summaryrefslogtreecommitdiffstats
path: root/services
diff options
context:
space:
mode:
Diffstat (limited to 'services')
-rw-r--r--services/actions/auth.go107
-rw-r--r--services/actions/auth_test.go65
-rw-r--r--services/actions/cleanup.go128
-rw-r--r--services/actions/cleanup_test.go31
-rw-r--r--services/actions/clear_tasks.go101
-rw-r--r--services/actions/commit_status.go167
-rw-r--r--services/actions/init.go26
-rw-r--r--services/actions/interface.go28
-rw-r--r--services/actions/job_emitter.go162
-rw-r--r--services/actions/job_emitter_test.go136
-rw-r--r--services/actions/main_test.go17
-rw-r--r--services/actions/notifier.go777
-rw-r--r--services/actions/notifier_helper.go590
-rw-r--r--services/actions/notifier_helper_test.go51
-rw-r--r--services/actions/rerun.go38
-rw-r--r--services/actions/rerun_test.go48
-rw-r--r--services/actions/schedule_tasks.go154
-rw-r--r--services/actions/variables.go100
-rw-r--r--services/actions/workflows.go173
-rw-r--r--services/agit/agit.go268
-rw-r--r--services/asymkey/deploy_key.go31
-rw-r--r--services/asymkey/main_test.go17
-rw-r--r--services/asymkey/sign.go404
-rw-r--r--services/asymkey/ssh_key.go50
-rw-r--r--services/asymkey/ssh_key_test.go88
-rw-r--r--services/attachment/attachment.go80
-rw-r--r--services/attachment/attachment_test.go47
-rw-r--r--services/auth/additional_scopes_test.go32
-rw-r--r--services/auth/auth.go106
-rw-r--r--services/auth/auth_test.go134
-rw-r--r--services/auth/basic.go180
-rw-r--r--services/auth/group.go72
-rw-r--r--services/auth/httpsign.go218
-rw-r--r--services/auth/interface.go47
-rw-r--r--services/auth/main_test.go14
-rw-r--r--services/auth/oauth2.go244
-rw-r--r--services/auth/oauth2_test.go55
-rw-r--r--services/auth/reverseproxy.go179
-rw-r--r--services/auth/reverseproxy_test.go67
-rw-r--r--services/auth/session.go60
-rw-r--r--services/auth/signin.go128
-rw-r--r--services/auth/source.go42
-rw-r--r--services/auth/source/db/assert_interface_test.go20
-rw-r--r--services/auth/source/db/authenticate.go87
-rw-r--r--services/auth/source/db/source.go35
-rw-r--r--services/auth/source/ldap/README.md131
-rw-r--r--services/auth/source/ldap/assert_interface_test.go27
-rw-r--r--services/auth/source/ldap/security_protocol.go31
-rw-r--r--services/auth/source/ldap/source.go122
-rw-r--r--services/auth/source/ldap/source_authenticate.go124
-rw-r--r--services/auth/source/ldap/source_search.go516
-rw-r--r--services/auth/source/ldap/source_sync.go232
-rw-r--r--services/auth/source/ldap/util.go18
-rw-r--r--services/auth/source/oauth2/assert_interface_test.go22
-rw-r--r--services/auth/source/oauth2/init.go86
-rw-r--r--services/auth/source/oauth2/jwtsigningkey.go422
-rw-r--r--services/auth/source/oauth2/jwtsigningkey_test.go116
-rw-r--r--services/auth/source/oauth2/providers.go190
-rw-r--r--services/auth/source/oauth2/providers_base.go51
-rw-r--r--services/auth/source/oauth2/providers_custom.go123
-rw-r--r--services/auth/source/oauth2/providers_openid.go58
-rw-r--r--services/auth/source/oauth2/providers_simple.go109
-rw-r--r--services/auth/source/oauth2/source.go51
-rw-r--r--services/auth/source/oauth2/source_authenticate.go19
-rw-r--r--services/auth/source/oauth2/source_callout.go68
-rw-r--r--services/auth/source/oauth2/source_name.go18
-rw-r--r--services/auth/source/oauth2/source_register.go50
-rw-r--r--services/auth/source/oauth2/store.go98
-rw-r--r--services/auth/source/oauth2/token.go100
-rw-r--r--services/auth/source/oauth2/urlmapping.go77
-rw-r--r--services/auth/source/pam/assert_interface_test.go21
-rw-r--r--services/auth/source/pam/source.go45
-rw-r--r--services/auth/source/pam/source_authenticate.go76
-rw-r--r--services/auth/source/remote/source.go33
-rw-r--r--services/auth/source/smtp/assert_interface_test.go24
-rw-r--r--services/auth/source/smtp/auth.go106
-rw-r--r--services/auth/source/smtp/source.go66
-rw-r--r--services/auth/source/smtp/source_authenticate.go92
-rw-r--r--services/auth/source/source_group_sync.go116
-rw-r--r--services/auth/source/sspi/assert_interface_test.go18
-rw-r--r--services/auth/source/sspi/source.go39
-rw-r--r--services/auth/sspi.go223
-rw-r--r--services/auth/sspiauth_posix.go30
-rw-r--r--services/auth/sspiauth_windows.go19
-rw-r--r--services/auth/sync.go43
-rw-r--r--services/automerge/automerge.go306
-rw-r--r--services/automerge/notify.go46
-rw-r--r--services/context/access_log.go101
-rw-r--r--services/context/api.go459
-rw-r--r--services/context/api_org.go12
-rw-r--r--services/context/api_test.go51
-rw-r--r--services/context/base.go315
-rw-r--r--services/context/base_test.go47
-rw-r--r--services/context/captcha.go118
-rw-r--r--services/context/context.go254
-rw-r--r--services/context/context_cookie.go56
-rw-r--r--services/context/context_model.go29
-rw-r--r--services/context/context_request.go32
-rw-r--r--services/context/context_response.go194
-rw-r--r--services/context/context_template.go35
-rw-r--r--services/context/context_test.go24
-rw-r--r--services/context/csrf.go171
-rw-r--r--services/context/org.go280
-rw-r--r--services/context/package.go165
-rw-r--r--services/context/pagination.go57
-rw-r--r--services/context/permission.go149
-rw-r--r--services/context/private.go85
-rw-r--r--services/context/quota.go200
-rw-r--r--services/context/repo.go1112
-rw-r--r--services/context/repository.go25
-rw-r--r--services/context/response.go103
-rw-r--r--services/context/upload/upload.go105
-rw-r--r--services/context/upload/upload_test.go194
-rw-r--r--services/context/user.go84
-rw-r--r--services/context/utils.go38
-rw-r--r--services/context/xsrf.go99
-rw-r--r--services/context/xsrf_test.go91
-rw-r--r--services/contexttest/context_tests.go208
-rw-r--r--services/convert/activity.go52
-rw-r--r--services/convert/attachment.go63
-rw-r--r--services/convert/convert.go510
-rw-r--r--services/convert/git_commit.go228
-rw-r--r--services/convert/git_commit_test.go42
-rw-r--r--services/convert/issue.go288
-rw-r--r--services/convert/issue_comment.go187
-rw-r--r--services/convert/issue_test.go58
-rw-r--r--services/convert/main_test.go16
-rw-r--r--services/convert/mirror.go27
-rw-r--r--services/convert/notification.go98
-rw-r--r--services/convert/package.go53
-rw-r--r--services/convert/pull.go261
-rw-r--r--services/convert/pull_review.go139
-rw-r--r--services/convert/pull_test.go78
-rw-r--r--services/convert/quota.go185
-rw-r--r--services/convert/release.go35
-rw-r--r--services/convert/release_test.go29
-rw-r--r--services/convert/repository.go254
-rw-r--r--services/convert/secret.go18
-rw-r--r--services/convert/status.go65
-rw-r--r--services/convert/user.go113
-rw-r--r--services/convert/user_test.go41
-rw-r--r--services/convert/utils.go44
-rw-r--r--services/convert/utils_test.go39
-rw-r--r--services/convert/wiki.go45
-rw-r--r--services/cron/cron.go130
-rw-r--r--services/cron/setting.go86
-rw-r--r--services/cron/tasks.go230
-rw-r--r--services/cron/tasks_actions.go76
-rw-r--r--services/cron/tasks_basic.go175
-rw-r--r--services/cron/tasks_extended.go243
-rw-r--r--services/cron/tasks_test.go68
-rw-r--r--services/doctor/authorizedkeys.go100
-rw-r--r--services/doctor/breaking.go97
-rw-r--r--services/doctor/checkOldArchives.go59
-rw-r--r--services/doctor/dbconsistency.go268
-rw-r--r--services/doctor/dbversion.go42
-rw-r--r--services/doctor/doctor.go138
-rw-r--r--services/doctor/fix16961.go328
-rw-r--r--services/doctor/fix16961_test.go271
-rw-r--r--services/doctor/fix8312.go61
-rw-r--r--services/doctor/heads.go88
-rw-r--r--services/doctor/lfs.go52
-rw-r--r--services/doctor/mergebase.go114
-rw-r--r--services/doctor/misc.go299
-rw-r--r--services/doctor/packages_nuget.go160
-rw-r--r--services/doctor/paths.go124
-rw-r--r--services/doctor/push_mirror_consistency.go91
-rw-r--r--services/doctor/repository.go80
-rw-r--r--services/doctor/storage.go270
-rw-r--r--services/doctor/usertype.go41
-rw-r--r--services/externalaccount/link.go30
-rw-r--r--services/externalaccount/user.go107
-rw-r--r--services/f3/driver/asset.go171
-rw-r--r--services/f3/driver/assets.go42
-rw-r--r--services/f3/driver/comment.go122
-rw-r--r--services/f3/driver/comments.go49
-rw-r--r--services/f3/driver/common.go48
-rw-r--r--services/f3/driver/container.go43
-rw-r--r--services/f3/driver/forge.go64
-rw-r--r--services/f3/driver/issue.go238
-rw-r--r--services/f3/driver/issues.go40
-rw-r--r--services/f3/driver/label.go113
-rw-r--r--services/f3/driver/labels.go37
-rw-r--r--services/f3/driver/main.go17
-rw-r--r--services/f3/driver/main_test.go30
-rw-r--r--services/f3/driver/milestone.go150
-rw-r--r--services/f3/driver/milestones.go40
-rw-r--r--services/f3/driver/options.go20
-rw-r--r--services/f3/driver/options/name.go7
-rw-r--r--services/f3/driver/options/options.go31
-rw-r--r--services/f3/driver/organization.go111
-rw-r--r--services/f3/driver/organizations.go50
-rw-r--r--services/f3/driver/project.go188
-rw-r--r--services/f3/driver/projects.go55
-rw-r--r--services/f3/driver/pullrequest.go320
-rw-r--r--services/f3/driver/pullrequests.go42
-rw-r--r--services/f3/driver/reaction.go133
-rw-r--r--services/f3/driver/reactions.go59
-rw-r--r--services/f3/driver/release.go161
-rw-r--r--services/f3/driver/releases.go42
-rw-r--r--services/f3/driver/repositories.go36
-rw-r--r--services/f3/driver/repository.go101
-rw-r--r--services/f3/driver/review.go179
-rw-r--r--services/f3/driver/reviewcomment.go142
-rw-r--r--services/f3/driver/reviewcomments.go43
-rw-r--r--services/f3/driver/reviews.go49
-rw-r--r--services/f3/driver/root.go41
-rw-r--r--services/f3/driver/tests/init.go15
-rw-r--r--services/f3/driver/tests/new.go39
-rw-r--r--services/f3/driver/tests/options.go21
-rw-r--r--services/f3/driver/topic.go111
-rw-r--r--services/f3/driver/topics.go41
-rw-r--r--services/f3/driver/tree.go104
-rw-r--r--services/f3/driver/user.go128
-rw-r--r--services/f3/driver/users.go48
-rw-r--r--services/f3/util/logger.go97
-rw-r--r--services/f3/util/logger_test.go89
-rw-r--r--services/federation/federation_service.go295
-rw-r--r--services/feed/action.go458
-rw-r--r--services/feed/action_test.go52
-rw-r--r--services/forgejo/main_test.go17
-rw-r--r--services/forgejo/sanity.go26
-rw-r--r--services/forgejo/sanity_test.go31
-rw-r--r--services/forgejo/sanity_v1TOv5_0_1Included.go91
-rw-r--r--services/forgejo/sanity_v1TOv5_0_1Included_test.go115
-rw-r--r--services/forms/admin.go74
-rw-r--r--services/forms/auth_form.go92
-rw-r--r--services/forms/org.go76
-rw-r--r--services/forms/package_form.go30
-rw-r--r--services/forms/repo_branch_form.go38
-rw-r--r--services/forms/repo_form.go751
-rw-r--r--services/forms/repo_form_test.go64
-rw-r--r--services/forms/repo_tag_form.go26
-rw-r--r--services/forms/runner.go24
-rw-r--r--services/forms/user_form.go455
-rw-r--r--services/forms/user_form_auth_openid.go49
-rw-r--r--services/forms/user_form_hidden_comments.go104
-rw-r--r--services/forms/user_form_test.go131
-rw-r--r--services/gitdiff/csv.go469
-rw-r--r--services/gitdiff/csv_test.go229
-rw-r--r--services/gitdiff/gitdiff.go1396
-rw-r--r--services/gitdiff/gitdiff_test.go671
-rw-r--r--services/gitdiff/highlightdiff.go227
-rw-r--r--services/gitdiff/highlightdiff_test.go125
-rw-r--r--services/gitdiff/main_test.go18
-rw-r--r--services/gitdiff/testdata/academic-module/HEAD1
-rw-r--r--services/gitdiff/testdata/academic-module/config10
-rw-r--r--services/gitdiff/testdata/academic-module/description1
-rw-r--r--services/gitdiff/testdata/academic-module/indexbin0 -> 46960 bytes
-rw-r--r--services/gitdiff/testdata/academic-module/info/exclude6
-rw-r--r--services/gitdiff/testdata/academic-module/logs/HEAD1
-rw-r--r--services/gitdiff/testdata/academic-module/logs/refs/heads/master1
-rw-r--r--services/gitdiff/testdata/academic-module/logs/refs/remotes/origin/HEAD1
-rw-r--r--services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.idxbin0 -> 65332 bytes
-rw-r--r--services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.packbin0 -> 1167905 bytes
-rw-r--r--services/gitdiff/testdata/academic-module/packed-refs2
-rw-r--r--services/gitdiff/testdata/academic-module/refs/heads/master1
-rw-r--r--services/gitdiff/testdata/academic-module/refs/remotes/origin/HEAD1
-rw-r--r--services/indexer/indexer.go20
-rw-r--r--services/indexer/notify.go170
-rw-r--r--services/issue/assignee.go314
-rw-r--r--services/issue/assignee_test.go48
-rw-r--r--services/issue/comments.go136
-rw-r--r--services/issue/comments_test.go147
-rw-r--r--services/issue/commit.go202
-rw-r--r--services/issue/commit_test.go301
-rw-r--r--services/issue/content.go25
-rw-r--r--services/issue/issue.go349
-rw-r--r--services/issue/issue_test.go87
-rw-r--r--services/issue/label.go95
-rw-r--r--services/issue/label_test.go62
-rw-r--r--services/issue/main_test.go23
-rw-r--r--services/issue/milestone.go110
-rw-r--r--services/issue/milestone_test.go35
-rw-r--r--services/issue/pull.go153
-rw-r--r--services/issue/reaction.go47
-rw-r--r--services/issue/status.go36
-rw-r--r--services/issue/template.go193
-rw-r--r--services/lfs/locks.go340
-rw-r--r--services/lfs/server.go633
-rw-r--r--services/mailer/incoming/incoming.go394
-rw-r--r--services/mailer/incoming/incoming_handler.go187
-rw-r--r--services/mailer/incoming/incoming_test.go191
-rw-r--r--services/mailer/incoming/payload/payload.go70
-rw-r--r--services/mailer/mail.go751
-rw-r--r--services/mailer/mail_admin_new_user.go78
-rw-r--r--services/mailer/mail_admin_new_user_test.go79
-rw-r--r--services/mailer/mail_auth_test.go62
-rw-r--r--services/mailer/mail_comment.go63
-rw-r--r--services/mailer/mail_issue.go201
-rw-r--r--services/mailer/mail_release.go98
-rw-r--r--services/mailer/mail_repo.go89
-rw-r--r--services/mailer/mail_team_invite.go76
-rw-r--r--services/mailer/mail_test.go540
-rw-r--r--services/mailer/mailer.go448
-rw-r--r--services/mailer/mailer_test.go128
-rw-r--r--services/mailer/main_test.go48
-rw-r--r--services/mailer/notify.go208
-rw-r--r--services/mailer/token/token.go138
-rw-r--r--services/markup/main_test.go16
-rw-r--r--services/markup/processorhelper.go87
-rw-r--r--services/markup/processorhelper_test.go55
-rw-r--r--services/migrations/codebase.go651
-rw-r--r--services/migrations/codebase_test.go151
-rw-r--r--services/migrations/common.go83
-rw-r--r--services/migrations/dump.go737
-rw-r--r--services/migrations/error.go26
-rw-r--r--services/migrations/forgejo_downloader.go20
-rw-r--r--services/migrations/forgejo_downloader_test.go16
-rw-r--r--services/migrations/git.go48
-rw-r--r--services/migrations/gitbucket.go90
-rw-r--r--services/migrations/gitea_downloader.go703
-rw-r--r--services/migrations/gitea_downloader_test.go314
-rw-r--r--services/migrations/gitea_uploader.go1031
-rw-r--r--services/migrations/gitea_uploader_test.go519
-rw-r--r--services/migrations/github.go885
-rw-r--r--services/migrations/github_test.go432
-rw-r--r--services/migrations/gitlab.go784
-rw-r--r--services/migrations/gitlab_test.go646
-rw-r--r--services/migrations/gogs.go330
-rw-r--r--services/migrations/gogs_test.go224
-rw-r--r--services/migrations/http_client.go29
-rw-r--r--services/migrations/main_test.go266
-rw-r--r--services/migrations/migrate.go510
-rw-r--r--services/migrations/migrate_test.go115
-rw-r--r--services/migrations/onedev.go634
-rw-r--r--services/migrations/onedev_test.go149
-rw-r--r--services/migrations/restore.go272
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F6136367217
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=1&per_page=224
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=2&per_page=226
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=224
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=2&per_page=224
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=3&per_page=224
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=4&per_page=226
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fdiscussions%3Fpage=1&per_page=10024
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fresource_state_events%3Fpage=1&per_page=10024
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%3Fpage=1&per_page=2&sort=asc&state=all24
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Flabels%3Fpage=1&per_page=10024
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F117
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Fapprovals17
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=124
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=2&per_page=124
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=3&per_page=126
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F2%2Fapprovals16
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%3Fpage=1&per_page=1&view=simple24
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmilestones%3Fpage=1&per_page=100&state=all24
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Freleases%3Fpage=1&per_page=10024
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fforgejo%252Ftest_repo17
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fgitea%252Ftest_repo17
-rw-r--r--services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fversion17
-rw-r--r--services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F659099622
-rw-r--r--services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=1031
-rw-r--r--services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%3Fpage=1&per_page=10&sort=asc&state=all29
-rw-r--r--services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F122
-rw-r--r--services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=1031
-rw-r--r--services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%3Fpage=1&per_page=10&view=simple29
-rw-r--r--services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2Ftroyengel%252Farchbuild22
-rw-r--r--services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fversion22
-rw-r--r--services/migrations/update.go77
-rw-r--r--services/mirror/mirror.go146
-rw-r--r--services/mirror/mirror_pull.go628
-rw-r--r--services/mirror/mirror_push.go313
-rw-r--r--services/mirror/mirror_test.go46
-rw-r--r--services/mirror/notifier.go31
-rw-r--r--services/mirror/queue.go70
-rw-r--r--services/notify/notifier.go79
-rw-r--r--services/notify/notify.go376
-rw-r--r--services/notify/null.go213
-rw-r--r--services/org/org.go76
-rw-r--r--services/org/org_test.go39
-rw-r--r--services/org/repo.go27
-rw-r--r--services/org/repo_test.go34
-rw-r--r--services/org/team_invite.go22
-rw-r--r--services/packages/alpine/repository.go337
-rw-r--r--services/packages/arch/repository.go368
-rw-r--r--services/packages/auth.go75
-rw-r--r--services/packages/cargo/index.go315
-rw-r--r--services/packages/cleanup/cleanup.go198
-rw-r--r--services/packages/cleanup/cleanup_sha256_test.go116
-rw-r--r--services/packages/cleanup/main_test.go14
-rw-r--r--services/packages/container/blob_uploader.go133
-rw-r--r--services/packages/container/cleanup.go111
-rw-r--r--services/packages/container/cleanup_sha256.go158
-rw-r--r--services/packages/container/common.go35
-rw-r--r--services/packages/debian/repository.go413
-rw-r--r--services/packages/packages.go665
-rw-r--r--services/packages/rpm/repository.go674
-rw-r--r--services/pull/check.go404
-rw-r--r--services/pull/check_test.go70
-rw-r--r--services/pull/comment.go94
-rw-r--r--services/pull/commit_status.go171
-rw-r--r--services/pull/commit_status_test.go65
-rw-r--r--services/pull/edits.go40
-rw-r--r--services/pull/lfs.go135
-rw-r--r--services/pull/main_test.go17
-rw-r--r--services/pull/merge.go562
-rw-r--r--services/pull/merge_ff_only.go21
-rw-r--r--services/pull/merge_merge.go25
-rw-r--r--services/pull/merge_prepare.go288
-rw-r--r--services/pull/merge_rebase.go121
-rw-r--r--services/pull/merge_squash.go86
-rw-r--r--services/pull/merge_test.go67
-rw-r--r--services/pull/patch.go582
-rw-r--r--services/pull/patch_unmerged.go203
-rw-r--r--services/pull/pull.go1032
-rw-r--r--services/pull/pull_test.go94
-rw-r--r--services/pull/review.go465
-rw-r--r--services/pull/review_test.go49
-rw-r--r--services/pull/temp_repo.go196
-rw-r--r--services/pull/update.go180
-rw-r--r--services/pull/update_rebase.go107
-rw-r--r--services/release/release.go470
-rw-r--r--services/release/release_test.go475
-rw-r--r--services/release/tag.go61
-rw-r--r--services/remote/promote.go133
-rw-r--r--services/repository/adopt.go370
-rw-r--r--services/repository/adopt_test.go115
-rw-r--r--services/repository/archiver/archiver.go377
-rw-r--r--services/repository/archiver/archiver_test.go134
-rw-r--r--services/repository/avatar.go116
-rw-r--r--services/repository/avatar_test.go64
-rw-r--r--services/repository/branch.go604
-rw-r--r--services/repository/cache.go30
-rw-r--r--services/repository/check.go202
-rw-r--r--services/repository/collaboration.go52
-rw-r--r--services/repository/collaboration_test.go28
-rw-r--r--services/repository/commit.go55
-rw-r--r--services/repository/commitstatus/commitstatus.go202
-rw-r--r--services/repository/contributors_graph.go321
-rw-r--r--services/repository/contributors_graph_test.go101
-rw-r--r--services/repository/create.go318
-rw-r--r--services/repository/create_test.go149
-rw-r--r--services/repository/delete.go471
-rw-r--r--services/repository/files/cherry_pick.go128
-rw-r--r--services/repository/files/commit.go44
-rw-r--r--services/repository/files/content.go278
-rw-r--r--services/repository/files/content_test.go201
-rw-r--r--services/repository/files/diff.go42
-rw-r--r--services/repository/files/diff_test.go166
-rw-r--r--services/repository/files/file.go174
-rw-r--r--services/repository/files/file_test.go115
-rw-r--r--services/repository/files/patch.go199
-rw-r--r--services/repository/files/temp_repo.go406
-rw-r--r--services/repository/files/temp_repo_test.go28
-rw-r--r--services/repository/files/tree.go101
-rw-r--r--services/repository/files/tree_test.go52
-rw-r--r--services/repository/files/update.go501
-rw-r--r--services/repository/files/upload.go248
-rw-r--r--services/repository/fork.go248
-rw-r--r--services/repository/fork_test.go49
-rw-r--r--services/repository/generate.go391
-rw-r--r--services/repository/generate_test.go67
-rw-r--r--services/repository/hooks.go110
-rw-r--r--services/repository/init.go83
-rw-r--r--services/repository/lfs.go123
-rw-r--r--services/repository/lfs_test.go75
-rw-r--r--services/repository/main_test.go14
-rw-r--r--services/repository/migrate.go289
-rw-r--r--services/repository/push.go420
-rw-r--r--services/repository/repository.go153
-rw-r--r--services/repository/repository_test.go43
-rw-r--r--services/repository/review.go24
-rw-r--r--services/repository/review_test.go29
-rw-r--r--services/repository/setting.go57
-rw-r--r--services/repository/star.go27
-rw-r--r--services/repository/template.go135
-rw-r--r--services/repository/transfer.go434
-rw-r--r--services/repository/transfer_test.go124
-rw-r--r--services/secrets/secrets.go83
-rw-r--r--services/secrets/validation.go25
-rw-r--r--services/task/migrate.go154
-rw-r--r--services/task/task.go169
-rw-r--r--services/uinotification/notify.go261
-rw-r--r--services/user/TestPurgeUser/public_key.yml11
-rw-r--r--services/user/avatar.go73
-rw-r--r--services/user/avatar_test.go81
-rw-r--r--services/user/block.go95
-rw-r--r--services/user/block_test.go92
-rw-r--r--services/user/delete.go224
-rw-r--r--services/user/email.go232
-rw-r--r--services/user/email_test.go178
-rw-r--r--services/user/update.go233
-rw-r--r--services/user/update_test.go121
-rw-r--r--services/user/user.go332
-rw-r--r--services/user/user_test.go264
-rw-r--r--services/webhook/default.go160
-rw-r--r--services/webhook/default_test.go260
-rw-r--r--services/webhook/deliver.go258
-rw-r--r--services/webhook/deliver_test.go332
-rw-r--r--services/webhook/dingtalk.go232
-rw-r--r--services/webhook/dingtalk_test.go252
-rw-r--r--services/webhook/discord.go367
-rw-r--r--services/webhook/discord_test.go348
-rw-r--r--services/webhook/feishu.go200
-rw-r--r--services/webhook/feishu_test.go193
-rw-r--r--services/webhook/general.go354
-rw-r--r--services/webhook/general_test.go673
-rw-r--r--services/webhook/gogs.go42
-rw-r--r--services/webhook/main_test.go26
-rw-r--r--services/webhook/matrix.go316
-rw-r--r--services/webhook/matrix_test.go255
-rw-r--r--services/webhook/msteams.go377
-rw-r--r--services/webhook/msteams_test.go455
-rw-r--r--services/webhook/notifier.go887
-rw-r--r--services/webhook/packagist.go90
-rw-r--r--services/webhook/packagist_test.go70
-rw-r--r--services/webhook/shared/img.go15
-rw-r--r--services/webhook/shared/payloader.go161
-rw-r--r--services/webhook/slack.go361
-rw-r--r--services/webhook/slack_test.go265
-rw-r--r--services/webhook/sourcehut/builds.go301
-rw-r--r--services/webhook/sourcehut/builds_test.go386
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/HEAD1
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/config4
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/description1
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/info/exclude6
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/3c/3d4b799b3933ba687b263eeef2034300a5315ebin0 -> 83 bytes
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/58/771003157b81abc6bf41df0c5db4147a3e3c832
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/69/b217caa89166a02b8cd368b64fb83a44720e141
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/99/fb389b232e5497f0dcdb1c1065eac1d10d3794bin0 -> 57 bytes
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/9e/4b777f81b316a1c75a0797b33add68ee49b0d0bin0 -> 54 bytes
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/a5/4082fdb8e55055382725f10a81bb4dc2b130294
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/aa/3905af404394f576f88f00e7f0919b4b97453fbin0 -> 57 bytes
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/b0/404943256a1f5a50c3726f4378756b4c1e5704bin0 -> 160 bytes
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/objects/d2/e0862c8b8097ba4bdd72946c20479751d307a04
-rw-r--r--services/webhook/sourcehut/testdata/repo.git/refs/heads/main1
-rw-r--r--services/webhook/telegram.go228
-rw-r--r--services/webhook/telegram_test.go212
-rw-r--r--services/webhook/webhook.go270
-rw-r--r--services/webhook/webhook_test.go100
-rw-r--r--services/webhook/wechatwork.go210
-rw-r--r--services/wiki/wiki.go449
-rw-r--r--services/wiki/wiki_path.go172
-rw-r--r--services/wiki/wiki_test.go327
535 files changed, 82780 insertions, 0 deletions
diff --git a/services/actions/auth.go b/services/actions/auth.go
new file mode 100644
index 0000000..1ef21f6
--- /dev/null
+++ b/services/actions/auth.go
@@ -0,0 +1,107 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/golang-jwt/jwt/v5"
+)
+
+type actionsClaims struct {
+ jwt.RegisteredClaims
+ Scp string `json:"scp"`
+ TaskID int64
+ RunID int64
+ JobID int64
+ Ac string `json:"ac"`
+}
+
+type actionsCacheScope struct {
+ Scope string
+ Permission actionsCachePermission
+}
+
+type actionsCachePermission int
+
+const (
+ actionsCachePermissionRead = 1 << iota
+ actionsCachePermissionWrite
+)
+
+func CreateAuthorizationToken(taskID, runID, jobID int64) (string, error) {
+ now := time.Now()
+
+ ac, err := json.Marshal(&[]actionsCacheScope{
+ {
+ Scope: "",
+ Permission: actionsCachePermissionWrite,
+ },
+ })
+ if err != nil {
+ return "", err
+ }
+
+ claims := actionsClaims{
+ RegisteredClaims: jwt.RegisteredClaims{
+ ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
+ NotBefore: jwt.NewNumericDate(now),
+ },
+ Scp: fmt.Sprintf("Actions.Results:%d:%d", runID, jobID),
+ Ac: string(ac),
+ TaskID: taskID,
+ RunID: runID,
+ JobID: jobID,
+ }
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
+
+ tokenString, err := token.SignedString(setting.GetGeneralTokenSigningSecret())
+ if err != nil {
+ return "", err
+ }
+
+ return tokenString, nil
+}
+
+func ParseAuthorizationToken(req *http.Request) (int64, error) {
+ h := req.Header.Get("Authorization")
+ if h == "" {
+ return 0, nil
+ }
+
+ parts := strings.SplitN(h, " ", 2)
+ if len(parts) != 2 {
+ log.Error("split token failed: %s", h)
+ return 0, fmt.Errorf("split token failed")
+ }
+
+ return TokenToTaskID(parts[1])
+}
+
+// TokenToTaskID returns the TaskID associated with the provided JWT token
+func TokenToTaskID(token string) (int64, error) {
+ parsedToken, err := jwt.ParseWithClaims(token, &actionsClaims{}, func(t *jwt.Token) (any, error) {
+ if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
+ return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
+ }
+ return setting.GetGeneralTokenSigningSecret(), nil
+ })
+ if err != nil {
+ return 0, err
+ }
+
+ c, ok := parsedToken.Claims.(*actionsClaims)
+ if !parsedToken.Valid || !ok {
+ return 0, fmt.Errorf("invalid token claim")
+ }
+
+ return c.TaskID, nil
+}
diff --git a/services/actions/auth_test.go b/services/actions/auth_test.go
new file mode 100644
index 0000000..1400e61
--- /dev/null
+++ b/services/actions/auth_test.go
@@ -0,0 +1,65 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "net/http"
+ "testing"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/golang-jwt/jwt/v5"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCreateAuthorizationToken(t *testing.T) {
+ var taskID int64 = 23
+ token, err := CreateAuthorizationToken(taskID, 1, 2)
+ require.NoError(t, err)
+ assert.NotEqual(t, "", token)
+ claims := jwt.MapClaims{}
+ _, err = jwt.ParseWithClaims(token, claims, func(t *jwt.Token) (any, error) {
+ return setting.GetGeneralTokenSigningSecret(), nil
+ })
+ require.NoError(t, err)
+ scp, ok := claims["scp"]
+ assert.True(t, ok, "Has scp claim in jwt token")
+ assert.Contains(t, scp, "Actions.Results:1:2")
+ taskIDClaim, ok := claims["TaskID"]
+ assert.True(t, ok, "Has TaskID claim in jwt token")
+ assert.InDelta(t, float64(taskID), taskIDClaim, 0, "Supplied taskid must match stored one")
+ acClaim, ok := claims["ac"]
+ assert.True(t, ok, "Has ac claim in jwt token")
+ ac, ok := acClaim.(string)
+ assert.True(t, ok, "ac claim is a string for buildx gha cache")
+ scopes := []actionsCacheScope{}
+ err = json.Unmarshal([]byte(ac), &scopes)
+ require.NoError(t, err, "ac claim is a json list for buildx gha cache")
+ assert.GreaterOrEqual(t, len(scopes), 1, "Expected at least one action cache scope for buildx gha cache")
+}
+
+func TestParseAuthorizationToken(t *testing.T) {
+ var taskID int64 = 23
+ token, err := CreateAuthorizationToken(taskID, 1, 2)
+ require.NoError(t, err)
+ assert.NotEqual(t, "", token)
+ headers := http.Header{}
+ headers.Set("Authorization", "Bearer "+token)
+ rTaskID, err := ParseAuthorizationToken(&http.Request{
+ Header: headers,
+ })
+ require.NoError(t, err)
+ assert.Equal(t, taskID, rTaskID)
+}
+
+func TestParseAuthorizationTokenNoAuthHeader(t *testing.T) {
+ headers := http.Header{}
+ rTaskID, err := ParseAuthorizationToken(&http.Request{
+ Header: headers,
+ })
+ require.NoError(t, err)
+ assert.Equal(t, int64(0), rTaskID)
+}
diff --git a/services/actions/cleanup.go b/services/actions/cleanup.go
new file mode 100644
index 0000000..34fa268
--- /dev/null
+++ b/services/actions/cleanup.go
@@ -0,0 +1,128 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "os"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ actions_module "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+// Cleanup removes expired actions logs, data and artifacts
+func Cleanup(ctx context.Context) error {
+ // clean up expired artifacts
+ if err := CleanupArtifacts(ctx); err != nil {
+ return fmt.Errorf("cleanup artifacts: %w", err)
+ }
+
+ // clean up old logs
+ if err := CleanupLogs(ctx); err != nil {
+ return fmt.Errorf("cleanup logs: %w", err)
+ }
+
+ return nil
+}
+
+// CleanupArtifacts removes expired add need-deleted artifacts and set records expired status
+func CleanupArtifacts(taskCtx context.Context) error {
+ if err := cleanExpiredArtifacts(taskCtx); err != nil {
+ return err
+ }
+ return cleanNeedDeleteArtifacts(taskCtx)
+}
+
+func cleanExpiredArtifacts(taskCtx context.Context) error {
+ artifacts, err := actions_model.ListNeedExpiredArtifacts(taskCtx)
+ if err != nil {
+ return err
+ }
+ log.Info("Found %d expired artifacts", len(artifacts))
+ for _, artifact := range artifacts {
+ if err := actions_model.SetArtifactExpired(taskCtx, artifact.ID); err != nil {
+ log.Error("Cannot set artifact %d expired: %v", artifact.ID, err)
+ continue
+ }
+ if err := storage.ActionsArtifacts.Delete(artifact.StoragePath); err != nil {
+ log.Error("Cannot delete artifact %d: %v", artifact.ID, err)
+ continue
+ }
+ log.Info("Artifact %d set expired", artifact.ID)
+ }
+ return nil
+}
+
+// deleteArtifactBatchSize is the batch size of deleting artifacts
+const deleteArtifactBatchSize = 100
+
+func cleanNeedDeleteArtifacts(taskCtx context.Context) error {
+ for {
+ artifacts, err := actions_model.ListPendingDeleteArtifacts(taskCtx, deleteArtifactBatchSize)
+ if err != nil {
+ return err
+ }
+ log.Info("Found %d artifacts pending deletion", len(artifacts))
+ for _, artifact := range artifacts {
+ if err := actions_model.SetArtifactDeleted(taskCtx, artifact.ID); err != nil {
+ log.Error("Cannot set artifact %d deleted: %v", artifact.ID, err)
+ continue
+ }
+ if err := storage.ActionsArtifacts.Delete(artifact.StoragePath); err != nil {
+ log.Error("Cannot delete artifact %d: %v", artifact.ID, err)
+ continue
+ }
+ log.Info("Artifact %d set deleted", artifact.ID)
+ }
+ if len(artifacts) < deleteArtifactBatchSize {
+ log.Debug("No more artifacts pending deletion")
+ break
+ }
+ }
+ return nil
+}
+
+const deleteLogBatchSize = 100
+
+// CleanupLogs removes logs which are older than the configured retention time
+func CleanupLogs(ctx context.Context) error {
+ olderThan := timeutil.TimeStampNow().AddDuration(-time.Duration(setting.Actions.LogRetentionDays) * 24 * time.Hour)
+
+ count := 0
+ for {
+ tasks, err := actions_model.FindOldTasksToExpire(ctx, olderThan, deleteLogBatchSize)
+ if err != nil {
+ return fmt.Errorf("find old tasks: %w", err)
+ }
+ for _, task := range tasks {
+ if err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename); err != nil && !errors.Is(err, os.ErrNotExist) {
+ log.Error("Failed to remove log %s (in storage %v) of task %v: %v", task.LogFilename, task.LogInStorage, task.ID, err)
+ // do not return error here, continue to next task
+ continue
+ }
+ task.LogIndexes = nil // clear log indexes since it's a heavy field
+ task.LogExpired = true
+ if err := actions_model.UpdateTask(ctx, task, "log_indexes", "log_expired"); err != nil {
+ log.Error("Failed to update task %v: %v", task.ID, err)
+ // do not return error here, continue to next task
+ continue
+ }
+ count++
+ log.Trace("Removed log %s of task %v", task.LogFilename, task.ID)
+ }
+ if len(tasks) < deleteLogBatchSize {
+ break
+ }
+ }
+
+ log.Info("Removed %d logs", count)
+ return nil
+}
diff --git a/services/actions/cleanup_test.go b/services/actions/cleanup_test.go
new file mode 100644
index 0000000..65fae84
--- /dev/null
+++ b/services/actions/cleanup_test.go
@@ -0,0 +1,31 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCleanup(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ t.Run("Deletes no longer existing logs", func(t *testing.T) {
+ unittest.AssertSuccessfulInsert(t, &actions_model.ActionTask{ID: 1001, LogExpired: false, LogIndexes: []int64{1, 2, 3, 4}, LogFilename: "does-not-exist", Stopped: timeutil.TimeStamp(1)})
+
+ require.NoError(t, CleanupLogs(db.DefaultContext))
+
+ task := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionTask{ID: 1001})
+ assert.EqualValues(t, "does-not-exist", task.LogFilename)
+ assert.True(t, task.LogExpired)
+ assert.Nil(t, task.LogIndexes)
+ })
+}
diff --git a/services/actions/clear_tasks.go b/services/actions/clear_tasks.go
new file mode 100644
index 0000000..6737378
--- /dev/null
+++ b/services/actions/clear_tasks.go
@@ -0,0 +1,101 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+// StopZombieTasks stops the task which have running status, but haven't been updated for a long time
+func StopZombieTasks(ctx context.Context) error {
+ return stopTasks(ctx, actions_model.FindTaskOptions{
+ Status: actions_model.StatusRunning,
+ UpdatedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.ZombieTaskTimeout).Unix()),
+ })
+}
+
+// StopEndlessTasks stops the tasks which have running status and continuous updates, but don't end for a long time
+func StopEndlessTasks(ctx context.Context) error {
+ return stopTasks(ctx, actions_model.FindTaskOptions{
+ Status: actions_model.StatusRunning,
+ StartedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.EndlessTaskTimeout).Unix()),
+ })
+}
+
+func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error {
+ tasks, err := db.Find[actions_model.ActionTask](ctx, opts)
+ if err != nil {
+ return fmt.Errorf("find tasks: %w", err)
+ }
+
+ jobs := make([]*actions_model.ActionRunJob, 0, len(tasks))
+ for _, task := range tasks {
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := actions_model.StopTask(ctx, task.ID, actions_model.StatusFailure); err != nil {
+ return err
+ }
+ if err := task.LoadJob(ctx); err != nil {
+ return err
+ }
+ jobs = append(jobs, task.Job)
+ return nil
+ }); err != nil {
+ log.Warn("Cannot stop task %v: %v", task.ID, err)
+ continue
+ }
+
+ remove, err := actions.TransferLogs(ctx, task.LogFilename)
+ if err != nil {
+ log.Warn("Cannot transfer logs of task %v: %v", task.ID, err)
+ continue
+ }
+ task.LogInStorage = true
+ if err := actions_model.UpdateTask(ctx, task, "log_in_storage"); err != nil {
+ log.Warn("Cannot update task %v: %v", task.ID, err)
+ continue
+ }
+ remove()
+ }
+
+ CreateCommitStatus(ctx, jobs...)
+
+ return nil
+}
+
+// CancelAbandonedJobs cancels the jobs which have waiting status, but haven't been picked by a runner for a long time
+func CancelAbandonedJobs(ctx context.Context) error {
+ jobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{
+ Statuses: []actions_model.Status{actions_model.StatusWaiting, actions_model.StatusBlocked},
+ UpdatedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.AbandonedJobTimeout).Unix()),
+ })
+ if err != nil {
+ log.Warn("find abandoned tasks: %v", err)
+ return err
+ }
+
+ now := timeutil.TimeStampNow()
+ for _, job := range jobs {
+ job.Status = actions_model.StatusCancelled
+ job.Stopped = now
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ _, err := actions_model.UpdateRunJob(ctx, job, nil, "status", "stopped")
+ return err
+ }); err != nil {
+ log.Warn("cancel abandoned job %v: %v", job.ID, err)
+ // go on
+ }
+ CreateCommitStatus(ctx, job)
+ }
+
+ return nil
+}
diff --git a/services/actions/commit_status.go b/services/actions/commit_status.go
new file mode 100644
index 0000000..04dffba
--- /dev/null
+++ b/services/actions/commit_status.go
@@ -0,0 +1,167 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "fmt"
+ "path"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ user_model "code.gitea.io/gitea/models/user"
+ actions_module "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus"
+
+ "github.com/nektos/act/pkg/jobparser"
+)
+
+// CreateCommitStatus creates a commit status for the given job.
+// It won't return an error failed, but will log it, because it's not critical.
+func CreateCommitStatus(ctx context.Context, jobs ...*actions_model.ActionRunJob) {
+ for _, job := range jobs {
+ if err := createCommitStatus(ctx, job); err != nil {
+ log.Error("Failed to create commit status for job %d: %v", job.ID, err)
+ }
+ }
+}
+
+func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) error {
+ if err := job.LoadAttributes(ctx); err != nil {
+ return fmt.Errorf("load run: %w", err)
+ }
+
+ run := job.Run
+
+ var (
+ sha string
+ event string
+ )
+ switch run.Event {
+ case webhook_module.HookEventPush:
+ event = "push"
+ payload, err := run.GetPushEventPayload()
+ if err != nil {
+ return fmt.Errorf("GetPushEventPayload: %w", err)
+ }
+ if payload.HeadCommit == nil {
+ return fmt.Errorf("head commit is missing in event payload")
+ }
+ sha = payload.HeadCommit.ID
+ case webhook_module.HookEventPullRequest, webhook_module.HookEventPullRequestSync, webhook_module.HookEventPullRequestLabel, webhook_module.HookEventPullRequestAssign, webhook_module.HookEventPullRequestMilestone:
+ if run.TriggerEvent == actions_module.GithubEventPullRequestTarget {
+ event = "pull_request_target"
+ } else {
+ event = "pull_request"
+ }
+ payload, err := run.GetPullRequestEventPayload()
+ if err != nil {
+ return fmt.Errorf("GetPullRequestEventPayload: %w", err)
+ }
+ if payload.PullRequest == nil {
+ return fmt.Errorf("pull request is missing in event payload")
+ } else if payload.PullRequest.Head == nil {
+ return fmt.Errorf("head of pull request is missing in event payload")
+ }
+ sha = payload.PullRequest.Head.Sha
+ case webhook_module.HookEventRelease:
+ event = string(run.Event)
+ sha = run.CommitSHA
+ default:
+ return nil
+ }
+
+ repo := run.Repo
+ // TODO: store workflow name as a field in ActionRun to avoid parsing
+ runName := path.Base(run.WorkflowID)
+ if wfs, err := jobparser.Parse(job.WorkflowPayload); err == nil && len(wfs) > 0 {
+ runName = wfs[0].Name
+ }
+ ctxname := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event)
+ state := toCommitStatus(job.Status)
+ if statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil {
+ for _, v := range statuses {
+ if v.Context == ctxname {
+ if v.State == state {
+ // no need to update
+ return nil
+ }
+ break
+ }
+ }
+ } else {
+ return fmt.Errorf("GetLatestCommitStatus: %w", err)
+ }
+
+ description := ""
+ switch job.Status {
+ // TODO: if we want support description in different languages, we need to support i18n placeholders in it
+ case actions_model.StatusSuccess:
+ description = fmt.Sprintf("Successful in %s", job.Duration())
+ case actions_model.StatusFailure:
+ description = fmt.Sprintf("Failing after %s", job.Duration())
+ case actions_model.StatusCancelled:
+ description = "Has been cancelled"
+ case actions_model.StatusSkipped:
+ description = "Has been skipped"
+ case actions_model.StatusRunning:
+ description = "Has started running"
+ case actions_model.StatusWaiting:
+ description = "Waiting to run"
+ case actions_model.StatusBlocked:
+ description = "Blocked by required conditions"
+ }
+
+ index, err := getIndexOfJob(ctx, job)
+ if err != nil {
+ return fmt.Errorf("getIndexOfJob: %w", err)
+ }
+
+ creator := user_model.NewActionsUser()
+ if err := commitstatus_service.CreateCommitStatus(ctx, repo, creator,
+ sha,
+ &git_model.CommitStatus{
+ SHA: sha,
+ TargetURL: fmt.Sprintf("%s/jobs/%d", run.Link(), index),
+ Description: description,
+ Context: ctxname,
+ CreatorID: creator.ID,
+ State: state,
+ }); err != nil {
+ return fmt.Errorf("NewCommitStatus: %w", err)
+ }
+
+ return nil
+}
+
+func toCommitStatus(status actions_model.Status) api.CommitStatusState {
+ switch status {
+ case actions_model.StatusSuccess, actions_model.StatusSkipped:
+ return api.CommitStatusSuccess
+ case actions_model.StatusFailure, actions_model.StatusCancelled:
+ return api.CommitStatusFailure
+ case actions_model.StatusWaiting, actions_model.StatusBlocked, actions_model.StatusRunning:
+ return api.CommitStatusPending
+ default:
+ return api.CommitStatusError
+ }
+}
+
+func getIndexOfJob(ctx context.Context, job *actions_model.ActionRunJob) (int, error) {
+ // TODO: store job index as a field in ActionRunJob to avoid this
+ jobs, err := actions_model.GetRunJobsByRunID(ctx, job.RunID)
+ if err != nil {
+ return 0, err
+ }
+ for i, v := range jobs {
+ if v.ID == job.ID {
+ return i, nil
+ }
+ }
+ return 0, nil
+}
diff --git a/services/actions/init.go b/services/actions/init.go
new file mode 100644
index 0000000..0f49cb6
--- /dev/null
+++ b/services/actions/init.go
@@ -0,0 +1,26 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+func Init() {
+ if !setting.Actions.Enabled {
+ return
+ }
+
+ jobEmitterQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "actions_ready_job", jobEmitterQueueHandler)
+ if jobEmitterQueue == nil {
+ log.Fatal("Unable to create actions_ready_job queue")
+ }
+ go graceful.GetManager().RunWithCancel(jobEmitterQueue)
+
+ notify_service.RegisterNotifier(NewNotifier())
+}
diff --git a/services/actions/interface.go b/services/actions/interface.go
new file mode 100644
index 0000000..d4fa782
--- /dev/null
+++ b/services/actions/interface.go
@@ -0,0 +1,28 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import "code.gitea.io/gitea/services/context"
+
+// API for actions of a repository or organization
+type API interface {
+ // ListActionsSecrets list secrets
+ ListActionsSecrets(*context.APIContext)
+ // CreateOrUpdateSecret create or update a secret
+ CreateOrUpdateSecret(*context.APIContext)
+ // DeleteSecret delete a secret
+ DeleteSecret(*context.APIContext)
+ // ListVariables list variables
+ ListVariables(*context.APIContext)
+ // GetVariable get a variable
+ GetVariable(*context.APIContext)
+ // DeleteVariable delete a variable
+ DeleteVariable(*context.APIContext)
+ // CreateVariable create a variable
+ CreateVariable(*context.APIContext)
+ // UpdateVariable update a variable
+ UpdateVariable(*context.APIContext)
+ // GetRegistrationToken get registration token
+ GetRegistrationToken(*context.APIContext)
+}
diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go
new file mode 100644
index 0000000..1f859fc
--- /dev/null
+++ b/services/actions/job_emitter.go
@@ -0,0 +1,162 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "errors"
+ "fmt"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/queue"
+
+ "github.com/nektos/act/pkg/jobparser"
+ "xorm.io/builder"
+)
+
+var jobEmitterQueue *queue.WorkerPoolQueue[*jobUpdate]
+
+type jobUpdate struct {
+ RunID int64
+}
+
+func EmitJobsIfReady(runID int64) error {
+ err := jobEmitterQueue.Push(&jobUpdate{
+ RunID: runID,
+ })
+ if errors.Is(err, queue.ErrAlreadyInQueue) {
+ return nil
+ }
+ return err
+}
+
+func jobEmitterQueueHandler(items ...*jobUpdate) []*jobUpdate {
+ ctx := graceful.GetManager().ShutdownContext()
+ var ret []*jobUpdate
+ for _, update := range items {
+ if err := checkJobsOfRun(ctx, update.RunID); err != nil {
+ ret = append(ret, update)
+ }
+ }
+ return ret
+}
+
+func checkJobsOfRun(ctx context.Context, runID int64) error {
+ jobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: runID})
+ if err != nil {
+ return err
+ }
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
+ for _, job := range jobs {
+ idToJobs[job.JobID] = append(idToJobs[job.JobID], job)
+ }
+
+ updates := newJobStatusResolver(jobs).Resolve()
+ for _, job := range jobs {
+ if status, ok := updates[job.ID]; ok {
+ job.Status = status
+ if n, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"status": actions_model.StatusBlocked}, "status"); err != nil {
+ return err
+ } else if n != 1 {
+ return fmt.Errorf("no affected for updating blocked job %v", job.ID)
+ }
+ }
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+ CreateCommitStatus(ctx, jobs...)
+ return nil
+}
+
+type jobStatusResolver struct {
+ statuses map[int64]actions_model.Status
+ needs map[int64][]int64
+ jobMap map[int64]*actions_model.ActionRunJob
+}
+
+func newJobStatusResolver(jobs actions_model.ActionJobList) *jobStatusResolver {
+ idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
+ jobMap := make(map[int64]*actions_model.ActionRunJob)
+ for _, job := range jobs {
+ idToJobs[job.JobID] = append(idToJobs[job.JobID], job)
+ jobMap[job.ID] = job
+ }
+
+ statuses := make(map[int64]actions_model.Status, len(jobs))
+ needs := make(map[int64][]int64, len(jobs))
+ for _, job := range jobs {
+ statuses[job.ID] = job.Status
+ for _, need := range job.Needs {
+ for _, v := range idToJobs[need] {
+ needs[job.ID] = append(needs[job.ID], v.ID)
+ }
+ }
+ }
+ return &jobStatusResolver{
+ statuses: statuses,
+ needs: needs,
+ jobMap: jobMap,
+ }
+}
+
+func (r *jobStatusResolver) Resolve() map[int64]actions_model.Status {
+ ret := map[int64]actions_model.Status{}
+ for i := 0; i < len(r.statuses); i++ {
+ updated := r.resolve()
+ if len(updated) == 0 {
+ return ret
+ }
+ for k, v := range updated {
+ ret[k] = v
+ r.statuses[k] = v
+ }
+ }
+ return ret
+}
+
+func (r *jobStatusResolver) resolve() map[int64]actions_model.Status {
+ ret := map[int64]actions_model.Status{}
+ for id, status := range r.statuses {
+ if status != actions_model.StatusBlocked {
+ continue
+ }
+ allDone, allSucceed := true, true
+ for _, need := range r.needs[id] {
+ needStatus := r.statuses[need]
+ if !needStatus.IsDone() {
+ allDone = false
+ }
+ if needStatus.In(actions_model.StatusFailure, actions_model.StatusCancelled, actions_model.StatusSkipped) {
+ allSucceed = false
+ }
+ }
+ if allDone {
+ if allSucceed {
+ ret[id] = actions_model.StatusWaiting
+ } else {
+ // Check if the job has an "if" condition
+ hasIf := false
+ if wfJobs, _ := jobparser.Parse(r.jobMap[id].WorkflowPayload); len(wfJobs) == 1 {
+ _, wfJob := wfJobs[0].Job()
+ hasIf = len(wfJob.If.Value) > 0
+ }
+
+ if hasIf {
+ // act_runner will check the "if" condition
+ ret[id] = actions_model.StatusWaiting
+ } else {
+ // If the "if" condition is empty and not all dependent jobs completed successfully,
+ // the job should be skipped.
+ ret[id] = actions_model.StatusSkipped
+ }
+ }
+ }
+ }
+ return ret
+}
diff --git a/services/actions/job_emitter_test.go b/services/actions/job_emitter_test.go
new file mode 100644
index 0000000..58c2dc3
--- /dev/null
+++ b/services/actions/job_emitter_test.go
@@ -0,0 +1,136 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_jobStatusResolver_Resolve(t *testing.T) {
+ tests := []struct {
+ name string
+ jobs actions_model.ActionJobList
+ want map[int64]actions_model.Status
+ }{
+ {
+ name: "no blocked",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusWaiting, Needs: []string{}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusWaiting, Needs: []string{}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusWaiting, Needs: []string{}},
+ },
+ want: map[int64]actions_model.Status{},
+ },
+ {
+ name: "single blocked",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusSuccess, Needs: []string{}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusWaiting, Needs: []string{}},
+ },
+ want: map[int64]actions_model.Status{
+ 2: actions_model.StatusWaiting,
+ },
+ },
+ {
+ name: "multiple blocked",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusSuccess, Needs: []string{}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ },
+ want: map[int64]actions_model.Status{
+ 2: actions_model.StatusWaiting,
+ 3: actions_model.StatusWaiting,
+ },
+ },
+ {
+ name: "chain blocked",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusFailure, Needs: []string{}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"2"}},
+ },
+ want: map[int64]actions_model.Status{
+ 2: actions_model.StatusSkipped,
+ 3: actions_model.StatusSkipped,
+ },
+ },
+ {
+ name: "loop need",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusBlocked, Needs: []string{"3"}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"2"}},
+ },
+ want: map[int64]actions_model.Status{},
+ },
+ {
+ name: "`if` is not empty and all jobs in `needs` completed successfully",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "job1", Status: actions_model.StatusSuccess, Needs: []string{}},
+ {ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
+ `
+name: test
+on: push
+jobs:
+ job2:
+ runs-on: ubuntu-latest
+ needs: job1
+ if: ${{ always() && needs.job1.result == 'success' }}
+ steps:
+ - run: echo "will be checked by act_runner"
+`)},
+ },
+ want: map[int64]actions_model.Status{2: actions_model.StatusWaiting},
+ },
+ {
+ name: "`if` is not empty and not all jobs in `needs` completed successfully",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
+ {ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
+ `
+name: test
+on: push
+jobs:
+ job2:
+ runs-on: ubuntu-latest
+ needs: job1
+ if: ${{ always() && needs.job1.result == 'failure' }}
+ steps:
+ - run: echo "will be checked by act_runner"
+`)},
+ },
+ want: map[int64]actions_model.Status{2: actions_model.StatusWaiting},
+ },
+ {
+ name: "`if` is empty and not all jobs in `needs` completed successfully",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
+ {ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
+ `
+name: test
+on: push
+jobs:
+ job2:
+ runs-on: ubuntu-latest
+ needs: job1
+ steps:
+ - run: echo "should be skipped"
+`)},
+ },
+ want: map[int64]actions_model.Status{2: actions_model.StatusSkipped},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := newJobStatusResolver(tt.jobs)
+ assert.Equal(t, tt.want, r.Resolve())
+ })
+ }
+}
diff --git a/services/actions/main_test.go b/services/actions/main_test.go
new file mode 100644
index 0000000..ea37ff5
--- /dev/null
+++ b/services/actions/main_test.go
@@ -0,0 +1,17 @@
+// Copyright 2024 The Forgejo Authors
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models/actions"
+ _ "code.gitea.io/gitea/models/activities"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/actions/notifier.go b/services/actions/notifier.go
new file mode 100644
index 0000000..2dd8115
--- /dev/null
+++ b/services/actions/notifier.go
@@ -0,0 +1,777 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ perm_model "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/convert"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+type actionsNotifier struct {
+ notify_service.NullNotifier
+}
+
+var _ notify_service.Notifier = &actionsNotifier{}
+
+// NewNotifier create a new actionsNotifier notifier
+func NewNotifier() notify_service.Notifier {
+ return &actionsNotifier{}
+}
+
+// NewIssue notifies issue created event
+func (n *actionsNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, _ []*user_model.User) {
+ ctx = withMethod(ctx, "NewIssue")
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error("issue.LoadRepo: %v", err)
+ return
+ }
+ if err := issue.LoadPoster(ctx); err != nil {
+ log.Error("issue.LoadPoster: %v", err)
+ return
+ }
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+
+ newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).WithPayload(&api.IssuePayload{
+ Action: api.HookIssueOpened,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, issue.Poster, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, issue.Poster, nil),
+ }).Notify(withMethod(ctx, "NewIssue"))
+}
+
+func (n *actionsNotifier) IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, _ string) {
+ ctx = withMethod(ctx, "IssueChangeTitle")
+
+ n.issueChange(ctx, doer, issue)
+}
+
+// IssueChangeContent notifies change content of issue
+func (n *actionsNotifier) IssueChangeContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, _ string) {
+ ctx = withMethod(ctx, "IssueChangeContent")
+
+ n.issueChange(ctx, doer, issue)
+}
+
+func (n *actionsNotifier) issueChange(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
+ var err error
+ if err = issue.LoadRepo(ctx); err != nil {
+ log.Error("LoadRepo: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ if issue.IsPull {
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ log.Error("loadPullRequest: %v", err)
+ return
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequest).
+ WithDoer(doer).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(issue.PullRequest).
+ Notify(ctx)
+ return
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).
+ WithDoer(doer).
+ WithPayload(&api.IssuePayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ Notify(ctx)
+}
+
+// IssueChangeStatus notifies close or reopen issue to notifiers
+func (n *actionsNotifier) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, _ *issues_model.Comment, isClosed bool) {
+ ctx = withMethod(ctx, "IssueChangeStatus")
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ if issue.IsPull {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest: %v", err)
+ return
+ }
+ // Merge pull request calls issue.changeStatus so we need to handle separately.
+ apiPullRequest := &api.PullRequestPayload{
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ CommitID: commitID,
+ }
+ if isClosed {
+ apiPullRequest.Action = api.HookIssueClosed
+ } else {
+ apiPullRequest.Action = api.HookIssueReOpened
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequest).
+ WithDoer(doer).
+ WithPayload(apiPullRequest).
+ WithPullRequest(issue.PullRequest).
+ Notify(ctx)
+ return
+ }
+ apiIssue := &api.IssuePayload{
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }
+ if isClosed {
+ apiIssue.Action = api.HookIssueClosed
+ } else {
+ apiIssue.Action = api.HookIssueReOpened
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).
+ WithDoer(doer).
+ WithPayload(apiIssue).
+ Notify(ctx)
+}
+
+// IssueChangeAssignee notifies assigned or unassigned to notifiers
+func (n *actionsNotifier) IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
+ ctx = withMethod(ctx, "IssueChangeAssignee")
+
+ var action api.HookIssueAction
+ if removed {
+ action = api.HookIssueUnassigned
+ } else {
+ action = api.HookIssueAssigned
+ }
+
+ hookEvent := webhook_module.HookEventIssueAssign
+ if issue.IsPull {
+ hookEvent = webhook_module.HookEventPullRequestAssign
+ }
+
+ notifyIssueChange(ctx, doer, issue, hookEvent, action, nil)
+}
+
+// IssueChangeMilestone notifies assignee to notifiers
+func (n *actionsNotifier) IssueChangeMilestone(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64) {
+ ctx = withMethod(ctx, "IssueChangeMilestone")
+
+ var action api.HookIssueAction
+ if issue.MilestoneID > 0 {
+ action = api.HookIssueMilestoned
+ } else {
+ action = api.HookIssueDemilestoned
+ }
+
+ hookEvent := webhook_module.HookEventIssueMilestone
+ if issue.IsPull {
+ hookEvent = webhook_module.HookEventPullRequestMilestone
+ }
+
+ notifyIssueChange(ctx, doer, issue, hookEvent, action, nil)
+}
+
+func (n *actionsNotifier) IssueChangeLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue,
+ addedLabels, removedLabels []*issues_model.Label,
+) {
+ ctx = withMethod(ctx, "IssueChangeLabels")
+
+ hookEvent := webhook_module.HookEventIssueLabel
+ if issue.IsPull {
+ hookEvent = webhook_module.HookEventPullRequestLabel
+ }
+
+ for _, added := range addedLabels {
+ notifyIssueChange(ctx, doer, issue, hookEvent, api.HookIssueLabelUpdated, added)
+ }
+ for _, removed := range removedLabels {
+ notifyIssueChange(ctx, doer, issue, hookEvent, api.HookIssueLabelCleared, removed)
+ }
+}
+
+func notifyIssueChange(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, event webhook_module.HookEventType, action api.HookIssueAction, label *issues_model.Label) {
+ var err error
+ if err = issue.LoadRepo(ctx); err != nil {
+ log.Error("LoadRepo: %v", err)
+ return
+ }
+
+ if err = issue.LoadPoster(ctx); err != nil {
+ log.Error("LoadPoster: %v", err)
+ return
+ }
+
+ var apiLabel *api.Label
+ if action == api.HookIssueLabelUpdated || action == api.HookIssueLabelCleared {
+ apiLabel = convert.ToLabel(label, issue.Repo, nil)
+ }
+
+ if issue.IsPull {
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ log.Error("loadPullRequest: %v", err)
+ return
+ }
+ newNotifyInputFromIssue(issue, event).
+ WithDoer(doer).
+ WithPayload(&api.PullRequestPayload{
+ Action: action,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Label: apiLabel,
+ }).
+ WithPullRequest(issue.PullRequest).
+ Notify(ctx)
+ return
+ }
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ newNotifyInputFromIssue(issue, event).
+ WithDoer(doer).
+ WithPayload(&api.IssuePayload{
+ Action: action,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Label: apiLabel,
+ }).
+ Notify(ctx)
+}
+
+// CreateIssueComment notifies comment on an issue to notifiers
+func (n *actionsNotifier) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, _ []*user_model.User,
+) {
+ ctx = withMethod(ctx, "CreateIssueComment")
+
+ if issue.IsPull {
+ notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentCreated)
+ return
+ }
+ notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventIssueComment, api.HookIssueCommentCreated)
+}
+
+func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
+ ctx = withMethod(ctx, "UpdateComment")
+
+ if err := c.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ if c.Issue.IsPull {
+ notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventPullRequestComment, api.HookIssueCommentEdited)
+ return
+ }
+ notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventIssueComment, api.HookIssueCommentEdited)
+}
+
+func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) {
+ ctx = withMethod(ctx, "DeleteComment")
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ if comment.Issue.IsPull {
+ notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentDeleted)
+ return
+ }
+ notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventIssueComment, api.HookIssueCommentDeleted)
+}
+
+func notifyIssueCommentChange(ctx context.Context, doer *user_model.User, comment *issues_model.Comment, oldContent string, event webhook_module.HookEventType, action api.HookIssueCommentAction) {
+ if err := comment.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ if err := comment.Issue.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, comment.Issue.Repo, doer)
+
+ payload := &api.IssueCommentPayload{
+ Action: action,
+ Issue: convert.ToAPIIssue(ctx, doer, comment.Issue),
+ Comment: convert.ToAPIComment(ctx, comment.Issue.Repo, comment),
+ Repository: convert.ToRepo(ctx, comment.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ IsPull: comment.Issue.IsPull,
+ }
+
+ if action == api.HookIssueCommentEdited {
+ payload.Changes = &api.ChangesPayload{
+ Body: &api.ChangesFromPayload{
+ From: oldContent,
+ },
+ }
+ }
+
+ if comment.Issue.IsPull {
+ if err := comment.Issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest: %v", err)
+ return
+ }
+ newNotifyInputFromIssue(comment.Issue, event).
+ WithDoer(doer).
+ WithPayload(payload).
+ WithPullRequest(comment.Issue.PullRequest).
+ Notify(ctx)
+ return
+ }
+
+ newNotifyInputFromIssue(comment.Issue, event).
+ WithDoer(doer).
+ WithPayload(payload).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) NewPullRequest(ctx context.Context, pull *issues_model.PullRequest, _ []*user_model.User) {
+ ctx = withMethod(ctx, "NewPullRequest")
+
+ if err := pull.LoadIssue(ctx); err != nil {
+ log.Error("pull.LoadIssue: %v", err)
+ return
+ }
+ if err := pull.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pull.Issue.LoadRepo: %v", err)
+ return
+ }
+ if err := pull.Issue.LoadPoster(ctx); err != nil {
+ log.Error("pull.Issue.LoadPoster: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, pull.Issue.Repo, pull.Issue.Poster)
+
+ newNotifyInputFromIssue(pull.Issue, webhook_module.HookEventPullRequest).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueOpened,
+ Index: pull.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pull, nil),
+ Repository: convert.ToRepo(ctx, pull.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, pull.Issue.Poster, nil),
+ }).
+ WithPullRequest(pull).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ ctx = withMethod(ctx, "CreateRepository")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventRepository).WithPayload(&api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).Notify(ctx)
+}
+
+func (n *actionsNotifier) ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
+ ctx = withMethod(ctx, "ForkRepository")
+
+ oldPermission, _ := access_model.GetUserRepoPermission(ctx, oldRepo, doer)
+ permission, _ := access_model.GetUserRepoPermission(ctx, repo, doer)
+
+ // forked webhook
+ newNotifyInput(oldRepo, doer, webhook_module.HookEventFork).WithPayload(&api.ForkPayload{
+ Forkee: convert.ToRepo(ctx, oldRepo, oldPermission),
+ Repo: convert.ToRepo(ctx, repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).Notify(ctx)
+
+ u := repo.MustOwner(ctx)
+
+ // Add to hook queue for created repo after session commit.
+ if u.IsOrganization() {
+ newNotifyInput(repo, doer, webhook_module.HookEventRepository).
+ WithRef(git.RefNameFromBranch(oldRepo.DefaultBranch).String()).
+ WithPayload(&api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).Notify(ctx)
+ }
+}
+
+func (n *actionsNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, _ *issues_model.Comment, _ []*user_model.User) {
+ ctx = withMethod(ctx, "PullRequestReview")
+
+ var reviewHookType webhook_module.HookEventType
+
+ switch review.Type {
+ case issues_model.ReviewTypeApprove:
+ reviewHookType = webhook_module.HookEventPullRequestReviewApproved
+ case issues_model.ReviewTypeComment:
+ reviewHookType = webhook_module.HookEventPullRequestReviewComment
+ case issues_model.ReviewTypeReject:
+ reviewHookType = webhook_module.HookEventPullRequestReviewRejected
+ default:
+ // unsupported review webhook type here
+ log.Error("Unsupported review webhook type")
+ return
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("pr.LoadIssue: %v", err)
+ return
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, review.Issue.Repo, review.Issue.Poster)
+ if err != nil {
+ log.Error("models.GetUserRepoPermission: %v", err)
+ return
+ }
+
+ newNotifyInput(review.Issue.Repo, review.Reviewer, reviewHookType).
+ WithRef(review.CommitID).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueReviewed,
+ Index: review.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, review.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, review.Reviewer, nil),
+ Review: &api.ReviewPayload{
+ Type: string(reviewHookType),
+ Content: review.Content,
+ },
+ }).Notify(ctx)
+}
+
+func (n *actionsNotifier) PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment) {
+ if !issue.IsPull {
+ log.Warn("PullRequestReviewRequest: issue is not a pull request: %v", issue.ID)
+ return
+ }
+
+ ctx = withMethod(ctx, "PullRequestReviewRequest")
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest failed: %v", err)
+ return
+ }
+ var action api.HookIssueAction
+ if isRequest {
+ action = api.HookIssueReviewRequested
+ } else {
+ action = api.HookIssueReviewRequestRemoved
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequestReviewRequest).
+ WithDoer(doer).
+ WithPayload(&api.PullRequestPayload{
+ Action: action,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ RequestedReviewer: convert.ToUser(ctx, reviewer, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(issue.PullRequest).
+ Notify(ctx)
+}
+
+func (*actionsNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ ctx = withMethod(ctx, "MergePullRequest")
+
+ // Reload pull request information.
+ if err := pr.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo: %v", err)
+ return
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, pr.Issue.Repo, doer)
+ if err != nil {
+ log.Error("models.GetUserRepoPermission: %v", err)
+ return
+ }
+
+ // Merge pull request calls issue.changeStatus so we need to handle separately.
+ apiPullRequest := &api.PullRequestPayload{
+ Index: pr.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Action: api.HookIssueClosed,
+ }
+
+ newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequest).
+ WithRef(pr.MergedCommitID).
+ WithPayload(apiPullRequest).
+ WithPullRequest(pr).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ if git.IsEmptyCommitID(opts.NewCommitID, nil) {
+ log.Trace("new commitID is empty")
+ return
+ }
+
+ ctx = withMethod(ctx, "PushCommits")
+
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ if err != nil {
+ log.Error("commits.ToAPIPayloadCommits failed: %v", err)
+ return
+ }
+
+ newNotifyInput(repo, pusher, webhook_module.HookEventPush).
+ WithRef(opts.RefFullName.String()).
+ WithPayload(&api.PushPayload{
+ Ref: opts.RefFullName.String(),
+ Before: opts.OldCommitID,
+ After: opts.NewCommitID,
+ CompareURL: setting.AppURL + commits.CompareURL,
+ Commits: apiCommits,
+ HeadCommit: apiHeadCommit,
+ Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Pusher: apiPusher,
+ Sender: apiPusher,
+ }).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) CreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ ctx = withMethod(ctx, "CreateRef")
+
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiRepo := convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeNone})
+
+ newNotifyInput(repo, pusher, webhook_module.HookEventCreate).
+ WithRef(refFullName.String()).
+ WithPayload(&api.CreatePayload{
+ Ref: refFullName.String(),
+ Sha: refID,
+ RefType: refFullName.RefType(),
+ Repo: apiRepo,
+ Sender: apiPusher,
+ }).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) DeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ ctx = withMethod(ctx, "DeleteRef")
+
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiRepo := convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeNone})
+
+ newNotifyInput(repo, pusher, webhook_module.HookEventDelete).
+ WithPayload(&api.DeletePayload{
+ Ref: refFullName.String(),
+ RefType: refFullName.RefType(),
+ PusherType: api.PusherTypeUser,
+ Repo: apiRepo,
+ Sender: apiPusher,
+ }).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ ctx = withMethod(ctx, "SyncPushCommits")
+
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ if err != nil {
+ log.Error("commits.ToAPIPayloadCommits failed: %v", err)
+ return
+ }
+
+ newNotifyInput(repo, pusher, webhook_module.HookEventPush).
+ WithRef(opts.RefFullName.String()).
+ WithPayload(&api.PushPayload{
+ Ref: opts.RefFullName.String(),
+ Before: opts.OldCommitID,
+ After: opts.NewCommitID,
+ CompareURL: setting.AppURL + commits.CompareURL,
+ Commits: apiCommits,
+ TotalCommits: commits.Len,
+ HeadCommit: apiHeadCommit,
+ Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Pusher: apiPusher,
+ Sender: apiPusher,
+ }).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) SyncCreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ ctx = withMethod(ctx, "SyncCreateRef")
+ n.CreateRef(ctx, pusher, repo, refFullName, refID)
+}
+
+func (n *actionsNotifier) SyncDeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ ctx = withMethod(ctx, "SyncDeleteRef")
+ n.DeleteRef(ctx, pusher, repo, refFullName)
+}
+
+func (n *actionsNotifier) NewRelease(ctx context.Context, rel *repo_model.Release) {
+ ctx = withMethod(ctx, "NewRelease")
+ notifyRelease(ctx, rel.Publisher, rel, api.HookReleasePublished)
+}
+
+func (n *actionsNotifier) UpdateRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+ ctx = withMethod(ctx, "UpdateRelease")
+ notifyRelease(ctx, doer, rel, api.HookReleaseUpdated)
+}
+
+func (n *actionsNotifier) DeleteRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+ if rel.IsTag {
+ // has sent same action in `PushCommits`, so skip it.
+ return
+ }
+ ctx = withMethod(ctx, "DeleteRelease")
+ notifyRelease(ctx, doer, rel, api.HookReleaseDeleted)
+}
+
+func (n *actionsNotifier) PackageCreate(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+ ctx = withMethod(ctx, "PackageCreate")
+ notifyPackage(ctx, doer, pd, api.HookPackageCreated)
+}
+
+func (n *actionsNotifier) PackageDelete(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+ ctx = withMethod(ctx, "PackageDelete")
+ notifyPackage(ctx, doer, pd, api.HookPackageDeleted)
+}
+
+func (n *actionsNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ ctx = withMethod(ctx, "AutoMergePullRequest")
+ n.MergePullRequest(ctx, doer, pr)
+}
+
+func (n *actionsNotifier) PullRequestSynchronized(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ ctx = withMethod(ctx, "PullRequestSynchronized")
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo: %v", err)
+ return
+ }
+
+ newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequestSync).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueSynchronized,
+ Index: pr.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, pr.Issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(pr).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) PullRequestChangeTargetBranch(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, oldBranch string) {
+ ctx = withMethod(ctx, "PullRequestChangeTargetBranch")
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, pr.Issue.Repo, pr.Issue.Poster)
+ newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequest).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueEdited,
+ Index: pr.Issue.Index,
+ Changes: &api.ChangesPayload{
+ Ref: &api.ChangesFromPayload{
+ From: oldBranch,
+ },
+ },
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(pr).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) NewWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+ ctx = withMethod(ctx, "NewWikiPage")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
+ Action: api.HookWikiCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ Comment: comment,
+ }).Notify(ctx)
+}
+
+func (n *actionsNotifier) EditWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+ ctx = withMethod(ctx, "EditWikiPage")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
+ Action: api.HookWikiEdited,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ Comment: comment,
+ }).Notify(ctx)
+}
+
+func (n *actionsNotifier) DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page string) {
+ ctx = withMethod(ctx, "DeleteWikiPage")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
+ Action: api.HookWikiDeleted,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ }).Notify(ctx)
+}
+
+// MigrateRepository is used to detect workflows after a repository has been migrated
+func (n *actionsNotifier) MigrateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ ctx = withMethod(ctx, "MigrateRepository")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventRepository).WithPayload(&api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).Notify(ctx)
+}
diff --git a/services/actions/notifier_helper.go b/services/actions/notifier_helper.go
new file mode 100644
index 0000000..0a1dbb1
--- /dev/null
+++ b/services/actions/notifier_helper.go
@@ -0,0 +1,590 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "slices"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ actions_module "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/convert"
+
+ "github.com/nektos/act/pkg/jobparser"
+ "github.com/nektos/act/pkg/model"
+)
+
+type methodCtx struct{}
+
+var methodCtxKey = methodCtx{}
+
+// withMethod sets the notification method that this context currently executes.
+// Used for debugging/ troubleshooting purposes.
+func withMethod(ctx context.Context, method string) context.Context {
+ // don't overwrite
+ if v := ctx.Value(methodCtxKey); v != nil {
+ if _, ok := v.(string); ok {
+ return ctx
+ }
+ }
+ return context.WithValue(ctx, methodCtxKey, method)
+}
+
+// getMethod gets the notification method that this context currently executes.
+// Default: "notify"
+// Used for debugging/ troubleshooting purposes.
+func getMethod(ctx context.Context) string {
+ if v := ctx.Value(methodCtxKey); v != nil {
+ if s, ok := v.(string); ok {
+ return s
+ }
+ }
+ return "notify"
+}
+
+type notifyInput struct {
+ // required
+ Repo *repo_model.Repository
+ Doer *user_model.User
+ Event webhook_module.HookEventType
+
+ // optional
+ Ref git.RefName
+ Payload api.Payloader
+ PullRequest *issues_model.PullRequest
+}
+
+func newNotifyInput(repo *repo_model.Repository, doer *user_model.User, event webhook_module.HookEventType) *notifyInput {
+ return &notifyInput{
+ Repo: repo,
+ Doer: doer,
+ Event: event,
+ }
+}
+
+func newNotifyInputForSchedules(repo *repo_model.Repository) *notifyInput {
+ // the doer here will be ignored as we force using action user when handling schedules
+ return newNotifyInput(repo, user_model.NewActionsUser(), webhook_module.HookEventSchedule)
+}
+
+func (input *notifyInput) WithDoer(doer *user_model.User) *notifyInput {
+ input.Doer = doer
+ return input
+}
+
+func (input *notifyInput) WithRef(ref string) *notifyInput {
+ input.Ref = git.RefName(ref)
+ return input
+}
+
+func (input *notifyInput) WithPayload(payload api.Payloader) *notifyInput {
+ input.Payload = payload
+ return input
+}
+
+func (input *notifyInput) WithPullRequest(pr *issues_model.PullRequest) *notifyInput {
+ input.PullRequest = pr
+ if input.Ref == "" {
+ input.Ref = git.RefName(pr.GetGitRefName())
+ }
+ return input
+}
+
+func (input *notifyInput) Notify(ctx context.Context) {
+ log.Trace("execute %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
+
+ if err := notify(ctx, input); err != nil {
+ log.Error("an error occurred while executing the %s actions method: %v", getMethod(ctx), err)
+ }
+}
+
+func notify(ctx context.Context, input *notifyInput) error {
+ shouldDetectSchedules := input.Event == webhook_module.HookEventPush && input.Ref.BranchName() == input.Repo.DefaultBranch
+ if input.Doer.IsActions() {
+ // avoiding triggering cyclically, for example:
+ // a comment of an issue will trigger the runner to add a new comment as reply,
+ // and the new comment will trigger the runner again.
+ log.Debug("ignore executing %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
+
+ // we should update schedule tasks in this case, because
+ // 1. schedule tasks cannot be triggered by other events, so cyclic triggering will not occur
+ // 2. some schedule tasks may update the repo periodically, so the refs of schedule tasks need to be updated
+ if shouldDetectSchedules {
+ return DetectAndHandleSchedules(ctx, input.Repo)
+ }
+
+ return nil
+ }
+ if input.Repo.IsEmpty || input.Repo.IsArchived {
+ return nil
+ }
+ if unit_model.TypeActions.UnitGlobalDisabled() {
+ if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo, true); err != nil {
+ log.Error("CleanRepoScheduleTasks: %v", err)
+ }
+ return nil
+ }
+ if err := input.Repo.LoadUnits(ctx); err != nil {
+ return fmt.Errorf("repo.LoadUnits: %w", err)
+ } else if !input.Repo.UnitEnabled(ctx, unit_model.TypeActions) {
+ return nil
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(context.Background(), input.Repo)
+ if err != nil {
+ return fmt.Errorf("git.OpenRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ ref := input.Ref
+ if ref.BranchName() != input.Repo.DefaultBranch && actions_module.IsDefaultBranchWorkflow(input.Event) {
+ if ref != "" {
+ log.Warn("Event %q should only trigger workflows on the default branch, but its ref is %q. Will fall back to the default branch",
+ input.Event, ref)
+ }
+ ref = git.RefNameFromBranch(input.Repo.DefaultBranch)
+ }
+ if ref == "" {
+ log.Warn("Ref of event %q is empty, will fall back to the default branch", input.Event)
+ ref = git.RefNameFromBranch(input.Repo.DefaultBranch)
+ }
+
+ commitID, err := gitRepo.GetRefCommitID(ref.String())
+ if err != nil {
+ return fmt.Errorf("gitRepo.GetRefCommitID: %w", err)
+ }
+
+ // Get the commit object for the ref
+ commit, err := gitRepo.GetCommit(commitID)
+ if err != nil {
+ return fmt.Errorf("gitRepo.GetCommit: %w", err)
+ }
+
+ if skipWorkflows(input, commit) {
+ return nil
+ }
+
+ if SkipPullRequestEvent(ctx, input.Event, input.Repo.ID, commit.ID.String()) {
+ log.Trace("repo %s with commit %s skip event %v", input.Repo.RepoPath(), commit.ID, input.Event)
+ return nil
+ }
+
+ var detectedWorkflows []*actions_module.DetectedWorkflow
+ actionsConfig := input.Repo.MustGetUnit(ctx, unit_model.TypeActions).ActionsConfig()
+ workflows, schedules, err := actions_module.DetectWorkflows(gitRepo, commit,
+ input.Event,
+ input.Payload,
+ shouldDetectSchedules,
+ )
+ if err != nil {
+ return fmt.Errorf("DetectWorkflows: %w", err)
+ }
+
+ log.Trace("repo %s with commit %s event %s find %d workflows and %d schedules",
+ input.Repo.RepoPath(),
+ commit.ID,
+ input.Event,
+ len(workflows),
+ len(schedules),
+ )
+
+ for _, wf := range workflows {
+ if actionsConfig.IsWorkflowDisabled(wf.EntryName) {
+ log.Trace("repo %s has disable workflows %s", input.Repo.RepoPath(), wf.EntryName)
+ continue
+ }
+
+ if wf.TriggerEvent.Name != actions_module.GithubEventPullRequestTarget {
+ detectedWorkflows = append(detectedWorkflows, wf)
+ }
+ }
+
+ if input.PullRequest != nil {
+ // detect pull_request_target workflows
+ baseRef := git.BranchPrefix + input.PullRequest.BaseBranch
+ baseCommit, err := gitRepo.GetCommit(baseRef)
+ if err != nil {
+ if prp, ok := input.Payload.(*api.PullRequestPayload); ok && errors.Is(err, util.ErrNotExist) {
+ // the baseBranch was deleted and the PR closed: the action can be skipped
+ if prp.Action == api.HookIssueClosed {
+ return nil
+ }
+ }
+ return fmt.Errorf("gitRepo.GetCommit: %w", err)
+ }
+ baseWorkflows, _, err := actions_module.DetectWorkflows(gitRepo, baseCommit, input.Event, input.Payload, false)
+ if err != nil {
+ return fmt.Errorf("DetectWorkflows: %w", err)
+ }
+ if len(baseWorkflows) == 0 {
+ log.Trace("repo %s with commit %s couldn't find pull_request_target workflows", input.Repo.RepoPath(), baseCommit.ID)
+ } else {
+ for _, wf := range baseWorkflows {
+ if wf.TriggerEvent.Name == actions_module.GithubEventPullRequestTarget {
+ detectedWorkflows = append(detectedWorkflows, wf)
+ }
+ }
+ }
+ }
+
+ if shouldDetectSchedules {
+ if err := handleSchedules(ctx, schedules, commit, input, ref.String()); err != nil {
+ return err
+ }
+ }
+
+ return handleWorkflows(ctx, detectedWorkflows, commit, input, ref.String())
+}
+
+func SkipPullRequestEvent(ctx context.Context, event webhook_module.HookEventType, repoID int64, commitSHA string) bool {
+ if event != webhook_module.HookEventPullRequestSync {
+ return false
+ }
+
+ run := actions_model.ActionRun{
+ Event: webhook_module.HookEventPullRequest,
+ RepoID: repoID,
+ CommitSHA: commitSHA,
+ }
+ exist, err := db.GetEngine(ctx).Exist(&run)
+ if err != nil {
+ log.Error("Exist ActionRun %v: %v", run, err)
+ return false
+ }
+ return exist
+}
+
+func skipWorkflows(input *notifyInput, commit *git.Commit) bool {
+ // skip workflow runs with a configured skip-ci string in commit message or pr title if the event is push or pull_request(_sync)
+ // https://docs.github.com/en/actions/managing-workflow-runs/skipping-workflow-runs
+ skipWorkflowEvents := []webhook_module.HookEventType{
+ webhook_module.HookEventPush,
+ webhook_module.HookEventPullRequest,
+ webhook_module.HookEventPullRequestSync,
+ }
+ if slices.Contains(skipWorkflowEvents, input.Event) {
+ for _, s := range setting.Actions.SkipWorkflowStrings {
+ if input.PullRequest != nil && strings.Contains(input.PullRequest.Issue.Title, s) {
+ log.Debug("repo %s: skipped run for pr %v because of %s string", input.Repo.RepoPath(), input.PullRequest.Issue.ID, s)
+ return true
+ }
+ if strings.Contains(commit.CommitMessage, s) {
+ log.Debug("repo %s with commit %s: skipped run because of %s string", input.Repo.RepoPath(), commit.ID, s)
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func handleWorkflows(
+ ctx context.Context,
+ detectedWorkflows []*actions_module.DetectedWorkflow,
+ commit *git.Commit,
+ input *notifyInput,
+ ref string,
+) error {
+ if len(detectedWorkflows) == 0 {
+ log.Trace("repo %s with commit %s couldn't find workflows", input.Repo.RepoPath(), commit.ID)
+ return nil
+ }
+
+ p, err := json.Marshal(input.Payload)
+ if err != nil {
+ return fmt.Errorf("json.Marshal: %w", err)
+ }
+
+ isForkPullRequest := false
+ if pr := input.PullRequest; pr != nil {
+ switch pr.Flow {
+ case issues_model.PullRequestFlowGithub:
+ isForkPullRequest = pr.IsFromFork()
+ case issues_model.PullRequestFlowAGit:
+ // There is no fork concept in agit flow, anyone with read permission can push refs/for/<target-branch>/<topic-branch> to the repo.
+ // So we can treat it as a fork pull request because it may be from an untrusted user
+ isForkPullRequest = true
+ default:
+ // unknown flow, assume it's a fork pull request to be safe
+ isForkPullRequest = true
+ }
+ }
+
+ for _, dwf := range detectedWorkflows {
+ run := &actions_model.ActionRun{
+ Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0],
+ RepoID: input.Repo.ID,
+ OwnerID: input.Repo.OwnerID,
+ WorkflowID: dwf.EntryName,
+ TriggerUserID: input.Doer.ID,
+ Ref: ref,
+ CommitSHA: commit.ID.String(),
+ IsForkPullRequest: isForkPullRequest,
+ Event: input.Event,
+ EventPayload: string(p),
+ TriggerEvent: dwf.TriggerEvent.Name,
+ Status: actions_model.StatusWaiting,
+ }
+
+ need, err := ifNeedApproval(ctx, run, input.Repo, input.Doer)
+ if err != nil {
+ log.Error("check if need approval for repo %d with user %d: %v", input.Repo.ID, input.Doer.ID, err)
+ continue
+ }
+
+ run.NeedApproval = need
+
+ if err := run.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ continue
+ }
+
+ vars, err := actions_model.GetVariablesOfRun(ctx, run)
+ if err != nil {
+ log.Error("GetVariablesOfRun: %v", err)
+ continue
+ }
+
+ jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars))
+ if err != nil {
+ log.Error("jobparser.Parse: %v", err)
+ continue
+ }
+
+ // cancel running jobs if the event is push or pull_request_sync
+ if run.Event == webhook_module.HookEventPush ||
+ run.Event == webhook_module.HookEventPullRequestSync {
+ if err := actions_model.CancelPreviousJobs(
+ ctx,
+ run.RepoID,
+ run.Ref,
+ run.WorkflowID,
+ run.Event,
+ ); err != nil {
+ log.Error("CancelPreviousJobs: %v", err)
+ }
+ }
+
+ if err := actions_model.InsertRun(ctx, run, jobs); err != nil {
+ log.Error("InsertRun: %v", err)
+ continue
+ }
+
+ alljobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: run.ID})
+ if err != nil {
+ log.Error("FindRunJobs: %v", err)
+ continue
+ }
+ CreateCommitStatus(ctx, alljobs...)
+ }
+ return nil
+}
+
+func newNotifyInputFromIssue(issue *issues_model.Issue, event webhook_module.HookEventType) *notifyInput {
+ return newNotifyInput(issue.Repo, issue.Poster, event)
+}
+
+func notifyRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release, action api.HookReleaseAction) {
+ if err := rel.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, rel.Repo, doer)
+
+ newNotifyInput(rel.Repo, doer, webhook_module.HookEventRelease).
+ WithRef(git.RefNameFromTag(rel.TagName).String()).
+ WithPayload(&api.ReleasePayload{
+ Action: action,
+ Release: convert.ToAPIRelease(ctx, rel.Repo, rel),
+ Repository: convert.ToRepo(ctx, rel.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ Notify(ctx)
+}
+
+func notifyPackage(ctx context.Context, sender *user_model.User, pd *packages_model.PackageDescriptor, action api.HookPackageAction) {
+ if pd.Repository == nil {
+ // When a package is uploaded to an organization, it could trigger an event to notify.
+ // So the repository could be nil, however, actions can't support that yet.
+ // See https://github.com/go-gitea/gitea/pull/17940
+ return
+ }
+
+ apiPackage, err := convert.ToPackage(ctx, pd, sender)
+ if err != nil {
+ log.Error("Error converting package: %v", err)
+ return
+ }
+
+ newNotifyInput(pd.Repository, sender, webhook_module.HookEventPackage).
+ WithPayload(&api.PackagePayload{
+ Action: action,
+ Package: apiPackage,
+ Sender: convert.ToUser(ctx, sender, nil),
+ }).
+ Notify(ctx)
+}
+
+func ifNeedApproval(ctx context.Context, run *actions_model.ActionRun, repo *repo_model.Repository, user *user_model.User) (bool, error) {
+ // 1. don't need approval if it's not a fork PR
+ // 2. don't need approval if the event is `pull_request_target` since the workflow will run in the context of base branch
+ // see https://docs.github.com/en/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks#about-workflow-runs-from-public-forks
+ if !run.IsForkPullRequest || run.TriggerEvent == actions_module.GithubEventPullRequestTarget {
+ return false, nil
+ }
+
+ // always need approval if the user is restricted
+ if user.IsRestricted {
+ log.Trace("need approval because user %d is restricted", user.ID)
+ return true, nil
+ }
+
+ // don't need approval if the user can write
+ if perm, err := access_model.GetUserRepoPermission(ctx, repo, user); err != nil {
+ return false, fmt.Errorf("GetUserRepoPermission: %w", err)
+ } else if perm.CanWrite(unit_model.TypeActions) {
+ log.Trace("do not need approval because user %d can write", user.ID)
+ return false, nil
+ }
+
+ // don't need approval if the user has been approved before
+ if count, err := db.Count[actions_model.ActionRun](ctx, actions_model.FindRunOptions{
+ RepoID: repo.ID,
+ TriggerUserID: user.ID,
+ Approved: true,
+ }); err != nil {
+ return false, fmt.Errorf("CountRuns: %w", err)
+ } else if count > 0 {
+ log.Trace("do not need approval because user %d has been approved before", user.ID)
+ return false, nil
+ }
+
+ // otherwise, need approval
+ log.Trace("need approval because it's the first time user %d triggered actions", user.ID)
+ return true, nil
+}
+
+func handleSchedules(
+ ctx context.Context,
+ detectedWorkflows []*actions_module.DetectedWorkflow,
+ commit *git.Commit,
+ input *notifyInput,
+ _ string,
+) error {
+ branch, err := commit.GetBranchName()
+ if err != nil {
+ return err
+ }
+ if branch != input.Repo.DefaultBranch {
+ log.Trace("commit branch is not default branch in repo")
+ return nil
+ }
+
+ if count, err := db.Count[actions_model.ActionSchedule](ctx, actions_model.FindScheduleOptions{RepoID: input.Repo.ID}); err != nil {
+ log.Error("CountSchedules: %v", err)
+ return err
+ } else if count > 0 {
+ if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo, false); err != nil {
+ log.Error("CleanRepoScheduleTasks: %v", err)
+ }
+ }
+
+ if len(detectedWorkflows) == 0 {
+ log.Trace("repo %s with commit %s couldn't find schedules", input.Repo.RepoPath(), commit.ID)
+ return nil
+ }
+
+ payload := &api.SchedulePayload{
+ Action: api.HookScheduleCreated,
+ }
+
+ p, err := json.Marshal(payload)
+ if err != nil {
+ return fmt.Errorf("json.Marshal: %w", err)
+ }
+
+ crons := make([]*actions_model.ActionSchedule, 0, len(detectedWorkflows))
+ for _, dwf := range detectedWorkflows {
+ // Check cron job condition. Only working in default branch
+ workflow, err := model.ReadWorkflow(bytes.NewReader(dwf.Content))
+ if err != nil {
+ log.Error("ReadWorkflow: %v", err)
+ continue
+ }
+ schedules := workflow.OnSchedule()
+ if len(schedules) == 0 {
+ log.Warn("no schedule event")
+ continue
+ }
+
+ run := &actions_model.ActionSchedule{
+ Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0],
+ RepoID: input.Repo.ID,
+ OwnerID: input.Repo.OwnerID,
+ WorkflowID: dwf.EntryName,
+ TriggerUserID: user_model.ActionsUserID,
+ Ref: input.Repo.DefaultBranch,
+ CommitSHA: commit.ID.String(),
+ Event: input.Event,
+ EventPayload: string(p),
+ Specs: schedules,
+ Content: dwf.Content,
+ }
+ crons = append(crons, run)
+ }
+
+ return actions_model.CreateScheduleTask(ctx, crons)
+}
+
+// DetectAndHandleSchedules detects the schedule workflows on the default branch and create schedule tasks
+func DetectAndHandleSchedules(ctx context.Context, repo *repo_model.Repository) error {
+ if repo.IsEmpty || repo.IsArchived {
+ return nil
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(context.Background(), repo)
+ if err != nil {
+ return fmt.Errorf("git.OpenRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ // Only detect schedule workflows on the default branch
+ commit, err := gitRepo.GetCommit(repo.DefaultBranch)
+ if err != nil {
+ return fmt.Errorf("gitRepo.GetCommit: %w", err)
+ }
+ scheduleWorkflows, err := actions_module.DetectScheduledWorkflows(gitRepo, commit)
+ if err != nil {
+ return fmt.Errorf("detect schedule workflows: %w", err)
+ }
+ if len(scheduleWorkflows) == 0 {
+ return nil
+ }
+
+ // We need a notifyInput to call handleSchedules
+ // if repo is a mirror, commit author maybe an external user,
+ // so we use action user as the Doer of the notifyInput
+ notifyInput := newNotifyInputForSchedules(repo)
+
+ return handleSchedules(ctx, scheduleWorkflows, commit, notifyInput, repo.DefaultBranch)
+}
diff --git a/services/actions/notifier_helper_test.go b/services/actions/notifier_helper_test.go
new file mode 100644
index 0000000..0fa40c0
--- /dev/null
+++ b/services/actions/notifier_helper_test.go
@@ -0,0 +1,51 @@
+// Copyright 2024 The Forgejo Authors
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_SkipPullRequestEvent(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repoID := int64(1)
+ commitSHA := "1234"
+
+ // event is not webhook_module.HookEventPullRequestSync, never skip
+ assert.False(t, SkipPullRequestEvent(db.DefaultContext, webhook_module.HookEventPullRequest, repoID, commitSHA))
+
+ // event is webhook_module.HookEventPullRequestSync but there is nothing in the ActionRun table, do not skip
+ assert.False(t, SkipPullRequestEvent(db.DefaultContext, webhook_module.HookEventPullRequestSync, repoID, commitSHA))
+
+ // there is a webhook_module.HookEventPullRequest event but the SHA is different, do not skip
+ index := int64(1)
+ run := &actions_model.ActionRun{
+ Index: index,
+ Event: webhook_module.HookEventPullRequest,
+ RepoID: repoID,
+ CommitSHA: "othersha",
+ }
+ unittest.AssertSuccessfulInsert(t, run)
+ assert.False(t, SkipPullRequestEvent(db.DefaultContext, webhook_module.HookEventPullRequestSync, repoID, commitSHA))
+
+ // there already is a webhook_module.HookEventPullRequest with the same SHA, skip
+ index++
+ run = &actions_model.ActionRun{
+ Index: index,
+ Event: webhook_module.HookEventPullRequest,
+ RepoID: repoID,
+ CommitSHA: commitSHA,
+ }
+ unittest.AssertSuccessfulInsert(t, run)
+ assert.True(t, SkipPullRequestEvent(db.DefaultContext, webhook_module.HookEventPullRequestSync, repoID, commitSHA))
+}
diff --git a/services/actions/rerun.go b/services/actions/rerun.go
new file mode 100644
index 0000000..60f6650
--- /dev/null
+++ b/services/actions/rerun.go
@@ -0,0 +1,38 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/modules/container"
+)
+
+// GetAllRerunJobs get all jobs that need to be rerun when job should be rerun
+func GetAllRerunJobs(job *actions_model.ActionRunJob, allJobs []*actions_model.ActionRunJob) []*actions_model.ActionRunJob {
+ rerunJobs := []*actions_model.ActionRunJob{job}
+ rerunJobsIDSet := make(container.Set[string])
+ rerunJobsIDSet.Add(job.JobID)
+
+ for {
+ found := false
+ for _, j := range allJobs {
+ if rerunJobsIDSet.Contains(j.JobID) {
+ continue
+ }
+ for _, need := range j.Needs {
+ if rerunJobsIDSet.Contains(need) {
+ found = true
+ rerunJobs = append(rerunJobs, j)
+ rerunJobsIDSet.Add(j.JobID)
+ break
+ }
+ }
+ }
+ if !found {
+ break
+ }
+ }
+
+ return rerunJobs
+}
diff --git a/services/actions/rerun_test.go b/services/actions/rerun_test.go
new file mode 100644
index 0000000..a98de7b
--- /dev/null
+++ b/services/actions/rerun_test.go
@@ -0,0 +1,48 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGetAllRerunJobs(t *testing.T) {
+ job1 := &actions_model.ActionRunJob{JobID: "job1"}
+ job2 := &actions_model.ActionRunJob{JobID: "job2", Needs: []string{"job1"}}
+ job3 := &actions_model.ActionRunJob{JobID: "job3", Needs: []string{"job2"}}
+ job4 := &actions_model.ActionRunJob{JobID: "job4", Needs: []string{"job2", "job3"}}
+
+ jobs := []*actions_model.ActionRunJob{job1, job2, job3, job4}
+
+ testCases := []struct {
+ job *actions_model.ActionRunJob
+ rerunJobs []*actions_model.ActionRunJob
+ }{
+ {
+ job1,
+ []*actions_model.ActionRunJob{job1, job2, job3, job4},
+ },
+ {
+ job2,
+ []*actions_model.ActionRunJob{job2, job3, job4},
+ },
+ {
+ job3,
+ []*actions_model.ActionRunJob{job3, job4},
+ },
+ {
+ job4,
+ []*actions_model.ActionRunJob{job4},
+ },
+ }
+
+ for _, tc := range testCases {
+ rerunJobs := GetAllRerunJobs(tc.job, jobs)
+ assert.ElementsMatch(t, tc.rerunJobs, rerunJobs)
+ }
+}
diff --git a/services/actions/schedule_tasks.go b/services/actions/schedule_tasks.go
new file mode 100644
index 0000000..18f3324
--- /dev/null
+++ b/services/actions/schedule_tasks.go
@@ -0,0 +1,154 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/timeutil"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/nektos/act/pkg/jobparser"
+)
+
+// StartScheduleTasks start the task
+func StartScheduleTasks(ctx context.Context) error {
+ return startTasks(ctx)
+}
+
+// startTasks retrieves specifications in pages, creates a schedule task for each specification,
+// and updates the specification's next run time and previous run time.
+// The function returns an error if there's an issue with finding or updating the specifications.
+func startTasks(ctx context.Context) error {
+ // Set the page size
+ pageSize := 50
+
+ // Retrieve specs in pages until all specs have been retrieved
+ now := time.Now()
+ for page := 1; ; page++ {
+ // Retrieve the specs for the current page
+ specs, _, err := actions_model.FindSpecs(ctx, actions_model.FindSpecOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: pageSize,
+ },
+ Next: now.Unix(),
+ })
+ if err != nil {
+ return fmt.Errorf("find specs: %w", err)
+ }
+
+ if err := specs.LoadRepos(ctx); err != nil {
+ return fmt.Errorf("LoadRepos: %w", err)
+ }
+
+ // Loop through each spec and create a schedule task for it
+ for _, row := range specs {
+ // cancel running jobs if the event is push
+ if row.Schedule.Event == webhook_module.HookEventPush {
+ // cancel running jobs of the same workflow
+ if err := actions_model.CancelPreviousJobs(
+ ctx,
+ row.RepoID,
+ row.Schedule.Ref,
+ row.Schedule.WorkflowID,
+ webhook_module.HookEventSchedule,
+ ); err != nil {
+ log.Error("CancelPreviousJobs: %v", err)
+ }
+ }
+
+ if row.Repo.IsArchived {
+ // Skip if the repo is archived
+ continue
+ }
+
+ cfg, err := row.Repo.GetUnit(ctx, unit.TypeActions)
+ if err != nil {
+ if repo_model.IsErrUnitTypeNotExist(err) {
+ // Skip the actions unit of this repo is disabled.
+ continue
+ }
+ return fmt.Errorf("GetUnit: %w", err)
+ }
+ if cfg.ActionsConfig().IsWorkflowDisabled(row.Schedule.WorkflowID) {
+ continue
+ }
+
+ if err := CreateScheduleTask(ctx, row.Schedule); err != nil {
+ log.Error("CreateScheduleTask: %v", err)
+ return err
+ }
+
+ // Parse the spec
+ schedule, err := row.Parse()
+ if err != nil {
+ log.Error("Parse: %v", err)
+ return err
+ }
+
+ // Update the spec's next run time and previous run time
+ row.Prev = row.Next
+ row.Next = timeutil.TimeStamp(schedule.Next(now.Add(1 * time.Minute)).Unix())
+ if err := actions_model.UpdateScheduleSpec(ctx, row, "prev", "next"); err != nil {
+ log.Error("UpdateScheduleSpec: %v", err)
+ return err
+ }
+ }
+
+ // Stop if all specs have been retrieved
+ if len(specs) < pageSize {
+ break
+ }
+ }
+
+ return nil
+}
+
+// CreateScheduleTask creates a scheduled task from a cron action schedule.
+// It creates an action run based on the schedule, inserts it into the database, and creates commit statuses for each job.
+func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule) error {
+ // Create a new action run based on the schedule
+ run := &actions_model.ActionRun{
+ Title: cron.Title,
+ RepoID: cron.RepoID,
+ OwnerID: cron.OwnerID,
+ WorkflowID: cron.WorkflowID,
+ TriggerUserID: cron.TriggerUserID,
+ Ref: cron.Ref,
+ CommitSHA: cron.CommitSHA,
+ Event: cron.Event,
+ EventPayload: cron.EventPayload,
+ TriggerEvent: string(webhook_module.HookEventSchedule),
+ ScheduleID: cron.ID,
+ Status: actions_model.StatusWaiting,
+ }
+
+ vars, err := actions_model.GetVariablesOfRun(ctx, run)
+ if err != nil {
+ log.Error("GetVariablesOfRun: %v", err)
+ return err
+ }
+
+ // Parse the workflow specification from the cron schedule
+ workflows, err := jobparser.Parse(cron.Content, jobparser.WithVars(vars))
+ if err != nil {
+ return err
+ }
+
+ // Insert the action run and its associated jobs into the database
+ if err := actions_model.InsertRun(ctx, run, workflows); err != nil {
+ return err
+ }
+
+ // Return nil if no errors occurred
+ return nil
+}
diff --git a/services/actions/variables.go b/services/actions/variables.go
new file mode 100644
index 0000000..8dde9c4
--- /dev/null
+++ b/services/actions/variables.go
@@ -0,0 +1,100 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "regexp"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+ secret_service "code.gitea.io/gitea/services/secrets"
+)
+
+func CreateVariable(ctx context.Context, ownerID, repoID int64, name, data string) (*actions_model.ActionVariable, error) {
+ if err := secret_service.ValidateName(name); err != nil {
+ return nil, err
+ }
+
+ if err := envNameCIRegexMatch(name); err != nil {
+ return nil, err
+ }
+
+ v, err := actions_model.InsertVariable(ctx, ownerID, repoID, name, util.ReserveLineBreakForTextarea(data))
+ if err != nil {
+ return nil, err
+ }
+
+ return v, nil
+}
+
+func UpdateVariable(ctx context.Context, variableID int64, name, data string) (bool, error) {
+ if err := secret_service.ValidateName(name); err != nil {
+ return false, err
+ }
+
+ if err := envNameCIRegexMatch(name); err != nil {
+ return false, err
+ }
+
+ return actions_model.UpdateVariable(ctx, &actions_model.ActionVariable{
+ ID: variableID,
+ Name: strings.ToUpper(name),
+ Data: util.ReserveLineBreakForTextarea(data),
+ })
+}
+
+func DeleteVariableByID(ctx context.Context, variableID int64) error {
+ return actions_model.DeleteVariable(ctx, variableID)
+}
+
+func DeleteVariableByName(ctx context.Context, ownerID, repoID int64, name string) error {
+ if err := secret_service.ValidateName(name); err != nil {
+ return err
+ }
+
+ if err := envNameCIRegexMatch(name); err != nil {
+ return err
+ }
+
+ v, err := GetVariable(ctx, actions_model.FindVariablesOpts{
+ OwnerID: ownerID,
+ RepoID: repoID,
+ Name: name,
+ })
+ if err != nil {
+ return err
+ }
+
+ return actions_model.DeleteVariable(ctx, v.ID)
+}
+
+func GetVariable(ctx context.Context, opts actions_model.FindVariablesOpts) (*actions_model.ActionVariable, error) {
+ vars, err := actions_model.FindVariables(ctx, opts)
+ if err != nil {
+ return nil, err
+ }
+ if len(vars) != 1 {
+ return nil, util.NewNotExistErrorf("variable not found")
+ }
+ return vars[0], nil
+}
+
+// some regular expression of `variables` and `secrets`
+// reference to:
+// https://docs.github.com/en/actions/learn-github-actions/variables#naming-conventions-for-configuration-variables
+// https://docs.github.com/en/actions/security-guides/encrypted-secrets#naming-your-secrets
+var (
+ forbiddenEnvNameCIRx = regexp.MustCompile("(?i)^CI")
+)
+
+func envNameCIRegexMatch(name string) error {
+ if forbiddenEnvNameCIRx.MatchString(name) {
+ log.Error("Env Name cannot be ci")
+ return util.NewInvalidArgumentErrorf("env name cannot be ci")
+ }
+ return nil
+}
diff --git a/services/actions/workflows.go b/services/actions/workflows.go
new file mode 100644
index 0000000..e2fb316
--- /dev/null
+++ b/services/actions/workflows.go
@@ -0,0 +1,173 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "strconv"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/perm"
+ "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/convert"
+
+ "github.com/nektos/act/pkg/jobparser"
+ act_model "github.com/nektos/act/pkg/model"
+)
+
+type InputRequiredErr struct {
+ Name string
+}
+
+func (err InputRequiredErr) Error() string {
+ return fmt.Sprintf("input required for '%s'", err.Name)
+}
+
+func IsInputRequiredErr(err error) bool {
+ _, ok := err.(InputRequiredErr)
+ return ok
+}
+
+type Workflow struct {
+ WorkflowID string
+ Ref string
+ Commit *git.Commit
+ GitEntry *git.TreeEntry
+}
+
+type InputValueGetter func(key string) string
+
+func (entry *Workflow) Dispatch(ctx context.Context, inputGetter InputValueGetter, repo *repo_model.Repository, doer *user.User) error {
+ content, err := actions.GetContentFromEntry(entry.GitEntry)
+ if err != nil {
+ return err
+ }
+
+ wf, err := act_model.ReadWorkflow(bytes.NewReader(content))
+ if err != nil {
+ return err
+ }
+
+ fullWorkflowID := ".forgejo/workflows/" + entry.WorkflowID
+
+ title := wf.Name
+ if len(title) < 1 {
+ title = fullWorkflowID
+ }
+
+ inputs := make(map[string]string)
+ if workflowDispatch := wf.WorkflowDispatchConfig(); workflowDispatch != nil {
+ for key, input := range workflowDispatch.Inputs {
+ val := inputGetter(key)
+ if len(val) == 0 {
+ val = input.Default
+ if len(val) == 0 {
+ if input.Required {
+ name := input.Description
+ if len(name) == 0 {
+ name = key
+ }
+ return InputRequiredErr{Name: name}
+ }
+ continue
+ }
+ } else if input.Type == "boolean" {
+ // Since "boolean" inputs are rendered as a checkbox in html, the value inside the form is "on"
+ val = strconv.FormatBool(val == "on")
+ }
+ inputs[key] = val
+ }
+ }
+
+ if int64(len(inputs)) > setting.Actions.LimitDispatchInputs {
+ return errors.New("to many inputs")
+ }
+
+ payload := &structs.WorkflowDispatchPayload{
+ Inputs: inputs,
+ Ref: entry.Ref,
+ Repository: convert.ToRepo(ctx, repo, access.Permission{AccessMode: perm.AccessModeNone}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Workflow: fullWorkflowID,
+ }
+
+ p, err := json.Marshal(payload)
+ if err != nil {
+ return err
+ }
+
+ run := &actions_model.ActionRun{
+ Title: title,
+ RepoID: repo.ID,
+ Repo: repo,
+ OwnerID: repo.OwnerID,
+ WorkflowID: entry.WorkflowID,
+ TriggerUserID: doer.ID,
+ TriggerUser: doer,
+ Ref: entry.Ref,
+ CommitSHA: entry.Commit.ID.String(),
+ Event: webhook.HookEventWorkflowDispatch,
+ EventPayload: string(p),
+ TriggerEvent: string(webhook.HookEventWorkflowDispatch),
+ Status: actions_model.StatusWaiting,
+ }
+
+ vars, err := actions_model.GetVariablesOfRun(ctx, run)
+ if err != nil {
+ return err
+ }
+
+ jobs, err := jobparser.Parse(content, jobparser.WithVars(vars))
+ if err != nil {
+ return err
+ }
+
+ return actions_model.InsertRun(ctx, run, jobs)
+}
+
+func GetWorkflowFromCommit(gitRepo *git.Repository, ref, workflowID string) (*Workflow, error) {
+ ref, err := gitRepo.ExpandRef(ref)
+ if err != nil {
+ return nil, err
+ }
+
+ commit, err := gitRepo.GetCommit(ref)
+ if err != nil {
+ return nil, err
+ }
+
+ entries, err := actions.ListWorkflows(commit)
+ if err != nil {
+ return nil, err
+ }
+
+ var workflowEntry *git.TreeEntry
+ for _, entry := range entries {
+ if entry.Name() == workflowID {
+ workflowEntry = entry
+ break
+ }
+ }
+ if workflowEntry == nil {
+ return nil, errors.New("workflow not found")
+ }
+
+ return &Workflow{
+ WorkflowID: workflowID,
+ Ref: ref,
+ Commit: commit,
+ GitEntry: workflowEntry,
+ }, nil
+}
diff --git a/services/agit/agit.go b/services/agit/agit.go
new file mode 100644
index 0000000..a18f9ef
--- /dev/null
+++ b/services/agit/agit.go
@@ -0,0 +1,268 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package agit
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "strings"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/git/pushoptions"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/private"
+ notify_service "code.gitea.io/gitea/services/notify"
+ pull_service "code.gitea.io/gitea/services/pull"
+)
+
+// ProcReceive handle proc receive work
+func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, opts *private.HookOptions) ([]private.HookProcReceiveRefResult, error) {
+ results := make([]private.HookProcReceiveRefResult, 0, len(opts.OldCommitIDs))
+
+ topicBranch, _ := opts.GetGitPushOptions().GetString(pushoptions.AgitTopic)
+ _, forcePush := opts.GetGitPushOptions().GetString(pushoptions.AgitForcePush)
+ title, hasTitle := opts.GetGitPushOptions().GetString(pushoptions.AgitTitle)
+ description, hasDesc := opts.GetGitPushOptions().GetString(pushoptions.AgitDescription)
+
+ objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
+
+ pusher, err := user_model.GetUserByID(ctx, opts.UserID)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get user[%d]: %w", opts.UserID, err)
+ }
+
+ for i := range opts.OldCommitIDs {
+ // Avoid processing this change if the new commit is empty.
+ if opts.NewCommitIDs[i] == objectFormat.EmptyObjectID().String() {
+ results = append(results, private.HookProcReceiveRefResult{
+ OriginalRef: opts.RefFullNames[i],
+ OldOID: opts.OldCommitIDs[i],
+ NewOID: opts.NewCommitIDs[i],
+ Err: "Cannot delete a non-existent branch.",
+ })
+ continue
+ }
+
+ // Only process references that are in the form of refs/for/
+ if !opts.RefFullNames[i].IsFor() {
+ results = append(results, private.HookProcReceiveRefResult{
+ IsNotMatched: true,
+ OriginalRef: opts.RefFullNames[i],
+ })
+ continue
+ }
+
+ // Get the anything after the refs/for/ prefix.
+ baseBranchName := opts.RefFullNames[i].ForBranchName()
+ curentTopicBranch := topicBranch
+
+ // If the reference was given in the format of refs/for/<target-branch>/<topic-branch>,
+ // where <target-branch> and <topic-branch> can contain slashes, we need to iteratively
+ // search for what the target and topic branch is.
+ if !gitRepo.IsBranchExist(baseBranchName) {
+ for p, v := range baseBranchName {
+ if v == '/' && gitRepo.IsBranchExist(baseBranchName[:p]) && p != len(baseBranchName)-1 {
+ curentTopicBranch = baseBranchName[p+1:]
+ baseBranchName = baseBranchName[:p]
+ break
+ }
+ }
+ }
+
+ if len(curentTopicBranch) == 0 {
+ results = append(results, private.HookProcReceiveRefResult{
+ OriginalRef: opts.RefFullNames[i],
+ OldOID: opts.OldCommitIDs[i],
+ NewOID: opts.NewCommitIDs[i],
+ Err: "The topic-branch option is not set",
+ })
+ continue
+ }
+
+ // Include the user's name in the head branch, to avoid conflicts
+ // with other users.
+ headBranch := curentTopicBranch
+ userName := strings.ToLower(opts.UserName)
+ if !strings.HasPrefix(curentTopicBranch, userName+"/") {
+ headBranch = userName + "/" + curentTopicBranch
+ }
+
+ // Check if a AGit pull request already exist for this branch.
+ pr, err := issues_model.GetUnmergedPullRequest(ctx, repo.ID, repo.ID, headBranch, baseBranchName, issues_model.PullRequestFlowAGit)
+ if err != nil {
+ if !issues_model.IsErrPullRequestNotExist(err) {
+ return nil, fmt.Errorf("failed to get unmerged AGit flow pull request in repository %q: %w", repo.FullName(), err)
+ }
+
+ // Check if the changes are already in the target branch.
+ stdout, _, gitErr := git.NewCommand(ctx, "branch", "--contains").AddDynamicArguments(opts.NewCommitIDs[i], baseBranchName).RunStdString(&git.RunOpts{Dir: repo.RepoPath()})
+ if gitErr != nil {
+ return nil, fmt.Errorf("failed to check if the target branch already contains the new commit in repository %q: %w", repo.FullName(), err)
+ }
+ if len(stdout) > 0 {
+ results = append(results, private.HookProcReceiveRefResult{
+ OriginalRef: opts.RefFullNames[i],
+ OldOID: opts.OldCommitIDs[i],
+ NewOID: opts.NewCommitIDs[i],
+ Err: "The target branch already contains this commit",
+ })
+ continue
+ }
+
+ // Automatically fill out the title and the description from the first commit.
+ shouldGetCommit := len(title) == 0 || len(description) == 0
+
+ var commit *git.Commit
+ if shouldGetCommit {
+ commit, err = gitRepo.GetCommit(opts.NewCommitIDs[i])
+ if err != nil {
+ return nil, fmt.Errorf("failed to get commit %s in repository %q: %w", opts.NewCommitIDs[i], repo.FullName(), err)
+ }
+ }
+ if !hasTitle || len(title) == 0 {
+ title = strings.Split(commit.CommitMessage, "\n")[0]
+ }
+ if !hasDesc || len(description) == 0 {
+ _, description, _ = strings.Cut(commit.CommitMessage, "\n\n")
+ }
+
+ prIssue := &issues_model.Issue{
+ RepoID: repo.ID,
+ Title: title,
+ PosterID: pusher.ID,
+ Poster: pusher,
+ IsPull: true,
+ Content: description,
+ }
+
+ pr := &issues_model.PullRequest{
+ HeadRepoID: repo.ID,
+ BaseRepoID: repo.ID,
+ HeadBranch: headBranch,
+ HeadCommitID: opts.NewCommitIDs[i],
+ BaseBranch: baseBranchName,
+ HeadRepo: repo,
+ BaseRepo: repo,
+ MergeBase: "",
+ Type: issues_model.PullRequestGitea,
+ Flow: issues_model.PullRequestFlowAGit,
+ }
+
+ if err := pull_service.NewPullRequest(ctx, repo, prIssue, []int64{}, []string{}, pr, []int64{}); err != nil {
+ return nil, fmt.Errorf("unable to create new pull request: %w", err)
+ }
+
+ log.Trace("Pull request created: %d/%d", repo.ID, prIssue.ID)
+
+ results = append(results, private.HookProcReceiveRefResult{
+ Ref: pr.GetGitRefName(),
+ OriginalRef: opts.RefFullNames[i],
+ OldOID: objectFormat.EmptyObjectID().String(),
+ NewOID: opts.NewCommitIDs[i],
+ })
+ continue
+ }
+
+ // Update an existing pull request.
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return nil, fmt.Errorf("unable to load base repository for PR[%d]: %w", pr.ID, err)
+ }
+
+ oldCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ return nil, fmt.Errorf("unable to get commit id of reference[%s] in base repository for PR[%d]: %w", pr.GetGitRefName(), pr.ID, err)
+ }
+
+ // Do not process this change if nothing was changed.
+ if oldCommitID == opts.NewCommitIDs[i] {
+ results = append(results, private.HookProcReceiveRefResult{
+ OriginalRef: opts.RefFullNames[i],
+ OldOID: opts.OldCommitIDs[i],
+ NewOID: opts.NewCommitIDs[i],
+ Err: "The new commit is the same as the old commit",
+ })
+ continue
+ }
+
+ // If the force push option was not set, ensure that this change isn't a force push.
+ if !forcePush {
+ output, _, err := git.NewCommand(ctx, "rev-list", "--max-count=1").AddDynamicArguments(oldCommitID, "^"+opts.NewCommitIDs[i]).RunStdString(&git.RunOpts{Dir: repo.RepoPath(), Env: os.Environ()})
+ if err != nil {
+ return nil, fmt.Errorf("failed to detect a force push: %w", err)
+ } else if len(output) > 0 {
+ results = append(results, private.HookProcReceiveRefResult{
+ OriginalRef: opts.RefFullNames[i],
+ OldOID: opts.OldCommitIDs[i],
+ NewOID: opts.NewCommitIDs[i],
+ Err: "Updates were rejected because the tip of your current branch is behind its remote counterpart. If this is intentional, set the `force-push` option by adding `-o force-push=true` to your `git push` command.",
+ })
+ continue
+ }
+ }
+
+ // Set the new commit as reference of the pull request.
+ pr.HeadCommitID = opts.NewCommitIDs[i]
+ if err = pull_service.UpdateRef(ctx, pr); err != nil {
+ return nil, fmt.Errorf("failed to update the reference of the pull request: %w", err)
+ }
+
+ // TODO: refactor to unify with `pull_service.AddTestPullRequestTask`
+
+ // Add the pull request to the merge conflicting checker queue.
+ pull_service.AddToTaskQueue(ctx, pr)
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ return nil, fmt.Errorf("failed to load the issue of the pull request: %w", err)
+ }
+
+ // Validate pull request.
+ pull_service.ValidatePullRequest(ctx, pr, oldCommitID, opts.NewCommitIDs[i], pusher)
+
+ // TODO: call `InvalidateCodeComments`
+
+ // Create and notify about the new commits.
+ comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i])
+ if err == nil && comment != nil {
+ notify_service.PullRequestPushCommits(ctx, pusher, pr, comment)
+ }
+ notify_service.PullRequestSynchronized(ctx, pusher, pr)
+
+ // this always seems to be false
+ isForcePush := comment != nil && comment.IsForcePush
+
+ results = append(results, private.HookProcReceiveRefResult{
+ OldOID: oldCommitID,
+ NewOID: opts.NewCommitIDs[i],
+ Ref: pr.GetGitRefName(),
+ OriginalRef: opts.RefFullNames[i],
+ IsForcePush: isForcePush,
+ })
+ }
+
+ return results, nil
+}
+
+// UserNameChanged handle user name change for agit flow pull
+func UserNameChanged(ctx context.Context, user *user_model.User, newName string) error {
+ pulls, err := issues_model.GetAllUnmergedAgitPullRequestByPoster(ctx, user.ID)
+ if err != nil {
+ return err
+ }
+
+ newName = strings.ToLower(newName)
+
+ for _, pull := range pulls {
+ pull.HeadBranch = strings.TrimPrefix(pull.HeadBranch, user.LowerName+"/")
+ pull.HeadBranch = newName + "/" + pull.HeadBranch
+ if err = pull.UpdateCols(ctx, "head_branch"); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/services/asymkey/deploy_key.go b/services/asymkey/deploy_key.go
new file mode 100644
index 0000000..e127cbf
--- /dev/null
+++ b/services/asymkey/deploy_key.go
@@ -0,0 +1,31 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package asymkey
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// DeleteDeployKey deletes deploy key from its repository authorized_keys file if needed.
+func DeleteDeployKey(ctx context.Context, doer *user_model.User, id int64) error {
+ dbCtx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := models.DeleteDeployKey(dbCtx, doer, id); err != nil {
+ return err
+ }
+ if err := committer.Commit(); err != nil {
+ return err
+ }
+
+ return asymkey_model.RewriteAllPublicKeys(ctx)
+}
diff --git a/services/asymkey/main_test.go b/services/asymkey/main_test.go
new file mode 100644
index 0000000..3505b26
--- /dev/null
+++ b/services/asymkey/main_test.go
@@ -0,0 +1,17 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package asymkey
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models/actions"
+ _ "code.gitea.io/gitea/models/activities"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/asymkey/sign.go b/services/asymkey/sign.go
new file mode 100644
index 0000000..8fb5699
--- /dev/null
+++ b/services/asymkey/sign.go
@@ -0,0 +1,404 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package asymkey
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+type signingMode string
+
+const (
+ never signingMode = "never"
+ always signingMode = "always"
+ pubkey signingMode = "pubkey"
+ twofa signingMode = "twofa"
+ parentSigned signingMode = "parentsigned"
+ baseSigned signingMode = "basesigned"
+ headSigned signingMode = "headsigned"
+ commitsSigned signingMode = "commitssigned"
+ approved signingMode = "approved"
+ noKey signingMode = "nokey"
+)
+
+func signingModeFromStrings(modeStrings []string) []signingMode {
+ returnable := make([]signingMode, 0, len(modeStrings))
+ for _, mode := range modeStrings {
+ signMode := signingMode(strings.ToLower(strings.TrimSpace(mode)))
+ switch signMode {
+ case never:
+ return []signingMode{never}
+ case always:
+ return []signingMode{always}
+ case pubkey:
+ fallthrough
+ case twofa:
+ fallthrough
+ case parentSigned:
+ fallthrough
+ case baseSigned:
+ fallthrough
+ case headSigned:
+ fallthrough
+ case approved:
+ fallthrough
+ case commitsSigned:
+ returnable = append(returnable, signMode)
+ }
+ }
+ if len(returnable) == 0 {
+ return []signingMode{never}
+ }
+ return returnable
+}
+
+// ErrWontSign explains the first reason why a commit would not be signed
+// There may be other reasons - this is just the first reason found
+type ErrWontSign struct {
+ Reason signingMode
+}
+
+func (e *ErrWontSign) Error() string {
+ return fmt.Sprintf("won't sign: %s", e.Reason)
+}
+
+// IsErrWontSign checks if an error is a ErrWontSign
+func IsErrWontSign(err error) bool {
+ _, ok := err.(*ErrWontSign)
+ return ok
+}
+
+// SigningKey returns the KeyID and git Signature for the repo
+func SigningKey(ctx context.Context, repoPath string) (string, *git.Signature) {
+ if setting.Repository.Signing.SigningKey == "none" {
+ return "", nil
+ }
+
+ if setting.Repository.Signing.SigningKey == "default" || setting.Repository.Signing.SigningKey == "" {
+ // Can ignore the error here as it means that commit.gpgsign is not set
+ value, _, _ := git.NewCommand(ctx, "config", "--get", "commit.gpgsign").RunStdString(&git.RunOpts{Dir: repoPath})
+ sign, valid := git.ParseBool(strings.TrimSpace(value))
+ if !sign || !valid {
+ return "", nil
+ }
+
+ signingKey, _, _ := git.NewCommand(ctx, "config", "--get", "user.signingkey").RunStdString(&git.RunOpts{Dir: repoPath})
+ signingName, _, _ := git.NewCommand(ctx, "config", "--get", "user.name").RunStdString(&git.RunOpts{Dir: repoPath})
+ signingEmail, _, _ := git.NewCommand(ctx, "config", "--get", "user.email").RunStdString(&git.RunOpts{Dir: repoPath})
+ return strings.TrimSpace(signingKey), &git.Signature{
+ Name: strings.TrimSpace(signingName),
+ Email: strings.TrimSpace(signingEmail),
+ }
+ }
+
+ return setting.Repository.Signing.SigningKey, &git.Signature{
+ Name: setting.Repository.Signing.SigningName,
+ Email: setting.Repository.Signing.SigningEmail,
+ }
+}
+
+// PublicSigningKey gets the public signing key within a provided repository directory
+func PublicSigningKey(ctx context.Context, repoPath string) (string, error) {
+ signingKey, _ := SigningKey(ctx, repoPath)
+ if signingKey == "" {
+ return "", nil
+ }
+
+ content, stderr, err := process.GetManager().ExecDir(ctx, -1, repoPath,
+ "gpg --export -a", "gpg", "--export", "-a", signingKey)
+ if err != nil {
+ log.Error("Unable to get default signing key in %s: %s, %s, %v", repoPath, signingKey, stderr, err)
+ return "", err
+ }
+ return content, nil
+}
+
+// SignInitialCommit determines if we should sign the initial commit to this repository
+func SignInitialCommit(ctx context.Context, repoPath string, u *user_model.User) (bool, string, *git.Signature, error) {
+ rules := signingModeFromStrings(setting.Repository.Signing.InitialCommit)
+ signingKey, sig := SigningKey(ctx, repoPath)
+ if signingKey == "" {
+ return false, "", nil, &ErrWontSign{noKey}
+ }
+
+Loop:
+ for _, rule := range rules {
+ switch rule {
+ case never:
+ return false, "", nil, &ErrWontSign{never}
+ case always:
+ break Loop
+ case pubkey:
+ keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ OwnerID: u.ID,
+ IncludeSubKeys: true,
+ })
+ if err != nil {
+ return false, "", nil, err
+ }
+ if len(keys) == 0 {
+ return false, "", nil, &ErrWontSign{pubkey}
+ }
+ case twofa:
+ twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
+ if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ return false, "", nil, err
+ }
+ if twofaModel == nil {
+ return false, "", nil, &ErrWontSign{twofa}
+ }
+ }
+ }
+ return true, signingKey, sig, nil
+}
+
+// SignWikiCommit determines if we should sign the commits to this repository wiki
+func SignWikiCommit(ctx context.Context, repo *repo_model.Repository, u *user_model.User) (bool, string, *git.Signature, error) {
+ repoWikiPath := repo.WikiPath()
+ rules := signingModeFromStrings(setting.Repository.Signing.Wiki)
+ signingKey, sig := SigningKey(ctx, repoWikiPath)
+ if signingKey == "" {
+ return false, "", nil, &ErrWontSign{noKey}
+ }
+
+Loop:
+ for _, rule := range rules {
+ switch rule {
+ case never:
+ return false, "", nil, &ErrWontSign{never}
+ case always:
+ break Loop
+ case pubkey:
+ keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ OwnerID: u.ID,
+ IncludeSubKeys: true,
+ })
+ if err != nil {
+ return false, "", nil, err
+ }
+ if len(keys) == 0 {
+ return false, "", nil, &ErrWontSign{pubkey}
+ }
+ case twofa:
+ twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
+ if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ return false, "", nil, err
+ }
+ if twofaModel == nil {
+ return false, "", nil, &ErrWontSign{twofa}
+ }
+ case parentSigned:
+ gitRepo, err := gitrepo.OpenWikiRepository(ctx, repo)
+ if err != nil {
+ return false, "", nil, err
+ }
+ defer gitRepo.Close()
+ commit, err := gitRepo.GetCommit("HEAD")
+ if err != nil {
+ return false, "", nil, err
+ }
+ if commit.Signature == nil {
+ return false, "", nil, &ErrWontSign{parentSigned}
+ }
+ verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ if !verification.Verified {
+ return false, "", nil, &ErrWontSign{parentSigned}
+ }
+ }
+ }
+ return true, signingKey, sig, nil
+}
+
+// SignCRUDAction determines if we should sign a CRUD commit to this repository
+func SignCRUDAction(ctx context.Context, repoPath string, u *user_model.User, tmpBasePath, parentCommit string) (bool, string, *git.Signature, error) {
+ rules := signingModeFromStrings(setting.Repository.Signing.CRUDActions)
+ signingKey, sig := SigningKey(ctx, repoPath)
+ if signingKey == "" {
+ return false, "", nil, &ErrWontSign{noKey}
+ }
+
+Loop:
+ for _, rule := range rules {
+ switch rule {
+ case never:
+ return false, "", nil, &ErrWontSign{never}
+ case always:
+ break Loop
+ case pubkey:
+ keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ OwnerID: u.ID,
+ IncludeSubKeys: true,
+ })
+ if err != nil {
+ return false, "", nil, err
+ }
+ if len(keys) == 0 {
+ return false, "", nil, &ErrWontSign{pubkey}
+ }
+ case twofa:
+ twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
+ if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ return false, "", nil, err
+ }
+ if twofaModel == nil {
+ return false, "", nil, &ErrWontSign{twofa}
+ }
+ case parentSigned:
+ gitRepo, err := git.OpenRepository(ctx, tmpBasePath)
+ if err != nil {
+ return false, "", nil, err
+ }
+ defer gitRepo.Close()
+ commit, err := gitRepo.GetCommit(parentCommit)
+ if err != nil {
+ return false, "", nil, err
+ }
+ if commit.Signature == nil {
+ return false, "", nil, &ErrWontSign{parentSigned}
+ }
+ verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ if !verification.Verified {
+ return false, "", nil, &ErrWontSign{parentSigned}
+ }
+ }
+ }
+ return true, signingKey, sig, nil
+}
+
+// SignMerge determines if we should sign a PR merge commit to the base repository
+func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, tmpBasePath, baseCommit, headCommit string) (bool, string, *git.Signature, error) {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to get Base Repo for pull request")
+ return false, "", nil, err
+ }
+ repo := pr.BaseRepo
+
+ signingKey, signer := SigningKey(ctx, repo.RepoPath())
+ if signingKey == "" {
+ return false, "", nil, &ErrWontSign{noKey}
+ }
+ rules := signingModeFromStrings(setting.Repository.Signing.Merges)
+
+ var gitRepo *git.Repository
+ var err error
+
+Loop:
+ for _, rule := range rules {
+ switch rule {
+ case never:
+ return false, "", nil, &ErrWontSign{never}
+ case always:
+ break Loop
+ case pubkey:
+ keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ OwnerID: u.ID,
+ IncludeSubKeys: true,
+ })
+ if err != nil {
+ return false, "", nil, err
+ }
+ if len(keys) == 0 {
+ return false, "", nil, &ErrWontSign{pubkey}
+ }
+ case twofa:
+ twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
+ if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ return false, "", nil, err
+ }
+ if twofaModel == nil {
+ return false, "", nil, &ErrWontSign{twofa}
+ }
+ case approved:
+ protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pr.BaseBranch)
+ if err != nil {
+ return false, "", nil, err
+ }
+ if protectedBranch == nil {
+ return false, "", nil, &ErrWontSign{approved}
+ }
+ if issues_model.GetGrantedApprovalsCount(ctx, protectedBranch, pr) < 1 {
+ return false, "", nil, &ErrWontSign{approved}
+ }
+ case baseSigned:
+ if gitRepo == nil {
+ gitRepo, err = git.OpenRepository(ctx, tmpBasePath)
+ if err != nil {
+ return false, "", nil, err
+ }
+ defer gitRepo.Close()
+ }
+ commit, err := gitRepo.GetCommit(baseCommit)
+ if err != nil {
+ return false, "", nil, err
+ }
+ verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ if !verification.Verified {
+ return false, "", nil, &ErrWontSign{baseSigned}
+ }
+ case headSigned:
+ if gitRepo == nil {
+ gitRepo, err = git.OpenRepository(ctx, tmpBasePath)
+ if err != nil {
+ return false, "", nil, err
+ }
+ defer gitRepo.Close()
+ }
+ commit, err := gitRepo.GetCommit(headCommit)
+ if err != nil {
+ return false, "", nil, err
+ }
+ verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ if !verification.Verified {
+ return false, "", nil, &ErrWontSign{headSigned}
+ }
+ case commitsSigned:
+ if gitRepo == nil {
+ gitRepo, err = git.OpenRepository(ctx, tmpBasePath)
+ if err != nil {
+ return false, "", nil, err
+ }
+ defer gitRepo.Close()
+ }
+ commit, err := gitRepo.GetCommit(headCommit)
+ if err != nil {
+ return false, "", nil, err
+ }
+ verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ if !verification.Verified {
+ return false, "", nil, &ErrWontSign{commitsSigned}
+ }
+ // need to work out merge-base
+ mergeBaseCommit, _, err := gitRepo.GetMergeBase("", baseCommit, headCommit)
+ if err != nil {
+ return false, "", nil, err
+ }
+ commitList, err := commit.CommitsBeforeUntil(mergeBaseCommit)
+ if err != nil {
+ return false, "", nil, err
+ }
+ for _, commit := range commitList {
+ verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ if !verification.Verified {
+ return false, "", nil, &ErrWontSign{commitsSigned}
+ }
+ }
+ }
+ }
+ return true, signingKey, signer, nil
+}
diff --git a/services/asymkey/ssh_key.go b/services/asymkey/ssh_key.go
new file mode 100644
index 0000000..83d7eda
--- /dev/null
+++ b/services/asymkey/ssh_key.go
@@ -0,0 +1,50 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package asymkey
+
+import (
+ "context"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// DeletePublicKey deletes SSH key information both in database and authorized_keys file.
+func DeletePublicKey(ctx context.Context, doer *user_model.User, id int64) (err error) {
+ key, err := asymkey_model.GetPublicKeyByID(ctx, id)
+ if err != nil {
+ return err
+ }
+
+ // Check if user has access to delete this key.
+ if !doer.IsAdmin && doer.ID != key.OwnerID {
+ return asymkey_model.ErrKeyAccessDenied{
+ UserID: doer.ID,
+ KeyID: key.ID,
+ Note: "public",
+ }
+ }
+
+ dbCtx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if _, err = db.DeleteByID[asymkey_model.PublicKey](dbCtx, id); err != nil {
+ return err
+ }
+
+ if err = committer.Commit(); err != nil {
+ return err
+ }
+ committer.Close()
+
+ if key.Type == asymkey_model.KeyTypePrincipal {
+ return asymkey_model.RewriteAllPrincipalKeys(ctx)
+ }
+
+ return asymkey_model.RewriteAllPublicKeys(ctx)
+}
diff --git a/services/asymkey/ssh_key_test.go b/services/asymkey/ssh_key_test.go
new file mode 100644
index 0000000..d667a02
--- /dev/null
+++ b/services/asymkey/ssh_key_test.go
@@ -0,0 +1,88 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package asymkey
+
+import (
+ "testing"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestAddLdapSSHPublicKeys(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ s := &auth.Source{ID: 1}
+
+ testCases := []struct {
+ keyString string
+ number int
+ keyContents []string
+ }{
+ {
+ keyString: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment\n",
+ number: 1,
+ keyContents: []string{
+ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
+ },
+ },
+ {
+ keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
+ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
+ number: 2,
+ keyContents: []string{
+ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
+ "ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
+ },
+ },
+ {
+ keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
+# comment asmdna,ndp
+ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
+ number: 2,
+ keyContents: []string{
+ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
+ "ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
+ },
+ },
+ {
+ keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
+382488320jasdj1lasmva/vasodifipi4193-fksma.cm
+ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
+ number: 2,
+ keyContents: []string{
+ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
+ "ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
+ },
+ },
+ }
+
+ for i, kase := range testCases {
+ s.ID = int64(i) + 20
+ asymkey_model.AddPublicKeysBySource(db.DefaultContext, user, s, []string{kase.keyString})
+ keys, err := db.Find[asymkey_model.PublicKey](db.DefaultContext, asymkey_model.FindPublicKeyOptions{
+ OwnerID: user.ID,
+ LoginSourceID: s.ID,
+ })
+ require.NoError(t, err)
+ if err != nil {
+ continue
+ }
+ assert.Len(t, keys, kase.number)
+
+ for _, key := range keys {
+ assert.Contains(t, kase.keyContents, key.Content)
+ }
+ for _, key := range keys {
+ DeletePublicKey(db.DefaultContext, user, key.ID)
+ }
+ }
+}
diff --git a/services/attachment/attachment.go b/services/attachment/attachment.go
new file mode 100644
index 0000000..c911945
--- /dev/null
+++ b/services/attachment/attachment.go
@@ -0,0 +1,80 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package attachment
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/validation"
+ "code.gitea.io/gitea/services/context/upload"
+
+ "github.com/google/uuid"
+)
+
+// NewAttachment creates a new attachment object, but do not verify.
+func NewAttachment(ctx context.Context, attach *repo_model.Attachment, file io.Reader, size int64) (*repo_model.Attachment, error) {
+ if attach.RepoID == 0 {
+ return nil, fmt.Errorf("attachment %s should belong to a repository", attach.Name)
+ }
+
+ err := db.WithTx(ctx, func(ctx context.Context) error {
+ attach.UUID = uuid.New().String()
+ size, err := storage.Attachments.Save(attach.RelativePath(), file, size)
+ if err != nil {
+ return fmt.Errorf("Create: %w", err)
+ }
+ attach.Size = size
+
+ eng := db.GetEngine(ctx)
+ if attach.NoAutoTime {
+ eng.NoAutoTime()
+ }
+ _, err = eng.Insert(attach)
+ return err
+ })
+
+ return attach, err
+}
+
+func NewExternalAttachment(ctx context.Context, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
+ if attach.RepoID == 0 {
+ return nil, fmt.Errorf("attachment %s should belong to a repository", attach.Name)
+ }
+ if attach.ExternalURL == "" {
+ return nil, fmt.Errorf("attachment %s should have a external url", attach.Name)
+ }
+ if !validation.IsValidExternalURL(attach.ExternalURL) {
+ return nil, repo_model.ErrInvalidExternalURL{ExternalURL: attach.ExternalURL}
+ }
+
+ attach.UUID = uuid.New().String()
+
+ eng := db.GetEngine(ctx)
+ if attach.NoAutoTime {
+ eng.NoAutoTime()
+ }
+ _, err := eng.Insert(attach)
+
+ return attach, err
+}
+
+// UploadAttachment upload new attachment into storage and update database
+func UploadAttachment(ctx context.Context, file io.Reader, allowedTypes string, fileSize int64, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(file, buf)
+ buf = buf[:n]
+
+ if err := upload.Verify(buf, attach.Name, allowedTypes); err != nil {
+ return nil, err
+ }
+
+ return NewAttachment(ctx, attach, io.MultiReader(bytes.NewReader(buf), file), fileSize)
+}
diff --git a/services/attachment/attachment_test.go b/services/attachment/attachment_test.go
new file mode 100644
index 0000000..fe861c6
--- /dev/null
+++ b/services/attachment/attachment_test.go
@@ -0,0 +1,47 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package attachment
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ _ "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func TestUploadAttachment(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ fPath := "./attachment_test.go"
+ f, err := os.Open(fPath)
+ require.NoError(t, err)
+ defer f.Close()
+
+ attach, err := NewAttachment(db.DefaultContext, &repo_model.Attachment{
+ RepoID: 1,
+ UploaderID: user.ID,
+ Name: filepath.Base(fPath),
+ }, f, -1)
+ require.NoError(t, err)
+
+ attachment, err := repo_model.GetAttachmentByUUID(db.DefaultContext, attach.UUID)
+ require.NoError(t, err)
+ assert.EqualValues(t, user.ID, attachment.UploaderID)
+ assert.Equal(t, int64(0), attachment.DownloadCount)
+}
diff --git a/services/auth/additional_scopes_test.go b/services/auth/additional_scopes_test.go
new file mode 100644
index 0000000..9ab4e6e
--- /dev/null
+++ b/services/auth/additional_scopes_test.go
@@ -0,0 +1,32 @@
+package auth
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGrantAdditionalScopes(t *testing.T) {
+ tests := []struct {
+ grantScopes string
+ expectedScopes string
+ }{
+ {"openid profile email", ""},
+ {"openid profile email groups", ""},
+ {"openid profile email all", "all"},
+ {"openid profile email read:user all", "read:user,all"},
+ {"openid profile email groups read:user", "read:user"},
+ {"read:user read:repository", "read:user,read:repository"},
+ {"read:user write:issue public-only", "read:user,write:issue,public-only"},
+ {"openid profile email read:user", "read:user"},
+ {"read:invalid_scope", ""},
+ {"read:invalid_scope,write:scope_invalid,just-plain-wrong", ""},
+ }
+
+ for _, test := range tests {
+ t.Run(test.grantScopes, func(t *testing.T) {
+ result := grantAdditionalScopes(test.grantScopes)
+ assert.Equal(t, test.expectedScopes, result)
+ })
+ }
+}
diff --git a/services/auth/auth.go b/services/auth/auth.go
new file mode 100644
index 0000000..c108723
--- /dev/null
+++ b/services/auth/auth.go
@@ -0,0 +1,106 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "fmt"
+ "net/http"
+ "regexp"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/webauthn"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/session"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web/middleware"
+ gitea_context "code.gitea.io/gitea/services/context"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+// Init should be called exactly once when the application starts to allow plugins
+// to allocate necessary resources
+func Init() {
+ webauthn.Init()
+}
+
+// isAttachmentDownload check if request is a file download (GET) with URL to an attachment
+func isAttachmentDownload(req *http.Request) bool {
+ return strings.HasPrefix(req.URL.Path, "/attachments/") && req.Method == "GET"
+}
+
+// isContainerPath checks if the request targets the container endpoint
+func isContainerPath(req *http.Request) bool {
+ return strings.HasPrefix(req.URL.Path, "/v2/")
+}
+
+var (
+ gitRawOrAttachPathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/(?:(?:git-(?:(?:upload)|(?:receive))-pack$)|(?:info/refs$)|(?:HEAD$)|(?:objects/)|(?:raw/)|(?:releases/download/)|(?:attachments/))`)
+ lfsPathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/info/lfs/`)
+ archivePathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/archive/`)
+)
+
+func isGitRawOrAttachPath(req *http.Request) bool {
+ return gitRawOrAttachPathRe.MatchString(req.URL.Path)
+}
+
+func isGitRawOrAttachOrLFSPath(req *http.Request) bool {
+ if isGitRawOrAttachPath(req) {
+ return true
+ }
+ if setting.LFS.StartServer {
+ return lfsPathRe.MatchString(req.URL.Path)
+ }
+ return false
+}
+
+func isArchivePath(req *http.Request) bool {
+ return archivePathRe.MatchString(req.URL.Path)
+}
+
+// handleSignIn clears existing session variables and stores new ones for the specified user object
+func handleSignIn(resp http.ResponseWriter, req *http.Request, sess SessionStore, user *user_model.User) {
+ // We need to regenerate the session...
+ newSess, err := session.RegenerateSession(resp, req)
+ if err != nil {
+ log.Error(fmt.Sprintf("Error regenerating session: %v", err))
+ } else {
+ sess = newSess
+ }
+
+ _ = sess.Delete("openid_verified_uri")
+ _ = sess.Delete("openid_signin_remember")
+ _ = sess.Delete("openid_determined_email")
+ _ = sess.Delete("openid_determined_username")
+ _ = sess.Delete("twofaUid")
+ _ = sess.Delete("twofaRemember")
+ _ = sess.Delete("webauthnAssertion")
+ _ = sess.Delete("linkAccount")
+ err = sess.Set("uid", user.ID)
+ if err != nil {
+ log.Error(fmt.Sprintf("Error setting session: %v", err))
+ }
+
+ // Language setting of the user overwrites the one previously set
+ // If the user does not have a locale set, we save the current one.
+ if len(user.Language) == 0 {
+ lc := middleware.Locale(resp, req)
+ opts := &user_service.UpdateOptions{
+ Language: optional.Some(lc.Language()),
+ }
+ if err := user_service.UpdateUser(req.Context(), user, opts); err != nil {
+ log.Error(fmt.Sprintf("Error updating user language [user: %d, locale: %s]", user.ID, user.Language))
+ return
+ }
+ }
+
+ middleware.SetLocaleCookie(resp, user.Language, 0)
+
+ // Clear whatever CSRF has right now, force to generate a new one
+ if ctx := gitea_context.GetWebContext(req); ctx != nil {
+ ctx.Csrf.DeleteCookie(ctx)
+ }
+}
diff --git a/services/auth/auth_test.go b/services/auth/auth_test.go
new file mode 100644
index 0000000..3adaa28
--- /dev/null
+++ b/services/auth/auth_test.go
@@ -0,0 +1,134 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "net/http"
+ "testing"
+
+ "code.gitea.io/gitea/modules/setting"
+)
+
+func Test_isGitRawOrLFSPath(t *testing.T) {
+ tests := []struct {
+ path string
+
+ want bool
+ }{
+ {
+ "/owner/repo/git-upload-pack",
+ true,
+ },
+ {
+ "/owner/repo/git-receive-pack",
+ true,
+ },
+ {
+ "/owner/repo/info/refs",
+ true,
+ },
+ {
+ "/owner/repo/HEAD",
+ true,
+ },
+ {
+ "/owner/repo/objects/info/alternates",
+ true,
+ },
+ {
+ "/owner/repo/objects/info/http-alternates",
+ true,
+ },
+ {
+ "/owner/repo/objects/info/packs",
+ true,
+ },
+ {
+ "/owner/repo/objects/info/blahahsdhsdkla",
+ true,
+ },
+ {
+ "/owner/repo/objects/01/23456789abcdef0123456789abcdef01234567",
+ true,
+ },
+ {
+ "/owner/repo/objects/pack/pack-123456789012345678921234567893124567894.pack",
+ true,
+ },
+ {
+ "/owner/repo/objects/pack/pack-0123456789abcdef0123456789abcdef0123456.idx",
+ true,
+ },
+ {
+ "/owner/repo/raw/branch/foo/fanaso",
+ true,
+ },
+ {
+ "/owner/repo/stars",
+ false,
+ },
+ {
+ "/notowner",
+ false,
+ },
+ {
+ "/owner/repo",
+ false,
+ },
+ {
+ "/owner/repo/commit/123456789012345678921234567893124567894",
+ false,
+ },
+ {
+ "/owner/repo/releases/download/tag/repo.tar.gz",
+ true,
+ },
+ {
+ "/owner/repo/attachments/6d92a9ee-5d8b-4993-97c9-6181bdaa8955",
+ true,
+ },
+ }
+ lfsTests := []string{
+ "/owner/repo/info/lfs/",
+ "/owner/repo/info/lfs/objects/batch",
+ "/owner/repo/info/lfs/objects/oid/filename",
+ "/owner/repo/info/lfs/objects/oid",
+ "/owner/repo/info/lfs/objects",
+ "/owner/repo/info/lfs/verify",
+ "/owner/repo/info/lfs/locks",
+ "/owner/repo/info/lfs/locks/verify",
+ "/owner/repo/info/lfs/locks/123/unlock",
+ }
+
+ origLFSStartServer := setting.LFS.StartServer
+
+ for _, tt := range tests {
+ t.Run(tt.path, func(t *testing.T) {
+ req, _ := http.NewRequest("POST", "http://localhost"+tt.path, nil)
+ setting.LFS.StartServer = false
+ if got := isGitRawOrAttachOrLFSPath(req); got != tt.want {
+ t.Errorf("isGitOrLFSPath() = %v, want %v", got, tt.want)
+ }
+ setting.LFS.StartServer = true
+ if got := isGitRawOrAttachOrLFSPath(req); got != tt.want {
+ t.Errorf("isGitOrLFSPath() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+ for _, tt := range lfsTests {
+ t.Run(tt, func(t *testing.T) {
+ req, _ := http.NewRequest("POST", tt, nil)
+ setting.LFS.StartServer = false
+ if got := isGitRawOrAttachOrLFSPath(req); got != setting.LFS.StartServer {
+ t.Errorf("isGitOrLFSPath(%q) = %v, want %v, %v", tt, got, setting.LFS.StartServer, gitRawOrAttachPathRe.MatchString(tt))
+ }
+ setting.LFS.StartServer = true
+ if got := isGitRawOrAttachOrLFSPath(req); got != setting.LFS.StartServer {
+ t.Errorf("isGitOrLFSPath(%q) = %v, want %v", tt, got, setting.LFS.StartServer)
+ }
+ })
+ }
+ setting.LFS.StartServer = origLFSStartServer
+}
diff --git a/services/auth/basic.go b/services/auth/basic.go
new file mode 100644
index 0000000..d489164
--- /dev/null
+++ b/services/auth/basic.go
@@ -0,0 +1,180 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "errors"
+ "net/http"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ auth_model "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web/middleware"
+)
+
+// Ensure the struct implements the interface.
+var (
+ _ Method = &Basic{}
+)
+
+// BasicMethodName is the constant name of the basic authentication method
+const BasicMethodName = "basic"
+
+// Basic implements the Auth interface and authenticates requests (API requests
+// only) by looking for Basic authentication data or "x-oauth-basic" token in the "Authorization"
+// header.
+type Basic struct{}
+
+// Name represents the name of auth method
+func (b *Basic) Name() string {
+ return BasicMethodName
+}
+
+// Verify extracts and validates Basic data (username and password/token) from the
+// "Authorization" header of the request and returns the corresponding user object for that
+// name/token on successful validation.
+// Returns nil if header is empty or validation fails.
+func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+ // Basic authentication should only fire on API, Download or on Git or LFSPaths
+ if !middleware.IsAPIPath(req) && !isContainerPath(req) && !isAttachmentDownload(req) && !isGitRawOrAttachOrLFSPath(req) {
+ return nil, nil
+ }
+
+ baHead := req.Header.Get("Authorization")
+ if len(baHead) == 0 {
+ return nil, nil
+ }
+
+ auths := strings.SplitN(baHead, " ", 2)
+ if len(auths) != 2 || (strings.ToLower(auths[0]) != "basic") {
+ return nil, nil
+ }
+
+ uname, passwd, _ := base.BasicAuthDecode(auths[1])
+
+ // Check if username or password is a token
+ isUsernameToken := len(passwd) == 0 || passwd == "x-oauth-basic"
+ // Assume username is token
+ authToken := uname
+ if !isUsernameToken {
+ log.Trace("Basic Authorization: Attempting login for: %s", uname)
+ // Assume password is token
+ authToken = passwd
+ } else {
+ log.Trace("Basic Authorization: Attempting login with username as token")
+ }
+
+ // check oauth2 token
+ uid, _ := CheckOAuthAccessToken(req.Context(), authToken)
+ if uid != 0 {
+ log.Trace("Basic Authorization: Valid OAuthAccessToken for user[%d]", uid)
+
+ u, err := user_model.GetUserByID(req.Context(), uid)
+ if err != nil {
+ log.Error("GetUserByID: %v", err)
+ return nil, err
+ }
+
+ store.GetData()["IsApiToken"] = true
+ return u, nil
+ }
+
+ // check personal access token
+ token, err := auth_model.GetAccessTokenBySHA(req.Context(), authToken)
+ if err == nil {
+ log.Trace("Basic Authorization: Valid AccessToken for user[%d]", uid)
+ u, err := user_model.GetUserByID(req.Context(), token.UID)
+ if err != nil {
+ log.Error("GetUserByID: %v", err)
+ return nil, err
+ }
+
+ token.UpdatedUnix = timeutil.TimeStampNow()
+ if err = auth_model.UpdateAccessToken(req.Context(), token); err != nil {
+ log.Error("UpdateAccessToken: %v", err)
+ }
+
+ store.GetData()["IsApiToken"] = true
+ store.GetData()["ApiTokenScope"] = token.Scope
+ return u, nil
+ } else if !auth_model.IsErrAccessTokenNotExist(err) && !auth_model.IsErrAccessTokenEmpty(err) {
+ log.Error("GetAccessTokenBySha: %v", err)
+ }
+
+ // check task token
+ task, err := actions_model.GetRunningTaskByToken(req.Context(), authToken)
+ if err == nil && task != nil {
+ log.Trace("Basic Authorization: Valid AccessToken for task[%d]", task.ID)
+
+ store.GetData()["IsActionsToken"] = true
+ store.GetData()["ActionsTaskID"] = task.ID
+
+ return user_model.NewActionsUser(), nil
+ }
+
+ if !setting.Service.EnableBasicAuth {
+ return nil, nil
+ }
+
+ log.Trace("Basic Authorization: Attempting SignIn for %s", uname)
+ u, source, err := UserSignIn(req.Context(), uname, passwd)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("UserSignIn: %v", err)
+ }
+ return nil, err
+ }
+
+ hashWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(req.Context(), u.ID)
+ if err != nil {
+ log.Error("HasWebAuthnRegistrationsByUID: %v", err)
+ return nil, err
+ }
+
+ if hashWebAuthn {
+ return nil, errors.New("Basic authorization is not allowed while having security keys enrolled")
+ }
+
+ if skipper, ok := source.Cfg.(LocalTwoFASkipper); !ok || !skipper.IsSkipLocalTwoFA() {
+ if err := validateTOTP(req, u); err != nil {
+ return nil, err
+ }
+ }
+
+ log.Trace("Basic Authorization: Logged in user %-v", u)
+
+ return u, nil
+}
+
+func getOtpHeader(header http.Header) string {
+ otpHeader := header.Get("X-Gitea-OTP")
+ if forgejoHeader := header.Get("X-Forgejo-OTP"); forgejoHeader != "" {
+ otpHeader = forgejoHeader
+ }
+ return otpHeader
+}
+
+func validateTOTP(req *http.Request, u *user_model.User) error {
+ twofa, err := auth_model.GetTwoFactorByUID(req.Context(), u.ID)
+ if err != nil {
+ if auth_model.IsErrTwoFactorNotEnrolled(err) {
+ // No 2FA enrollment for this user
+ return nil
+ }
+ return err
+ }
+ if ok, err := twofa.ValidateTOTP(getOtpHeader(req.Header)); err != nil {
+ return err
+ } else if !ok {
+ return util.NewInvalidArgumentErrorf("invalid provided OTP")
+ }
+ return nil
+}
diff --git a/services/auth/group.go b/services/auth/group.go
new file mode 100644
index 0000000..aecf43c
--- /dev/null
+++ b/services/auth/group.go
@@ -0,0 +1,72 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "net/http"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// Ensure the struct implements the interface.
+var (
+ _ Method = &Group{}
+)
+
+// Group implements the Auth interface with serval Auth.
+type Group struct {
+ methods []Method
+}
+
+// NewGroup creates a new auth group
+func NewGroup(methods ...Method) *Group {
+ return &Group{
+ methods: methods,
+ }
+}
+
+// Add adds a new method to group
+func (b *Group) Add(method Method) {
+ b.methods = append(b.methods, method)
+}
+
+// Name returns group's methods name
+func (b *Group) Name() string {
+ names := make([]string, 0, len(b.methods))
+ for _, m := range b.methods {
+ names = append(names, m.Name())
+ }
+ return strings.Join(names, ",")
+}
+
+func (b *Group) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+ // Try to sign in with each of the enabled plugins
+ var retErr error
+ for _, m := range b.methods {
+ user, err := m.Verify(req, w, store, sess)
+ if err != nil {
+ if retErr == nil {
+ retErr = err
+ }
+ // Try other methods if this one failed.
+ // Some methods may share the same protocol to detect if they are matched.
+ // For example, OAuth2 and conan.Auth both read token from "Authorization: Bearer <token>" header,
+ // If OAuth2 returns error, we should give conan.Auth a chance to try.
+ continue
+ }
+
+ // If any method returns a user, we can stop trying.
+ // Return the user and ignore any error returned by previous methods.
+ if user != nil {
+ if store.GetData()["AuthedMethod"] == nil {
+ store.GetData()["AuthedMethod"] = m.Name()
+ }
+ return user, nil
+ }
+ }
+
+ // If no method returns a user, return the error returned by the first method.
+ return nil, retErr
+}
diff --git a/services/auth/httpsign.go b/services/auth/httpsign.go
new file mode 100644
index 0000000..b604349
--- /dev/null
+++ b/services/auth/httpsign.go
@@ -0,0 +1,218 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "bytes"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/go-fed/httpsig"
+ "golang.org/x/crypto/ssh"
+)
+
+// Ensure the struct implements the interface.
+var (
+ _ Method = &HTTPSign{}
+)
+
+// HTTPSign implements the Auth interface and authenticates requests (API requests
+// only) by looking for http signature data in the "Signature" header.
+// more information can be found on https://github.com/go-fed/httpsig
+type HTTPSign struct{}
+
+// Name represents the name of auth method
+func (h *HTTPSign) Name() string {
+ return "httpsign"
+}
+
+// Verify extracts and validates HTTPsign from the Signature header of the request and returns
+// the corresponding user object on successful validation.
+// Returns nil if header is empty or validation fails.
+func (h *HTTPSign) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+ sigHead := req.Header.Get("Signature")
+ if len(sigHead) == 0 {
+ return nil, nil
+ }
+
+ var (
+ publicKey *asymkey_model.PublicKey
+ err error
+ )
+
+ if len(req.Header.Get("X-Ssh-Certificate")) != 0 {
+ // Handle Signature signed by SSH certificates
+ if len(setting.SSH.TrustedUserCAKeys) == 0 {
+ return nil, nil
+ }
+
+ publicKey, err = VerifyCert(req)
+ if err != nil {
+ log.Debug("VerifyCert on request from %s: failed: %v", req.RemoteAddr, err)
+ log.Warn("Failed authentication attempt from %s", req.RemoteAddr)
+ return nil, nil
+ }
+ } else {
+ // Handle Signature signed by Public Key
+ publicKey, err = VerifyPubKey(req)
+ if err != nil {
+ log.Debug("VerifyPubKey on request from %s: failed: %v", req.RemoteAddr, err)
+ log.Warn("Failed authentication attempt from %s", req.RemoteAddr)
+ return nil, nil
+ }
+ }
+
+ u, err := user_model.GetUserByID(req.Context(), publicKey.OwnerID)
+ if err != nil {
+ log.Error("GetUserByID: %v", err)
+ return nil, err
+ }
+
+ store.GetData()["IsApiToken"] = true
+
+ log.Trace("HTTP Sign: Logged in user %-v", u)
+
+ return u, nil
+}
+
+func VerifyPubKey(r *http.Request) (*asymkey_model.PublicKey, error) {
+ verifier, err := httpsig.NewVerifier(r)
+ if err != nil {
+ return nil, fmt.Errorf("httpsig.NewVerifier failed: %s", err)
+ }
+
+ keyID := verifier.KeyId()
+
+ publicKeys, err := db.Find[asymkey_model.PublicKey](r.Context(), asymkey_model.FindPublicKeyOptions{
+ Fingerprint: keyID,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ if len(publicKeys) == 0 {
+ return nil, fmt.Errorf("no public key found for keyid %s", keyID)
+ }
+
+ sshPublicKey, _, _, _, err := ssh.ParseAuthorizedKey([]byte(publicKeys[0].Content))
+ if err != nil {
+ return nil, err
+ }
+
+ if err := doVerify(verifier, []ssh.PublicKey{sshPublicKey}); err != nil {
+ return nil, err
+ }
+
+ return publicKeys[0], nil
+}
+
+// VerifyCert verifies the validity of the ssh certificate and returns the publickey of the signer
+// We verify that the certificate is signed with the correct CA
+// We verify that the http request is signed with the private key (of the public key mentioned in the certificate)
+func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) {
+ // Get our certificate from the header
+ bcert, err := base64.RawStdEncoding.DecodeString(r.Header.Get("x-ssh-certificate"))
+ if err != nil {
+ return nil, err
+ }
+
+ pk, err := ssh.ParsePublicKey(bcert)
+ if err != nil {
+ return nil, err
+ }
+
+ // Check if it's really a ssh certificate
+ cert, ok := pk.(*ssh.Certificate)
+ if !ok {
+ return nil, fmt.Errorf("no certificate found")
+ }
+
+ c := &ssh.CertChecker{
+ IsUserAuthority: func(auth ssh.PublicKey) bool {
+ marshaled := auth.Marshal()
+
+ for _, k := range setting.SSH.TrustedUserCAKeysParsed {
+ if bytes.Equal(marshaled, k.Marshal()) {
+ return true
+ }
+ }
+
+ return false
+ },
+ }
+
+ // check the CA of the cert
+ if !c.IsUserAuthority(cert.SignatureKey) {
+ return nil, fmt.Errorf("CA check failed")
+ }
+
+ // Create a verifier
+ verifier, err := httpsig.NewVerifier(r)
+ if err != nil {
+ return nil, fmt.Errorf("httpsig.NewVerifier failed: %s", err)
+ }
+
+ // now verify that this request was signed with the private key that matches the certificate public key
+ if err := doVerify(verifier, []ssh.PublicKey{cert.Key}); err != nil {
+ return nil, err
+ }
+
+ // Now for each of the certificate valid principals
+ for _, principal := range cert.ValidPrincipals {
+ // Look in the db for the public key
+ publicKey, err := asymkey_model.SearchPublicKeyByContentExact(r.Context(), principal)
+ if asymkey_model.IsErrKeyNotExist(err) {
+ // No public key matches this principal - try the next principal
+ continue
+ } else if err != nil {
+ // this error will be a db error therefore we can't solve this and we should abort
+ log.Error("SearchPublicKeyByContentExact: %v", err)
+ return nil, err
+ }
+
+ // Validate the cert for this principal
+ if err := c.CheckCert(principal, cert); err != nil {
+ // however, because principal is a member of ValidPrincipals - if this fails then the certificate itself is invalid
+ return nil, err
+ }
+
+ // OK we have a public key for a principal matching a valid certificate whose key has signed this request.
+ return publicKey, nil
+ }
+
+ // No public key matching a principal in the certificate is registered in gitea
+ return nil, fmt.Errorf("no valid principal found")
+}
+
+// doVerify iterates across the provided public keys attempting the verify the current request against each key in turn
+func doVerify(verifier httpsig.Verifier, sshPublicKeys []ssh.PublicKey) error {
+ for _, publicKey := range sshPublicKeys {
+ cryptoPubkey := publicKey.(ssh.CryptoPublicKey).CryptoPublicKey()
+
+ var algos []httpsig.Algorithm
+
+ switch {
+ case strings.HasPrefix(publicKey.Type(), "ssh-ed25519"):
+ algos = []httpsig.Algorithm{httpsig.ED25519}
+ case strings.HasPrefix(publicKey.Type(), "ssh-rsa"):
+ algos = []httpsig.Algorithm{httpsig.RSA_SHA1, httpsig.RSA_SHA256, httpsig.RSA_SHA512}
+ }
+ for _, algo := range algos {
+ if err := verifier.Verify(cryptoPubkey, algo); err == nil {
+ return nil
+ }
+ }
+ }
+
+ return errors.New("verification failed")
+}
diff --git a/services/auth/interface.go b/services/auth/interface.go
new file mode 100644
index 0000000..ece28af
--- /dev/null
+++ b/services/auth/interface.go
@@ -0,0 +1,47 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "context"
+ "net/http"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/session"
+ "code.gitea.io/gitea/modules/web/middleware"
+)
+
+// DataStore represents a data store
+type DataStore middleware.ContextDataStore
+
+// SessionStore represents a session store
+type SessionStore session.Store
+
+// Method represents an authentication method (plugin) for HTTP requests.
+type Method interface {
+ // Verify tries to verify the authentication data contained in the request.
+ // If verification is successful returns either an existing user object (with id > 0)
+ // or a new user object (with id = 0) populated with the information that was found
+ // in the authentication data (username or email).
+ // Second argument returns err if verification fails, otherwise
+ // First return argument returns nil if no matched verification condition
+ Verify(http *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error)
+
+ Name() string
+}
+
+// PasswordAuthenticator represents a source of authentication
+type PasswordAuthenticator interface {
+ Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error)
+}
+
+// LocalTwoFASkipper represents a source of authentication that can skip local 2fa
+type LocalTwoFASkipper interface {
+ IsSkipLocalTwoFA() bool
+}
+
+// SynchronizableSource represents a source that can synchronize users
+type SynchronizableSource interface {
+ Sync(ctx context.Context, updateExisting bool) error
+}
diff --git a/services/auth/main_test.go b/services/auth/main_test.go
new file mode 100644
index 0000000..b81c39a
--- /dev/null
+++ b/services/auth/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/auth/oauth2.go b/services/auth/oauth2.go
new file mode 100644
index 0000000..b983e57
--- /dev/null
+++ b/services/auth/oauth2.go
@@ -0,0 +1,244 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "context"
+ "net/http"
+ "slices"
+ "strings"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ auth_model "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/actions"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+)
+
+// Ensure the struct implements the interface.
+var (
+ _ Method = &OAuth2{}
+)
+
+// grantAdditionalScopes returns valid scopes coming from grant
+func grantAdditionalScopes(grantScopes string) string {
+ // scopes_supported from templates/user/auth/oidc_wellknown.tmpl
+ scopesSupported := []string{
+ "openid",
+ "profile",
+ "email",
+ "groups",
+ }
+
+ var apiTokenScopes []string
+ for _, apiTokenScope := range strings.Split(grantScopes, " ") {
+ if slices.Index(scopesSupported, apiTokenScope) == -1 {
+ apiTokenScopes = append(apiTokenScopes, apiTokenScope)
+ }
+ }
+
+ if len(apiTokenScopes) == 0 {
+ return ""
+ }
+
+ var additionalGrantScopes []string
+ allScopes := auth_model.AccessTokenScope("all")
+
+ for _, apiTokenScope := range apiTokenScopes {
+ grantScope := auth_model.AccessTokenScope(apiTokenScope)
+ if ok, _ := allScopes.HasScope(grantScope); ok {
+ additionalGrantScopes = append(additionalGrantScopes, apiTokenScope)
+ } else if apiTokenScope == "public-only" {
+ additionalGrantScopes = append(additionalGrantScopes, apiTokenScope)
+ }
+ }
+ if len(additionalGrantScopes) > 0 {
+ return strings.Join(additionalGrantScopes, ",")
+ }
+
+ return ""
+}
+
+// CheckOAuthAccessToken returns uid of user from oauth token
+// + non default openid scopes requested
+func CheckOAuthAccessToken(ctx context.Context, accessToken string) (int64, string) {
+ if !setting.OAuth2.Enabled {
+ return 0, ""
+ }
+ // JWT tokens require a "."
+ if !strings.Contains(accessToken, ".") {
+ return 0, ""
+ }
+ token, err := oauth2.ParseToken(accessToken, oauth2.DefaultSigningKey)
+ if err != nil {
+ log.Trace("oauth2.ParseToken: %v", err)
+ return 0, ""
+ }
+ var grant *auth_model.OAuth2Grant
+ if grant, err = auth_model.GetOAuth2GrantByID(ctx, token.GrantID); err != nil || grant == nil {
+ return 0, ""
+ }
+ if token.Type != oauth2.TypeAccessToken {
+ return 0, ""
+ }
+ if token.ExpiresAt.Before(time.Now()) || token.IssuedAt.After(time.Now()) {
+ return 0, ""
+ }
+ grantScopes := grantAdditionalScopes(grant.Scope)
+ return grant.UserID, grantScopes
+}
+
+// CheckTaskIsRunning verifies that the TaskID corresponds to a running task
+func CheckTaskIsRunning(ctx context.Context, taskID int64) bool {
+ // Verify the task exists
+ task, err := actions_model.GetTaskByID(ctx, taskID)
+ if err != nil {
+ return false
+ }
+
+ // Verify that it's running
+ return task.Status == actions_model.StatusRunning
+}
+
+// OAuth2 implements the Auth interface and authenticates requests
+// (API requests only) by looking for an OAuth token in query parameters or the
+// "Authorization" header.
+type OAuth2 struct{}
+
+// Name represents the name of auth method
+func (o *OAuth2) Name() string {
+ return "oauth2"
+}
+
+// parseToken returns the token from request, and a boolean value
+// representing whether the token exists or not
+func parseToken(req *http.Request) (string, bool) {
+ _ = req.ParseForm()
+ if !setting.DisableQueryAuthToken {
+ // Check token.
+ if token := req.Form.Get("token"); token != "" {
+ return token, true
+ }
+ // Check access token.
+ if token := req.Form.Get("access_token"); token != "" {
+ return token, true
+ }
+ } else if req.Form.Get("token") != "" || req.Form.Get("access_token") != "" {
+ log.Warn("API token sent in query string but DISABLE_QUERY_AUTH_TOKEN=true")
+ }
+
+ // check header token
+ if auHead := req.Header.Get("Authorization"); auHead != "" {
+ auths := strings.Fields(auHead)
+ if len(auths) == 2 && (auths[0] == "token" || strings.ToLower(auths[0]) == "bearer") {
+ return auths[1], true
+ }
+ }
+ return "", false
+}
+
+// userIDFromToken returns the user id corresponding to the OAuth token.
+// It will set 'IsApiToken' to true if the token is an API token and
+// set 'ApiTokenScope' to the scope of the access token
+func (o *OAuth2) userIDFromToken(ctx context.Context, tokenSHA string, store DataStore) int64 {
+ // Let's see if token is valid.
+ if strings.Contains(tokenSHA, ".") {
+ // First attempt to decode an actions JWT, returning the actions user
+ if taskID, err := actions.TokenToTaskID(tokenSHA); err == nil {
+ if CheckTaskIsRunning(ctx, taskID) {
+ store.GetData()["IsActionsToken"] = true
+ store.GetData()["ActionsTaskID"] = taskID
+ return user_model.ActionsUserID
+ }
+ }
+
+ // Otherwise, check if this is an OAuth access token
+ uid, grantScopes := CheckOAuthAccessToken(ctx, tokenSHA)
+ if uid != 0 {
+ store.GetData()["IsApiToken"] = true
+ if grantScopes != "" {
+ store.GetData()["ApiTokenScope"] = auth_model.AccessTokenScope(grantScopes)
+ } else {
+ store.GetData()["ApiTokenScope"] = auth_model.AccessTokenScopeAll // fallback to all
+ }
+ }
+ return uid
+ }
+ t, err := auth_model.GetAccessTokenBySHA(ctx, tokenSHA)
+ if err != nil {
+ if auth_model.IsErrAccessTokenNotExist(err) {
+ // check task token
+ task, err := actions_model.GetRunningTaskByToken(ctx, tokenSHA)
+ if err == nil && task != nil {
+ log.Trace("Basic Authorization: Valid AccessToken for task[%d]", task.ID)
+
+ store.GetData()["IsActionsToken"] = true
+ store.GetData()["ActionsTaskID"] = task.ID
+
+ return user_model.ActionsUserID
+ }
+ } else if !auth_model.IsErrAccessTokenNotExist(err) && !auth_model.IsErrAccessTokenEmpty(err) {
+ log.Error("GetAccessTokenBySHA: %v", err)
+ }
+ return 0
+ }
+ t.UpdatedUnix = timeutil.TimeStampNow()
+ if err = auth_model.UpdateAccessToken(ctx, t); err != nil {
+ log.Error("UpdateAccessToken: %v", err)
+ }
+ store.GetData()["IsApiToken"] = true
+ store.GetData()["ApiTokenScope"] = t.Scope
+ return t.UID
+}
+
+// Verify extracts the user ID from the OAuth token in the query parameters
+// or the "Authorization" header and returns the corresponding user object for that ID.
+// If verification is successful returns an existing user object.
+// Returns nil if verification fails.
+func (o *OAuth2) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+ // These paths are not API paths, but we still want to check for tokens because they maybe in the API returned URLs
+ if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isAuthenticatedTokenRequest(req) &&
+ !isGitRawOrAttachPath(req) && !isArchivePath(req) {
+ return nil, nil
+ }
+
+ token, ok := parseToken(req)
+ if !ok {
+ return nil, nil
+ }
+
+ id := o.userIDFromToken(req.Context(), token, store)
+
+ if id <= 0 && id != -2 { // -2 means actions, so we need to allow it.
+ return nil, user_model.ErrUserNotExist{}
+ }
+ log.Trace("OAuth2 Authorization: Found token for user[%d]", id)
+
+ user, err := user_model.GetPossibleUserByID(req.Context(), id)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("GetUserByName: %v", err)
+ }
+ return nil, err
+ }
+
+ log.Trace("OAuth2 Authorization: Logged in user %-v", user)
+ return user, nil
+}
+
+func isAuthenticatedTokenRequest(req *http.Request) bool {
+ switch req.URL.Path {
+ case "/login/oauth/userinfo":
+ fallthrough
+ case "/login/oauth/introspect":
+ return true
+ }
+ return false
+}
diff --git a/services/auth/oauth2_test.go b/services/auth/oauth2_test.go
new file mode 100644
index 0000000..c9b4ed0
--- /dev/null
+++ b/services/auth/oauth2_test.go
@@ -0,0 +1,55 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "context"
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/actions"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestUserIDFromToken(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ t.Run("Actions JWT", func(t *testing.T) {
+ const RunningTaskID = 47
+ token, err := actions.CreateAuthorizationToken(RunningTaskID, 1, 2)
+ require.NoError(t, err)
+
+ ds := make(middleware.ContextData)
+
+ o := OAuth2{}
+ uid := o.userIDFromToken(context.Background(), token, ds)
+ assert.Equal(t, int64(user_model.ActionsUserID), uid)
+ assert.Equal(t, true, ds["IsActionsToken"])
+ assert.Equal(t, ds["ActionsTaskID"], int64(RunningTaskID))
+ })
+}
+
+func TestCheckTaskIsRunning(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ cases := map[string]struct {
+ TaskID int64
+ Expected bool
+ }{
+ "Running": {TaskID: 47, Expected: true},
+ "Missing": {TaskID: 1, Expected: false},
+ "Cancelled": {TaskID: 46, Expected: false},
+ }
+
+ for name := range cases {
+ c := cases[name]
+ t.Run(name, func(t *testing.T) {
+ actual := CheckTaskIsRunning(context.Background(), c.TaskID)
+ assert.Equal(t, c.Expected, actual)
+ })
+ }
+}
diff --git a/services/auth/reverseproxy.go b/services/auth/reverseproxy.go
new file mode 100644
index 0000000..8a5a5dc
--- /dev/null
+++ b/services/auth/reverseproxy.go
@@ -0,0 +1,179 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "net/http"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web/middleware"
+
+ gouuid "github.com/google/uuid"
+)
+
+// Ensure the struct implements the interface.
+var (
+ _ Method = &ReverseProxy{}
+)
+
+// ReverseProxyMethodName is the constant name of the ReverseProxy authentication method
+const ReverseProxyMethodName = "reverse_proxy"
+
+// ReverseProxy implements the Auth interface, but actually relies on
+// a reverse proxy for authentication of users.
+// On successful authentication the proxy is expected to populate the username in the
+// "setting.ReverseProxyAuthUser" header. Optionally it can also populate the email of the
+// user in the "setting.ReverseProxyAuthEmail" header.
+type ReverseProxy struct{}
+
+// getUserName extracts the username from the "setting.ReverseProxyAuthUser" header
+func (r *ReverseProxy) getUserName(req *http.Request) string {
+ return strings.TrimSpace(req.Header.Get(setting.ReverseProxyAuthUser))
+}
+
+// Name represents the name of auth method
+func (r *ReverseProxy) Name() string {
+ return ReverseProxyMethodName
+}
+
+// getUserFromAuthUser extracts the username from the "setting.ReverseProxyAuthUser" header
+// of the request and returns the corresponding user object for that name.
+// Verification of header data is not performed as it should have already been done by
+// the reverse proxy.
+// If a username is available in the "setting.ReverseProxyAuthUser" header an existing
+// user object is returned (populated with username or email found in header).
+// Returns nil if header is empty.
+func (r *ReverseProxy) getUserFromAuthUser(req *http.Request) (*user_model.User, error) {
+ username := r.getUserName(req)
+ if len(username) == 0 {
+ return nil, nil
+ }
+ log.Trace("ReverseProxy Authorization: Found username: %s", username)
+
+ user, err := user_model.GetUserByName(req.Context(), username)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) || !r.isAutoRegisterAllowed() {
+ log.Error("GetUserByName: %v", err)
+ return nil, err
+ }
+ user = r.newUser(req)
+ }
+ return user, nil
+}
+
+// getEmail extracts the email from the "setting.ReverseProxyAuthEmail" header
+func (r *ReverseProxy) getEmail(req *http.Request) string {
+ return strings.TrimSpace(req.Header.Get(setting.ReverseProxyAuthEmail))
+}
+
+// getUserFromAuthEmail extracts the username from the "setting.ReverseProxyAuthEmail" header
+// of the request and returns the corresponding user object for that email.
+// Verification of header data is not performed as it should have already been done by
+// the reverse proxy.
+// If an email is available in the "setting.ReverseProxyAuthEmail" header an existing
+// user object is returned (populated with the email found in header).
+// Returns nil if header is empty or if "setting.EnableReverseProxyEmail" is disabled.
+func (r *ReverseProxy) getUserFromAuthEmail(req *http.Request) *user_model.User {
+ if !setting.Service.EnableReverseProxyEmail {
+ return nil
+ }
+ email := r.getEmail(req)
+ if len(email) == 0 {
+ return nil
+ }
+ log.Trace("ReverseProxy Authorization: Found email: %s", email)
+
+ user, err := user_model.GetUserByEmail(req.Context(), email)
+ if err != nil {
+ // Do not allow auto-registration, we don't have a username here
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("GetUserByEmail: %v", err)
+ }
+ return nil
+ }
+ return user
+}
+
+// Verify attempts to load a user object based on headers sent by the reverse proxy.
+// First it will attempt to load it based on the username (see docs for getUserFromAuthUser),
+// and failing that it will attempt to load it based on the email (see docs for getUserFromAuthEmail).
+// Returns nil if the headers are empty or the user is not found.
+func (r *ReverseProxy) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+ user, err := r.getUserFromAuthUser(req)
+ if err != nil {
+ return nil, err
+ }
+ if user == nil {
+ user = r.getUserFromAuthEmail(req)
+ if user == nil {
+ return nil, nil
+ }
+ }
+
+ // Make sure requests to API paths, attachment downloads, git and LFS do not create a new session
+ if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isGitRawOrAttachOrLFSPath(req) {
+ if sess != nil && (sess.Get("uid") == nil || sess.Get("uid").(int64) != user.ID) {
+ handleSignIn(w, req, sess, user)
+ }
+ }
+ store.GetData()["IsReverseProxy"] = true
+
+ log.Trace("ReverseProxy Authorization: Logged in user %-v", user)
+ return user, nil
+}
+
+// isAutoRegisterAllowed checks if EnableReverseProxyAutoRegister setting is true
+func (r *ReverseProxy) isAutoRegisterAllowed() bool {
+ return setting.Service.EnableReverseProxyAutoRegister
+}
+
+// newUser creates a new user object for the purpose of automatic registration
+// and populates its name and email with the information present in request headers.
+func (r *ReverseProxy) newUser(req *http.Request) *user_model.User {
+ username := r.getUserName(req)
+ if len(username) == 0 {
+ return nil
+ }
+
+ email := gouuid.New().String() + "@localhost"
+ if setting.Service.EnableReverseProxyEmail {
+ webAuthEmail := req.Header.Get(setting.ReverseProxyAuthEmail)
+ if len(webAuthEmail) > 0 {
+ email = webAuthEmail
+ }
+ }
+
+ var fullname string
+ if setting.Service.EnableReverseProxyFullName {
+ fullname = req.Header.Get(setting.ReverseProxyAuthFullName)
+ }
+
+ user := &user_model.User{
+ Name: username,
+ Email: email,
+ FullName: fullname,
+ }
+
+ overwriteDefault := user_model.CreateUserOverwriteOptions{
+ IsActive: optional.Some(true),
+ }
+
+ // The first user created should be an admin.
+ if user_model.CountUsers(req.Context(), nil) == 0 {
+ user.IsAdmin = true
+ }
+
+ if err := user_model.CreateUser(req.Context(), user, &overwriteDefault); err != nil {
+ // FIXME: should I create a system notice?
+ log.Error("CreateUser: %v", err)
+ return nil
+ }
+
+ return user
+}
diff --git a/services/auth/reverseproxy_test.go b/services/auth/reverseproxy_test.go
new file mode 100644
index 0000000..7f1b2a7
--- /dev/null
+++ b/services/auth/reverseproxy_test.go
@@ -0,0 +1,67 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "net/http"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestReverseProxyAuth(t *testing.T) {
+ defer test.MockVariableValue(&setting.Service.EnableReverseProxyEmail, true)()
+ defer test.MockVariableValue(&setting.Service.EnableReverseProxyFullName, true)()
+ defer test.MockVariableValue(&setting.Service.EnableReverseProxyFullName, true)()
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ require.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.User{}))
+ require.EqualValues(t, 0, user_model.CountUsers(db.DefaultContext, nil))
+
+ t.Run("First user should be admin", func(t *testing.T) {
+ req, err := http.NewRequest("GET", "/", nil)
+ require.NoError(t, err)
+
+ req.Header.Add(setting.ReverseProxyAuthUser, "Edgar")
+ req.Header.Add(setting.ReverseProxyAuthFullName, "Edgar Allan Poe")
+ req.Header.Add(setting.ReverseProxyAuthEmail, "edgar@example.org")
+
+ rp := &ReverseProxy{}
+ user := rp.newUser(req)
+
+ require.EqualValues(t, 1, user_model.CountUsers(db.DefaultContext, nil))
+ unittest.AssertExistsAndLoadBean(t, &user_model.User{Email: "edgar@example.org", Name: "Edgar", LowerName: "edgar", FullName: "Edgar Allan Poe", IsAdmin: true})
+ require.EqualValues(t, "edgar@example.org", user.Email)
+ require.EqualValues(t, "Edgar", user.Name)
+ require.EqualValues(t, "edgar", user.LowerName)
+ require.EqualValues(t, "Edgar Allan Poe", user.FullName)
+ require.True(t, user.IsAdmin)
+ })
+
+ t.Run("Second user shouldn't be admin", func(t *testing.T) {
+ req, err := http.NewRequest("GET", "/", nil)
+ require.NoError(t, err)
+
+ req.Header.Add(setting.ReverseProxyAuthUser, " Gusted ")
+ req.Header.Add(setting.ReverseProxyAuthFullName, "â¤â€¿â¤")
+ req.Header.Add(setting.ReverseProxyAuthEmail, "gusted@example.org")
+
+ rp := &ReverseProxy{}
+ user := rp.newUser(req)
+
+ require.EqualValues(t, 2, user_model.CountUsers(db.DefaultContext, nil))
+ unittest.AssertExistsAndLoadBean(t, &user_model.User{Email: "gusted@example.org", Name: "Gusted", LowerName: "gusted", FullName: "â¤â€¿â¤"}, "is_admin = false")
+ require.EqualValues(t, "gusted@example.org", user.Email)
+ require.EqualValues(t, "Gusted", user.Name)
+ require.EqualValues(t, "gusted", user.LowerName)
+ require.EqualValues(t, "â¤â€¿â¤", user.FullName)
+ require.False(t, user.IsAdmin)
+ })
+}
diff --git a/services/auth/session.go b/services/auth/session.go
new file mode 100644
index 0000000..35d97e4
--- /dev/null
+++ b/services/auth/session.go
@@ -0,0 +1,60 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "net/http"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// Ensure the struct implements the interface.
+var (
+ _ Method = &Session{}
+)
+
+// Session checks if there is a user uid stored in the session and returns the user
+// object for that uid.
+type Session struct{}
+
+// Name represents the name of auth method
+func (s *Session) Name() string {
+ return "session"
+}
+
+// Verify checks if there is a user uid stored in the session and returns the user
+// object for that uid.
+// Returns nil if there is no user uid stored in the session.
+func (s *Session) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+ if sess == nil {
+ return nil, nil
+ }
+
+ // Get user ID
+ uid := sess.Get("uid")
+ if uid == nil {
+ return nil, nil
+ }
+ log.Trace("Session Authorization: Found user[%d]", uid)
+
+ id, ok := uid.(int64)
+ if !ok {
+ return nil, nil
+ }
+
+ // Get user object
+ user, err := user_model.GetUserByID(req.Context(), id)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("GetUserByID: %v", err)
+ // Return the err as-is to keep current signed-in session, in case the err is something like context.Canceled. Otherwise non-existing user (nil, nil) will make the caller clear the signed-in session.
+ return nil, err
+ }
+ return nil, nil
+ }
+
+ log.Trace("Session Authorization: Logged in user %-v", user)
+ return user, nil
+}
diff --git a/services/auth/signin.go b/services/auth/signin.go
new file mode 100644
index 0000000..e116a08
--- /dev/null
+++ b/services/auth/signin.go
@@ -0,0 +1,128 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "context"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+ "code.gitea.io/gitea/services/auth/source/smtp"
+
+ _ "code.gitea.io/gitea/services/auth/source/db" // register the sources (and below)
+ _ "code.gitea.io/gitea/services/auth/source/ldap" // register the ldap source
+ _ "code.gitea.io/gitea/services/auth/source/pam" // register the pam source
+ _ "code.gitea.io/gitea/services/auth/source/sspi" // register the sspi source
+)
+
+// UserSignIn validates user name and password.
+func UserSignIn(ctx context.Context, username, password string) (*user_model.User, *auth.Source, error) {
+ var user *user_model.User
+ isEmail := false
+ if strings.Contains(username, "@") {
+ isEmail = true
+ emailAddress := user_model.EmailAddress{LowerEmail: strings.ToLower(strings.TrimSpace(username))}
+ // check same email
+ has, err := db.GetEngine(ctx).Get(&emailAddress)
+ if err != nil {
+ return nil, nil, err
+ }
+ if has {
+ if !emailAddress.IsActivated {
+ return nil, nil, user_model.ErrEmailAddressNotExist{
+ Email: username,
+ }
+ }
+ user = &user_model.User{ID: emailAddress.UID}
+ }
+ } else {
+ trimmedUsername := strings.TrimSpace(username)
+ if len(trimmedUsername) == 0 {
+ return nil, nil, user_model.ErrUserNotExist{Name: username}
+ }
+
+ user = &user_model.User{LowerName: strings.ToLower(trimmedUsername)}
+ }
+
+ if user != nil {
+ hasUser, err := user_model.GetUser(ctx, user)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if hasUser {
+ source, err := auth.GetSourceByID(ctx, user.LoginSource)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if !source.IsActive {
+ return nil, nil, oauth2.ErrAuthSourceNotActivated
+ }
+
+ authenticator, ok := source.Cfg.(PasswordAuthenticator)
+ if !ok {
+ return nil, nil, smtp.ErrUnsupportedLoginType
+ }
+
+ user, err := authenticator.Authenticate(ctx, user, user.LoginName, password)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // WARN: DON'T check user.IsActive, that will be checked on reqSign so that
+ // user could be hint to resend confirm email.
+ if user.ProhibitLogin {
+ return nil, nil, user_model.ErrUserProhibitLogin{UID: user.ID, Name: user.Name}
+ }
+
+ return user, source, nil
+ }
+ }
+
+ sources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
+ IsActive: optional.Some(true),
+ })
+ if err != nil {
+ return nil, nil, err
+ }
+
+ for _, source := range sources {
+ if !source.IsActive {
+ // don't try to authenticate non-active sources
+ continue
+ }
+
+ authenticator, ok := source.Cfg.(PasswordAuthenticator)
+ if !ok {
+ continue
+ }
+
+ authUser, err := authenticator.Authenticate(ctx, nil, username, password)
+
+ if err == nil {
+ if !authUser.ProhibitLogin {
+ return authUser, source, nil
+ }
+ err = user_model.ErrUserProhibitLogin{UID: authUser.ID, Name: authUser.Name}
+ }
+
+ if user_model.IsErrUserNotExist(err) {
+ log.Debug("Failed to login '%s' via '%s': %v", username, source.Name, err)
+ } else {
+ log.Warn("Failed to login '%s' via '%s': %v", username, source.Name, err)
+ }
+ }
+
+ if isEmail {
+ return nil, nil, user_model.ErrEmailAddressNotExist{Email: username}
+ }
+
+ return nil, nil, user_model.ErrUserNotExist{Name: username}
+}
diff --git a/services/auth/source.go b/services/auth/source.go
new file mode 100644
index 0000000..69b71a6
--- /dev/null
+++ b/services/auth/source.go
@@ -0,0 +1,42 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// DeleteSource deletes a AuthSource record in DB.
+func DeleteSource(ctx context.Context, source *auth.Source) error {
+ count, err := db.GetEngine(ctx).Count(&user_model.User{LoginSource: source.ID})
+ if err != nil {
+ return err
+ } else if count > 0 {
+ return auth.ErrSourceInUse{
+ ID: source.ID,
+ }
+ }
+
+ count, err = db.GetEngine(ctx).Count(&user_model.ExternalLoginUser{LoginSourceID: source.ID})
+ if err != nil {
+ return err
+ } else if count > 0 {
+ return auth.ErrSourceInUse{
+ ID: source.ID,
+ }
+ }
+
+ if registerableSource, ok := source.Cfg.(auth.RegisterableSource); ok {
+ if err := registerableSource.UnregisterSource(); err != nil {
+ return err
+ }
+ }
+
+ _, err = db.GetEngine(ctx).ID(source.ID).Delete(new(auth.Source))
+ return err
+}
diff --git a/services/auth/source/db/assert_interface_test.go b/services/auth/source/db/assert_interface_test.go
new file mode 100644
index 0000000..62387c7
--- /dev/null
+++ b/services/auth/source/db/assert_interface_test.go
@@ -0,0 +1,20 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package db_test
+
+import (
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/db"
+)
+
+// This test file exists to assert that our Source exposes the interfaces that we expect
+// It tightly binds the interfaces and implementation without breaking go import cycles
+
+type sourceInterface interface {
+ auth.PasswordAuthenticator
+ auth_model.Config
+}
+
+var _ (sourceInterface) = &db.Source{}
diff --git a/services/auth/source/db/authenticate.go b/services/auth/source/db/authenticate.go
new file mode 100644
index 0000000..8160141
--- /dev/null
+++ b/services/auth/source/db/authenticate.go
@@ -0,0 +1,87 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package db
+
+import (
+ "context"
+ "fmt"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// ErrUserPasswordNotSet represents a "ErrUserPasswordNotSet" kind of error.
+type ErrUserPasswordNotSet struct {
+ UID int64
+ Name string
+}
+
+func (err ErrUserPasswordNotSet) Error() string {
+ return fmt.Sprintf("user's password isn't set [uid: %d, name: %s]", err.UID, err.Name)
+}
+
+// Unwrap unwraps this error as a ErrInvalidArgument error
+func (err ErrUserPasswordNotSet) Unwrap() error {
+ return util.ErrInvalidArgument
+}
+
+// ErrUserPasswordInvalid represents a "ErrUserPasswordInvalid" kind of error.
+type ErrUserPasswordInvalid struct {
+ UID int64
+ Name string
+}
+
+func (err ErrUserPasswordInvalid) Error() string {
+ return fmt.Sprintf("user's password is invalid [uid: %d, name: %s]", err.UID, err.Name)
+}
+
+// Unwrap unwraps this error as a ErrInvalidArgument error
+func (err ErrUserPasswordInvalid) Unwrap() error {
+ return util.ErrInvalidArgument
+}
+
+// Authenticate authenticates the provided user against the DB
+func Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error) {
+ if user == nil {
+ return nil, user_model.ErrUserNotExist{Name: login}
+ }
+
+ if !user.IsPasswordSet() {
+ return nil, ErrUserPasswordNotSet{UID: user.ID, Name: user.Name}
+ } else if !user.ValidatePassword(password) {
+ return nil, ErrUserPasswordInvalid{UID: user.ID, Name: user.Name}
+ }
+
+ // Update password hash if server password hash algorithm have changed
+ // Or update the password when the salt length doesn't match the current
+ // recommended salt length, this in order to migrate user's salts to a more secure salt.
+ if user.PasswdHashAlgo != setting.PasswordHashAlgo || len(user.Salt) != user_model.SaltByteLength*2 {
+ if err := user.SetPassword(password); err != nil {
+ return nil, err
+ }
+ if err := user_model.UpdateUserCols(ctx, user, "passwd", "passwd_hash_algo", "salt"); err != nil {
+ return nil, err
+ }
+ }
+
+ // WARN: DON'T check user.IsActive, that will be checked on reqSign so that
+ // user could be hinted to resend confirm email.
+ if user.ProhibitLogin {
+ return nil, user_model.ErrUserProhibitLogin{
+ UID: user.ID,
+ Name: user.Name,
+ }
+ }
+
+ // attempting to login as a non-user account
+ if user.Type != user_model.UserTypeIndividual {
+ return nil, user_model.ErrUserProhibitLogin{
+ UID: user.ID,
+ Name: user.Name,
+ }
+ }
+
+ return user, nil
+}
diff --git a/services/auth/source/db/source.go b/services/auth/source/db/source.go
new file mode 100644
index 0000000..bb2270c
--- /dev/null
+++ b/services/auth/source/db/source.go
@@ -0,0 +1,35 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package db
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// Source is a password authentication service
+type Source struct{}
+
+// FromDB fills up an OAuth2Config from serialized format.
+func (source *Source) FromDB(bs []byte) error {
+ return nil
+}
+
+// ToDB exports the config to a byte slice to be saved into database (this method is just dummy and does nothing for DB source)
+func (source *Source) ToDB() ([]byte, error) {
+ return nil, nil
+}
+
+// Authenticate queries if login/password is valid against the PAM,
+// and create a local user if success when enabled.
+func (source *Source) Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error) {
+ return Authenticate(ctx, user, login, password)
+}
+
+func init() {
+ auth.RegisterTypeConfig(auth.NoType, &Source{})
+ auth.RegisterTypeConfig(auth.Plain, &Source{})
+}
diff --git a/services/auth/source/ldap/README.md b/services/auth/source/ldap/README.md
new file mode 100644
index 0000000..34c8117
--- /dev/null
+++ b/services/auth/source/ldap/README.md
@@ -0,0 +1,131 @@
+# Gitea LDAP Authentication Module
+
+## About
+
+This authentication module attempts to authorize and authenticate a user
+against an LDAP server. It provides two methods of authentication: LDAP via
+BindDN, and LDAP simple authentication.
+
+LDAP via BindDN functions like most LDAP authentication systems. First, it
+queries the LDAP server using a Bind DN and searches for the user that is
+attempting to sign in. If the user is found, the module attempts to bind to the
+server using the user's supplied credentials. If this succeeds, the user has
+been authenticated, and his account information is retrieved and passed to the
+Gogs login infrastructure.
+
+LDAP simple authentication does not utilize a Bind DN. Instead, it binds
+directly with the LDAP server using the user's supplied credentials. If the bind
+succeeds and no filter rules out the user, the user is authenticated.
+
+LDAP via BindDN is recommended for most users. By using a Bind DN, the server
+can perform authorization by restricting which entries the Bind DN account can
+read. Further, using a Bind DN with reduced permissions can reduce security risk
+in the face of application bugs.
+
+## Usage
+
+To use this module, add an LDAP authentication source via the Authentications
+section in the admin panel. Both the LDAP via BindDN and the simple auth LDAP
+share the following fields:
+
+* Authorization Name **(required)**
+ * A name to assign to the new method of authorization.
+
+* Host **(required)**
+ * The address where the LDAP server can be reached.
+ * Example: mydomain.com
+
+* Port **(required)**
+ * The port to use when connecting to the server.
+ * Example: 636
+
+* Enable TLS Encryption (optional)
+ * Whether to use TLS when connecting to the LDAP server.
+
+* Admin Filter (optional)
+ * An LDAP filter specifying if a user should be given administrator
+ privileges. If a user accounts passes the filter, the user will be
+ privileged as an administrator.
+ * Example: (objectClass=adminAccount)
+
+* First name attribute (optional)
+ * The attribute of the user's LDAP record containing the user's first name.
+ This will be used to populate their account information.
+ * Example: givenName
+
+* Surname attribute (optional)
+ * The attribute of the user's LDAP record containing the user's surname This
+ will be used to populate their account information.
+ * Example: sn
+
+* E-mail attribute **(required)**
+ * The attribute of the user's LDAP record containing the user's email
+ address. This will be used to populate their account information.
+ * Example: mail
+
+**LDAP via BindDN** adds the following fields:
+
+* Bind DN (optional)
+ * The DN to bind to the LDAP server with when searching for the user. This
+ may be left blank to perform an anonymous search.
+ * Example: cn=Search,dc=mydomain,dc=com
+
+* Bind Password (optional)
+ * The password for the Bind DN specified above, if any. _Note: The password
+ is stored in plaintext at the server. As such, ensure that your Bind DN
+ has as few privileges as possible._
+
+* User Search Base **(required)**
+ * The LDAP base at which user accounts will be searched for.
+ * Example: ou=Users,dc=mydomain,dc=com
+
+* User Filter **(required)**
+ * An LDAP filter declaring how to find the user record that is attempting to
+ authenticate. The '%[1]s' matching parameter will be substituted with the
+ user's username.
+ * Example: (&(objectClass=posixAccount)(|(uid=%[1]s)(mail=%[1]s)))
+
+**LDAP using simple auth** adds the following fields:
+
+* User DN **(required)**
+ * A template to use as the user's DN. The `%s` matching parameter will be
+ substituted with the user's username.
+ * Example: cn=%s,ou=Users,dc=mydomain,dc=com
+ * Example: uid=%s,ou=Users,dc=mydomain,dc=com
+
+* User Search Base (optional)
+ * The LDAP base at which user accounts will be searched for.
+ * Example: ou=Users,dc=mydomain,dc=com
+
+* User Filter **(required)**
+ * An LDAP filter declaring when a user should be allowed to log in. The `%[1]s`
+ matching parameter will be substituted with the user's username.
+ * Example: (&(objectClass=posixAccount)(|(cn=%[1]s)(mail=%[1]s)))
+ * Example: (&(objectClass=posixAccount)(|(uid=%[1]s)(mail=%[1]s)))
+
+**Verify group membership in LDAP** uses the following fields:
+
+* Group Search Base (optional)
+ * The LDAP DN used for groups.
+ * Example: ou=group,dc=mydomain,dc=com
+
+* Group Name Filter (optional)
+ * An LDAP filter declaring how to find valid groups in the above DN.
+ * Example: (|(cn=gitea_users)(cn=admins))
+
+* User Attribute in Group (optional)
+ * The user attribute that is used to reference a user in the group object.
+ * Example: uid if the group objects contains a member: bender and the user object contains a uid: bender.
+ * Example: dn if the group object contains a member: uid=bender,ou=users,dc=planetexpress,dc=com.
+
+* Group Attribute for User (optional)
+ * The attribute of the group object that lists/contains the group members.
+ * Example: memberUid or member
+
+* Team group map (optional)
+ * Automatically add users to Organization teams, depending on LDAP group memberships.
+ * Note: this function only adds users to teams, it never removes users.
+ * Example: {"cn=MyGroup,cn=groups,dc=example,dc=org": {"MyGiteaOrganization": ["MyGiteaTeam1", "MyGiteaTeam2", ...], ...}, ...}
+
+* Team group map removal (optional)
+ * If set to true, users will be removed from teams if they are not members of the corresponding group.
diff --git a/services/auth/source/ldap/assert_interface_test.go b/services/auth/source/ldap/assert_interface_test.go
new file mode 100644
index 0000000..3334768
--- /dev/null
+++ b/services/auth/source/ldap/assert_interface_test.go
@@ -0,0 +1,27 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package ldap_test
+
+import (
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/ldap"
+)
+
+// This test file exists to assert that our Source exposes the interfaces that we expect
+// It tightly binds the interfaces and implementation without breaking go import cycles
+
+type sourceInterface interface {
+ auth.PasswordAuthenticator
+ auth.SynchronizableSource
+ auth.LocalTwoFASkipper
+ auth_model.SSHKeyProvider
+ auth_model.Config
+ auth_model.SkipVerifiable
+ auth_model.HasTLSer
+ auth_model.UseTLSer
+ auth_model.SourceSettable
+}
+
+var _ (sourceInterface) = &ldap.Source{}
diff --git a/services/auth/source/ldap/security_protocol.go b/services/auth/source/ldap/security_protocol.go
new file mode 100644
index 0000000..af83ce1
--- /dev/null
+++ b/services/auth/source/ldap/security_protocol.go
@@ -0,0 +1,31 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package ldap
+
+// SecurityProtocol protocol type
+type SecurityProtocol int
+
+// Note: new type must be added at the end of list to maintain compatibility.
+const (
+ SecurityProtocolUnencrypted SecurityProtocol = iota
+ SecurityProtocolLDAPS
+ SecurityProtocolStartTLS
+)
+
+// String returns the name of the SecurityProtocol
+func (s SecurityProtocol) String() string {
+ return SecurityProtocolNames[s]
+}
+
+// Int returns the int value of the SecurityProtocol
+func (s SecurityProtocol) Int() int {
+ return int(s)
+}
+
+// SecurityProtocolNames contains the name of SecurityProtocol values.
+var SecurityProtocolNames = map[SecurityProtocol]string{
+ SecurityProtocolUnencrypted: "Unencrypted",
+ SecurityProtocolLDAPS: "LDAPS",
+ SecurityProtocolStartTLS: "StartTLS",
+}
diff --git a/services/auth/source/ldap/source.go b/services/auth/source/ldap/source.go
new file mode 100644
index 0000000..ba407b3
--- /dev/null
+++ b/services/auth/source/ldap/source.go
@@ -0,0 +1,122 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package ldap
+
+import (
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/secret"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// .____ ________ _____ __________
+// | | \______ \ / _ \\______ \
+// | | | | \ / /_\ \| ___/
+// | |___ | ` \/ | \ |
+// |_______ \/_______ /\____|__ /____|
+// \/ \/ \/
+
+// Package ldap provide functions & structure to query a LDAP ldap directory
+// For now, it's mainly tested again an MS Active Directory service, see README.md for more information
+
+// Source Basic LDAP authentication service
+type Source struct {
+ Name string // canonical name (ie. corporate.ad)
+ Host string // LDAP host
+ Port int // port number
+ SecurityProtocol SecurityProtocol
+ SkipVerify bool
+ BindDN string // DN to bind with
+ BindPasswordEncrypt string // Encrypted Bind BN password
+ BindPassword string // Bind DN password
+ UserBase string // Base search path for users
+ UserDN string // Template for the DN of the user for simple auth
+ DefaultDomainName string // DomainName used if none are in the field, default "localhost.local"
+ AttributeUsername string // Username attribute
+ AttributeName string // First name attribute
+ AttributeSurname string // Surname attribute
+ AttributeMail string // E-mail attribute
+ AttributesInBind bool // fetch attributes in bind context (not user)
+ AttributeSSHPublicKey string // LDAP SSH Public Key attribute
+ AttributeAvatar string
+ SearchPageSize uint32 // Search with paging page size
+ Filter string // Query filter to validate entry
+ AdminFilter string // Query filter to check if user is admin
+ RestrictedFilter string // Query filter to check if user is restricted
+ Enabled bool // if this source is disabled
+ AllowDeactivateAll bool // Allow an empty search response to deactivate all users from this source
+ GroupsEnabled bool // if the group checking is enabled
+ GroupDN string // Group Search Base
+ GroupFilter string // Group Name Filter
+ GroupMemberUID string // Group Attribute containing array of UserUID
+ GroupTeamMap string // Map LDAP groups to teams
+ GroupTeamMapRemoval bool // Remove user from teams which are synchronized and user is not a member of the corresponding LDAP group
+ UserUID string // User Attribute listed in Group
+ SkipLocalTwoFA bool `json:",omitempty"` // Skip Local 2fa for users authenticated with this source
+
+ // reference to the authSource
+ authSource *auth.Source
+}
+
+// FromDB fills up a LDAPConfig from serialized format.
+func (source *Source) FromDB(bs []byte) error {
+ err := json.UnmarshalHandleDoubleEncode(bs, &source)
+ if err != nil {
+ return err
+ }
+ if source.BindPasswordEncrypt != "" {
+ source.BindPassword, err = secret.DecryptSecret(setting.SecretKey, source.BindPasswordEncrypt)
+ source.BindPasswordEncrypt = ""
+ }
+ return err
+}
+
+// ToDB exports a LDAPConfig to a serialized format.
+func (source *Source) ToDB() ([]byte, error) {
+ var err error
+ source.BindPasswordEncrypt, err = secret.EncryptSecret(setting.SecretKey, source.BindPassword)
+ if err != nil {
+ return nil, err
+ }
+ source.BindPassword = ""
+ return json.Marshal(source)
+}
+
+// SecurityProtocolName returns the name of configured security
+// protocol.
+func (source *Source) SecurityProtocolName() string {
+ return SecurityProtocolNames[source.SecurityProtocol]
+}
+
+// IsSkipVerify returns if SkipVerify is set
+func (source *Source) IsSkipVerify() bool {
+ return source.SkipVerify
+}
+
+// HasTLS returns if HasTLS
+func (source *Source) HasTLS() bool {
+ return source.SecurityProtocol > SecurityProtocolUnencrypted
+}
+
+// UseTLS returns if UseTLS
+func (source *Source) UseTLS() bool {
+ return source.SecurityProtocol != SecurityProtocolUnencrypted
+}
+
+// ProvidesSSHKeys returns if this source provides SSH Keys
+func (source *Source) ProvidesSSHKeys() bool {
+ return len(strings.TrimSpace(source.AttributeSSHPublicKey)) > 0
+}
+
+// SetAuthSource sets the related AuthSource
+func (source *Source) SetAuthSource(authSource *auth.Source) {
+ source.authSource = authSource
+}
+
+func init() {
+ auth.RegisterTypeConfig(auth.LDAP, &Source{})
+ auth.RegisterTypeConfig(auth.DLDAP, &Source{})
+}
diff --git a/services/auth/source/ldap/source_authenticate.go b/services/auth/source/ldap/source_authenticate.go
new file mode 100644
index 0000000..68ecd16
--- /dev/null
+++ b/services/auth/source/ldap/source_authenticate.go
@@ -0,0 +1,124 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package ldap
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ auth_module "code.gitea.io/gitea/modules/auth"
+ "code.gitea.io/gitea/modules/optional"
+ source_service "code.gitea.io/gitea/services/auth/source"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+// Authenticate queries if login/password is valid against the LDAP directory pool,
+// and create a local user if success when enabled.
+func (source *Source) Authenticate(ctx context.Context, user *user_model.User, userName, password string) (*user_model.User, error) {
+ loginName := userName
+ if user != nil {
+ loginName = user.LoginName
+ }
+ sr := source.SearchEntry(loginName, password, source.authSource.Type == auth.DLDAP)
+ if sr == nil {
+ // User not in LDAP, do nothing
+ return nil, user_model.ErrUserNotExist{Name: loginName}
+ }
+ // Fallback.
+ if len(sr.Username) == 0 {
+ sr.Username = userName
+ }
+ if len(sr.Mail) == 0 {
+ sr.Mail = fmt.Sprintf("%s@localhost.local", sr.Username)
+ }
+ isAttributeSSHPublicKeySet := len(strings.TrimSpace(source.AttributeSSHPublicKey)) > 0
+
+ // Update User admin flag if exist
+ if isExist, err := user_model.IsUserExist(ctx, 0, sr.Username); err != nil {
+ return nil, err
+ } else if isExist {
+ if user == nil {
+ user, err = user_model.GetUserByName(ctx, sr.Username)
+ if err != nil {
+ return nil, err
+ }
+ }
+ if user != nil && !user.ProhibitLogin {
+ opts := &user_service.UpdateOptions{}
+ if len(source.AdminFilter) > 0 && user.IsAdmin != sr.IsAdmin {
+ // Change existing admin flag only if AdminFilter option is set
+ opts.IsAdmin = optional.Some(sr.IsAdmin)
+ }
+ if !sr.IsAdmin && len(source.RestrictedFilter) > 0 && user.IsRestricted != sr.IsRestricted {
+ // Change existing restricted flag only if RestrictedFilter option is set
+ opts.IsRestricted = optional.Some(sr.IsRestricted)
+ }
+ if opts.IsAdmin.Has() || opts.IsRestricted.Has() {
+ if err := user_service.UpdateUser(ctx, user, opts); err != nil {
+ return nil, err
+ }
+ }
+ }
+ }
+
+ if user != nil {
+ if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, user, source.authSource, sr.SSHPublicKey) {
+ if err := asymkey_model.RewriteAllPublicKeys(ctx); err != nil {
+ return user, err
+ }
+ }
+ } else {
+ user = &user_model.User{
+ LowerName: strings.ToLower(sr.Username),
+ Name: sr.Username,
+ FullName: composeFullName(sr.Name, sr.Surname, sr.Username),
+ Email: sr.Mail,
+ LoginType: source.authSource.Type,
+ LoginSource: source.authSource.ID,
+ LoginName: userName,
+ IsAdmin: sr.IsAdmin,
+ }
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsRestricted: optional.Some(sr.IsRestricted),
+ IsActive: optional.Some(true),
+ }
+
+ err := user_model.CreateUser(ctx, user, overwriteDefault)
+ if err != nil {
+ return user, err
+ }
+
+ if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(ctx, user, source.authSource, sr.SSHPublicKey) {
+ if err := asymkey_model.RewriteAllPublicKeys(ctx); err != nil {
+ return user, err
+ }
+ }
+ if len(source.AttributeAvatar) > 0 {
+ if err := user_service.UploadAvatar(ctx, user, sr.Avatar); err != nil {
+ return user, err
+ }
+ }
+ }
+
+ if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) {
+ groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap)
+ if err != nil {
+ return user, err
+ }
+ if err := source_service.SyncGroupsToTeams(ctx, user, sr.Groups, groupTeamMapping, source.GroupTeamMapRemoval); err != nil {
+ return user, err
+ }
+ }
+
+ return user, nil
+}
+
+// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication
+func (source *Source) IsSkipLocalTwoFA() bool {
+ return source.SkipLocalTwoFA
+}
diff --git a/services/auth/source/ldap/source_search.go b/services/auth/source/ldap/source_search.go
new file mode 100644
index 0000000..2a61386
--- /dev/null
+++ b/services/auth/source/ldap/source_search.go
@@ -0,0 +1,516 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package ldap
+
+import (
+ "crypto/tls"
+ "fmt"
+ "net"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+
+ "github.com/go-ldap/ldap/v3"
+)
+
+// SearchResult : user data
+type SearchResult struct {
+ Username string // Username
+ Name string // Name
+ Surname string // Surname
+ Mail string // E-mail address
+ SSHPublicKey []string // SSH Public Key
+ IsAdmin bool // if user is administrator
+ IsRestricted bool // if user is restricted
+ LowerName string // LowerName
+ Avatar []byte
+ Groups container.Set[string]
+}
+
+func (source *Source) sanitizedUserQuery(username string) (string, bool) {
+ // See http://tools.ietf.org/search/rfc4515
+ badCharacters := "\x00()*\\"
+ if strings.ContainsAny(username, badCharacters) {
+ log.Debug("'%s' contains invalid query characters. Aborting.", username)
+ return "", false
+ }
+
+ return fmt.Sprintf(source.Filter, username), true
+}
+
+func (source *Source) sanitizedUserDN(username string) (string, bool) {
+ // See http://tools.ietf.org/search/rfc4514: "special characters"
+ badCharacters := "\x00()*\\,='\"#+;<>"
+ if strings.ContainsAny(username, badCharacters) {
+ log.Debug("'%s' contains invalid DN characters. Aborting.", username)
+ return "", false
+ }
+
+ return fmt.Sprintf(source.UserDN, username), true
+}
+
+func (source *Source) sanitizedGroupFilter(group string) (string, bool) {
+ // See http://tools.ietf.org/search/rfc4515
+ badCharacters := "\x00*\\"
+ if strings.ContainsAny(group, badCharacters) {
+ log.Trace("Group filter invalid query characters: %s", group)
+ return "", false
+ }
+
+ return group, true
+}
+
+func (source *Source) sanitizedGroupDN(groupDn string) (string, bool) {
+ // See http://tools.ietf.org/search/rfc4514: "special characters"
+ badCharacters := "\x00()*\\'\"#+;<>"
+ if strings.ContainsAny(groupDn, badCharacters) || strings.HasPrefix(groupDn, " ") || strings.HasSuffix(groupDn, " ") {
+ log.Trace("Group DN contains invalid query characters: %s", groupDn)
+ return "", false
+ }
+
+ return groupDn, true
+}
+
+func (source *Source) findUserDN(l *ldap.Conn, name string) (string, bool) {
+ log.Trace("Search for LDAP user: %s", name)
+
+ // A search for the user.
+ userFilter, ok := source.sanitizedUserQuery(name)
+ if !ok {
+ return "", false
+ }
+
+ log.Trace("Searching for DN using filter %s and base %s", userFilter, source.UserBase)
+ search := ldap.NewSearchRequest(
+ source.UserBase, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0,
+ false, userFilter, []string{}, nil)
+
+ // Ensure we found a user
+ sr, err := l.Search(search)
+ if err != nil || len(sr.Entries) < 1 {
+ log.Debug("Failed search using filter[%s]: %v", userFilter, err)
+ return "", false
+ } else if len(sr.Entries) > 1 {
+ log.Debug("Filter '%s' returned more than one user.", userFilter)
+ return "", false
+ }
+
+ userDN := sr.Entries[0].DN
+ if userDN == "" {
+ log.Error("LDAP search was successful, but found no DN!")
+ return "", false
+ }
+
+ return userDN, true
+}
+
+func dial(source *Source) (*ldap.Conn, error) {
+ log.Trace("Dialing LDAP with security protocol (%v) without verifying: %v", source.SecurityProtocol, source.SkipVerify)
+
+ tlsConfig := &tls.Config{
+ ServerName: source.Host,
+ InsecureSkipVerify: source.SkipVerify,
+ }
+
+ if source.SecurityProtocol == SecurityProtocolLDAPS {
+ return ldap.DialTLS("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)), tlsConfig)
+ }
+
+ conn, err := ldap.Dial("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)))
+ if err != nil {
+ return nil, fmt.Errorf("error during Dial: %w", err)
+ }
+
+ if source.SecurityProtocol == SecurityProtocolStartTLS {
+ if err = conn.StartTLS(tlsConfig); err != nil {
+ conn.Close()
+ return nil, fmt.Errorf("error during StartTLS: %w", err)
+ }
+ }
+
+ return conn, nil
+}
+
+func bindUser(l *ldap.Conn, userDN, passwd string) error {
+ log.Trace("Binding with userDN: %s", userDN)
+ err := l.Bind(userDN, passwd)
+ if err != nil {
+ log.Debug("LDAP auth. failed for %s, reason: %v", userDN, err)
+ return err
+ }
+ log.Trace("Bound successfully with userDN: %s", userDN)
+ return err
+}
+
+func checkAdmin(l *ldap.Conn, ls *Source, userDN string) bool {
+ if len(ls.AdminFilter) == 0 {
+ return false
+ }
+ log.Trace("Checking admin with filter %s and base %s", ls.AdminFilter, userDN)
+ search := ldap.NewSearchRequest(
+ userDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, ls.AdminFilter,
+ []string{ls.AttributeName},
+ nil)
+
+ sr, err := l.Search(search)
+
+ if err != nil {
+ log.Error("LDAP Admin Search with filter %s for %s failed unexpectedly! (%v)", ls.AdminFilter, userDN, err)
+ } else if len(sr.Entries) < 1 {
+ log.Trace("LDAP Admin Search found no matching entries.")
+ } else {
+ return true
+ }
+ return false
+}
+
+func checkRestricted(l *ldap.Conn, ls *Source, userDN string) bool {
+ if len(ls.RestrictedFilter) == 0 {
+ return false
+ }
+ if ls.RestrictedFilter == "*" {
+ return true
+ }
+ log.Trace("Checking restricted with filter %s and base %s", ls.RestrictedFilter, userDN)
+ search := ldap.NewSearchRequest(
+ userDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, ls.RestrictedFilter,
+ []string{ls.AttributeName},
+ nil)
+
+ sr, err := l.Search(search)
+
+ if err != nil {
+ log.Error("LDAP Restrictred Search with filter %s for %s failed unexpectedly! (%v)", ls.RestrictedFilter, userDN, err)
+ } else if len(sr.Entries) < 1 {
+ log.Trace("LDAP Restricted Search found no matching entries.")
+ } else {
+ return true
+ }
+ return false
+}
+
+// List all group memberships of a user
+func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string, applyGroupFilter bool) container.Set[string] {
+ ldapGroups := make(container.Set[string])
+
+ groupFilter, ok := source.sanitizedGroupFilter(source.GroupFilter)
+ if !ok {
+ return ldapGroups
+ }
+
+ groupDN, ok := source.sanitizedGroupDN(source.GroupDN)
+ if !ok {
+ return ldapGroups
+ }
+
+ var searchFilter string
+ if applyGroupFilter && groupFilter != "" {
+ searchFilter = fmt.Sprintf("(&(%s)(%s=%s))", groupFilter, source.GroupMemberUID, ldap.EscapeFilter(uid))
+ } else {
+ searchFilter = fmt.Sprintf("(%s=%s)", source.GroupMemberUID, ldap.EscapeFilter(uid))
+ }
+ result, err := l.Search(ldap.NewSearchRequest(
+ groupDN,
+ ldap.ScopeWholeSubtree,
+ ldap.NeverDerefAliases,
+ 0,
+ 0,
+ false,
+ searchFilter,
+ []string{},
+ nil,
+ ))
+ if err != nil {
+ log.Error("Failed group search in LDAP with filter [%s]: %v", searchFilter, err)
+ return ldapGroups
+ }
+
+ for _, entry := range result.Entries {
+ if entry.DN == "" {
+ log.Error("LDAP search was successful, but found no DN!")
+ continue
+ }
+ ldapGroups.Add(entry.DN)
+ }
+
+ return ldapGroups
+}
+
+func (source *Source) getUserAttributeListedInGroup(entry *ldap.Entry) string {
+ if strings.ToLower(source.UserUID) == "dn" {
+ return entry.DN
+ }
+
+ return entry.GetAttributeValue(source.UserUID)
+}
+
+// SearchEntry : search an LDAP source if an entry (name, passwd) is valid and in the specific filter
+func (source *Source) SearchEntry(name, passwd string, directBind bool) *SearchResult {
+ // See https://tools.ietf.org/search/rfc4513#section-5.1.2
+ if len(passwd) == 0 {
+ log.Debug("Auth. failed for %s, password cannot be empty", name)
+ return nil
+ }
+ l, err := dial(source)
+ if err != nil {
+ log.Error("LDAP Connect error, %s:%v", source.Host, err)
+ source.Enabled = false
+ return nil
+ }
+ defer l.Close()
+
+ var userDN string
+ if directBind {
+ log.Trace("LDAP will bind directly via UserDN template: %s", source.UserDN)
+
+ var ok bool
+ userDN, ok = source.sanitizedUserDN(name)
+
+ if !ok {
+ return nil
+ }
+
+ err = bindUser(l, userDN, passwd)
+ if err != nil {
+ return nil
+ }
+
+ if source.UserBase != "" {
+ // not everyone has a CN compatible with input name so we need to find
+ // the real userDN in that case
+
+ userDN, ok = source.findUserDN(l, name)
+ if !ok {
+ return nil
+ }
+ }
+ } else {
+ log.Trace("LDAP will use BindDN.")
+
+ var found bool
+
+ if source.BindDN != "" && source.BindPassword != "" {
+ err := l.Bind(source.BindDN, source.BindPassword)
+ if err != nil {
+ log.Debug("Failed to bind as BindDN[%s]: %v", source.BindDN, err)
+ return nil
+ }
+ log.Trace("Bound as BindDN %s", source.BindDN)
+ } else {
+ log.Trace("Proceeding with anonymous LDAP search.")
+ }
+
+ userDN, found = source.findUserDN(l, name)
+ if !found {
+ return nil
+ }
+ }
+
+ if !source.AttributesInBind {
+ // binds user (checking password) before looking-up attributes in user context
+ err = bindUser(l, userDN, passwd)
+ if err != nil {
+ return nil
+ }
+ }
+
+ userFilter, ok := source.sanitizedUserQuery(name)
+ if !ok {
+ return nil
+ }
+
+ isAttributeSSHPublicKeySet := len(strings.TrimSpace(source.AttributeSSHPublicKey)) > 0
+ isAtributeAvatarSet := len(strings.TrimSpace(source.AttributeAvatar)) > 0
+
+ attribs := []string{source.AttributeUsername, source.AttributeName, source.AttributeSurname, source.AttributeMail}
+ if len(strings.TrimSpace(source.UserUID)) > 0 {
+ attribs = append(attribs, source.UserUID)
+ }
+ if isAttributeSSHPublicKeySet {
+ attribs = append(attribs, source.AttributeSSHPublicKey)
+ }
+ if isAtributeAvatarSet {
+ attribs = append(attribs, source.AttributeAvatar)
+ }
+
+ log.Trace("Fetching attributes '%v', '%v', '%v', '%v', '%v', '%v', '%v' with filter '%s' and base '%s'", source.AttributeUsername, source.AttributeName, source.AttributeSurname, source.AttributeMail, source.AttributeSSHPublicKey, source.AttributeAvatar, source.UserUID, userFilter, userDN)
+ search := ldap.NewSearchRequest(
+ userDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, userFilter,
+ attribs, nil)
+
+ sr, err := l.Search(search)
+ if err != nil {
+ log.Error("LDAP Search failed unexpectedly! (%v)", err)
+ return nil
+ } else if len(sr.Entries) < 1 {
+ if directBind {
+ log.Trace("User filter inhibited user login.")
+ } else {
+ log.Trace("LDAP Search found no matching entries.")
+ }
+
+ return nil
+ }
+
+ var sshPublicKey []string
+ var Avatar []byte
+
+ username := sr.Entries[0].GetAttributeValue(source.AttributeUsername)
+ firstname := sr.Entries[0].GetAttributeValue(source.AttributeName)
+ surname := sr.Entries[0].GetAttributeValue(source.AttributeSurname)
+ mail := sr.Entries[0].GetAttributeValue(source.AttributeMail)
+
+ if isAttributeSSHPublicKeySet {
+ sshPublicKey = sr.Entries[0].GetAttributeValues(source.AttributeSSHPublicKey)
+ }
+
+ isAdmin := checkAdmin(l, source, userDN)
+
+ var isRestricted bool
+ if !isAdmin {
+ isRestricted = checkRestricted(l, source, userDN)
+ }
+
+ if isAtributeAvatarSet {
+ Avatar = sr.Entries[0].GetRawAttributeValue(source.AttributeAvatar)
+ }
+
+ // Check group membership
+ var usersLdapGroups container.Set[string]
+ if source.GroupsEnabled {
+ userAttributeListedInGroup := source.getUserAttributeListedInGroup(sr.Entries[0])
+ usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, true)
+
+ if source.GroupFilter != "" && len(usersLdapGroups) == 0 {
+ return nil
+ }
+ }
+
+ if !directBind && source.AttributesInBind {
+ // binds user (checking password) after looking-up attributes in BindDN context
+ err = bindUser(l, userDN, passwd)
+ if err != nil {
+ return nil
+ }
+ }
+
+ return &SearchResult{
+ LowerName: strings.ToLower(username),
+ Username: username,
+ Name: firstname,
+ Surname: surname,
+ Mail: mail,
+ SSHPublicKey: sshPublicKey,
+ IsAdmin: isAdmin,
+ IsRestricted: isRestricted,
+ Avatar: Avatar,
+ Groups: usersLdapGroups,
+ }
+}
+
+// UsePagedSearch returns if need to use paged search
+func (source *Source) UsePagedSearch() bool {
+ return source.SearchPageSize > 0
+}
+
+// SearchEntries : search an LDAP source for all users matching userFilter
+func (source *Source) SearchEntries() ([]*SearchResult, error) {
+ l, err := dial(source)
+ if err != nil {
+ log.Error("LDAP Connect error, %s:%v", source.Host, err)
+ source.Enabled = false
+ return nil, err
+ }
+ defer l.Close()
+
+ if source.BindDN != "" && source.BindPassword != "" {
+ err := l.Bind(source.BindDN, source.BindPassword)
+ if err != nil {
+ log.Debug("Failed to bind as BindDN[%s]: %v", source.BindDN, err)
+ return nil, err
+ }
+ log.Trace("Bound as BindDN %s", source.BindDN)
+ } else {
+ log.Trace("Proceeding with anonymous LDAP search.")
+ }
+
+ userFilter := fmt.Sprintf(source.Filter, "*")
+
+ isAttributeSSHPublicKeySet := len(strings.TrimSpace(source.AttributeSSHPublicKey)) > 0
+ isAtributeAvatarSet := len(strings.TrimSpace(source.AttributeAvatar)) > 0
+
+ attribs := []string{source.AttributeUsername, source.AttributeName, source.AttributeSurname, source.AttributeMail, source.UserUID}
+ if isAttributeSSHPublicKeySet {
+ attribs = append(attribs, source.AttributeSSHPublicKey)
+ }
+ if isAtributeAvatarSet {
+ attribs = append(attribs, source.AttributeAvatar)
+ }
+
+ log.Trace("Fetching attributes '%v', '%v', '%v', '%v', '%v', '%v' with filter %s and base %s", source.AttributeUsername, source.AttributeName, source.AttributeSurname, source.AttributeMail, source.AttributeSSHPublicKey, source.AttributeAvatar, userFilter, source.UserBase)
+ search := ldap.NewSearchRequest(
+ source.UserBase, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, userFilter,
+ attribs, nil)
+
+ var sr *ldap.SearchResult
+ if source.UsePagedSearch() {
+ sr, err = l.SearchWithPaging(search, source.SearchPageSize)
+ } else {
+ sr, err = l.Search(search)
+ }
+ if err != nil {
+ log.Error("LDAP Search failed unexpectedly! (%v)", err)
+ return nil, err
+ }
+
+ result := make([]*SearchResult, 0, len(sr.Entries))
+
+ for _, v := range sr.Entries {
+ var usersLdapGroups container.Set[string]
+ if source.GroupsEnabled {
+ userAttributeListedInGroup := source.getUserAttributeListedInGroup(v)
+
+ if source.GroupFilter != "" {
+ usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, true)
+ if len(usersLdapGroups) == 0 {
+ continue
+ }
+ }
+
+ if source.GroupTeamMap != "" || source.GroupTeamMapRemoval {
+ usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, false)
+ }
+ }
+
+ user := &SearchResult{
+ Username: v.GetAttributeValue(source.AttributeUsername),
+ Name: v.GetAttributeValue(source.AttributeName),
+ Surname: v.GetAttributeValue(source.AttributeSurname),
+ Mail: v.GetAttributeValue(source.AttributeMail),
+ IsAdmin: checkAdmin(l, source, v.DN),
+ Groups: usersLdapGroups,
+ }
+
+ if !user.IsAdmin {
+ user.IsRestricted = checkRestricted(l, source, v.DN)
+ }
+
+ if isAttributeSSHPublicKeySet {
+ user.SSHPublicKey = v.GetAttributeValues(source.AttributeSSHPublicKey)
+ }
+
+ if isAtributeAvatarSet {
+ user.Avatar = v.GetRawAttributeValue(source.AttributeAvatar)
+ }
+
+ user.LowerName = strings.ToLower(user.Username)
+
+ result = append(result, user)
+ }
+
+ return result, nil
+}
diff --git a/services/auth/source/ldap/source_sync.go b/services/auth/source/ldap/source_sync.go
new file mode 100644
index 0000000..1f70eda
--- /dev/null
+++ b/services/auth/source/ldap/source_sync.go
@@ -0,0 +1,232 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package ldap
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+ auth_module "code.gitea.io/gitea/modules/auth"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ source_service "code.gitea.io/gitea/services/auth/source"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+// Sync causes this ldap source to synchronize its users with the db
+func (source *Source) Sync(ctx context.Context, updateExisting bool) error {
+ log.Trace("Doing: SyncExternalUsers[%s]", source.authSource.Name)
+
+ isAttributeSSHPublicKeySet := len(strings.TrimSpace(source.AttributeSSHPublicKey)) > 0
+ var sshKeysNeedUpdate bool
+
+ // Find all users with this login type - FIXME: Should this be an iterator?
+ users, err := user_model.GetUsersBySource(ctx, source.authSource)
+ if err != nil {
+ log.Error("SyncExternalUsers: %v", err)
+ return err
+ }
+ select {
+ case <-ctx.Done():
+ log.Warn("SyncExternalUsers: Cancelled before update of %s", source.authSource.Name)
+ return db.ErrCancelledf("Before update of %s", source.authSource.Name)
+ default:
+ }
+
+ usernameUsers := make(map[string]*user_model.User, len(users))
+ mailUsers := make(map[string]*user_model.User, len(users))
+ keepActiveUsers := make(container.Set[int64])
+
+ for _, u := range users {
+ usernameUsers[u.LowerName] = u
+ mailUsers[strings.ToLower(u.Email)] = u
+ }
+
+ sr, err := source.SearchEntries()
+ if err != nil {
+ log.Error("SyncExternalUsers LDAP source failure [%s], skipped", source.authSource.Name)
+ return nil
+ }
+
+ if len(sr) == 0 {
+ if !source.AllowDeactivateAll {
+ log.Error("LDAP search found no entries but did not report an error. Refusing to deactivate all users")
+ return nil
+ }
+ log.Warn("LDAP search found no entries but did not report an error. All users will be deactivated as per settings")
+ }
+
+ orgCache := make(map[string]*organization.Organization)
+ teamCache := make(map[string]*organization.Team)
+
+ groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap)
+ if err != nil {
+ return err
+ }
+
+ for _, su := range sr {
+ select {
+ case <-ctx.Done():
+ log.Warn("SyncExternalUsers: Cancelled at update of %s before completed update of users", source.authSource.Name)
+ // Rewrite authorized_keys file if LDAP Public SSH Key attribute is set and any key was added or removed
+ if sshKeysNeedUpdate {
+ err = asymkey_model.RewriteAllPublicKeys(ctx)
+ if err != nil {
+ log.Error("RewriteAllPublicKeys: %v", err)
+ }
+ }
+ return db.ErrCancelledf("During update of %s before completed update of users", source.authSource.Name)
+ default:
+ }
+ if len(su.Username) == 0 && len(su.Mail) == 0 {
+ continue
+ }
+
+ var usr *user_model.User
+ if len(su.Username) > 0 {
+ usr = usernameUsers[su.LowerName]
+ }
+ if usr == nil && len(su.Mail) > 0 {
+ usr = mailUsers[strings.ToLower(su.Mail)]
+ }
+
+ if usr != nil {
+ keepActiveUsers.Add(usr.ID)
+ } else if len(su.Username) == 0 {
+ // we cannot create the user if su.Username is empty
+ continue
+ }
+
+ if len(su.Mail) == 0 {
+ domainName := source.DefaultDomainName
+ if len(domainName) == 0 {
+ domainName = "localhost.local"
+ }
+ su.Mail = fmt.Sprintf("%s@%s", su.Username, domainName)
+ }
+
+ fullName := composeFullName(su.Name, su.Surname, su.Username)
+ // If no existing user found, create one
+ if usr == nil {
+ log.Trace("SyncExternalUsers[%s]: Creating user %s", source.authSource.Name, su.Username)
+
+ usr = &user_model.User{
+ LowerName: su.LowerName,
+ Name: su.Username,
+ FullName: fullName,
+ LoginType: source.authSource.Type,
+ LoginSource: source.authSource.ID,
+ LoginName: su.Username,
+ Email: su.Mail,
+ IsAdmin: su.IsAdmin,
+ }
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsRestricted: optional.Some(su.IsRestricted),
+ IsActive: optional.Some(true),
+ }
+
+ err = user_model.CreateUser(ctx, usr, overwriteDefault)
+ if err != nil {
+ log.Error("SyncExternalUsers[%s]: Error creating user %s: %v", source.authSource.Name, su.Username, err)
+ }
+
+ if err == nil && isAttributeSSHPublicKeySet {
+ log.Trace("SyncExternalUsers[%s]: Adding LDAP Public SSH Keys for user %s", source.authSource.Name, usr.Name)
+ if asymkey_model.AddPublicKeysBySource(ctx, usr, source.authSource, su.SSHPublicKey) {
+ sshKeysNeedUpdate = true
+ }
+ }
+
+ if err == nil && len(source.AttributeAvatar) > 0 {
+ _ = user_service.UploadAvatar(ctx, usr, su.Avatar)
+ }
+ } else if updateExisting {
+ // Synchronize SSH Public Key if that attribute is set
+ if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, usr, source.authSource, su.SSHPublicKey) {
+ sshKeysNeedUpdate = true
+ }
+
+ // Check if user data has changed
+ if (len(source.AdminFilter) > 0 && usr.IsAdmin != su.IsAdmin) ||
+ (len(source.RestrictedFilter) > 0 && usr.IsRestricted != su.IsRestricted) ||
+ !strings.EqualFold(usr.Email, su.Mail) ||
+ usr.FullName != fullName ||
+ !usr.IsActive {
+ log.Trace("SyncExternalUsers[%s]: Updating user %s", source.authSource.Name, usr.Name)
+
+ opts := &user_service.UpdateOptions{
+ FullName: optional.Some(fullName),
+ IsActive: optional.Some(true),
+ }
+ if source.AdminFilter != "" {
+ opts.IsAdmin = optional.Some(su.IsAdmin)
+ }
+ // Change existing restricted flag only if RestrictedFilter option is set
+ if !su.IsAdmin && source.RestrictedFilter != "" {
+ opts.IsRestricted = optional.Some(su.IsRestricted)
+ }
+
+ if err := user_service.UpdateUser(ctx, usr, opts); err != nil {
+ log.Error("SyncExternalUsers[%s]: Error updating user %s: %v", source.authSource.Name, usr.Name, err)
+ }
+
+ if err := user_service.ReplacePrimaryEmailAddress(ctx, usr, su.Mail); err != nil {
+ log.Error("SyncExternalUsers[%s]: Error updating user %s primary email %s: %v", source.authSource.Name, usr.Name, su.Mail, err)
+ }
+ }
+
+ if usr.IsUploadAvatarChanged(su.Avatar) {
+ if err == nil && len(source.AttributeAvatar) > 0 {
+ _ = user_service.UploadAvatar(ctx, usr, su.Avatar)
+ }
+ }
+ }
+ // Synchronize LDAP groups with organization and team memberships
+ if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) {
+ if err := source_service.SyncGroupsToTeamsCached(ctx, usr, su.Groups, groupTeamMapping, source.GroupTeamMapRemoval, orgCache, teamCache); err != nil {
+ log.Error("SyncGroupsToTeamsCached: %v", err)
+ }
+ }
+ }
+
+ // Rewrite authorized_keys file if LDAP Public SSH Key attribute is set and any key was added or removed
+ if sshKeysNeedUpdate {
+ err = asymkey_model.RewriteAllPublicKeys(ctx)
+ if err != nil {
+ log.Error("RewriteAllPublicKeys: %v", err)
+ }
+ }
+
+ select {
+ case <-ctx.Done():
+ log.Warn("SyncExternalUsers: Cancelled during update of %s before delete users", source.authSource.Name)
+ return db.ErrCancelledf("During update of %s before delete users", source.authSource.Name)
+ default:
+ }
+
+ // Deactivate users not present in LDAP
+ if updateExisting {
+ for _, usr := range users {
+ if keepActiveUsers.Contains(usr.ID) {
+ continue
+ }
+
+ log.Trace("SyncExternalUsers[%s]: Deactivating user %s", source.authSource.Name, usr.Name)
+
+ opts := &user_service.UpdateOptions{
+ IsActive: optional.Some(false),
+ }
+ if err := user_service.UpdateUser(ctx, usr, opts); err != nil {
+ log.Error("SyncExternalUsers[%s]: Error deactivating user %s: %v", source.authSource.Name, usr.Name, err)
+ }
+ }
+ }
+ return nil
+}
diff --git a/services/auth/source/ldap/util.go b/services/auth/source/ldap/util.go
new file mode 100644
index 0000000..bd11e2d
--- /dev/null
+++ b/services/auth/source/ldap/util.go
@@ -0,0 +1,18 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package ldap
+
+// composeFullName composes a firstname surname or username
+func composeFullName(firstname, surname, username string) string {
+ switch {
+ case len(firstname) == 0 && len(surname) == 0:
+ return username
+ case len(firstname) == 0:
+ return surname
+ case len(surname) == 0:
+ return firstname
+ default:
+ return firstname + " " + surname
+ }
+}
diff --git a/services/auth/source/oauth2/assert_interface_test.go b/services/auth/source/oauth2/assert_interface_test.go
new file mode 100644
index 0000000..56fe0e4
--- /dev/null
+++ b/services/auth/source/oauth2/assert_interface_test.go
@@ -0,0 +1,22 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2_test
+
+import (
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+)
+
+// This test file exists to assert that our Source exposes the interfaces that we expect
+// It tightly binds the interfaces and implementation without breaking go import cycles
+
+type sourceInterface interface {
+ auth_model.Config
+ auth_model.SourceSettable
+ auth_model.RegisterableSource
+ auth.PasswordAuthenticator
+}
+
+var _ (sourceInterface) = &oauth2.Source{}
diff --git a/services/auth/source/oauth2/init.go b/services/auth/source/oauth2/init.go
new file mode 100644
index 0000000..5c25681
--- /dev/null
+++ b/services/auth/source/oauth2/init.go
@@ -0,0 +1,86 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "context"
+ "encoding/gob"
+ "net/http"
+ "sync"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/google/uuid"
+ "github.com/gorilla/sessions"
+ "github.com/markbates/goth/gothic"
+)
+
+var gothRWMutex = sync.RWMutex{}
+
+// UsersStoreKey is the key for the store
+const UsersStoreKey = "gitea-oauth2-sessions"
+
+// ProviderHeaderKey is the HTTP header key
+const ProviderHeaderKey = "gitea-oauth2-provider"
+
+// Init initializes the oauth source
+func Init(ctx context.Context) error {
+ if err := InitSigningKey(); err != nil {
+ return err
+ }
+
+ // Lock our mutex
+ gothRWMutex.Lock()
+
+ gob.Register(&sessions.Session{})
+
+ gothic.Store = &SessionsStore{
+ maxLength: int64(setting.OAuth2.MaxTokenLength),
+ }
+
+ gothic.SetState = func(req *http.Request) string {
+ return uuid.New().String()
+ }
+
+ gothic.GetProviderName = func(req *http.Request) (string, error) {
+ return req.Header.Get(ProviderHeaderKey), nil
+ }
+
+ // Unlock our mutex
+ gothRWMutex.Unlock()
+
+ return initOAuth2Sources(ctx)
+}
+
+// ResetOAuth2 clears existing OAuth2 providers and loads them from DB
+func ResetOAuth2(ctx context.Context) error {
+ ClearProviders()
+ return initOAuth2Sources(ctx)
+}
+
+// initOAuth2Sources is used to load and register all active OAuth2 providers
+func initOAuth2Sources(ctx context.Context) error {
+ authSources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
+ IsActive: optional.Some(true),
+ LoginType: auth.OAuth2,
+ })
+ if err != nil {
+ return err
+ }
+ for _, source := range authSources {
+ oauth2Source, ok := source.Cfg.(*Source)
+ if !ok {
+ continue
+ }
+ err := oauth2Source.RegisterSource()
+ if err != nil {
+ log.Critical("Unable to register source: %s due to Error: %v.", source.Name, err)
+ }
+ }
+ return nil
+}
diff --git a/services/auth/source/oauth2/jwtsigningkey.go b/services/auth/source/oauth2/jwtsigningkey.go
new file mode 100644
index 0000000..92adfc4
--- /dev/null
+++ b/services/auth/source/oauth2/jwtsigningkey.go
@@ -0,0 +1,422 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "crypto/ecdsa"
+ "crypto/ed25519"
+ "crypto/elliptic"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/base64"
+ "encoding/pem"
+ "fmt"
+ "math/big"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+
+ "github.com/golang-jwt/jwt/v5"
+)
+
+// ErrInvalidAlgorithmType represents an invalid algorithm error.
+type ErrInvalidAlgorithmType struct {
+ Algorithm string
+}
+
+func (err ErrInvalidAlgorithmType) Error() string {
+ return fmt.Sprintf("JWT signing algorithm is not supported: %s", err.Algorithm)
+}
+
+// JWTSigningKey represents a algorithm/key pair to sign JWTs
+type JWTSigningKey interface {
+ IsSymmetric() bool
+ SigningMethod() jwt.SigningMethod
+ SignKey() any
+ VerifyKey() any
+ ToJWK() (map[string]string, error)
+ PreProcessToken(*jwt.Token)
+}
+
+type hmacSigningKey struct {
+ signingMethod jwt.SigningMethod
+ secret []byte
+}
+
+func (key hmacSigningKey) IsSymmetric() bool {
+ return true
+}
+
+func (key hmacSigningKey) SigningMethod() jwt.SigningMethod {
+ return key.signingMethod
+}
+
+func (key hmacSigningKey) SignKey() any {
+ return key.secret
+}
+
+func (key hmacSigningKey) VerifyKey() any {
+ return key.secret
+}
+
+func (key hmacSigningKey) ToJWK() (map[string]string, error) {
+ return map[string]string{
+ "kty": "oct",
+ "alg": key.SigningMethod().Alg(),
+ }, nil
+}
+
+func (key hmacSigningKey) PreProcessToken(*jwt.Token) {}
+
+type rsaSingingKey struct {
+ signingMethod jwt.SigningMethod
+ key *rsa.PrivateKey
+ id string
+}
+
+func newRSASingingKey(signingMethod jwt.SigningMethod, key *rsa.PrivateKey) (rsaSingingKey, error) {
+ kid, err := util.CreatePublicKeyFingerprint(key.Public().(*rsa.PublicKey))
+ if err != nil {
+ return rsaSingingKey{}, err
+ }
+
+ return rsaSingingKey{
+ signingMethod,
+ key,
+ base64.RawURLEncoding.EncodeToString(kid),
+ }, nil
+}
+
+func (key rsaSingingKey) IsSymmetric() bool {
+ return false
+}
+
+func (key rsaSingingKey) SigningMethod() jwt.SigningMethod {
+ return key.signingMethod
+}
+
+func (key rsaSingingKey) SignKey() any {
+ return key.key
+}
+
+func (key rsaSingingKey) VerifyKey() any {
+ return key.key.Public()
+}
+
+func (key rsaSingingKey) ToJWK() (map[string]string, error) {
+ pubKey := key.key.Public().(*rsa.PublicKey)
+
+ return map[string]string{
+ "kty": "RSA",
+ "alg": key.SigningMethod().Alg(),
+ "kid": key.id,
+ "e": base64.RawURLEncoding.EncodeToString(big.NewInt(int64(pubKey.E)).Bytes()),
+ "n": base64.RawURLEncoding.EncodeToString(pubKey.N.Bytes()),
+ }, nil
+}
+
+func (key rsaSingingKey) PreProcessToken(token *jwt.Token) {
+ token.Header["kid"] = key.id
+}
+
+type eddsaSigningKey struct {
+ signingMethod jwt.SigningMethod
+ key ed25519.PrivateKey
+ id string
+}
+
+func newEdDSASingingKey(signingMethod jwt.SigningMethod, key ed25519.PrivateKey) (eddsaSigningKey, error) {
+ kid, err := util.CreatePublicKeyFingerprint(key.Public().(ed25519.PublicKey))
+ if err != nil {
+ return eddsaSigningKey{}, err
+ }
+
+ return eddsaSigningKey{
+ signingMethod,
+ key,
+ base64.RawURLEncoding.EncodeToString(kid),
+ }, nil
+}
+
+func (key eddsaSigningKey) IsSymmetric() bool {
+ return false
+}
+
+func (key eddsaSigningKey) SigningMethod() jwt.SigningMethod {
+ return key.signingMethod
+}
+
+func (key eddsaSigningKey) SignKey() any {
+ return key.key
+}
+
+func (key eddsaSigningKey) VerifyKey() any {
+ return key.key.Public()
+}
+
+func (key eddsaSigningKey) ToJWK() (map[string]string, error) {
+ pubKey := key.key.Public().(ed25519.PublicKey)
+
+ return map[string]string{
+ "alg": key.SigningMethod().Alg(),
+ "kid": key.id,
+ "kty": "OKP",
+ "crv": "Ed25519",
+ "x": base64.RawURLEncoding.EncodeToString(pubKey),
+ }, nil
+}
+
+func (key eddsaSigningKey) PreProcessToken(token *jwt.Token) {
+ token.Header["kid"] = key.id
+}
+
+type ecdsaSingingKey struct {
+ signingMethod jwt.SigningMethod
+ key *ecdsa.PrivateKey
+ id string
+}
+
+func newECDSASingingKey(signingMethod jwt.SigningMethod, key *ecdsa.PrivateKey) (ecdsaSingingKey, error) {
+ kid, err := util.CreatePublicKeyFingerprint(key.Public().(*ecdsa.PublicKey))
+ if err != nil {
+ return ecdsaSingingKey{}, err
+ }
+
+ return ecdsaSingingKey{
+ signingMethod,
+ key,
+ base64.RawURLEncoding.EncodeToString(kid),
+ }, nil
+}
+
+func (key ecdsaSingingKey) IsSymmetric() bool {
+ return false
+}
+
+func (key ecdsaSingingKey) SigningMethod() jwt.SigningMethod {
+ return key.signingMethod
+}
+
+func (key ecdsaSingingKey) SignKey() any {
+ return key.key
+}
+
+func (key ecdsaSingingKey) VerifyKey() any {
+ return key.key.Public()
+}
+
+func (key ecdsaSingingKey) ToJWK() (map[string]string, error) {
+ pubKey := key.key.Public().(*ecdsa.PublicKey)
+
+ return map[string]string{
+ "kty": "EC",
+ "alg": key.SigningMethod().Alg(),
+ "kid": key.id,
+ "crv": pubKey.Params().Name,
+ "x": base64.RawURLEncoding.EncodeToString(pubKey.X.Bytes()),
+ "y": base64.RawURLEncoding.EncodeToString(pubKey.Y.Bytes()),
+ }, nil
+}
+
+func (key ecdsaSingingKey) PreProcessToken(token *jwt.Token) {
+ token.Header["kid"] = key.id
+}
+
+// CreateJWTSigningKey creates a signing key from an algorithm / key pair.
+func CreateJWTSigningKey(algorithm string, key any) (JWTSigningKey, error) {
+ var signingMethod jwt.SigningMethod
+ switch algorithm {
+ case "HS256":
+ signingMethod = jwt.SigningMethodHS256
+ case "HS384":
+ signingMethod = jwt.SigningMethodHS384
+ case "HS512":
+ signingMethod = jwt.SigningMethodHS512
+
+ case "RS256":
+ signingMethod = jwt.SigningMethodRS256
+ case "RS384":
+ signingMethod = jwt.SigningMethodRS384
+ case "RS512":
+ signingMethod = jwt.SigningMethodRS512
+
+ case "ES256":
+ signingMethod = jwt.SigningMethodES256
+ case "ES384":
+ signingMethod = jwt.SigningMethodES384
+ case "ES512":
+ signingMethod = jwt.SigningMethodES512
+ case "EdDSA":
+ signingMethod = jwt.SigningMethodEdDSA
+ default:
+ return nil, ErrInvalidAlgorithmType{algorithm}
+ }
+
+ switch signingMethod.(type) {
+ case *jwt.SigningMethodEd25519:
+ privateKey, ok := key.(ed25519.PrivateKey)
+ if !ok {
+ return nil, jwt.ErrInvalidKeyType
+ }
+ return newEdDSASingingKey(signingMethod, privateKey)
+ case *jwt.SigningMethodECDSA:
+ privateKey, ok := key.(*ecdsa.PrivateKey)
+ if !ok {
+ return nil, jwt.ErrInvalidKeyType
+ }
+ return newECDSASingingKey(signingMethod, privateKey)
+ case *jwt.SigningMethodRSA:
+ privateKey, ok := key.(*rsa.PrivateKey)
+ if !ok {
+ return nil, jwt.ErrInvalidKeyType
+ }
+ return newRSASingingKey(signingMethod, privateKey)
+ default:
+ secret, ok := key.([]byte)
+ if !ok {
+ return nil, jwt.ErrInvalidKeyType
+ }
+ return hmacSigningKey{signingMethod, secret}, nil
+ }
+}
+
+// DefaultSigningKey is the default signing key for JWTs.
+var DefaultSigningKey JWTSigningKey
+
+// InitSigningKey creates the default signing key from settings or creates a random key.
+func InitSigningKey() error {
+ var err error
+ var key any
+
+ switch setting.OAuth2.JWTSigningAlgorithm {
+ case "HS256":
+ fallthrough
+ case "HS384":
+ fallthrough
+ case "HS512":
+ key = setting.GetGeneralTokenSigningSecret()
+ case "RS256":
+ fallthrough
+ case "RS384":
+ fallthrough
+ case "RS512":
+ fallthrough
+ case "ES256":
+ fallthrough
+ case "ES384":
+ fallthrough
+ case "ES512":
+ fallthrough
+ case "EdDSA":
+ key, err = loadOrCreateAsymmetricKey()
+ default:
+ return ErrInvalidAlgorithmType{setting.OAuth2.JWTSigningAlgorithm}
+ }
+
+ if err != nil {
+ return fmt.Errorf("Error while loading or creating JWT key: %w", err)
+ }
+
+ signingKey, err := CreateJWTSigningKey(setting.OAuth2.JWTSigningAlgorithm, key)
+ if err != nil {
+ return err
+ }
+
+ DefaultSigningKey = signingKey
+
+ return nil
+}
+
+// loadOrCreateAsymmetricKey checks if the configured private key exists.
+// If it does not exist a new random key gets generated and saved on the configured path.
+func loadOrCreateAsymmetricKey() (any, error) {
+ keyPath := setting.OAuth2.JWTSigningPrivateKeyFile
+
+ isExist, err := util.IsExist(keyPath)
+ if err != nil {
+ log.Fatal("Unable to check if %s exists. Error: %v", keyPath, err)
+ }
+ if !isExist {
+ err := func() error {
+ key, err := func() (any, error) {
+ switch {
+ case strings.HasPrefix(setting.OAuth2.JWTSigningAlgorithm, "RS"):
+ var bits int
+ switch setting.OAuth2.JWTSigningAlgorithm {
+ case "RS256":
+ bits = 2048
+ case "RS384":
+ bits = 3072
+ case "RS512":
+ bits = 4096
+ }
+ return rsa.GenerateKey(rand.Reader, bits)
+ case setting.OAuth2.JWTSigningAlgorithm == "EdDSA":
+ _, pk, err := ed25519.GenerateKey(rand.Reader)
+ return pk, err
+ default:
+ var curve elliptic.Curve
+ switch setting.OAuth2.JWTSigningAlgorithm {
+ case "ES256":
+ curve = elliptic.P256()
+ case "ES384":
+ curve = elliptic.P384()
+ case "ES512":
+ curve = elliptic.P521()
+ }
+ return ecdsa.GenerateKey(curve, rand.Reader)
+ }
+ }()
+ if err != nil {
+ return err
+ }
+
+ bytes, err := x509.MarshalPKCS8PrivateKey(key)
+ if err != nil {
+ return err
+ }
+
+ privateKeyPEM := &pem.Block{Type: "PRIVATE KEY", Bytes: bytes}
+
+ if err := os.MkdirAll(filepath.Dir(keyPath), os.ModePerm); err != nil {
+ return err
+ }
+
+ f, err := os.OpenFile(keyPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o600)
+ if err != nil {
+ return err
+ }
+ defer func() {
+ if err = f.Close(); err != nil {
+ log.Error("Close: %v", err)
+ }
+ }()
+
+ return pem.Encode(f, privateKeyPEM)
+ }()
+ if err != nil {
+ log.Fatal("Error generating private key: %v", err)
+ return nil, err
+ }
+ }
+
+ bytes, err := os.ReadFile(keyPath)
+ if err != nil {
+ return nil, err
+ }
+
+ block, _ := pem.Decode(bytes)
+ if block == nil {
+ return nil, fmt.Errorf("no valid PEM data found in %s", keyPath)
+ } else if block.Type != "PRIVATE KEY" {
+ return nil, fmt.Errorf("expected PRIVATE KEY, got %s in %s", block.Type, keyPath)
+ }
+
+ return x509.ParsePKCS8PrivateKey(block.Bytes)
+}
diff --git a/services/auth/source/oauth2/jwtsigningkey_test.go b/services/auth/source/oauth2/jwtsigningkey_test.go
new file mode 100644
index 0000000..4db538b
--- /dev/null
+++ b/services/auth/source/oauth2/jwtsigningkey_test.go
@@ -0,0 +1,116 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package oauth2
+
+import (
+ "crypto/ecdsa"
+ "crypto/ed25519"
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/pem"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLoadOrCreateAsymmetricKey(t *testing.T) {
+ loadKey := func(t *testing.T) any {
+ t.Helper()
+ loadOrCreateAsymmetricKey()
+
+ fileContent, err := os.ReadFile(setting.OAuth2.JWTSigningPrivateKeyFile)
+ require.NoError(t, err)
+
+ block, _ := pem.Decode(fileContent)
+ assert.NotNil(t, block)
+ assert.EqualValues(t, "PRIVATE KEY", block.Type)
+
+ parsedKey, err := x509.ParsePKCS8PrivateKey(block.Bytes)
+ require.NoError(t, err)
+
+ return parsedKey
+ }
+ t.Run("RSA-2048", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningPrivateKeyFile, filepath.Join(t.TempDir(), "jwt-rsa-2048.priv"))()
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "RS256")()
+
+ parsedKey := loadKey(t)
+
+ rsaPrivateKey := parsedKey.(*rsa.PrivateKey)
+ assert.EqualValues(t, 2048, rsaPrivateKey.N.BitLen())
+
+ t.Run("Load key with differ specified algorithm", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "EdDSA")()
+
+ parsedKey := loadKey(t)
+ rsaPrivateKey := parsedKey.(*rsa.PrivateKey)
+ assert.EqualValues(t, 2048, rsaPrivateKey.N.BitLen())
+ })
+ })
+
+ t.Run("RSA-3072", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningPrivateKeyFile, filepath.Join(t.TempDir(), "jwt-rsa-3072.priv"))()
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "RS384")()
+
+ parsedKey := loadKey(t)
+
+ rsaPrivateKey := parsedKey.(*rsa.PrivateKey)
+ assert.EqualValues(t, 3072, rsaPrivateKey.N.BitLen())
+ })
+
+ t.Run("RSA-4096", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningPrivateKeyFile, filepath.Join(t.TempDir(), "jwt-rsa-4096.priv"))()
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "RS512")()
+
+ parsedKey := loadKey(t)
+
+ rsaPrivateKey := parsedKey.(*rsa.PrivateKey)
+ assert.EqualValues(t, 4096, rsaPrivateKey.N.BitLen())
+ })
+
+ t.Run("ECDSA-256", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningPrivateKeyFile, filepath.Join(t.TempDir(), "jwt-ecdsa-256.priv"))()
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "ES256")()
+
+ parsedKey := loadKey(t)
+
+ ecdsaPrivateKey := parsedKey.(*ecdsa.PrivateKey)
+ assert.EqualValues(t, 256, ecdsaPrivateKey.Params().BitSize)
+ })
+
+ t.Run("ECDSA-384", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningPrivateKeyFile, filepath.Join(t.TempDir(), "jwt-ecdsa-384.priv"))()
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "ES384")()
+
+ parsedKey := loadKey(t)
+
+ ecdsaPrivateKey := parsedKey.(*ecdsa.PrivateKey)
+ assert.EqualValues(t, 384, ecdsaPrivateKey.Params().BitSize)
+ })
+
+ t.Run("ECDSA-512", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningPrivateKeyFile, filepath.Join(t.TempDir(), "jwt-ecdsa-512.priv"))()
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "ES512")()
+
+ parsedKey := loadKey(t)
+
+ ecdsaPrivateKey := parsedKey.(*ecdsa.PrivateKey)
+ assert.EqualValues(t, 521, ecdsaPrivateKey.Params().BitSize)
+ })
+
+ t.Run("EdDSA", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningPrivateKeyFile, filepath.Join(t.TempDir(), "jwt-eddsa.priv"))()
+ defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "EdDSA")()
+
+ parsedKey := loadKey(t)
+
+ assert.NotNil(t, parsedKey.(ed25519.PrivateKey))
+ })
+}
diff --git a/services/auth/source/oauth2/providers.go b/services/auth/source/oauth2/providers.go
new file mode 100644
index 0000000..f2c1bb4
--- /dev/null
+++ b/services/auth/source/oauth2/providers.go
@@ -0,0 +1,190 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "html"
+ "html/template"
+ "net/url"
+ "sort"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/markbates/goth"
+)
+
+// Provider is an interface for describing a single OAuth2 provider
+type Provider interface {
+ Name() string
+ DisplayName() string
+ IconHTML(size int) template.HTML
+ CustomURLSettings() *CustomURLSettings
+}
+
+// GothProviderCreator provides a function to create a goth.Provider
+type GothProviderCreator interface {
+ CreateGothProvider(providerName, callbackURL string, source *Source) (goth.Provider, error)
+}
+
+// GothProvider is an interface for describing a single OAuth2 provider
+type GothProvider interface {
+ Provider
+ GothProviderCreator
+}
+
+// AuthSourceProvider provides a provider for an AuthSource. Multiple auth sources could use the same registered GothProvider
+// So each auth source should have its own DisplayName and IconHTML for display.
+// The Name is the GothProvider's name, to help to find the GothProvider to sign in.
+// The DisplayName is the auth source config's name, site admin set it on the admin page, the IconURL can also be set there.
+type AuthSourceProvider struct {
+ GothProvider
+ sourceName, iconURL string
+}
+
+func (p *AuthSourceProvider) Name() string {
+ return p.GothProvider.Name()
+}
+
+func (p *AuthSourceProvider) DisplayName() string {
+ return p.sourceName
+}
+
+func (p *AuthSourceProvider) IconHTML(size int) template.HTML {
+ if p.iconURL != "" {
+ img := fmt.Sprintf(`<img class="tw-object-contain tw-mr-2" width="%d" height="%d" src="%s" alt="%s">`,
+ size,
+ size,
+ html.EscapeString(p.iconURL), html.EscapeString(p.DisplayName()),
+ )
+ return template.HTML(img)
+ }
+ return p.GothProvider.IconHTML(size)
+}
+
+// Providers contains the map of registered OAuth2 providers in Gitea (based on goth)
+// key is used to map the OAuth2Provider with the goth provider type (also in AuthSource.OAuth2Config.Provider)
+// value is used to store display data
+var gothProviders = map[string]GothProvider{}
+
+// RegisterGothProvider registers a GothProvider
+func RegisterGothProvider(provider GothProvider) {
+ if _, has := gothProviders[provider.Name()]; has {
+ log.Fatal("Duplicate oauth2provider type provided: %s", provider.Name())
+ }
+ gothProviders[provider.Name()] = provider
+}
+
+// GetSupportedOAuth2Providers returns the map of unconfigured OAuth2 providers
+// key is used as technical name (like in the callbackURL)
+// values to display
+func GetSupportedOAuth2Providers() []Provider {
+ providers := make([]Provider, 0, len(gothProviders))
+
+ for _, provider := range gothProviders {
+ providers = append(providers, provider)
+ }
+ sort.Slice(providers, func(i, j int) bool {
+ return providers[i].Name() < providers[j].Name()
+ })
+ return providers
+}
+
+func CreateProviderFromSource(source *auth.Source) (Provider, error) {
+ oauth2Cfg, ok := source.Cfg.(*Source)
+ if !ok {
+ return nil, fmt.Errorf("invalid OAuth2 source config: %v", oauth2Cfg)
+ }
+ gothProv := gothProviders[oauth2Cfg.Provider]
+ return &AuthSourceProvider{GothProvider: gothProv, sourceName: source.Name, iconURL: oauth2Cfg.IconURL}, nil
+}
+
+// GetOAuth2Providers returns the list of configured OAuth2 providers
+func GetOAuth2Providers(ctx context.Context, isActive optional.Option[bool]) ([]Provider, error) {
+ authSources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
+ IsActive: isActive,
+ LoginType: auth.OAuth2,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ providers := make([]Provider, 0, len(authSources))
+ for _, source := range authSources {
+ provider, err := CreateProviderFromSource(source)
+ if err != nil {
+ return nil, err
+ }
+ providers = append(providers, provider)
+ }
+
+ sort.Slice(providers, func(i, j int) bool {
+ return providers[i].Name() < providers[j].Name()
+ })
+
+ return providers, nil
+}
+
+// RegisterProviderWithGothic register a OAuth2 provider in goth lib
+func RegisterProviderWithGothic(providerName string, source *Source) error {
+ provider, err := createProvider(providerName, source)
+
+ if err == nil && provider != nil {
+ gothRWMutex.Lock()
+ defer gothRWMutex.Unlock()
+
+ goth.UseProviders(provider)
+ }
+
+ return err
+}
+
+// RemoveProviderFromGothic removes the given OAuth2 provider from the goth lib
+func RemoveProviderFromGothic(providerName string) {
+ gothRWMutex.Lock()
+ defer gothRWMutex.Unlock()
+
+ delete(goth.GetProviders(), providerName)
+}
+
+// ClearProviders clears all OAuth2 providers from the goth lib
+func ClearProviders() {
+ gothRWMutex.Lock()
+ defer gothRWMutex.Unlock()
+
+ goth.ClearProviders()
+}
+
+var ErrAuthSourceNotActivated = errors.New("auth source is not activated")
+
+// used to create different types of goth providers
+func createProvider(providerName string, source *Source) (goth.Provider, error) {
+ callbackURL := setting.AppURL + "user/oauth2/" + url.PathEscape(providerName) + "/callback"
+
+ var provider goth.Provider
+ var err error
+
+ p, ok := gothProviders[source.Provider]
+ if !ok {
+ return nil, ErrAuthSourceNotActivated
+ }
+
+ provider, err = p.CreateGothProvider(providerName, callbackURL, source)
+ if err != nil {
+ return provider, err
+ }
+
+ // always set the name if provider is created so we can support multiple setups of 1 provider
+ if provider != nil {
+ provider.SetName(providerName)
+ }
+
+ return provider, err
+}
diff --git a/services/auth/source/oauth2/providers_base.go b/services/auth/source/oauth2/providers_base.go
new file mode 100644
index 0000000..9d4ab10
--- /dev/null
+++ b/services/auth/source/oauth2/providers_base.go
@@ -0,0 +1,51 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "html/template"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/svg"
+)
+
+// BaseProvider represents a common base for Provider
+type BaseProvider struct {
+ name string
+ displayName string
+}
+
+// Name provides the technical name for this provider
+func (b *BaseProvider) Name() string {
+ return b.name
+}
+
+// DisplayName returns the friendly name for this provider
+func (b *BaseProvider) DisplayName() string {
+ return b.displayName
+}
+
+// IconHTML returns icon HTML for this provider
+func (b *BaseProvider) IconHTML(size int) template.HTML {
+ svgName := "gitea-" + b.name
+ switch b.name {
+ case "gplus":
+ svgName = "gitea-google"
+ case "github":
+ svgName = "octicon-mark-github"
+ }
+ svgHTML := svg.RenderHTML(svgName, size, "tw-mr-2")
+ if svgHTML == "" {
+ log.Error("No SVG icon for oauth2 provider %q", b.name)
+ svgHTML = svg.RenderHTML("gitea-openid", size, "tw-mr-2")
+ }
+ return svgHTML
+}
+
+// CustomURLSettings returns the custom url settings for this provider
+func (b *BaseProvider) CustomURLSettings() *CustomURLSettings {
+ return nil
+}
+
+var _ Provider = &BaseProvider{}
diff --git a/services/auth/source/oauth2/providers_custom.go b/services/auth/source/oauth2/providers_custom.go
new file mode 100644
index 0000000..65cf538
--- /dev/null
+++ b/services/auth/source/oauth2/providers_custom.go
@@ -0,0 +1,123 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/markbates/goth"
+ "github.com/markbates/goth/providers/azureadv2"
+ "github.com/markbates/goth/providers/gitea"
+ "github.com/markbates/goth/providers/github"
+ "github.com/markbates/goth/providers/gitlab"
+ "github.com/markbates/goth/providers/mastodon"
+ "github.com/markbates/goth/providers/nextcloud"
+)
+
+// CustomProviderNewFn creates a goth.Provider using a custom url mapping
+type CustomProviderNewFn func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error)
+
+// CustomProvider is a GothProvider that has CustomURL features
+type CustomProvider struct {
+ BaseProvider
+ customURLSettings *CustomURLSettings
+ newFn CustomProviderNewFn
+}
+
+// CustomURLSettings returns the CustomURLSettings for this provider
+func (c *CustomProvider) CustomURLSettings() *CustomURLSettings {
+ return c.customURLSettings
+}
+
+// CreateGothProvider creates a GothProvider from this Provider
+func (c *CustomProvider) CreateGothProvider(providerName, callbackURL string, source *Source) (goth.Provider, error) {
+ custom := c.customURLSettings.OverrideWith(source.CustomURLMapping)
+
+ return c.newFn(source.ClientID, source.ClientSecret, callbackURL, custom, source.Scopes)
+}
+
+// NewCustomProvider is a constructor function for custom providers
+func NewCustomProvider(name, displayName string, customURLSetting *CustomURLSettings, newFn CustomProviderNewFn) *CustomProvider {
+ return &CustomProvider{
+ BaseProvider: BaseProvider{
+ name: name,
+ displayName: displayName,
+ },
+ customURLSettings: customURLSetting,
+ newFn: newFn,
+ }
+}
+
+var _ GothProvider = &CustomProvider{}
+
+func init() {
+ RegisterGothProvider(NewCustomProvider(
+ "github", "GitHub", &CustomURLSettings{
+ TokenURL: availableAttribute(github.TokenURL),
+ AuthURL: availableAttribute(github.AuthURL),
+ ProfileURL: availableAttribute(github.ProfileURL),
+ EmailURL: availableAttribute(github.EmailURL),
+ },
+ func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
+ if setting.OAuth2Client.EnableAutoRegistration {
+ scopes = append(scopes, "user:email")
+ }
+ return github.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, custom.TokenURL, custom.ProfileURL, custom.EmailURL, scopes...), nil
+ }))
+
+ RegisterGothProvider(NewCustomProvider(
+ "gitlab", "GitLab", &CustomURLSettings{
+ AuthURL: availableAttribute(gitlab.AuthURL),
+ TokenURL: availableAttribute(gitlab.TokenURL),
+ ProfileURL: availableAttribute(gitlab.ProfileURL),
+ }, func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
+ scopes = append(scopes, "read_user")
+ return gitlab.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, custom.TokenURL, custom.ProfileURL, scopes...), nil
+ }))
+
+ RegisterGothProvider(NewCustomProvider(
+ "gitea", "Gitea", &CustomURLSettings{
+ TokenURL: requiredAttribute(gitea.TokenURL),
+ AuthURL: requiredAttribute(gitea.AuthURL),
+ ProfileURL: requiredAttribute(gitea.ProfileURL),
+ },
+ func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
+ return gitea.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, custom.TokenURL, custom.ProfileURL, scopes...), nil
+ }))
+
+ RegisterGothProvider(NewCustomProvider(
+ "nextcloud", "Nextcloud", &CustomURLSettings{
+ TokenURL: requiredAttribute(nextcloud.TokenURL),
+ AuthURL: requiredAttribute(nextcloud.AuthURL),
+ ProfileURL: requiredAttribute(nextcloud.ProfileURL),
+ },
+ func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
+ return nextcloud.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, custom.TokenURL, custom.ProfileURL, scopes...), nil
+ }))
+
+ RegisterGothProvider(NewCustomProvider(
+ "mastodon", "Mastodon", &CustomURLSettings{
+ AuthURL: requiredAttribute(mastodon.InstanceURL),
+ },
+ func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
+ return mastodon.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, scopes...), nil
+ }))
+
+ RegisterGothProvider(NewCustomProvider(
+ "azureadv2", "Azure AD v2", &CustomURLSettings{
+ Tenant: requiredAttribute("organizations"),
+ },
+ func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
+ azureScopes := make([]azureadv2.ScopeType, len(scopes))
+ for i, scope := range scopes {
+ azureScopes[i] = azureadv2.ScopeType(scope)
+ }
+
+ return azureadv2.New(clientID, secret, callbackURL, azureadv2.ProviderOptions{
+ Tenant: azureadv2.TenantType(custom.Tenant),
+ Scopes: azureScopes,
+ }), nil
+ },
+ ))
+}
diff --git a/services/auth/source/oauth2/providers_openid.go b/services/auth/source/oauth2/providers_openid.go
new file mode 100644
index 0000000..285876d
--- /dev/null
+++ b/services/auth/source/oauth2/providers_openid.go
@@ -0,0 +1,58 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "html/template"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/svg"
+
+ "github.com/markbates/goth"
+ "github.com/markbates/goth/providers/openidConnect"
+)
+
+// OpenIDProvider is a GothProvider for OpenID
+type OpenIDProvider struct{}
+
+// Name provides the technical name for this provider
+func (o *OpenIDProvider) Name() string {
+ return "openidConnect"
+}
+
+// DisplayName returns the friendly name for this provider
+func (o *OpenIDProvider) DisplayName() string {
+ return "OpenID Connect"
+}
+
+// IconHTML returns icon HTML for this provider
+func (o *OpenIDProvider) IconHTML(size int) template.HTML {
+ return svg.RenderHTML("gitea-openid", size, "tw-mr-2")
+}
+
+// CreateGothProvider creates a GothProvider from this Provider
+func (o *OpenIDProvider) CreateGothProvider(providerName, callbackURL string, source *Source) (goth.Provider, error) {
+ scopes := setting.OAuth2Client.OpenIDConnectScopes
+ if len(scopes) == 0 {
+ scopes = append(scopes, source.Scopes...)
+ }
+
+ provider, err := openidConnect.New(source.ClientID, source.ClientSecret, callbackURL, source.OpenIDConnectAutoDiscoveryURL, scopes...)
+ if err != nil {
+ log.Warn("Failed to create OpenID Connect Provider with name '%s' with url '%s': %v", providerName, source.OpenIDConnectAutoDiscoveryURL, err)
+ }
+ return provider, err
+}
+
+// CustomURLSettings returns the custom url settings for this provider
+func (o *OpenIDProvider) CustomURLSettings() *CustomURLSettings {
+ return nil
+}
+
+var _ GothProvider = &OpenIDProvider{}
+
+func init() {
+ RegisterGothProvider(&OpenIDProvider{})
+}
diff --git a/services/auth/source/oauth2/providers_simple.go b/services/auth/source/oauth2/providers_simple.go
new file mode 100644
index 0000000..e95323a
--- /dev/null
+++ b/services/auth/source/oauth2/providers_simple.go
@@ -0,0 +1,109 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/markbates/goth"
+ "github.com/markbates/goth/providers/azuread"
+ "github.com/markbates/goth/providers/bitbucket"
+ "github.com/markbates/goth/providers/discord"
+ "github.com/markbates/goth/providers/dropbox"
+ "github.com/markbates/goth/providers/facebook"
+ "github.com/markbates/goth/providers/google"
+ "github.com/markbates/goth/providers/microsoftonline"
+ "github.com/markbates/goth/providers/twitter"
+ "github.com/markbates/goth/providers/yandex"
+)
+
+// SimpleProviderNewFn create goth.Providers without custom url features
+type SimpleProviderNewFn func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider
+
+// SimpleProvider is a GothProvider which does not have custom url features
+type SimpleProvider struct {
+ BaseProvider
+ scopes []string
+ newFn SimpleProviderNewFn
+}
+
+// CreateGothProvider creates a GothProvider from this Provider
+func (c *SimpleProvider) CreateGothProvider(providerName, callbackURL string, source *Source) (goth.Provider, error) {
+ scopes := make([]string, len(c.scopes)+len(source.Scopes))
+ copy(scopes, c.scopes)
+ copy(scopes[len(c.scopes):], source.Scopes)
+ return c.newFn(source.ClientID, source.ClientSecret, callbackURL, scopes...), nil
+}
+
+// NewSimpleProvider is a constructor function for simple providers
+func NewSimpleProvider(name, displayName string, scopes []string, newFn SimpleProviderNewFn) *SimpleProvider {
+ return &SimpleProvider{
+ BaseProvider: BaseProvider{
+ name: name,
+ displayName: displayName,
+ },
+ scopes: scopes,
+ newFn: newFn,
+ }
+}
+
+var _ GothProvider = &SimpleProvider{}
+
+func init() {
+ RegisterGothProvider(
+ NewSimpleProvider("bitbucket", "Bitbucket", []string{"account"},
+ func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
+ return bitbucket.New(clientKey, secret, callbackURL, scopes...)
+ }))
+
+ RegisterGothProvider(
+ NewSimpleProvider("dropbox", "Dropbox", nil,
+ func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
+ return dropbox.New(clientKey, secret, callbackURL, scopes...)
+ }))
+
+ RegisterGothProvider(NewSimpleProvider("facebook", "Facebook", nil,
+ func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
+ return facebook.New(clientKey, secret, callbackURL, scopes...)
+ }))
+
+ // named gplus due to legacy gplus -> google migration (Google killed Google+). This ensures old connections still work
+ RegisterGothProvider(NewSimpleProvider("gplus", "Google", []string{"email"},
+ func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
+ if setting.OAuth2Client.UpdateAvatar || setting.OAuth2Client.EnableAutoRegistration {
+ scopes = append(scopes, "profile")
+ }
+ return google.New(clientKey, secret, callbackURL, scopes...)
+ }))
+
+ RegisterGothProvider(NewSimpleProvider("twitter", "Twitter", nil,
+ func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
+ return twitter.New(clientKey, secret, callbackURL)
+ }))
+
+ RegisterGothProvider(NewSimpleProvider("discord", "Discord", []string{discord.ScopeIdentify, discord.ScopeEmail},
+ func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
+ return discord.New(clientKey, secret, callbackURL, scopes...)
+ }))
+
+ // See https://tech.yandex.com/passport/doc/dg/reference/response-docpage/
+ RegisterGothProvider(NewSimpleProvider("yandex", "Yandex", []string{"login:email", "login:info", "login:avatar"},
+ func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
+ return yandex.New(clientKey, secret, callbackURL, scopes...)
+ }))
+
+ RegisterGothProvider(NewSimpleProvider(
+ "azuread", "Azure AD", nil,
+ func(clientID, secret, callbackURL string, scopes ...string) goth.Provider {
+ return azuread.New(clientID, secret, callbackURL, nil, scopes...)
+ },
+ ))
+
+ RegisterGothProvider(NewSimpleProvider(
+ "microsoftonline", "Microsoft Online", nil,
+ func(clientID, secret, callbackURL string, scopes ...string) goth.Provider {
+ return microsoftonline.New(clientID, secret, callbackURL, scopes...)
+ },
+ ))
+}
diff --git a/services/auth/source/oauth2/source.go b/services/auth/source/oauth2/source.go
new file mode 100644
index 0000000..675005e
--- /dev/null
+++ b/services/auth/source/oauth2/source.go
@@ -0,0 +1,51 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/json"
+)
+
+// Source holds configuration for the OAuth2 login source.
+type Source struct {
+ Provider string
+ ClientID string
+ ClientSecret string
+ OpenIDConnectAutoDiscoveryURL string
+ CustomURLMapping *CustomURLMapping
+ IconURL string
+
+ Scopes []string
+ RequiredClaimName string
+ RequiredClaimValue string
+ GroupClaimName string
+ AdminGroup string
+ GroupTeamMap string
+ GroupTeamMapRemoval bool
+ RestrictedGroup string
+ SkipLocalTwoFA bool `json:",omitempty"`
+
+ // reference to the authSource
+ authSource *auth.Source
+}
+
+// FromDB fills up an OAuth2Config from serialized format.
+func (source *Source) FromDB(bs []byte) error {
+ return json.UnmarshalHandleDoubleEncode(bs, &source)
+}
+
+// ToDB exports an SMTPConfig to a serialized format.
+func (source *Source) ToDB() ([]byte, error) {
+ return json.Marshal(source)
+}
+
+// SetAuthSource sets the related AuthSource
+func (source *Source) SetAuthSource(authSource *auth.Source) {
+ source.authSource = authSource
+}
+
+func init() {
+ auth.RegisterTypeConfig(auth.OAuth2, &Source{})
+}
diff --git a/services/auth/source/oauth2/source_authenticate.go b/services/auth/source/oauth2/source_authenticate.go
new file mode 100644
index 0000000..bbda35d
--- /dev/null
+++ b/services/auth/source/oauth2/source_authenticate.go
@@ -0,0 +1,19 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "context"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/services/auth/source/db"
+)
+
+// Authenticate falls back to the db authenticator
+func (source *Source) Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error) {
+ return db.Authenticate(ctx, user, login, password)
+}
+
+// NB: Oauth2 does not implement LocalTwoFASkipper for password authentication
+// as its password authentication drops to db authentication
diff --git a/services/auth/source/oauth2/source_callout.go b/services/auth/source/oauth2/source_callout.go
new file mode 100644
index 0000000..f95a80f
--- /dev/null
+++ b/services/auth/source/oauth2/source_callout.go
@@ -0,0 +1,68 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "net/http"
+ "net/url"
+
+ "github.com/markbates/goth"
+ "github.com/markbates/goth/gothic"
+)
+
+// Callout redirects request/response pair to authenticate against the provider
+func (source *Source) Callout(request *http.Request, response http.ResponseWriter, codeChallengeS256 string) error {
+ // not sure if goth is thread safe (?) when using multiple providers
+ request.Header.Set(ProviderHeaderKey, source.authSource.Name)
+
+ var querySuffix string
+ if codeChallengeS256 != "" {
+ querySuffix = "&" + url.Values{
+ "code_challenge_method": []string{"S256"},
+ "code_challenge": []string{codeChallengeS256},
+ }.Encode()
+ }
+
+ // don't use the default gothic begin handler to prevent issues when some error occurs
+ // normally the gothic library will write some custom stuff to the response instead of our own nice error page
+ // gothic.BeginAuthHandler(response, request)
+
+ gothRWMutex.RLock()
+ defer gothRWMutex.RUnlock()
+
+ url, err := gothic.GetAuthURL(response, request)
+ if err == nil {
+ // hacky way to set the code_challenge, but no better way until
+ // https://github.com/markbates/goth/issues/516 is resolved
+ http.Redirect(response, request, url+querySuffix, http.StatusTemporaryRedirect)
+ }
+ return err
+}
+
+// Callback handles OAuth callback, resolve to a goth user and send back to original url
+// this will trigger a new authentication request, but because we save it in the session we can use that
+func (source *Source) Callback(request *http.Request, response http.ResponseWriter, codeVerifier string) (goth.User, error) {
+ // not sure if goth is thread safe (?) when using multiple providers
+ request.Header.Set(ProviderHeaderKey, source.authSource.Name)
+
+ if codeVerifier != "" {
+ // hacky way to set the code_verifier...
+ // Will be picked up inside CompleteUserAuth: params := req.URL.Query()
+ // https://github.com/markbates/goth/pull/474/files
+ request = request.Clone(request.Context())
+ q := request.URL.Query()
+ q.Add("code_verifier", codeVerifier)
+ request.URL.RawQuery = q.Encode()
+ }
+
+ gothRWMutex.RLock()
+ defer gothRWMutex.RUnlock()
+
+ user, err := gothic.CompleteUserAuth(response, request)
+ if err != nil {
+ return user, err
+ }
+
+ return user, nil
+}
diff --git a/services/auth/source/oauth2/source_name.go b/services/auth/source/oauth2/source_name.go
new file mode 100644
index 0000000..eee789e
--- /dev/null
+++ b/services/auth/source/oauth2/source_name.go
@@ -0,0 +1,18 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+// Name returns the provider name of this source
+func (source *Source) Name() string {
+ return source.Provider
+}
+
+// DisplayName returns the display name of this source
+func (source *Source) DisplayName() string {
+ provider, has := gothProviders[source.Provider]
+ if !has {
+ return source.Provider
+ }
+ return provider.DisplayName()
+}
diff --git a/services/auth/source/oauth2/source_register.go b/services/auth/source/oauth2/source_register.go
new file mode 100644
index 0000000..82a36ac
--- /dev/null
+++ b/services/auth/source/oauth2/source_register.go
@@ -0,0 +1,50 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "fmt"
+)
+
+// RegisterSource causes an OAuth2 configuration to be registered
+func (source *Source) RegisterSource() error {
+ err := RegisterProviderWithGothic(source.authSource.Name, source)
+ return wrapOpenIDConnectInitializeError(err, source.authSource.Name, source)
+}
+
+// UnregisterSource causes an OAuth2 configuration to be unregistered
+func (source *Source) UnregisterSource() error {
+ RemoveProviderFromGothic(source.authSource.Name)
+ return nil
+}
+
+// ErrOpenIDConnectInitialize represents a "OpenIDConnectInitialize" kind of error.
+type ErrOpenIDConnectInitialize struct {
+ OpenIDConnectAutoDiscoveryURL string
+ ProviderName string
+ Cause error
+}
+
+// IsErrOpenIDConnectInitialize checks if an error is a ExternalLoginUserAlreadyExist.
+func IsErrOpenIDConnectInitialize(err error) bool {
+ _, ok := err.(ErrOpenIDConnectInitialize)
+ return ok
+}
+
+func (err ErrOpenIDConnectInitialize) Error() string {
+ return fmt.Sprintf("Failed to initialize OpenID Connect Provider with name '%s' with url '%s': %v", err.ProviderName, err.OpenIDConnectAutoDiscoveryURL, err.Cause)
+}
+
+func (err ErrOpenIDConnectInitialize) Unwrap() error {
+ return err.Cause
+}
+
+// wrapOpenIDConnectInitializeError is used to wrap the error but this cannot be done in modules/auth/oauth2
+// inside oauth2: import cycle not allowed models -> modules/auth/oauth2 -> models
+func wrapOpenIDConnectInitializeError(err error, providerName string, source *Source) error {
+ if err != nil && source.Provider == "openidConnect" {
+ err = ErrOpenIDConnectInitialize{ProviderName: providerName, OpenIDConnectAutoDiscoveryURL: source.OpenIDConnectAutoDiscoveryURL, Cause: err}
+ }
+ return err
+}
diff --git a/services/auth/source/oauth2/store.go b/services/auth/source/oauth2/store.go
new file mode 100644
index 0000000..e031653
--- /dev/null
+++ b/services/auth/source/oauth2/store.go
@@ -0,0 +1,98 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "encoding/gob"
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/log"
+ session_module "code.gitea.io/gitea/modules/session"
+
+ chiSession "code.forgejo.org/go-chi/session"
+ "github.com/gorilla/sessions"
+)
+
+// SessionsStore creates a gothic store from our session
+type SessionsStore struct {
+ maxLength int64
+}
+
+// Get should return a cached session.
+func (st *SessionsStore) Get(r *http.Request, name string) (*sessions.Session, error) {
+ return st.getOrNew(r, name, false)
+}
+
+// New should create and return a new session.
+//
+// Note that New should never return a nil session, even in the case of
+// an error if using the Registry infrastructure to cache the session.
+func (st *SessionsStore) New(r *http.Request, name string) (*sessions.Session, error) {
+ return st.getOrNew(r, name, true)
+}
+
+// getOrNew gets the session from the chi-session if it exists. Override permits the overriding of an unexpected object.
+func (st *SessionsStore) getOrNew(r *http.Request, name string, override bool) (*sessions.Session, error) {
+ chiStore := chiSession.GetSession(r)
+
+ session := sessions.NewSession(st, name)
+
+ rawData := chiStore.Get(name)
+ if rawData != nil {
+ oldSession, ok := rawData.(*sessions.Session)
+ if ok {
+ session.ID = oldSession.ID
+ session.IsNew = oldSession.IsNew
+ session.Options = oldSession.Options
+ session.Values = oldSession.Values
+
+ return session, nil
+ } else if !override {
+ log.Error("Unexpected object in session at name: %s: %v", name, rawData)
+ return nil, fmt.Errorf("unexpected object in session at name: %s", name)
+ }
+ }
+
+ session.IsNew = override
+ session.ID = chiStore.ID() // Simply copy the session id from the chi store
+
+ return session, chiStore.Set(name, session)
+}
+
+// Save should persist session to the underlying store implementation.
+func (st *SessionsStore) Save(r *http.Request, w http.ResponseWriter, session *sessions.Session) error {
+ chiStore := chiSession.GetSession(r)
+
+ if session.IsNew {
+ _, _ = session_module.RegenerateSession(w, r)
+ session.IsNew = false
+ }
+
+ if err := chiStore.Set(session.Name(), session); err != nil {
+ return err
+ }
+
+ if st.maxLength > 0 {
+ sizeWriter := &sizeWriter{}
+
+ _ = gob.NewEncoder(sizeWriter).Encode(session)
+ if sizeWriter.size > st.maxLength {
+ return fmt.Errorf("encode session: Data too long: %d > %d", sizeWriter.size, st.maxLength)
+ }
+ }
+
+ return chiStore.Release()
+}
+
+type sizeWriter struct {
+ size int64
+}
+
+func (s *sizeWriter) Write(data []byte) (int, error) {
+ s.size += int64(len(data))
+ return len(data), nil
+}
+
+var _ (sessions.Store) = &SessionsStore{}
diff --git a/services/auth/source/oauth2/token.go b/services/auth/source/oauth2/token.go
new file mode 100644
index 0000000..3405619
--- /dev/null
+++ b/services/auth/source/oauth2/token.go
@@ -0,0 +1,100 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+import (
+ "fmt"
+ "time"
+
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/golang-jwt/jwt/v5"
+)
+
+// ___________ __
+// \__ ___/___ | | __ ____ ____
+// | | / _ \| |/ // __ \ / \
+// | |( <_> ) <\ ___/| | \
+// |____| \____/|__|_ \\___ >___| /
+// \/ \/ \/
+
+// Token represents an Oauth grant
+
+// TokenType represents the type of token for an oauth application
+type TokenType int
+
+const (
+ // TypeAccessToken is a token with short lifetime to access the api
+ TypeAccessToken TokenType = 0
+ // TypeRefreshToken is token with long lifetime to refresh access tokens obtained by the client
+ TypeRefreshToken = iota
+)
+
+// Token represents a JWT token used to authenticate a client
+type Token struct {
+ GrantID int64 `json:"gnt"`
+ Type TokenType `json:"tt"`
+ Counter int64 `json:"cnt,omitempty"`
+ jwt.RegisteredClaims
+}
+
+// ParseToken parses a signed jwt string
+func ParseToken(jwtToken string, signingKey JWTSigningKey) (*Token, error) {
+ parsedToken, err := jwt.ParseWithClaims(jwtToken, &Token{}, func(token *jwt.Token) (any, error) {
+ if token.Method == nil || token.Method.Alg() != signingKey.SigningMethod().Alg() {
+ return nil, fmt.Errorf("unexpected signing algo: %v", token.Header["alg"])
+ }
+ return signingKey.VerifyKey(), nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ if !parsedToken.Valid {
+ return nil, fmt.Errorf("invalid token")
+ }
+ var token *Token
+ var ok bool
+ if token, ok = parsedToken.Claims.(*Token); !ok || !parsedToken.Valid {
+ return nil, fmt.Errorf("invalid token")
+ }
+ return token, nil
+}
+
+// SignToken signs the token with the JWT secret
+func (token *Token) SignToken(signingKey JWTSigningKey) (string, error) {
+ token.IssuedAt = jwt.NewNumericDate(time.Now())
+ jwtToken := jwt.NewWithClaims(signingKey.SigningMethod(), token)
+ signingKey.PreProcessToken(jwtToken)
+ return jwtToken.SignedString(signingKey.SignKey())
+}
+
+// OIDCToken represents an OpenID Connect id_token
+type OIDCToken struct {
+ jwt.RegisteredClaims
+ Nonce string `json:"nonce,omitempty"`
+
+ // Scope profile
+ Name string `json:"name,omitempty"`
+ PreferredUsername string `json:"preferred_username,omitempty"`
+ Profile string `json:"profile,omitempty"`
+ Picture string `json:"picture,omitempty"`
+ Website string `json:"website,omitempty"`
+ Locale string `json:"locale,omitempty"`
+ UpdatedAt timeutil.TimeStamp `json:"updated_at,omitempty"`
+
+ // Scope email
+ Email string `json:"email,omitempty"`
+ EmailVerified bool `json:"email_verified,omitempty"`
+
+ // Groups are generated by organization and team names
+ Groups []string `json:"groups,omitempty"`
+}
+
+// SignToken signs an id_token with the (symmetric) client secret key
+func (token *OIDCToken) SignToken(signingKey JWTSigningKey) (string, error) {
+ token.IssuedAt = jwt.NewNumericDate(time.Now())
+ jwtToken := jwt.NewWithClaims(signingKey.SigningMethod(), token)
+ signingKey.PreProcessToken(jwtToken)
+ return jwtToken.SignedString(signingKey.SignKey())
+}
diff --git a/services/auth/source/oauth2/urlmapping.go b/services/auth/source/oauth2/urlmapping.go
new file mode 100644
index 0000000..d0442d5
--- /dev/null
+++ b/services/auth/source/oauth2/urlmapping.go
@@ -0,0 +1,77 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package oauth2
+
+// CustomURLMapping describes the urls values to use when customizing OAuth2 provider URLs
+type CustomURLMapping struct {
+ AuthURL string `json:",omitempty"`
+ TokenURL string `json:",omitempty"`
+ ProfileURL string `json:",omitempty"`
+ EmailURL string `json:",omitempty"`
+ Tenant string `json:",omitempty"`
+}
+
+// CustomURLSettings describes the urls values and availability to use when customizing OAuth2 provider URLs
+type CustomURLSettings struct {
+ AuthURL Attribute `json:",omitempty"`
+ TokenURL Attribute `json:",omitempty"`
+ ProfileURL Attribute `json:",omitempty"`
+ EmailURL Attribute `json:",omitempty"`
+ Tenant Attribute `json:",omitempty"`
+}
+
+// Attribute describes the availability, and required status for a custom url configuration
+type Attribute struct {
+ Value string
+ Available bool
+ Required bool
+}
+
+func availableAttribute(value string) Attribute {
+ return Attribute{Value: value, Available: true}
+}
+
+func requiredAttribute(value string) Attribute {
+ return Attribute{Value: value, Available: true, Required: true}
+}
+
+// Required is true if any attribute is required
+func (c *CustomURLSettings) Required() bool {
+ if c == nil {
+ return false
+ }
+ if c.AuthURL.Required || c.EmailURL.Required || c.ProfileURL.Required || c.TokenURL.Required || c.Tenant.Required {
+ return true
+ }
+ return false
+}
+
+// OverrideWith copies the current customURLMapping and overrides it with values from the provided mapping
+func (c *CustomURLSettings) OverrideWith(override *CustomURLMapping) *CustomURLMapping {
+ custom := &CustomURLMapping{
+ AuthURL: c.AuthURL.Value,
+ TokenURL: c.TokenURL.Value,
+ ProfileURL: c.ProfileURL.Value,
+ EmailURL: c.EmailURL.Value,
+ Tenant: c.Tenant.Value,
+ }
+ if override != nil {
+ if len(override.AuthURL) > 0 && c.AuthURL.Available {
+ custom.AuthURL = override.AuthURL
+ }
+ if len(override.TokenURL) > 0 && c.TokenURL.Available {
+ custom.TokenURL = override.TokenURL
+ }
+ if len(override.ProfileURL) > 0 && c.ProfileURL.Available {
+ custom.ProfileURL = override.ProfileURL
+ }
+ if len(override.EmailURL) > 0 && c.EmailURL.Available {
+ custom.EmailURL = override.EmailURL
+ }
+ if len(override.Tenant) > 0 && c.Tenant.Available {
+ custom.Tenant = override.Tenant
+ }
+ }
+ return custom
+}
diff --git a/services/auth/source/pam/assert_interface_test.go b/services/auth/source/pam/assert_interface_test.go
new file mode 100644
index 0000000..8e7648b
--- /dev/null
+++ b/services/auth/source/pam/assert_interface_test.go
@@ -0,0 +1,21 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pam_test
+
+import (
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/pam"
+)
+
+// This test file exists to assert that our Source exposes the interfaces that we expect
+// It tightly binds the interfaces and implementation without breaking go import cycles
+
+type sourceInterface interface {
+ auth.PasswordAuthenticator
+ auth_model.Config
+ auth_model.SourceSettable
+}
+
+var _ (sourceInterface) = &pam.Source{}
diff --git a/services/auth/source/pam/source.go b/services/auth/source/pam/source.go
new file mode 100644
index 0000000..96b182e
--- /dev/null
+++ b/services/auth/source/pam/source.go
@@ -0,0 +1,45 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pam
+
+import (
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/json"
+)
+
+// __________ _____ _____
+// \______ \/ _ \ / \
+// | ___/ /_\ \ / \ / \
+// | | / | \/ Y \
+// |____| \____|__ /\____|__ /
+// \/ \/
+
+// Source holds configuration for the PAM login source.
+type Source struct {
+ ServiceName string // pam service (e.g. system-auth)
+ EmailDomain string
+ SkipLocalTwoFA bool `json:",omitempty"` // Skip Local 2fa for users authenticated with this source
+
+ // reference to the authSource
+ authSource *auth.Source
+}
+
+// FromDB fills up a PAMConfig from serialized format.
+func (source *Source) FromDB(bs []byte) error {
+ return json.UnmarshalHandleDoubleEncode(bs, &source)
+}
+
+// ToDB exports a PAMConfig to a serialized format.
+func (source *Source) ToDB() ([]byte, error) {
+ return json.Marshal(source)
+}
+
+// SetAuthSource sets the related AuthSource
+func (source *Source) SetAuthSource(authSource *auth.Source) {
+ source.authSource = authSource
+}
+
+func init() {
+ auth.RegisterTypeConfig(auth.PAM, &Source{})
+}
diff --git a/services/auth/source/pam/source_authenticate.go b/services/auth/source/pam/source_authenticate.go
new file mode 100644
index 0000000..addd1bd
--- /dev/null
+++ b/services/auth/source/pam/source_authenticate.go
@@ -0,0 +1,76 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pam
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/pam"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/google/uuid"
+)
+
+// Authenticate queries if login/password is valid against the PAM,
+// and create a local user if success when enabled.
+func (source *Source) Authenticate(ctx context.Context, user *user_model.User, userName, password string) (*user_model.User, error) {
+ pamLogin, err := pam.Auth(source.ServiceName, userName, password)
+ if err != nil {
+ if strings.Contains(err.Error(), "Authentication failure") {
+ return nil, user_model.ErrUserNotExist{Name: userName}
+ }
+ return nil, err
+ }
+
+ if user != nil {
+ return user, nil
+ }
+
+ // Allow PAM sources with `@` in their name, like from Active Directory
+ username := pamLogin
+ email := pamLogin
+ idx := strings.Index(pamLogin, "@")
+ if idx > -1 {
+ username = pamLogin[:idx]
+ }
+ if user_model.ValidateEmail(email) != nil {
+ if source.EmailDomain != "" {
+ email = fmt.Sprintf("%s@%s", username, source.EmailDomain)
+ } else {
+ email = fmt.Sprintf("%s@%s", username, setting.Service.NoReplyAddress)
+ }
+ if user_model.ValidateEmail(email) != nil {
+ email = uuid.New().String() + "@localhost"
+ }
+ }
+
+ user = &user_model.User{
+ LowerName: strings.ToLower(username),
+ Name: username,
+ Email: email,
+ Passwd: password,
+ LoginType: auth.PAM,
+ LoginSource: source.authSource.ID,
+ LoginName: userName, // This is what the user typed in
+ }
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsActive: optional.Some(true),
+ }
+
+ if err := user_model.CreateUser(ctx, user, overwriteDefault); err != nil {
+ return user, err
+ }
+
+ return user, nil
+}
+
+// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication
+func (source *Source) IsSkipLocalTwoFA() bool {
+ return source.SkipLocalTwoFA
+}
diff --git a/services/auth/source/remote/source.go b/services/auth/source/remote/source.go
new file mode 100644
index 0000000..4165858
--- /dev/null
+++ b/services/auth/source/remote/source.go
@@ -0,0 +1,33 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// SPDX-License-Identifier: MIT
+
+package remote
+
+import (
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/json"
+)
+
+type Source struct {
+ URL string
+ MatchingSource string
+
+ // reference to the authSource
+ authSource *auth.Source
+}
+
+func (source *Source) FromDB(bs []byte) error {
+ return json.UnmarshalHandleDoubleEncode(bs, &source)
+}
+
+func (source *Source) ToDB() ([]byte, error) {
+ return json.Marshal(source)
+}
+
+func (source *Source) SetAuthSource(authSource *auth.Source) {
+ source.authSource = authSource
+}
+
+func init() {
+ auth.RegisterTypeConfig(auth.Remote, &Source{})
+}
diff --git a/services/auth/source/smtp/assert_interface_test.go b/services/auth/source/smtp/assert_interface_test.go
new file mode 100644
index 0000000..6c9cde6
--- /dev/null
+++ b/services/auth/source/smtp/assert_interface_test.go
@@ -0,0 +1,24 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package smtp_test
+
+import (
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/smtp"
+)
+
+// This test file exists to assert that our Source exposes the interfaces that we expect
+// It tightly binds the interfaces and implementation without breaking go import cycles
+
+type sourceInterface interface {
+ auth.PasswordAuthenticator
+ auth_model.Config
+ auth_model.SkipVerifiable
+ auth_model.HasTLSer
+ auth_model.UseTLSer
+ auth_model.SourceSettable
+}
+
+var _ (sourceInterface) = &smtp.Source{}
diff --git a/services/auth/source/smtp/auth.go b/services/auth/source/smtp/auth.go
new file mode 100644
index 0000000..6446fcd
--- /dev/null
+++ b/services/auth/source/smtp/auth.go
@@ -0,0 +1,106 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package smtp
+
+import (
+ "crypto/tls"
+ "errors"
+ "fmt"
+ "net"
+ "net/smtp"
+ "os"
+ "strconv"
+)
+
+// _________ __________________________
+// / _____/ / \__ ___/\______ \
+// \_____ \ / \ / \| | | ___/
+// / \/ Y \ | | |
+// /_______ /\____|__ /____| |____|
+// \/ \/
+
+type loginAuthenticator struct {
+ username, password string
+}
+
+func (auth *loginAuthenticator) Start(server *smtp.ServerInfo) (string, []byte, error) {
+ return "LOGIN", []byte(auth.username), nil
+}
+
+func (auth *loginAuthenticator) Next(fromServer []byte, more bool) ([]byte, error) {
+ if more {
+ switch string(fromServer) {
+ case "Username:":
+ return []byte(auth.username), nil
+ case "Password:":
+ return []byte(auth.password), nil
+ }
+ }
+ return nil, nil
+}
+
+// SMTP authentication type names.
+const (
+ PlainAuthentication = "PLAIN"
+ LoginAuthentication = "LOGIN"
+ CRAMMD5Authentication = "CRAM-MD5"
+)
+
+// Authenticators contains available SMTP authentication type names.
+var Authenticators = []string{PlainAuthentication, LoginAuthentication, CRAMMD5Authentication}
+
+// ErrUnsupportedLoginType login source is unknown error
+var ErrUnsupportedLoginType = errors.New("Login source is unknown")
+
+// Authenticate performs an SMTP authentication.
+func Authenticate(a smtp.Auth, source *Source) error {
+ tlsConfig := &tls.Config{
+ InsecureSkipVerify: source.SkipVerify,
+ ServerName: source.Host,
+ }
+
+ conn, err := net.Dial("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)))
+ if err != nil {
+ return err
+ }
+ defer conn.Close()
+
+ if source.UseTLS() {
+ conn = tls.Client(conn, tlsConfig)
+ }
+
+ client, err := smtp.NewClient(conn, source.Host)
+ if err != nil {
+ return fmt.Errorf("failed to create NewClient: %w", err)
+ }
+ defer client.Close()
+
+ if !source.DisableHelo {
+ hostname := source.HeloHostname
+ if len(hostname) == 0 {
+ hostname, err = os.Hostname()
+ if err != nil {
+ return fmt.Errorf("failed to find Hostname: %w", err)
+ }
+ }
+
+ if err = client.Hello(hostname); err != nil {
+ return fmt.Errorf("failed to send Helo: %w", err)
+ }
+ }
+
+ // If not using SMTPS, always use STARTTLS if available
+ hasStartTLS, _ := client.Extension("STARTTLS")
+ if !source.UseTLS() && hasStartTLS {
+ if err = client.StartTLS(tlsConfig); err != nil {
+ return fmt.Errorf("failed to start StartTLS: %w", err)
+ }
+ }
+
+ if ok, _ := client.Extension("AUTH"); ok {
+ return client.Auth(a)
+ }
+
+ return ErrUnsupportedLoginType
+}
diff --git a/services/auth/source/smtp/source.go b/services/auth/source/smtp/source.go
new file mode 100644
index 0000000..2a648e4
--- /dev/null
+++ b/services/auth/source/smtp/source.go
@@ -0,0 +1,66 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package smtp
+
+import (
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/json"
+)
+
+// _________ __________________________
+// / _____/ / \__ ___/\______ \
+// \_____ \ / \ / \| | | ___/
+// / \/ Y \ | | |
+// /_______ /\____|__ /____| |____|
+// \/ \/
+
+// Source holds configuration for the SMTP login source.
+type Source struct {
+ Auth string
+ Host string
+ Port int
+ AllowedDomains string `xorm:"TEXT"`
+ ForceSMTPS bool
+ SkipVerify bool
+ HeloHostname string
+ DisableHelo bool
+ SkipLocalTwoFA bool `json:",omitempty"`
+
+ // reference to the authSource
+ authSource *auth.Source
+}
+
+// FromDB fills up an SMTPConfig from serialized format.
+func (source *Source) FromDB(bs []byte) error {
+ return json.UnmarshalHandleDoubleEncode(bs, &source)
+}
+
+// ToDB exports an SMTPConfig to a serialized format.
+func (source *Source) ToDB() ([]byte, error) {
+ return json.Marshal(source)
+}
+
+// IsSkipVerify returns if SkipVerify is set
+func (source *Source) IsSkipVerify() bool {
+ return source.SkipVerify
+}
+
+// HasTLS returns true for SMTP
+func (source *Source) HasTLS() bool {
+ return true
+}
+
+// UseTLS returns if TLS is set
+func (source *Source) UseTLS() bool {
+ return source.ForceSMTPS || source.Port == 465
+}
+
+// SetAuthSource sets the related AuthSource
+func (source *Source) SetAuthSource(authSource *auth.Source) {
+ source.authSource = authSource
+}
+
+func init() {
+ auth.RegisterTypeConfig(auth.SMTP, &Source{})
+}
diff --git a/services/auth/source/smtp/source_authenticate.go b/services/auth/source/smtp/source_authenticate.go
new file mode 100644
index 0000000..1f0a61c
--- /dev/null
+++ b/services/auth/source/smtp/source_authenticate.go
@@ -0,0 +1,92 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package smtp
+
+import (
+ "context"
+ "errors"
+ "net/smtp"
+ "net/textproto"
+ "strings"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// Authenticate queries if the provided login/password is authenticates against the SMTP server
+// Users will be autoregistered as required
+func (source *Source) Authenticate(ctx context.Context, user *user_model.User, userName, password string) (*user_model.User, error) {
+ // Verify allowed domains.
+ if len(source.AllowedDomains) > 0 {
+ idx := strings.Index(userName, "@")
+ if idx == -1 {
+ return nil, user_model.ErrUserNotExist{Name: userName}
+ } else if !util.SliceContainsString(strings.Split(source.AllowedDomains, ","), userName[idx+1:], true) {
+ return nil, user_model.ErrUserNotExist{Name: userName}
+ }
+ }
+
+ var auth smtp.Auth
+ switch source.Auth {
+ case PlainAuthentication:
+ auth = smtp.PlainAuth("", userName, password, source.Host)
+ case LoginAuthentication:
+ auth = &loginAuthenticator{userName, password}
+ case CRAMMD5Authentication:
+ auth = smtp.CRAMMD5Auth(userName, password)
+ default:
+ return nil, errors.New("unsupported SMTP auth type")
+ }
+
+ if err := Authenticate(auth, source); err != nil {
+ // Check standard error format first,
+ // then fallback to worse case.
+ tperr, ok := err.(*textproto.Error)
+ if (ok && tperr.Code == 535) ||
+ strings.Contains(err.Error(), "Username and Password not accepted") {
+ return nil, user_model.ErrUserNotExist{Name: userName}
+ }
+ if (ok && tperr.Code == 534) ||
+ strings.Contains(err.Error(), "Application-specific password required") {
+ return nil, user_model.ErrUserNotExist{Name: userName}
+ }
+ return nil, err
+ }
+
+ if user != nil {
+ return user, nil
+ }
+
+ username := userName
+ idx := strings.Index(userName, "@")
+ if idx > -1 {
+ username = userName[:idx]
+ }
+
+ user = &user_model.User{
+ LowerName: strings.ToLower(username),
+ Name: strings.ToLower(username),
+ Email: userName,
+ Passwd: password,
+ LoginType: auth_model.SMTP,
+ LoginSource: source.authSource.ID,
+ LoginName: userName,
+ }
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsActive: optional.Some(true),
+ }
+
+ if err := user_model.CreateUser(ctx, user, overwriteDefault); err != nil {
+ return user, err
+ }
+
+ return user, nil
+}
+
+// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication
+func (source *Source) IsSkipLocalTwoFA() bool {
+ return source.SkipLocalTwoFA
+}
diff --git a/services/auth/source/source_group_sync.go b/services/auth/source/source_group_sync.go
new file mode 100644
index 0000000..3a2411e
--- /dev/null
+++ b/services/auth/source/source_group_sync.go
@@ -0,0 +1,116 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package source
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+)
+
+type syncType int
+
+const (
+ syncAdd syncType = iota
+ syncRemove
+)
+
+// SyncGroupsToTeams maps authentication source groups to organization and team memberships
+func SyncGroupsToTeams(ctx context.Context, user *user_model.User, sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string, performRemoval bool) error {
+ orgCache := make(map[string]*organization.Organization)
+ teamCache := make(map[string]*organization.Team)
+ return SyncGroupsToTeamsCached(ctx, user, sourceUserGroups, sourceGroupTeamMapping, performRemoval, orgCache, teamCache)
+}
+
+// SyncGroupsToTeamsCached maps authentication source groups to organization and team memberships
+func SyncGroupsToTeamsCached(ctx context.Context, user *user_model.User, sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string, performRemoval bool, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) error {
+ membershipsToAdd, membershipsToRemove := resolveMappedMemberships(sourceUserGroups, sourceGroupTeamMapping)
+
+ if performRemoval {
+ if err := syncGroupsToTeamsCached(ctx, user, membershipsToRemove, syncRemove, orgCache, teamCache); err != nil {
+ return fmt.Errorf("could not sync[remove] user groups: %w", err)
+ }
+ }
+
+ if err := syncGroupsToTeamsCached(ctx, user, membershipsToAdd, syncAdd, orgCache, teamCache); err != nil {
+ return fmt.Errorf("could not sync[add] user groups: %w", err)
+ }
+
+ return nil
+}
+
+func resolveMappedMemberships(sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string) (map[string][]string, map[string][]string) {
+ membershipsToAdd := map[string][]string{}
+ membershipsToRemove := map[string][]string{}
+ for group, memberships := range sourceGroupTeamMapping {
+ isUserInGroup := sourceUserGroups.Contains(group)
+ if isUserInGroup {
+ for org, teams := range memberships {
+ membershipsToAdd[org] = append(membershipsToAdd[org], teams...)
+ }
+ } else {
+ for org, teams := range memberships {
+ membershipsToRemove[org] = append(membershipsToRemove[org], teams...)
+ }
+ }
+ }
+ return membershipsToAdd, membershipsToRemove
+}
+
+func syncGroupsToTeamsCached(ctx context.Context, user *user_model.User, orgTeamMap map[string][]string, action syncType, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) error {
+ for orgName, teamNames := range orgTeamMap {
+ var err error
+ org, ok := orgCache[orgName]
+ if !ok {
+ org, err = organization.GetOrgByName(ctx, orgName)
+ if err != nil {
+ if organization.IsErrOrgNotExist(err) {
+ // organization must be created before group sync
+ log.Warn("group sync: Could not find organisation %s: %v", orgName, err)
+ continue
+ }
+ return err
+ }
+ orgCache[orgName] = org
+ }
+ for _, teamName := range teamNames {
+ team, ok := teamCache[orgName+teamName]
+ if !ok {
+ team, err = org.GetTeam(ctx, teamName)
+ if err != nil {
+ if organization.IsErrTeamNotExist(err) {
+ // team must be created before group sync
+ log.Warn("group sync: Could not find team %s: %v", teamName, err)
+ continue
+ }
+ return err
+ }
+ teamCache[orgName+teamName] = team
+ }
+
+ isMember, err := organization.IsTeamMember(ctx, org.ID, team.ID, user.ID)
+ if err != nil {
+ return err
+ }
+
+ if action == syncAdd && !isMember {
+ if err := models.AddTeamMember(ctx, team, user.ID); err != nil {
+ log.Error("group sync: Could not add user to team: %v", err)
+ return err
+ }
+ } else if action == syncRemove && isMember {
+ if err := models.RemoveTeamMember(ctx, team, user.ID); err != nil {
+ log.Error("group sync: Could not remove user from team: %v", err)
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
diff --git a/services/auth/source/sspi/assert_interface_test.go b/services/auth/source/sspi/assert_interface_test.go
new file mode 100644
index 0000000..03d836d
--- /dev/null
+++ b/services/auth/source/sspi/assert_interface_test.go
@@ -0,0 +1,18 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package sspi_test
+
+import (
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth/source/sspi"
+)
+
+// This test file exists to assert that our Source exposes the interfaces that we expect
+// It tightly binds the interfaces and implementation without breaking go import cycles
+
+type sourceInterface interface {
+ auth.Config
+}
+
+var _ (sourceInterface) = &sspi.Source{}
diff --git a/services/auth/source/sspi/source.go b/services/auth/source/sspi/source.go
new file mode 100644
index 0000000..bdd6ef4
--- /dev/null
+++ b/services/auth/source/sspi/source.go
@@ -0,0 +1,39 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package sspi
+
+import (
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/json"
+)
+
+// _________ ___________________.___
+// / _____// _____/\______ \ |
+// \_____ \ \_____ \ | ___/ |
+// / \/ \ | | | |
+// /_______ /_______ / |____| |___|
+// \/ \/
+
+// Source holds configuration for SSPI single sign-on.
+type Source struct {
+ AutoCreateUsers bool
+ AutoActivateUsers bool
+ StripDomainNames bool
+ SeparatorReplacement string
+ DefaultLanguage string
+}
+
+// FromDB fills up an SSPIConfig from serialized format.
+func (cfg *Source) FromDB(bs []byte) error {
+ return json.UnmarshalHandleDoubleEncode(bs, &cfg)
+}
+
+// ToDB exports an SSPIConfig to a serialized format.
+func (cfg *Source) ToDB() ([]byte, error) {
+ return json.Marshal(cfg)
+}
+
+func init() {
+ auth.RegisterTypeConfig(auth.SSPI, &Source{})
+}
diff --git a/services/auth/sspi.go b/services/auth/sspi.go
new file mode 100644
index 0000000..64a127e
--- /dev/null
+++ b/services/auth/sspi.go
@@ -0,0 +1,223 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "context"
+ "errors"
+ "net/http"
+ "strings"
+ "sync"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/auth/source/sspi"
+ gitea_context "code.gitea.io/gitea/services/context"
+
+ gouuid "github.com/google/uuid"
+)
+
+const (
+ tplSignIn base.TplName = "user/auth/signin"
+)
+
+type SSPIAuth interface {
+ AppendAuthenticateHeader(w http.ResponseWriter, data string)
+ Authenticate(r *http.Request, w http.ResponseWriter) (userInfo *SSPIUserInfo, outToken string, err error)
+}
+
+var (
+ sspiAuth SSPIAuth // a global instance of the websspi authenticator to avoid acquiring the server credential handle on every request
+ sspiAuthOnce sync.Once
+ sspiAuthErrInit error
+
+ // Ensure the struct implements the interface.
+ _ Method = &SSPI{}
+)
+
+// SSPI implements the SingleSignOn interface and authenticates requests
+// via the built-in SSPI module in Windows for SPNEGO authentication.
+// The SSPI plugin is expected to be executed last, as it returns 401 status code if negotiation
+// fails (or if negotiation should continue), which would prevent other authentication methods
+// to execute at all.
+type SSPI struct{}
+
+// Name represents the name of auth method
+func (s *SSPI) Name() string {
+ return "sspi"
+}
+
+// Verify uses SSPI (Windows implementation of SPNEGO) to authenticate the request.
+// If authentication is successful, returns the corresponding user object.
+// If negotiation should continue or authentication fails, immediately returns a 401 HTTP
+// response code, as required by the SPNEGO protocol.
+func (s *SSPI) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+ sspiAuthOnce.Do(func() { sspiAuthErrInit = sspiAuthInit() })
+ if sspiAuthErrInit != nil {
+ return nil, sspiAuthErrInit
+ }
+ if !s.shouldAuthenticate(req) {
+ return nil, nil
+ }
+
+ cfg, err := s.getConfig(req.Context())
+ if err != nil {
+ log.Error("could not get SSPI config: %v", err)
+ return nil, err
+ }
+
+ log.Trace("SSPI Authorization: Attempting to authenticate")
+ userInfo, outToken, err := sspiAuth.Authenticate(req, w)
+ if err != nil {
+ log.Warn("Authentication failed with error: %v\n", err)
+ sspiAuth.AppendAuthenticateHeader(w, outToken)
+
+ // Include the user login page in the 401 response to allow the user
+ // to login with another authentication method if SSPI authentication
+ // fails
+ store.GetData()["Flash"] = map[string]string{
+ "ErrorMsg": err.Error(),
+ }
+ store.GetData()["EnableOpenIDSignIn"] = setting.Service.EnableOpenIDSignIn
+ store.GetData()["EnableSSPI"] = true
+ // in this case, the Verify function is called in Gitea's web context
+ // FIXME: it doesn't look good to render the page here, why not redirect?
+ gitea_context.GetWebContext(req).HTML(http.StatusUnauthorized, tplSignIn)
+ return nil, err
+ }
+ if outToken != "" {
+ sspiAuth.AppendAuthenticateHeader(w, outToken)
+ }
+
+ username := sanitizeUsername(userInfo.Username, cfg)
+ if len(username) == 0 {
+ return nil, nil
+ }
+ log.Info("Authenticated as %s\n", username)
+
+ user, err := user_model.GetUserByName(req.Context(), username)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("GetUserByName: %v", err)
+ return nil, err
+ }
+ if !cfg.AutoCreateUsers {
+ log.Error("User '%s' not found", username)
+ return nil, nil
+ }
+ user, err = s.newUser(req.Context(), username, cfg)
+ if err != nil {
+ log.Error("CreateUser: %v", err)
+ return nil, err
+ }
+ }
+
+ // Make sure requests to API paths and PWA resources do not create a new session
+ if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) {
+ handleSignIn(w, req, sess, user)
+ }
+
+ log.Trace("SSPI Authorization: Logged in user %-v", user)
+ return user, nil
+}
+
+// getConfig retrieves the SSPI configuration from login sources
+func (s *SSPI) getConfig(ctx context.Context) (*sspi.Source, error) {
+ sources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
+ IsActive: optional.Some(true),
+ LoginType: auth.SSPI,
+ })
+ if err != nil {
+ return nil, err
+ }
+ if len(sources) == 0 {
+ return nil, errors.New("no active login sources of type SSPI found")
+ }
+ if len(sources) > 1 {
+ return nil, errors.New("more than one active login source of type SSPI found")
+ }
+ return sources[0].Cfg.(*sspi.Source), nil
+}
+
+func (s *SSPI) shouldAuthenticate(req *http.Request) (shouldAuth bool) {
+ shouldAuth = false
+ path := strings.TrimSuffix(req.URL.Path, "/")
+ if path == "/user/login" {
+ if req.FormValue("user_name") != "" && req.FormValue("password") != "" {
+ shouldAuth = false
+ } else if req.FormValue("auth_with_sspi") == "1" {
+ shouldAuth = true
+ }
+ } else if middleware.IsAPIPath(req) || isAttachmentDownload(req) {
+ shouldAuth = true
+ }
+ return shouldAuth
+}
+
+// newUser creates a new user object for the purpose of automatic registration
+// and populates its name and email with the information present in request headers.
+func (s *SSPI) newUser(ctx context.Context, username string, cfg *sspi.Source) (*user_model.User, error) {
+ email := gouuid.New().String() + "@localhost.localdomain"
+ user := &user_model.User{
+ Name: username,
+ Email: email,
+ Language: cfg.DefaultLanguage,
+ }
+ emailNotificationPreference := user_model.EmailNotificationsDisabled
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsActive: optional.Some(cfg.AutoActivateUsers),
+ KeepEmailPrivate: optional.Some(true),
+ EmailNotificationsPreference: &emailNotificationPreference,
+ }
+ if err := user_model.CreateUser(ctx, user, overwriteDefault); err != nil {
+ return nil, err
+ }
+
+ return user, nil
+}
+
+// stripDomainNames removes NETBIOS domain name and separator from down-level logon names
+// (eg. "DOMAIN\user" becomes "user"), and removes the UPN suffix (domain name) and separator
+// from UPNs (eg. "user@domain.local" becomes "user")
+func stripDomainNames(username string) string {
+ if strings.Contains(username, "\\") {
+ parts := strings.SplitN(username, "\\", 2)
+ if len(parts) > 1 {
+ username = parts[1]
+ }
+ } else if strings.Contains(username, "@") {
+ parts := strings.Split(username, "@")
+ if len(parts) > 1 {
+ username = parts[0]
+ }
+ }
+ return username
+}
+
+func replaceSeparators(username string, cfg *sspi.Source) string {
+ newSep := cfg.SeparatorReplacement
+ username = strings.ReplaceAll(username, "\\", newSep)
+ username = strings.ReplaceAll(username, "/", newSep)
+ username = strings.ReplaceAll(username, "@", newSep)
+ return username
+}
+
+func sanitizeUsername(username string, cfg *sspi.Source) string {
+ if len(username) == 0 {
+ return ""
+ }
+ if cfg.StripDomainNames {
+ username = stripDomainNames(username)
+ }
+ // Replace separators even if we have already stripped the domain name part,
+ // as the username can contain several separators: eg. "MICROSOFT\useremail@live.com"
+ username = replaceSeparators(username, cfg)
+ return username
+}
diff --git a/services/auth/sspiauth_posix.go b/services/auth/sspiauth_posix.go
new file mode 100644
index 0000000..49b0ed4
--- /dev/null
+++ b/services/auth/sspiauth_posix.go
@@ -0,0 +1,30 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+//go:build !windows
+
+package auth
+
+import (
+ "errors"
+ "net/http"
+)
+
+type SSPIUserInfo struct {
+ Username string // Name of user, usually in the form DOMAIN\User
+ Groups []string // The global groups the user is a member of
+}
+
+type sspiAuthMock struct{}
+
+func (s sspiAuthMock) AppendAuthenticateHeader(w http.ResponseWriter, data string) {
+}
+
+func (s sspiAuthMock) Authenticate(r *http.Request, w http.ResponseWriter) (userInfo *SSPIUserInfo, outToken string, err error) {
+ return nil, "", errors.New("not implemented")
+}
+
+func sspiAuthInit() error {
+ sspiAuth = &sspiAuthMock{} // TODO: we can mock the SSPI auth in tests
+ return nil
+}
diff --git a/services/auth/sspiauth_windows.go b/services/auth/sspiauth_windows.go
new file mode 100644
index 0000000..093caae
--- /dev/null
+++ b/services/auth/sspiauth_windows.go
@@ -0,0 +1,19 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+//go:build windows
+
+package auth
+
+import (
+ "github.com/quasoft/websspi"
+)
+
+type SSPIUserInfo = websspi.UserInfo
+
+func sspiAuthInit() error {
+ var err error
+ config := websspi.NewConfig()
+ sspiAuth, err = websspi.New(config)
+ return err
+}
diff --git a/services/auth/sync.go b/services/auth/sync.go
new file mode 100644
index 0000000..7562ac8
--- /dev/null
+++ b/services/auth/sync.go
@@ -0,0 +1,43 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// SyncExternalUsers is used to synchronize users with external authorization source
+func SyncExternalUsers(ctx context.Context, updateExisting bool) error {
+ log.Trace("Doing: SyncExternalUsers")
+
+ ls, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{})
+ if err != nil {
+ log.Error("SyncExternalUsers: %v", err)
+ return err
+ }
+
+ for _, s := range ls {
+ if !s.IsActive || !s.IsSyncEnabled {
+ continue
+ }
+ select {
+ case <-ctx.Done():
+ log.Warn("SyncExternalUsers: Cancelled before update of %s", s.Name)
+ return db.ErrCancelledf("Before update of %s", s.Name)
+ default:
+ }
+
+ if syncable, ok := s.Cfg.(SynchronizableSource); ok {
+ err := syncable.Sync(ctx, updateExisting)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
diff --git a/services/automerge/automerge.go b/services/automerge/automerge.go
new file mode 100644
index 0000000..a1ee204
--- /dev/null
+++ b/services/automerge/automerge.go
@@ -0,0 +1,306 @@
+// Copyright 2021 Gitea. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package automerge
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ pull_model "code.gitea.io/gitea/models/pull"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/queue"
+ notify_service "code.gitea.io/gitea/services/notify"
+ pull_service "code.gitea.io/gitea/services/pull"
+)
+
+// prAutoMergeQueue represents a queue to handle update pull request tests
+var prAutoMergeQueue *queue.WorkerPoolQueue[string]
+
+// Init runs the task queue to that handles auto merges
+func Init() error {
+ notify_service.RegisterNotifier(NewNotifier())
+
+ prAutoMergeQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_auto_merge", handler)
+ if prAutoMergeQueue == nil {
+ return fmt.Errorf("unable to create pr_auto_merge queue")
+ }
+ go graceful.GetManager().RunWithCancel(prAutoMergeQueue)
+ return nil
+}
+
+// handle passed PR IDs and test the PRs
+func handler(items ...string) []string {
+ for _, s := range items {
+ var id int64
+ var sha string
+ if _, err := fmt.Sscanf(s, "%d_%s", &id, &sha); err != nil {
+ log.Error("could not parse data from pr_auto_merge queue (%v): %v", s, err)
+ continue
+ }
+ handlePullRequestAutoMerge(id, sha)
+ }
+ return nil
+}
+
+func addToQueue(pr *issues_model.PullRequest, sha string) {
+ log.Trace("Adding pullID: %d to the pull requests patch checking queue with sha %s", pr.ID, sha)
+ if err := prAutoMergeQueue.Push(fmt.Sprintf("%d_%s", pr.ID, sha)); err != nil {
+ log.Error("Error adding pullID: %d to the pull requests patch checking queue %v", pr.ID, err)
+ }
+}
+
+// ScheduleAutoMerge if schedule is false and no error, pull can be merged directly
+func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pull *issues_model.PullRequest, style repo_model.MergeStyle, message string) (scheduled bool, err error) {
+ err = db.WithTx(ctx, func(ctx context.Context) error {
+ if err := pull_model.ScheduleAutoMerge(ctx, doer, pull.ID, style, message); err != nil {
+ return err
+ }
+ scheduled = true
+
+ _, err = issues_model.CreateAutoMergeComment(ctx, issues_model.CommentTypePRScheduledToAutoMerge, pull, doer)
+ return err
+ })
+ return scheduled, err
+}
+
+// RemoveScheduledAutoMerge cancels a previously scheduled pull request
+func RemoveScheduledAutoMerge(ctx context.Context, doer *user_model.User, pull *issues_model.PullRequest) error {
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err := pull_model.DeleteScheduledAutoMerge(ctx, pull.ID); err != nil {
+ return err
+ }
+
+ _, err := issues_model.CreateAutoMergeComment(ctx, issues_model.CommentTypePRUnScheduledToAutoMerge, pull, doer)
+ return err
+ })
+}
+
+// StartPRCheckAndAutoMergeBySHA start an automerge check and auto merge task for all pull requests of repository and SHA
+func StartPRCheckAndAutoMergeBySHA(ctx context.Context, sha string, repo *repo_model.Repository) error {
+ pulls, err := getPullRequestsByHeadSHA(ctx, sha, repo, func(pr *issues_model.PullRequest) bool {
+ return !pr.HasMerged && pr.CanAutoMerge()
+ })
+ if err != nil {
+ return err
+ }
+
+ for _, pr := range pulls {
+ addToQueue(pr, sha)
+ }
+
+ return nil
+}
+
+// StartPRCheckAndAutoMerge start an automerge check and auto merge task for a pull request
+func StartPRCheckAndAutoMerge(ctx context.Context, pull *issues_model.PullRequest) {
+ if pull == nil || pull.HasMerged || !pull.CanAutoMerge() {
+ return
+ }
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ log.Error("LoadBaseRepo: %v", err)
+ return
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo)
+ if err != nil {
+ log.Error("OpenRepository: %v", err)
+ return
+ }
+ defer gitRepo.Close()
+ commitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ log.Error("GetRefCommitID: %v", err)
+ return
+ }
+
+ addToQueue(pull, commitID)
+}
+
+func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.Repository, filter func(*issues_model.PullRequest) bool) (map[int64]*issues_model.PullRequest, error) {
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return nil, err
+ }
+ defer gitRepo.Close()
+
+ refs, err := gitRepo.GetRefsBySha(sha, "")
+ if err != nil {
+ return nil, err
+ }
+
+ pulls := make(map[int64]*issues_model.PullRequest)
+
+ for _, ref := range refs {
+ // Each pull branch starts with refs/pull/ we then go from there to find the index of the pr and then
+ // use that to get the pr.
+ if strings.HasPrefix(ref, git.PullPrefix) {
+ parts := strings.Split(ref[len(git.PullPrefix):], "/")
+
+ // e.g. 'refs/pull/1/head' would be []string{"1", "head"}
+ if len(parts) != 2 {
+ log.Error("getPullRequestsByHeadSHA found broken pull ref [%s] on repo [%-v]", ref, repo)
+ continue
+ }
+
+ prIndex, err := strconv.ParseInt(parts[0], 10, 64)
+ if err != nil {
+ log.Error("getPullRequestsByHeadSHA found broken pull ref [%s] on repo [%-v]", ref, repo)
+ continue
+ }
+
+ p, err := issues_model.GetPullRequestByIndex(ctx, repo.ID, prIndex)
+ if err != nil {
+ // If there is no pull request for this branch, we don't try to merge it.
+ if issues_model.IsErrPullRequestNotExist(err) {
+ continue
+ }
+ return nil, err
+ }
+
+ if filter(p) {
+ pulls[p.ID] = p
+ }
+ }
+ }
+
+ return pulls, nil
+}
+
+// handlePullRequestAutoMerge merge the pull request if all checks are successful
+func handlePullRequestAutoMerge(pullID int64, sha string) {
+ ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(),
+ fmt.Sprintf("Handle AutoMerge of PR[%d] with sha[%s]", pullID, sha))
+ defer finished()
+
+ pr, err := issues_model.GetPullRequestByID(ctx, pullID)
+ if err != nil {
+ log.Error("GetPullRequestByID[%d]: %v", pullID, err)
+ return
+ }
+
+ // Check if there is a scheduled pr in the db
+ exists, scheduledPRM, err := pull_model.GetScheduledMergeByPullID(ctx, pr.ID)
+ if err != nil {
+ log.Error("%-v GetScheduledMergeByPullID: %v", pr, err)
+ return
+ }
+ if !exists {
+ return
+ }
+
+ if err = pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("%-v LoadBaseRepo: %v", pr, err)
+ return
+ }
+
+ // check the sha is the same as pull request head commit id
+ baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ log.Error("OpenRepository: %v", err)
+ return
+ }
+ defer baseGitRepo.Close()
+
+ headCommitID, err := baseGitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ log.Error("GetRefCommitID: %v", err)
+ return
+ }
+ if headCommitID != sha {
+ log.Warn("Head commit id of auto merge %-v does not match sha [%s], it may means the head branch has been updated. Just ignore this request because a new request expected in the queue", pr, sha)
+ return
+ }
+
+ // Get all checks for this pr
+ // We get the latest sha commit hash again to handle the case where the check of a previous push
+ // did not succeed or was not finished yet.
+ if err = pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("%-v LoadHeadRepo: %v", pr, err)
+ return
+ }
+
+ var headGitRepo *git.Repository
+ if pr.BaseRepoID == pr.HeadRepoID {
+ headGitRepo = baseGitRepo
+ } else {
+ headGitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
+ if err != nil {
+ log.Error("OpenRepository %-v: %v", pr.HeadRepo, err)
+ return
+ }
+ defer headGitRepo.Close()
+ }
+
+ switch pr.Flow {
+ case issues_model.PullRequestFlowGithub:
+ headBranchExist := headGitRepo.IsBranchExist(pr.HeadBranch)
+ if pr.HeadRepo == nil || !headBranchExist {
+ log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch: %s]", pr, pr.HeadRepoID, pr.HeadBranch)
+ return
+ }
+ case issues_model.PullRequestFlowAGit:
+ headBranchExist := git.IsReferenceExist(ctx, baseGitRepo.Path, pr.GetGitRefName())
+ if !headBranchExist {
+ log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch(Agit): %s]", pr, pr.HeadRepoID, pr.HeadBranch)
+ return
+ }
+ default:
+ log.Error("wrong flow type %d", pr.Flow)
+ return
+ }
+
+ // Check if all checks succeeded
+ pass, err := pull_service.IsPullCommitStatusPass(ctx, pr)
+ if err != nil {
+ log.Error("%-v IsPullCommitStatusPass: %v", pr, err)
+ return
+ }
+ if !pass {
+ log.Info("Scheduled auto merge %-v has unsuccessful status checks", pr)
+ return
+ }
+
+ // Merge if all checks succeeded
+ doer, err := user_model.GetUserByID(ctx, scheduledPRM.DoerID)
+ if err != nil {
+ log.Error("Unable to get scheduled User[%d]: %v", scheduledPRM.DoerID, err)
+ return
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
+ if err != nil {
+ log.Error("GetUserRepoPermission %-v: %v", pr.HeadRepo, err)
+ return
+ }
+
+ if err := pull_service.CheckPullMergeable(ctx, doer, &perm, pr, pull_service.MergeCheckTypeGeneral, false); err != nil {
+ if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) {
+ log.Info("%-v was scheduled to automerge by an unauthorized user", pr)
+ return
+ }
+ log.Error("%-v CheckPullMergeable: %v", pr, err)
+ return
+ }
+
+ if err := pull_service.Merge(ctx, pr, doer, baseGitRepo, scheduledPRM.MergeStyle, "", scheduledPRM.Message, true); err != nil {
+ log.Error("pull_service.Merge: %v", err)
+ // FIXME: if merge failed, we should display some error message to the pull request page.
+ // The resolution is add a new column on automerge table named `error_message` to store the error message and displayed
+ // on the pull request page. But this should not be finished in a bug fix PR which will be backport to release branch.
+ return
+ }
+}
diff --git a/services/automerge/notify.go b/services/automerge/notify.go
new file mode 100644
index 0000000..cb07821
--- /dev/null
+++ b/services/automerge/notify.go
@@ -0,0 +1,46 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package automerge
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+type automergeNotifier struct {
+ notify_service.NullNotifier
+}
+
+var _ notify_service.Notifier = &automergeNotifier{}
+
+// NewNotifier create a new automergeNotifier notifier
+func NewNotifier() notify_service.Notifier {
+ return &automergeNotifier{}
+}
+
+func (n *automergeNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
+ // as a missing / blocking reviews could have blocked a pending automerge let's recheck
+ if review.Type == issues_model.ReviewTypeApprove {
+ if err := StartPRCheckAndAutoMergeBySHA(ctx, review.CommitID, pr.BaseRepo); err != nil {
+ log.Error("StartPullRequestAutoMergeCheckBySHA: %v", err)
+ }
+ }
+}
+
+func (n *automergeNotifier) PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment) {
+ if err := review.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ if err := review.Issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest: %v", err)
+ return
+ }
+ // as reviews could have blocked a pending automerge let's recheck
+ StartPRCheckAndAutoMerge(ctx, review.Issue.PullRequest)
+}
diff --git a/services/context/access_log.go b/services/context/access_log.go
new file mode 100644
index 0000000..0926748
--- /dev/null
+++ b/services/context/access_log.go
@@ -0,0 +1,101 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "bytes"
+ "fmt"
+ "net"
+ "net/http"
+ "strings"
+ "text/template"
+ "time"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web/middleware"
+)
+
+type routerLoggerOptions struct {
+ req *http.Request
+ Identity *string
+ Start *time.Time
+ ResponseWriter http.ResponseWriter
+ Ctx map[string]any
+ RequestID *string
+}
+
+const keyOfRequestIDInTemplate = ".RequestID"
+
+// According to:
+// TraceId: A valid trace identifier is a 16-byte array with at least one non-zero byte
+// MD5 output is 16 or 32 bytes: md5-bytes is 16, md5-hex is 32
+// SHA1: similar, SHA1-bytes is 20, SHA1-hex is 40.
+// UUID is 128-bit, 32 hex chars, 36 ASCII chars with 4 dashes
+// So, we accept a Request ID with a maximum character length of 40
+const maxRequestIDByteLength = 40
+
+func parseRequestIDFromRequestHeader(req *http.Request) string {
+ requestID := "-"
+ for _, key := range setting.Log.RequestIDHeaders {
+ if req.Header.Get(key) != "" {
+ requestID = req.Header.Get(key)
+ break
+ }
+ }
+ if len(requestID) > maxRequestIDByteLength {
+ requestID = fmt.Sprintf("%s...", requestID[:maxRequestIDByteLength])
+ }
+ return requestID
+}
+
+// AccessLogger returns a middleware to log access logger
+func AccessLogger() func(http.Handler) http.Handler {
+ logger := log.GetLogger("access")
+ needRequestID := len(setting.Log.RequestIDHeaders) > 0 && strings.Contains(setting.Log.AccessLogTemplate, keyOfRequestIDInTemplate)
+ logTemplate, _ := template.New("log").Parse(setting.Log.AccessLogTemplate)
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ start := time.Now()
+
+ var requestID string
+ if needRequestID {
+ requestID = parseRequestIDFromRequestHeader(req)
+ }
+
+ reqHost, _, err := net.SplitHostPort(req.RemoteAddr)
+ if err != nil {
+ reqHost = req.RemoteAddr
+ }
+
+ next.ServeHTTP(w, req)
+ rw := w.(ResponseWriter)
+
+ identity := "-"
+ data := middleware.GetContextData(req.Context())
+ if signedUser, ok := data[middleware.ContextDataKeySignedUser].(*user_model.User); ok {
+ identity = signedUser.Name
+ }
+ buf := bytes.NewBuffer([]byte{})
+ err = logTemplate.Execute(buf, routerLoggerOptions{
+ req: req,
+ Identity: &identity,
+ Start: &start,
+ ResponseWriter: rw,
+ Ctx: map[string]any{
+ "RemoteAddr": req.RemoteAddr,
+ "RemoteHost": reqHost,
+ "Req": req,
+ },
+ RequestID: &requestID,
+ })
+ if err != nil {
+ log.Error("Could not execute access logger template: %v", err.Error())
+ }
+
+ logger.Info("%s", buf.String())
+ })
+ }
+}
diff --git a/services/context/api.go b/services/context/api.go
new file mode 100644
index 0000000..396ceb5
--- /dev/null
+++ b/services/context/api.go
@@ -0,0 +1,459 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ quota_model "code.gitea.io/gitea/models/quota"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ mc "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ web_types "code.gitea.io/gitea/modules/web/types"
+
+ "code.forgejo.org/go-chi/cache"
+)
+
+// APIContext is a specific context for API service
+type APIContext struct {
+ *Base
+
+ Cache cache.Cache
+
+ Doer *user_model.User // current signed-in user
+ IsSigned bool
+ IsBasicAuth bool
+
+ ContextUser *user_model.User // the user which is being visited, in most cases it differs from Doer
+
+ Repo *Repository
+ Comment *issues_model.Comment
+ Org *APIOrganization
+ Package *Package
+ QuotaGroup *quota_model.Group
+ QuotaRule *quota_model.Rule
+ PublicOnly bool // Whether the request is for a public endpoint
+}
+
+func init() {
+ web.RegisterResponseStatusProvider[*APIContext](func(req *http.Request) web_types.ResponseStatusProvider {
+ return req.Context().Value(apiContextKey).(*APIContext)
+ })
+}
+
+// Currently, we have the following common fields in error response:
+// * message: the message for end users (it shouldn't be used for error type detection)
+// if we need to indicate some errors, we should introduce some new fields like ErrorCode or ErrorType
+// * url: the swagger document URL
+
+type APIError struct {
+ Message string `json:"message"`
+ URL string `json:"url"`
+}
+
+// APIError is error format response
+// swagger:response error
+type swaggerAPIError struct {
+ // in:body
+ Body APIError `json:"body"`
+}
+
+type APIValidationError struct {
+ Message string `json:"message"`
+ URL string `json:"url"`
+}
+
+// APIValidationError is error format response related to input validation
+// swagger:response validationError
+type swaggerAPIValidationError struct {
+ // in:body
+ Body APIValidationError `json:"body"`
+}
+
+type APIInvalidTopicsError struct {
+ Message string `json:"message"`
+ InvalidTopics []string `json:"invalidTopics"`
+}
+
+// APIInvalidTopicsError is error format response to invalid topics
+// swagger:response invalidTopicsError
+type swaggerAPIInvalidTopicsError struct {
+ // in:body
+ Body APIInvalidTopicsError `json:"body"`
+}
+
+// APIEmpty is an empty response
+// swagger:response empty
+type APIEmpty struct{}
+
+type APIUnauthorizedError struct {
+ APIError
+}
+
+// APIUnauthorizedError is a unauthorized error response
+// swagger:response unauthorized
+type swaggerAPUnauthorizedError struct {
+ // in:body
+ Body APIUnauthorizedError `json:"body"`
+}
+
+type APIForbiddenError struct {
+ APIError
+}
+
+// APIForbiddenError is a forbidden error response
+// swagger:response forbidden
+type swaggerAPIForbiddenError struct {
+ // in:body
+ Body APIForbiddenError `json:"body"`
+}
+
+type APINotFound struct {
+ Message string `json:"message"`
+ URL string `json:"url"`
+ Errors []string `json:"errors"`
+}
+
+// APINotFound is a not found error response
+// swagger:response notFound
+type swaggerAPINotFound struct {
+ // in:body
+ Body APINotFound `json:"body"`
+}
+
+// APIConflict is a conflict empty response
+// swagger:response conflict
+type APIConflict struct{}
+
+// APIRedirect is a redirect response
+// swagger:response redirect
+type APIRedirect struct{}
+
+// APIString is a string response
+// swagger:response string
+type APIString string
+
+type APIRepoArchivedError struct {
+ APIError
+}
+
+// APIRepoArchivedError is an error that is raised when an archived repo should be modified
+// swagger:response repoArchivedError
+type swaggerAPIRepoArchivedError struct {
+ // in:body
+ Body APIRepoArchivedError `json:"body"`
+}
+
+// ServerError responds with error message, status is 500
+func (ctx *APIContext) ServerError(title string, err error) {
+ ctx.Error(http.StatusInternalServerError, title, err)
+}
+
+// Error responds with an error message to client with given obj as the message.
+// If status is 500, also it prints error to log.
+func (ctx *APIContext) Error(status int, title string, obj any) {
+ var message string
+ if err, ok := obj.(error); ok {
+ message = err.Error()
+ } else {
+ message = fmt.Sprintf("%s", obj)
+ }
+
+ if status == http.StatusInternalServerError {
+ log.ErrorWithSkip(1, "%s: %s", title, message)
+
+ if setting.IsProd && !(ctx.Doer != nil && ctx.Doer.IsAdmin) {
+ message = ""
+ }
+ }
+
+ ctx.JSON(status, APIError{
+ Message: message,
+ URL: setting.API.SwaggerURL,
+ })
+}
+
+// InternalServerError responds with an error message to the client with the error as a message
+// and the file and line of the caller.
+func (ctx *APIContext) InternalServerError(err error) {
+ log.ErrorWithSkip(1, "InternalServerError: %v", err)
+
+ var message string
+ if !setting.IsProd || (ctx.Doer != nil && ctx.Doer.IsAdmin) {
+ message = err.Error()
+ }
+
+ ctx.JSON(http.StatusInternalServerError, APIError{
+ Message: message,
+ URL: setting.API.SwaggerURL,
+ })
+}
+
+type apiContextKeyType struct{}
+
+var apiContextKey = apiContextKeyType{}
+
+// GetAPIContext returns a context for API routes
+func GetAPIContext(req *http.Request) *APIContext {
+ return req.Context().Value(apiContextKey).(*APIContext)
+}
+
+func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string {
+ page := NewPagination(total, pageSize, curPage, 0)
+ paginater := page.Paginater
+ links := make([]string, 0, 4)
+
+ if paginater.HasNext() {
+ u := *curURL
+ queries := u.Query()
+ queries.Set("page", fmt.Sprintf("%d", paginater.Next()))
+ u.RawQuery = queries.Encode()
+
+ links = append(links, fmt.Sprintf("<%s%s>; rel=\"next\"", setting.AppURL, u.RequestURI()[1:]))
+ }
+ if !paginater.IsLast() {
+ u := *curURL
+ queries := u.Query()
+ queries.Set("page", fmt.Sprintf("%d", paginater.TotalPages()))
+ u.RawQuery = queries.Encode()
+
+ links = append(links, fmt.Sprintf("<%s%s>; rel=\"last\"", setting.AppURL, u.RequestURI()[1:]))
+ }
+ if !paginater.IsFirst() {
+ u := *curURL
+ queries := u.Query()
+ queries.Set("page", "1")
+ u.RawQuery = queries.Encode()
+
+ links = append(links, fmt.Sprintf("<%s%s>; rel=\"first\"", setting.AppURL, u.RequestURI()[1:]))
+ }
+ if paginater.HasPrevious() {
+ u := *curURL
+ queries := u.Query()
+ queries.Set("page", fmt.Sprintf("%d", paginater.Previous()))
+ u.RawQuery = queries.Encode()
+
+ links = append(links, fmt.Sprintf("<%s%s>; rel=\"prev\"", setting.AppURL, u.RequestURI()[1:]))
+ }
+ return links
+}
+
+// SetLinkHeader sets pagination link header by given total number and page size.
+func (ctx *APIContext) SetLinkHeader(total, pageSize int) {
+ links := genAPILinks(ctx.Req.URL, total, pageSize, ctx.FormInt("page"))
+
+ if len(links) > 0 {
+ ctx.RespHeader().Set("Link", strings.Join(links, ","))
+ ctx.AppendAccessControlExposeHeaders("Link")
+ }
+}
+
+// APIContexter returns apicontext as middleware
+func APIContexter() func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ base, baseCleanUp := NewBaseContext(w, req)
+ ctx := &APIContext{
+ Base: base,
+ Cache: mc.GetCache(),
+ Repo: &Repository{PullRequest: &PullRequest{}},
+ Org: &APIOrganization{},
+ }
+ defer baseCleanUp()
+
+ ctx.Base.AppendContextValue(apiContextKey, ctx)
+ ctx.Base.AppendContextValueFunc(gitrepo.RepositoryContextKey, func() any { return ctx.Repo.GitRepo })
+
+ // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.
+ if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") {
+ if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size
+ ctx.InternalServerError(err)
+ return
+ }
+ }
+
+ httpcache.SetCacheControlInHeader(ctx.Resp.Header(), 0, "no-transform")
+ ctx.Resp.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions)
+
+ next.ServeHTTP(ctx.Resp, ctx.Req)
+ })
+ }
+}
+
+// NotFound handles 404s for APIContext
+// String will replace message, errors will be added to a slice
+func (ctx *APIContext) NotFound(objs ...any) {
+ message := ctx.Locale.TrString("error.not_found")
+ errors := make([]string, 0)
+ for _, obj := range objs {
+ // Ignore nil
+ if obj == nil {
+ continue
+ }
+
+ if err, ok := obj.(error); ok {
+ errors = append(errors, err.Error())
+ } else {
+ message = obj.(string)
+ }
+ }
+
+ ctx.JSON(http.StatusNotFound, APINotFound{
+ Message: message,
+ URL: setting.API.SwaggerURL,
+ Errors: errors,
+ })
+}
+
+// ReferencesGitRepo injects the GitRepo into the Context
+// you can optional skip the IsEmpty check
+func ReferencesGitRepo(allowEmpty ...bool) func(ctx *APIContext) (cancel context.CancelFunc) {
+ return func(ctx *APIContext) (cancel context.CancelFunc) {
+ // Empty repository does not have reference information.
+ if ctx.Repo.Repository.IsEmpty && !(len(allowEmpty) != 0 && allowEmpty[0]) {
+ return nil
+ }
+
+ // For API calls.
+ if ctx.Repo.GitRepo == nil {
+ gitRepo, err := gitrepo.OpenRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("Open Repository %v failed", ctx.Repo.Repository.FullName()), err)
+ return cancel
+ }
+ ctx.Repo.GitRepo = gitRepo
+ // We opened it, we should close it
+ return func() {
+ // If it's been set to nil then assume someone else has closed it.
+ if ctx.Repo.GitRepo != nil {
+ _ = ctx.Repo.GitRepo.Close()
+ }
+ }
+ }
+
+ return cancel
+ }
+}
+
+// RepoRefForAPI handles repository reference names when the ref name is not explicitly given
+func RepoRefForAPI(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ ctx := GetAPIContext(req)
+
+ if ctx.Repo.GitRepo == nil {
+ ctx.InternalServerError(fmt.Errorf("no open git repo"))
+ return
+ }
+
+ if ref := ctx.FormTrim("ref"); len(ref) > 0 {
+ commit, err := ctx.Repo.GitRepo.GetCommit(ref)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound()
+ } else {
+ ctx.Error(http.StatusInternalServerError, "GetCommit", err)
+ }
+ return
+ }
+ ctx.Repo.Commit = commit
+ ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
+ ctx.Repo.TreePath = ctx.Params("*")
+ next.ServeHTTP(w, req)
+ return
+ }
+
+ refName := getRefName(ctx.Base, ctx.Repo, RepoRefAny)
+ var err error
+
+ if ctx.Repo.GitRepo.IsBranchExist(refName) {
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
+ if err != nil {
+ ctx.InternalServerError(err)
+ return
+ }
+ ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
+ } else if ctx.Repo.GitRepo.IsTagExist(refName) {
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName)
+ if err != nil {
+ ctx.InternalServerError(err)
+ return
+ }
+ ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
+ } else if len(refName) == ctx.Repo.GetObjectFormat().FullLength() {
+ ctx.Repo.CommitID = refName
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
+ if err != nil {
+ ctx.NotFound("GetCommit", err)
+ return
+ }
+ } else {
+ ctx.NotFound(fmt.Errorf("not exist: '%s'", ctx.Params("*")))
+ return
+ }
+
+ next.ServeHTTP(w, req)
+ })
+}
+
+// HasAPIError returns true if error occurs in form validation.
+func (ctx *APIContext) HasAPIError() bool {
+ hasErr, ok := ctx.Data["HasError"]
+ if !ok {
+ return false
+ }
+ return hasErr.(bool)
+}
+
+// GetErrMsg returns error message in form validation.
+func (ctx *APIContext) GetErrMsg() string {
+ msg, _ := ctx.Data["ErrorMsg"].(string)
+ if msg == "" {
+ msg = "invalid form data"
+ }
+ return msg
+}
+
+// NotFoundOrServerError use error check function to determine if the error
+// is about not found. It responds with 404 status code for not found error,
+// or error context description for logging purpose of 500 server error.
+func (ctx *APIContext) NotFoundOrServerError(logMsg string, errCheck func(error) bool, logErr error) {
+ if errCheck(logErr) {
+ ctx.JSON(http.StatusNotFound, nil)
+ return
+ }
+ ctx.Error(http.StatusInternalServerError, "NotFoundOrServerError", logMsg)
+}
+
+// IsUserSiteAdmin returns true if current user is a site admin
+func (ctx *APIContext) IsUserSiteAdmin() bool {
+ return ctx.IsSigned && ctx.Doer.IsAdmin
+}
+
+// IsUserRepoAdmin returns true if current user is admin in current repo
+func (ctx *APIContext) IsUserRepoAdmin() bool {
+ return ctx.Repo.IsAdmin()
+}
+
+// IsUserRepoWriter returns true if current user has write privilege in current repo
+func (ctx *APIContext) IsUserRepoWriter(unitTypes []unit.Type) bool {
+ for _, unitType := range unitTypes {
+ if ctx.Repo.CanWrite(unitType) {
+ return true
+ }
+ }
+
+ return false
+}
diff --git a/services/context/api_org.go b/services/context/api_org.go
new file mode 100644
index 0000000..dad02b1
--- /dev/null
+++ b/services/context/api_org.go
@@ -0,0 +1,12 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import "code.gitea.io/gitea/models/organization"
+
+// APIOrganization contains organization and team
+type APIOrganization struct {
+ Organization *organization.Organization
+ Team *organization.Team
+}
diff --git a/services/context/api_test.go b/services/context/api_test.go
new file mode 100644
index 0000000..6064fee
--- /dev/null
+++ b/services/context/api_test.go
@@ -0,0 +1,51 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "net/url"
+ "strconv"
+ "testing"
+
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGenAPILinks(t *testing.T) {
+ setting.AppURL = "http://localhost:3000/"
+ kases := map[string][]string{
+ "api/v1/repos/jerrykan/example-repo/issues?state=all": {
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=2&state=all>; rel="next"`,
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=5&state=all>; rel="last"`,
+ },
+ "api/v1/repos/jerrykan/example-repo/issues?state=all&page=1": {
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=2&state=all>; rel="next"`,
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=5&state=all>; rel="last"`,
+ },
+ "api/v1/repos/jerrykan/example-repo/issues?state=all&page=2": {
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=3&state=all>; rel="next"`,
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=5&state=all>; rel="last"`,
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=1&state=all>; rel="first"`,
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=1&state=all>; rel="prev"`,
+ },
+ "api/v1/repos/jerrykan/example-repo/issues?state=all&page=5": {
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=1&state=all>; rel="first"`,
+ `<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=4&state=all>; rel="prev"`,
+ },
+ }
+
+ for req, response := range kases {
+ u, err := url.Parse(setting.AppURL + req)
+ require.NoError(t, err)
+
+ p := u.Query().Get("page")
+ curPage, _ := strconv.Atoi(p)
+
+ links := genAPILinks(u, 100, 20, curPage)
+
+ assert.EqualValues(t, links, response)
+ }
+}
diff --git a/services/context/base.go b/services/context/base.go
new file mode 100644
index 0000000..0259e0d
--- /dev/null
+++ b/services/context/base.go
@@ -0,0 +1,315 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "io"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/httplib"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/web/middleware"
+
+ "github.com/go-chi/chi/v5"
+)
+
+type contextValuePair struct {
+ key any
+ valueFn func() any
+}
+
+type Base struct {
+ originCtx context.Context
+ contextValues []contextValuePair
+
+ Resp ResponseWriter
+ Req *http.Request
+
+ // Data is prepared by ContextDataStore middleware, this field only refers to the pre-created/prepared ContextData.
+ // Although it's mainly used for MVC templates, sometimes it's also used to pass data between middlewares/handler
+ Data middleware.ContextData
+
+ // Locale is mainly for Web context, although the API context also uses it in some cases: message response, form validation
+ Locale translation.Locale
+}
+
+func (b *Base) Deadline() (deadline time.Time, ok bool) {
+ return b.originCtx.Deadline()
+}
+
+func (b *Base) Done() <-chan struct{} {
+ return b.originCtx.Done()
+}
+
+func (b *Base) Err() error {
+ return b.originCtx.Err()
+}
+
+func (b *Base) Value(key any) any {
+ for _, pair := range b.contextValues {
+ if pair.key == key {
+ return pair.valueFn()
+ }
+ }
+ return b.originCtx.Value(key)
+}
+
+func (b *Base) AppendContextValueFunc(key any, valueFn func() any) any {
+ b.contextValues = append(b.contextValues, contextValuePair{key, valueFn})
+ return b
+}
+
+func (b *Base) AppendContextValue(key, value any) any {
+ b.contextValues = append(b.contextValues, contextValuePair{key, func() any { return value }})
+ return b
+}
+
+func (b *Base) GetData() middleware.ContextData {
+ return b.Data
+}
+
+// AppendAccessControlExposeHeaders append headers by name to "Access-Control-Expose-Headers" header
+func (b *Base) AppendAccessControlExposeHeaders(names ...string) {
+ val := b.RespHeader().Get("Access-Control-Expose-Headers")
+ if len(val) != 0 {
+ b.RespHeader().Set("Access-Control-Expose-Headers", fmt.Sprintf("%s, %s", val, strings.Join(names, ", ")))
+ } else {
+ b.RespHeader().Set("Access-Control-Expose-Headers", strings.Join(names, ", "))
+ }
+}
+
+// SetTotalCountHeader set "X-Total-Count" header
+func (b *Base) SetTotalCountHeader(total int64) {
+ b.RespHeader().Set("X-Total-Count", fmt.Sprint(total))
+ b.AppendAccessControlExposeHeaders("X-Total-Count")
+}
+
+// Written returns true if there are something sent to web browser
+func (b *Base) Written() bool {
+ return b.Resp.WrittenStatus() != 0
+}
+
+func (b *Base) WrittenStatus() int {
+ return b.Resp.WrittenStatus()
+}
+
+// Status writes status code
+func (b *Base) Status(status int) {
+ b.Resp.WriteHeader(status)
+}
+
+// Write writes data to web browser
+func (b *Base) Write(bs []byte) (int, error) {
+ return b.Resp.Write(bs)
+}
+
+// RespHeader returns the response header
+func (b *Base) RespHeader() http.Header {
+ return b.Resp.Header()
+}
+
+// Error returned an error to web browser
+func (b *Base) Error(status int, contents ...string) {
+ v := http.StatusText(status)
+ if len(contents) > 0 {
+ v = contents[0]
+ }
+ http.Error(b.Resp, v, status)
+}
+
+// JSON render content as JSON
+func (b *Base) JSON(status int, content any) {
+ b.Resp.Header().Set("Content-Type", "application/json;charset=utf-8")
+ b.Resp.WriteHeader(status)
+ if err := json.NewEncoder(b.Resp).Encode(content); err != nil {
+ log.Error("Render JSON failed: %v", err)
+ }
+}
+
+// RemoteAddr returns the client machine ip address
+func (b *Base) RemoteAddr() string {
+ return b.Req.RemoteAddr
+}
+
+// Params returns the param on route
+func (b *Base) Params(p string) string {
+ s, _ := url.PathUnescape(chi.URLParam(b.Req, strings.TrimPrefix(p, ":")))
+ return s
+}
+
+func (b *Base) PathParamRaw(p string) string {
+ return chi.URLParam(b.Req, strings.TrimPrefix(p, ":"))
+}
+
+// ParamsInt64 returns the param on route as int64
+func (b *Base) ParamsInt64(p string) int64 {
+ v, _ := strconv.ParseInt(b.Params(p), 10, 64)
+ return v
+}
+
+// SetParams set params into routes
+func (b *Base) SetParams(k, v string) {
+ chiCtx := chi.RouteContext(b)
+ chiCtx.URLParams.Add(strings.TrimPrefix(k, ":"), url.PathEscape(v))
+}
+
+// FormString returns the first value matching the provided key in the form as a string
+func (b *Base) FormString(key string) string {
+ return b.Req.FormValue(key)
+}
+
+// FormStrings returns a string slice for the provided key from the form
+func (b *Base) FormStrings(key string) []string {
+ if b.Req.Form == nil {
+ if err := b.Req.ParseMultipartForm(32 << 20); err != nil {
+ return nil
+ }
+ }
+ if v, ok := b.Req.Form[key]; ok {
+ return v
+ }
+ return nil
+}
+
+// FormTrim returns the first value for the provided key in the form as a space trimmed string
+func (b *Base) FormTrim(key string) string {
+ return strings.TrimSpace(b.Req.FormValue(key))
+}
+
+// FormInt returns the first value for the provided key in the form as an int
+func (b *Base) FormInt(key string) int {
+ v, _ := strconv.Atoi(b.Req.FormValue(key))
+ return v
+}
+
+// FormInt64 returns the first value for the provided key in the form as an int64
+func (b *Base) FormInt64(key string) int64 {
+ v, _ := strconv.ParseInt(b.Req.FormValue(key), 10, 64)
+ return v
+}
+
+// FormBool returns true if the value for the provided key in the form is "1", "true" or "on"
+func (b *Base) FormBool(key string) bool {
+ s := b.Req.FormValue(key)
+ v, _ := strconv.ParseBool(s)
+ v = v || strings.EqualFold(s, "on")
+ return v
+}
+
+// FormOptionalBool returns an optional.Some(true) or optional.Some(false) if the value
+// for the provided key exists in the form else it returns optional.None[bool]()
+func (b *Base) FormOptionalBool(key string) optional.Option[bool] {
+ value := b.Req.FormValue(key)
+ if len(value) == 0 {
+ return optional.None[bool]()
+ }
+ s := b.Req.FormValue(key)
+ v, _ := strconv.ParseBool(s)
+ v = v || strings.EqualFold(s, "on")
+ return optional.Some(v)
+}
+
+func (b *Base) SetFormString(key, value string) {
+ _ = b.Req.FormValue(key) // force parse form
+ b.Req.Form.Set(key, value)
+}
+
+// PlainTextBytes renders bytes as plain text
+func (b *Base) plainTextInternal(skip, status int, bs []byte) {
+ statusPrefix := status / 100
+ if statusPrefix == 4 || statusPrefix == 5 {
+ log.Log(skip, log.TRACE, "plainTextInternal (status=%d): %s", status, string(bs))
+ }
+ b.Resp.Header().Set("Content-Type", "text/plain;charset=utf-8")
+ b.Resp.Header().Set("X-Content-Type-Options", "nosniff")
+ b.Resp.WriteHeader(status)
+ _, _ = b.Resp.Write(bs)
+}
+
+// PlainTextBytes renders bytes as plain text
+func (b *Base) PlainTextBytes(status int, bs []byte) {
+ b.plainTextInternal(2, status, bs)
+}
+
+// PlainText renders content as plain text
+func (b *Base) PlainText(status int, text string) {
+ b.plainTextInternal(2, status, []byte(text))
+}
+
+// Redirect redirects the request
+func (b *Base) Redirect(location string, status ...int) {
+ code := http.StatusSeeOther
+ if len(status) == 1 {
+ code = status[0]
+ }
+
+ if httplib.IsRiskyRedirectURL(location) {
+ // Some browsers (Safari) have buggy behavior for Cookie + Cache + External Redirection, eg: /my-path => https://other/path
+ // 1. the first request to "/my-path" contains cookie
+ // 2. some time later, the request to "/my-path" doesn't contain cookie (caused by Prevent web tracking)
+ // 3. Gitea's Sessioner doesn't see the session cookie, so it generates a new session id, and returns it to browser
+ // 4. then the browser accepts the empty session, then the user is logged out
+ // So in this case, we should remove the session cookie from the response header
+ removeSessionCookieHeader(b.Resp)
+ }
+ // in case the request is made by htmx, have it redirect the browser instead of trying to follow the redirect inside htmx
+ if b.Req.Header.Get("HX-Request") == "true" {
+ b.Resp.Header().Set("HX-Redirect", location)
+ // we have to return a non-redirect status code so XMLHTTPRequest will not immediately follow the redirect
+ // so as to give htmx redirect logic a chance to run
+ b.Status(http.StatusNoContent)
+ return
+ }
+ http.Redirect(b.Resp, b.Req, location, code)
+}
+
+type ServeHeaderOptions httplib.ServeHeaderOptions
+
+func (b *Base) SetServeHeaders(opt *ServeHeaderOptions) {
+ httplib.ServeSetHeaders(b.Resp, (*httplib.ServeHeaderOptions)(opt))
+}
+
+// ServeContent serves content to http request
+func (b *Base) ServeContent(r io.ReadSeeker, opts *ServeHeaderOptions) {
+ httplib.ServeSetHeaders(b.Resp, (*httplib.ServeHeaderOptions)(opts))
+ http.ServeContent(b.Resp, b.Req, opts.Filename, opts.LastModified, r)
+}
+
+// Close frees all resources hold by Context
+func (b *Base) cleanUp() {
+ if b.Req != nil && b.Req.MultipartForm != nil {
+ _ = b.Req.MultipartForm.RemoveAll() // remove the temp files buffered to tmp directory
+ }
+}
+
+func (b *Base) Tr(msg string, args ...any) template.HTML {
+ return b.Locale.Tr(msg, args...)
+}
+
+func (b *Base) TrN(cnt any, key1, keyN string, args ...any) template.HTML {
+ return b.Locale.TrN(cnt, key1, keyN, args...)
+}
+
+func NewBaseContext(resp http.ResponseWriter, req *http.Request) (b *Base, closeFunc func()) {
+ b = &Base{
+ originCtx: req.Context(),
+ Req: req,
+ Resp: WrapResponseWriter(resp),
+ Locale: middleware.Locale(resp, req),
+ Data: middleware.GetContextData(req.Context()),
+ }
+ b.AppendContextValue(translation.ContextKey, b.Locale)
+ b.Req = b.Req.WithContext(b)
+ return b, b.cleanUp
+}
diff --git a/services/context/base_test.go b/services/context/base_test.go
new file mode 100644
index 0000000..823f20e
--- /dev/null
+++ b/services/context/base_test.go
@@ -0,0 +1,47 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRedirect(t *testing.T) {
+ req, _ := http.NewRequest("GET", "/", nil)
+
+ cases := []struct {
+ url string
+ keep bool
+ }{
+ {"http://test", false},
+ {"https://test", false},
+ {"//test", false},
+ {"/://test", true},
+ {"/test", true},
+ }
+ for _, c := range cases {
+ resp := httptest.NewRecorder()
+ b, cleanup := NewBaseContext(resp, req)
+ resp.Header().Add("Set-Cookie", (&http.Cookie{Name: setting.SessionConfig.CookieName, Value: "dummy"}).String())
+ b.Redirect(c.url)
+ cleanup()
+ has := resp.Header().Get("Set-Cookie") == "i_like_gitea=dummy"
+ assert.Equal(t, c.keep, has, "url = %q", c.url)
+ }
+
+ req, _ = http.NewRequest("GET", "/", nil)
+ resp := httptest.NewRecorder()
+ req.Header.Add("HX-Request", "true")
+ b, cleanup := NewBaseContext(resp, req)
+ b.Redirect("/other")
+ cleanup()
+ assert.Equal(t, "/other", resp.Header().Get("HX-Redirect"))
+ assert.Equal(t, http.StatusNoContent, resp.Code)
+}
diff --git a/services/context/captcha.go b/services/context/captcha.go
new file mode 100644
index 0000000..da837ac
--- /dev/null
+++ b/services/context/captcha.go
@@ -0,0 +1,118 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "fmt"
+ "sync"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/hcaptcha"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/mcaptcha"
+ "code.gitea.io/gitea/modules/recaptcha"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/turnstile"
+
+ mc "code.forgejo.org/go-chi/cache"
+ "code.forgejo.org/go-chi/captcha"
+)
+
+var (
+ imageCaptchaOnce sync.Once
+ imageCachePrefix = "captcha:"
+)
+
+type imageCaptchaStore struct {
+ c mc.Cache
+}
+
+func (c *imageCaptchaStore) Set(id string, digits []byte) {
+ if err := c.c.Put(imageCachePrefix+id, string(digits), int64(captcha.Expiration.Seconds())); err != nil {
+ log.Error("Couldn't store captcha cache for %q: %v", id, err)
+ }
+}
+
+func (c *imageCaptchaStore) Get(id string, clear bool) (digits []byte) {
+ val, ok := c.c.Get(imageCachePrefix + id).(string)
+ if !ok {
+ return digits
+ }
+
+ if clear {
+ if err := c.c.Delete(imageCachePrefix + id); err != nil {
+ log.Error("Couldn't delete captcha cache for %q: %v", id, err)
+ }
+ }
+
+ return []byte(val)
+}
+
+// GetImageCaptcha returns image captcha ID.
+func GetImageCaptcha() string {
+ imageCaptchaOnce.Do(func() {
+ captcha.SetCustomStore(&imageCaptchaStore{c: cache.GetCache()})
+ })
+ return captcha.New()
+}
+
+// SetCaptchaData sets common captcha data
+func SetCaptchaData(ctx *Context) {
+ if !setting.Service.EnableCaptcha {
+ return
+ }
+ ctx.Data["EnableCaptcha"] = setting.Service.EnableCaptcha
+ ctx.Data["RecaptchaURL"] = setting.Service.RecaptchaURL
+ ctx.Data["Captcha"] = GetImageCaptcha()
+ ctx.Data["CaptchaType"] = setting.Service.CaptchaType
+ ctx.Data["RecaptchaSitekey"] = setting.Service.RecaptchaSitekey
+ ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey
+ ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey
+ ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL
+ ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey
+}
+
+const (
+ imgCaptchaIDField = "img-captcha-id"
+ imgCaptchaResponseField = "img-captcha-response"
+ gRecaptchaResponseField = "g-recaptcha-response"
+ hCaptchaResponseField = "h-captcha-response"
+ mCaptchaResponseField = "m-captcha-response"
+ cfTurnstileResponseField = "cf-turnstile-response"
+)
+
+// VerifyCaptcha verifies Captcha data
+// No-op if captchas are not enabled
+func VerifyCaptcha(ctx *Context, tpl base.TplName, form any) {
+ if !setting.Service.EnableCaptcha {
+ return
+ }
+
+ var valid bool
+ var err error
+ switch setting.Service.CaptchaType {
+ case setting.ImageCaptcha:
+ valid = captcha.VerifyString(ctx.Req.Form.Get(imgCaptchaIDField), ctx.Req.Form.Get(imgCaptchaResponseField))
+ case setting.ReCaptcha:
+ valid, err = recaptcha.Verify(ctx, ctx.Req.Form.Get(gRecaptchaResponseField))
+ case setting.HCaptcha:
+ valid, err = hcaptcha.Verify(ctx, ctx.Req.Form.Get(hCaptchaResponseField))
+ case setting.MCaptcha:
+ valid, err = mcaptcha.Verify(ctx, ctx.Req.Form.Get(mCaptchaResponseField))
+ case setting.CfTurnstile:
+ valid, err = turnstile.Verify(ctx, ctx.Req.Form.Get(cfTurnstileResponseField))
+ default:
+ ctx.ServerError("Unknown Captcha Type", fmt.Errorf("unknown Captcha Type: %s", setting.Service.CaptchaType))
+ return
+ }
+ if err != nil {
+ log.Debug("Captcha Verify failed: %v", err)
+ }
+
+ if !valid {
+ ctx.Data["Err_Captcha"] = true
+ ctx.RenderWithErr(ctx.Tr("form.captcha_incorrect"), tpl, form)
+ }
+}
diff --git a/services/context/context.go b/services/context/context.go
new file mode 100644
index 0000000..91e7b18
--- /dev/null
+++ b/services/context/context.go
@@ -0,0 +1,254 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "context"
+ "encoding/hex"
+ "fmt"
+ "html/template"
+ "io"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ mc "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/modules/web/middleware"
+ web_types "code.gitea.io/gitea/modules/web/types"
+
+ "code.forgejo.org/go-chi/cache"
+ "code.forgejo.org/go-chi/session"
+)
+
+// Render represents a template render
+type Render interface {
+ TemplateLookup(tmpl string, templateCtx context.Context) (templates.TemplateExecutor, error)
+ HTML(w io.Writer, status int, name string, data any, templateCtx context.Context) error
+}
+
+// Context represents context of a request.
+type Context struct {
+ *Base
+
+ TemplateContext TemplateContext
+
+ Render Render
+ PageData map[string]any // data used by JavaScript modules in one page, it's `window.config.pageData`
+
+ Cache cache.Cache
+ Csrf CSRFProtector
+ Flash *middleware.Flash
+ Session session.Store
+
+ Link string // current request URL (without query string)
+
+ Doer *user_model.User // current signed-in user
+ IsSigned bool
+ IsBasicAuth bool
+
+ ContextUser *user_model.User // the user which is being visited, in most cases it differs from Doer
+
+ Repo *Repository
+ Org *Organization
+ Package *Package
+}
+
+type TemplateContext map[string]any
+
+func init() {
+ web.RegisterResponseStatusProvider[*Context](func(req *http.Request) web_types.ResponseStatusProvider {
+ return req.Context().Value(WebContextKey).(*Context)
+ })
+}
+
+type webContextKeyType struct{}
+
+var WebContextKey = webContextKeyType{}
+
+func GetWebContext(req *http.Request) *Context {
+ ctx, _ := req.Context().Value(WebContextKey).(*Context)
+ return ctx
+}
+
+// ValidateContext is a special context for form validation middleware. It may be different from other contexts.
+type ValidateContext struct {
+ *Base
+}
+
+// GetValidateContext gets a context for middleware form validation
+func GetValidateContext(req *http.Request) (ctx *ValidateContext) {
+ if ctxAPI, ok := req.Context().Value(apiContextKey).(*APIContext); ok {
+ ctx = &ValidateContext{Base: ctxAPI.Base}
+ } else if ctxWeb, ok := req.Context().Value(WebContextKey).(*Context); ok {
+ ctx = &ValidateContext{Base: ctxWeb.Base}
+ } else {
+ panic("invalid context, expect either APIContext or Context")
+ }
+ return ctx
+}
+
+func NewTemplateContextForWeb(ctx *Context) TemplateContext {
+ tmplCtx := NewTemplateContext(ctx)
+ tmplCtx["Locale"] = ctx.Base.Locale
+ tmplCtx["AvatarUtils"] = templates.NewAvatarUtils(ctx)
+ return tmplCtx
+}
+
+func NewWebContext(base *Base, render Render, session session.Store) *Context {
+ ctx := &Context{
+ Base: base,
+ Render: render,
+ Session: session,
+
+ Cache: mc.GetCache(),
+ Link: setting.AppSubURL + strings.TrimSuffix(base.Req.URL.EscapedPath(), "/"),
+ Repo: &Repository{PullRequest: &PullRequest{}},
+ Org: &Organization{},
+ }
+ ctx.TemplateContext = NewTemplateContextForWeb(ctx)
+ ctx.Flash = &middleware.Flash{DataStore: ctx, Values: url.Values{}}
+ return ctx
+}
+
+// Contexter initializes a classic context for a request.
+func Contexter() func(next http.Handler) http.Handler {
+ rnd := templates.HTMLRenderer()
+ csrfOpts := CsrfOptions{
+ Secret: hex.EncodeToString(setting.GetGeneralTokenSigningSecret()),
+ Cookie: setting.CSRFCookieName,
+ Secure: setting.SessionConfig.Secure,
+ CookieHTTPOnly: setting.CSRFCookieHTTPOnly,
+ CookieDomain: setting.SessionConfig.Domain,
+ CookiePath: setting.SessionConfig.CookiePath,
+ SameSite: setting.SessionConfig.SameSite,
+ }
+ if !setting.IsProd {
+ CsrfTokenRegenerationInterval = 5 * time.Second // in dev, re-generate the tokens more aggressively for debug purpose
+ }
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
+ base, baseCleanUp := NewBaseContext(resp, req)
+ defer baseCleanUp()
+ ctx := NewWebContext(base, rnd, session.GetSession(req))
+
+ ctx.Data.MergeFrom(middleware.CommonTemplateContextData())
+ ctx.Data["Context"] = ctx // TODO: use "ctx" in template and remove this
+ ctx.Data["CurrentURL"] = setting.AppSubURL + req.URL.RequestURI()
+ ctx.Data["Link"] = ctx.Link
+
+ // PageData is passed by reference, and it will be rendered to `window.config.pageData` in `head.tmpl` for JavaScript modules
+ ctx.PageData = map[string]any{}
+ ctx.Data["PageData"] = ctx.PageData
+
+ ctx.Base.AppendContextValue(WebContextKey, ctx)
+ ctx.Base.AppendContextValueFunc(gitrepo.RepositoryContextKey, func() any { return ctx.Repo.GitRepo })
+
+ ctx.Csrf = NewCSRFProtector(csrfOpts)
+
+ // Get the last flash message from cookie
+ lastFlashCookie := middleware.GetSiteCookie(ctx.Req, CookieNameFlash)
+ if vals, _ := url.ParseQuery(lastFlashCookie); len(vals) > 0 {
+ // store last Flash message into the template data, to render it
+ ctx.Data["Flash"] = &middleware.Flash{
+ DataStore: ctx,
+ Values: vals,
+ ErrorMsg: vals.Get("error"),
+ SuccessMsg: vals.Get("success"),
+ InfoMsg: vals.Get("info"),
+ WarningMsg: vals.Get("warning"),
+ }
+ }
+
+ // if there are new messages in the ctx.Flash, write them into cookie
+ ctx.Resp.Before(func(resp ResponseWriter) {
+ if val := ctx.Flash.Encode(); val != "" {
+ middleware.SetSiteCookie(ctx.Resp, CookieNameFlash, val, 0)
+ } else if lastFlashCookie != "" {
+ middleware.SetSiteCookie(ctx.Resp, CookieNameFlash, "", -1)
+ }
+ })
+
+ // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.
+ if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") {
+ if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size
+ ctx.ServerError("ParseMultipartForm", err)
+ return
+ }
+ }
+
+ httpcache.SetCacheControlInHeader(ctx.Resp.Header(), 0, "no-transform")
+ ctx.Resp.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions)
+
+ ctx.Data["SystemConfig"] = setting.Config()
+
+ // FIXME: do we really always need these setting? There should be someway to have to avoid having to always set these
+ ctx.Data["DisableMigrations"] = setting.Repository.DisableMigrations
+ ctx.Data["DisableStars"] = setting.Repository.DisableStars
+ ctx.Data["DisableForks"] = setting.Repository.DisableForks
+ ctx.Data["EnableActions"] = setting.Actions.Enabled
+
+ ctx.Data["ManifestData"] = setting.ManifestData
+
+ ctx.Data["UnitWikiGlobalDisabled"] = unit.TypeWiki.UnitGlobalDisabled()
+ ctx.Data["UnitIssuesGlobalDisabled"] = unit.TypeIssues.UnitGlobalDisabled()
+ ctx.Data["UnitPullsGlobalDisabled"] = unit.TypePullRequests.UnitGlobalDisabled()
+ ctx.Data["UnitProjectsGlobalDisabled"] = unit.TypeProjects.UnitGlobalDisabled()
+ ctx.Data["UnitActionsGlobalDisabled"] = unit.TypeActions.UnitGlobalDisabled()
+
+ ctx.Data["AllLangs"] = translation.AllLangs()
+
+ next.ServeHTTP(ctx.Resp, ctx.Req)
+ })
+ }
+}
+
+// HasError returns true if error occurs in form validation.
+// Attention: this function changes ctx.Data and ctx.Flash
+func (ctx *Context) HasError() bool {
+ hasErr, ok := ctx.Data["HasError"]
+ if !ok {
+ return false
+ }
+ ctx.Flash.ErrorMsg = ctx.GetErrMsg()
+ ctx.Data["Flash"] = ctx.Flash
+ return hasErr.(bool)
+}
+
+// GetErrMsg returns error message in form validation.
+func (ctx *Context) GetErrMsg() string {
+ msg, _ := ctx.Data["ErrorMsg"].(string)
+ if msg == "" {
+ msg = "invalid form data"
+ }
+ return msg
+}
+
+func (ctx *Context) JSONRedirect(redirect string) {
+ ctx.JSON(http.StatusOK, map[string]any{"redirect": redirect})
+}
+
+func (ctx *Context) JSONOK() {
+ ctx.JSON(http.StatusOK, map[string]any{"ok": true}) // this is only a dummy response, frontend seldom uses it
+}
+
+func (ctx *Context) JSONError(msg any) {
+ switch v := msg.(type) {
+ case string:
+ ctx.JSON(http.StatusBadRequest, map[string]any{"errorMessage": v, "renderFormat": "text"})
+ case template.HTML:
+ ctx.JSON(http.StatusBadRequest, map[string]any{"errorMessage": v, "renderFormat": "html"})
+ default:
+ panic(fmt.Sprintf("unsupported type: %T", msg))
+ }
+}
diff --git a/services/context/context_cookie.go b/services/context/context_cookie.go
new file mode 100644
index 0000000..3699f81
--- /dev/null
+++ b/services/context/context_cookie.go
@@ -0,0 +1,56 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "net/http"
+ "strings"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/web/middleware"
+)
+
+const CookieNameFlash = "gitea_flash"
+
+func removeSessionCookieHeader(w http.ResponseWriter) {
+ cookies := w.Header()["Set-Cookie"]
+ w.Header().Del("Set-Cookie")
+ for _, cookie := range cookies {
+ if strings.HasPrefix(cookie, setting.SessionConfig.CookieName+"=") {
+ continue
+ }
+ w.Header().Add("Set-Cookie", cookie)
+ }
+}
+
+// SetSiteCookie convenience function to set most cookies consistently
+// CSRF and a few others are the exception here
+func (ctx *Context) SetSiteCookie(name, value string, maxAge int) {
+ middleware.SetSiteCookie(ctx.Resp, name, value, maxAge)
+}
+
+// DeleteSiteCookie convenience function to delete most cookies consistently
+// CSRF and a few others are the exception here
+func (ctx *Context) DeleteSiteCookie(name string) {
+ middleware.SetSiteCookie(ctx.Resp, name, "", -1)
+}
+
+// GetSiteCookie returns given cookie value from request header.
+func (ctx *Context) GetSiteCookie(name string) string {
+ return middleware.GetSiteCookie(ctx.Req, name)
+}
+
+// SetLTACookie will generate a LTA token and add it as an cookie.
+func (ctx *Context) SetLTACookie(u *user_model.User) error {
+ days := 86400 * setting.LogInRememberDays
+ lookup, validator, err := auth_model.GenerateAuthToken(ctx, u.ID, timeutil.TimeStampNow().Add(int64(days)), auth_model.LongTermAuthorization)
+ if err != nil {
+ return err
+ }
+ ctx.SetSiteCookie(setting.CookieRememberName, lookup+":"+validator, days)
+ return nil
+}
diff --git a/services/context/context_model.go b/services/context/context_model.go
new file mode 100644
index 0000000..4f70aac
--- /dev/null
+++ b/services/context/context_model.go
@@ -0,0 +1,29 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "code.gitea.io/gitea/models/unit"
+)
+
+// IsUserSiteAdmin returns true if current user is a site admin
+func (ctx *Context) IsUserSiteAdmin() bool {
+ return ctx.IsSigned && ctx.Doer.IsAdmin
+}
+
+// IsUserRepoAdmin returns true if current user is admin in current repo
+func (ctx *Context) IsUserRepoAdmin() bool {
+ return ctx.Repo.IsAdmin()
+}
+
+// IsUserRepoWriter returns true if current user has write privilege in current repo
+func (ctx *Context) IsUserRepoWriter(unitTypes []unit.Type) bool {
+ for _, unitType := range unitTypes {
+ if ctx.Repo.CanWrite(unitType) {
+ return true
+ }
+ }
+
+ return false
+}
diff --git a/services/context/context_request.go b/services/context/context_request.go
new file mode 100644
index 0000000..984b9ac
--- /dev/null
+++ b/services/context/context_request.go
@@ -0,0 +1,32 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "io"
+ "net/http"
+ "strings"
+)
+
+// UploadStream returns the request body or the first form file
+// Only form files need to get closed.
+func (ctx *Context) UploadStream() (rd io.ReadCloser, needToClose bool, err error) {
+ contentType := strings.ToLower(ctx.Req.Header.Get("Content-Type"))
+ if strings.HasPrefix(contentType, "application/x-www-form-urlencoded") || strings.HasPrefix(contentType, "multipart/form-data") {
+ if err := ctx.Req.ParseMultipartForm(32 << 20); err != nil {
+ return nil, false, err
+ }
+ if ctx.Req.MultipartForm.File == nil {
+ return nil, false, http.ErrMissingFile
+ }
+ for _, files := range ctx.Req.MultipartForm.File {
+ if len(files) > 0 {
+ r, err := files[0].Open()
+ return r, true, err
+ }
+ }
+ return nil, false, http.ErrMissingFile
+ }
+ return ctx.Req.Body, false, nil
+}
diff --git a/services/context/context_response.go b/services/context/context_response.go
new file mode 100644
index 0000000..f36b834
--- /dev/null
+++ b/services/context/context_response.go
@@ -0,0 +1,194 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "errors"
+ "fmt"
+ "html/template"
+ "net"
+ "net/http"
+ "net/url"
+ "path"
+ "strconv"
+ "strings"
+ "syscall"
+ "time"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/httplib"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/web/middleware"
+)
+
+// RedirectToUser redirect to a differently-named user
+func RedirectToUser(ctx *Base, userName string, redirectUserID int64) {
+ user, err := user_model.GetUserByID(ctx, redirectUserID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "unable to get user")
+ return
+ }
+
+ redirectPath := strings.Replace(
+ ctx.Req.URL.EscapedPath(),
+ url.PathEscape(userName),
+ url.PathEscape(user.Name),
+ 1,
+ )
+ if ctx.Req.URL.RawQuery != "" {
+ redirectPath += "?" + ctx.Req.URL.RawQuery
+ }
+ ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusTemporaryRedirect)
+}
+
+// RedirectToFirst redirects to first not empty URL which likely belongs to current site.
+// If no suitable redirection is found, it redirects to the home.
+// It returns the location it redirected to.
+func (ctx *Context) RedirectToFirst(location ...string) string {
+ for _, loc := range location {
+ if len(loc) == 0 {
+ continue
+ }
+
+ if httplib.IsRiskyRedirectURL(loc) {
+ continue
+ }
+
+ ctx.Redirect(loc)
+ return loc
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/")
+ return setting.AppSubURL + "/"
+}
+
+const tplStatus500 base.TplName = "status/500"
+
+// HTML calls Context.HTML and renders the template to HTTP response
+func (ctx *Context) HTML(status int, name base.TplName) {
+ log.Debug("Template: %s", name)
+
+ tmplStartTime := time.Now()
+ if !setting.IsProd {
+ ctx.Data["TemplateName"] = name
+ }
+ ctx.Data["TemplateLoadTimes"] = func() string {
+ return strconv.FormatInt(time.Since(tmplStartTime).Nanoseconds()/1e6, 10) + "ms"
+ }
+
+ err := ctx.Render.HTML(ctx.Resp, status, string(name), ctx.Data, ctx.TemplateContext)
+ if err == nil || errors.Is(err, syscall.EPIPE) {
+ return
+ }
+
+ // if rendering fails, show error page
+ if name != tplStatus500 {
+ err = fmt.Errorf("failed to render template: %s, error: %s", name, templates.HandleTemplateRenderingError(err))
+ ctx.ServerError("Render failed", err) // show the 500 error page
+ } else {
+ ctx.PlainText(http.StatusInternalServerError, "Unable to render status/500 page, the template system is broken, or Gitea can't find your template files.")
+ return
+ }
+}
+
+// JSONTemplate renders the template as JSON response
+// keep in mind that the template is processed in HTML context, so JSON-things should be handled carefully, eg: by JSEscape
+func (ctx *Context) JSONTemplate(tmpl base.TplName) {
+ t, err := ctx.Render.TemplateLookup(string(tmpl), nil)
+ if err != nil {
+ ctx.ServerError("unable to find template", err)
+ return
+ }
+ ctx.Resp.Header().Set("Content-Type", "application/json")
+ if err = t.Execute(ctx.Resp, ctx.Data); err != nil {
+ ctx.ServerError("unable to execute template", err)
+ }
+}
+
+// RenderToHTML renders the template content to a HTML string
+func (ctx *Context) RenderToHTML(name base.TplName, data map[string]any) (template.HTML, error) {
+ var buf strings.Builder
+ err := ctx.Render.HTML(&buf, 0, string(name), data, ctx.TemplateContext)
+ return template.HTML(buf.String()), err
+}
+
+// RenderWithErr used for page has form validation but need to prompt error to users.
+func (ctx *Context) RenderWithErr(msg any, tpl base.TplName, form any) {
+ if form != nil {
+ middleware.AssignForm(form, ctx.Data)
+ }
+ ctx.Flash.Error(msg, true)
+ ctx.HTML(http.StatusOK, tpl)
+}
+
+// NotFound displays a 404 (Not Found) page and prints the given error, if any.
+func (ctx *Context) NotFound(logMsg string, logErr error) {
+ ctx.notFoundInternal(logMsg, logErr)
+}
+
+func (ctx *Context) notFoundInternal(logMsg string, logErr error) {
+ if logErr != nil {
+ log.Log(2, log.DEBUG, "%s: %v", logMsg, logErr)
+ if !setting.IsProd {
+ ctx.Data["ErrorMsg"] = logErr
+ }
+ }
+
+ // response simple message if Accept isn't text/html
+ showHTML := false
+ for _, part := range ctx.Req.Header["Accept"] {
+ if strings.Contains(part, "text/html") {
+ showHTML = true
+ break
+ }
+ }
+
+ if !showHTML {
+ ctx.plainTextInternal(3, http.StatusNotFound, []byte("Not found.\n"))
+ return
+ }
+
+ ctx.Data["IsRepo"] = ctx.Repo.Repository != nil
+ ctx.Data["Title"] = "Page Not Found"
+ ctx.HTML(http.StatusNotFound, base.TplName("status/404"))
+}
+
+// ServerError displays a 500 (Internal Server Error) page and prints the given error, if any.
+func (ctx *Context) ServerError(logMsg string, logErr error) {
+ ctx.serverErrorInternal(logMsg, logErr)
+}
+
+func (ctx *Context) serverErrorInternal(logMsg string, logErr error) {
+ if logErr != nil {
+ log.ErrorWithSkip(2, "%s: %v", logMsg, logErr)
+ if _, ok := logErr.(*net.OpError); ok || errors.Is(logErr, &net.OpError{}) {
+ // This is an error within the underlying connection
+ // and further rendering will not work so just return
+ return
+ }
+
+ // it's safe to show internal error to admin users, and it helps
+ if !setting.IsProd || (ctx.Doer != nil && ctx.Doer.IsAdmin) {
+ ctx.Data["ErrorMsg"] = fmt.Sprintf("%s, %s", logMsg, logErr)
+ }
+ }
+
+ ctx.Data["Title"] = "Internal Server Error"
+ ctx.HTML(http.StatusInternalServerError, tplStatus500)
+}
+
+// NotFoundOrServerError use error check function to determine if the error
+// is about not found. It responds with 404 status code for not found error,
+// or error context description for logging purpose of 500 server error.
+// TODO: remove the "errCheck" and use util.ErrNotFound to check
+func (ctx *Context) NotFoundOrServerError(logMsg string, errCheck func(error) bool, logErr error) {
+ if errCheck(logErr) {
+ ctx.notFoundInternal(logMsg, logErr)
+ return
+ }
+ ctx.serverErrorInternal(logMsg, logErr)
+}
diff --git a/services/context/context_template.go b/services/context/context_template.go
new file mode 100644
index 0000000..7878d40
--- /dev/null
+++ b/services/context/context_template.go
@@ -0,0 +1,35 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "context"
+ "time"
+)
+
+var _ context.Context = TemplateContext(nil)
+
+func NewTemplateContext(ctx context.Context) TemplateContext {
+ return TemplateContext{"_ctx": ctx}
+}
+
+func (c TemplateContext) parentContext() context.Context {
+ return c["_ctx"].(context.Context)
+}
+
+func (c TemplateContext) Deadline() (deadline time.Time, ok bool) {
+ return c.parentContext().Deadline()
+}
+
+func (c TemplateContext) Done() <-chan struct{} {
+ return c.parentContext().Done()
+}
+
+func (c TemplateContext) Err() error {
+ return c.parentContext().Err()
+}
+
+func (c TemplateContext) Value(key any) any {
+ return c.parentContext().Value(key)
+}
diff --git a/services/context/context_test.go b/services/context/context_test.go
new file mode 100644
index 0000000..033ce2e
--- /dev/null
+++ b/services/context/context_test.go
@@ -0,0 +1,24 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRemoveSessionCookieHeader(t *testing.T) {
+ w := httptest.NewRecorder()
+ w.Header().Add("Set-Cookie", (&http.Cookie{Name: setting.SessionConfig.CookieName, Value: "foo"}).String())
+ w.Header().Add("Set-Cookie", (&http.Cookie{Name: "other", Value: "bar"}).String())
+ assert.Len(t, w.Header().Values("Set-Cookie"), 2)
+ removeSessionCookieHeader(w)
+ assert.Len(t, w.Header().Values("Set-Cookie"), 1)
+ assert.Contains(t, "other=bar", w.Header().Get("Set-Cookie"))
+}
diff --git a/services/context/csrf.go b/services/context/csrf.go
new file mode 100644
index 0000000..e0518a4
--- /dev/null
+++ b/services/context/csrf.go
@@ -0,0 +1,171 @@
+// Copyright 2013 Martini Authors
+// Copyright 2014 The Macaron Authors
+// Copyright 2021 The Gitea Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"): you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations
+// under the License.
+// SPDX-License-Identifier: Apache-2.0
+
+// a middleware that generates and validates CSRF tokens.
+
+package context
+
+import (
+ "html/template"
+ "net/http"
+ "strconv"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+)
+
+const (
+ CsrfHeaderName = "X-Csrf-Token"
+ CsrfFormName = "_csrf"
+ CsrfErrorString = "Invalid CSRF token."
+)
+
+// CSRFProtector represents a CSRF protector and is used to get the current token and validate the token.
+type CSRFProtector interface {
+ // PrepareForSessionUser prepares the csrf protector for the current session user.
+ PrepareForSessionUser(ctx *Context)
+ // Validate validates the csrf token in http context.
+ Validate(ctx *Context)
+ // DeleteCookie deletes the csrf cookie
+ DeleteCookie(ctx *Context)
+}
+
+type csrfProtector struct {
+ opt CsrfOptions
+ // id must be unique per user.
+ id string
+ // token is the valid one which wil be used by end user and passed via header, cookie, or hidden form value.
+ token string
+}
+
+// CsrfOptions maintains options to manage behavior of Generate.
+type CsrfOptions struct {
+ // The global secret value used to generate Tokens.
+ Secret string
+ // Cookie value used to set and get token.
+ Cookie string
+ // Cookie domain.
+ CookieDomain string
+ // Cookie path.
+ CookiePath string
+ CookieHTTPOnly bool
+ // SameSite set the cookie SameSite type
+ SameSite http.SameSite
+ // Set the Secure flag to true on the cookie.
+ Secure bool
+ // sessionKey is the key used for getting the unique ID per user.
+ sessionKey string
+ // oldSessionKey saves old value corresponding to sessionKey.
+ oldSessionKey string
+}
+
+func newCsrfCookie(opt *CsrfOptions, value string) *http.Cookie {
+ return &http.Cookie{
+ Name: opt.Cookie,
+ Value: value,
+ Path: opt.CookiePath,
+ Domain: opt.CookieDomain,
+ MaxAge: int(CsrfTokenTimeout.Seconds()),
+ Secure: opt.Secure,
+ HttpOnly: opt.CookieHTTPOnly,
+ SameSite: opt.SameSite,
+ }
+}
+
+func NewCSRFProtector(opt CsrfOptions) CSRFProtector {
+ if opt.Secret == "" {
+ panic("CSRF secret is empty but it must be set") // it shouldn't happen because it is always set in code
+ }
+ opt.Cookie = util.IfZero(opt.Cookie, "_csrf")
+ opt.CookiePath = util.IfZero(opt.CookiePath, "/")
+ opt.sessionKey = "uid"
+ opt.oldSessionKey = "_old_" + opt.sessionKey
+ return &csrfProtector{opt: opt}
+}
+
+func (c *csrfProtector) PrepareForSessionUser(ctx *Context) {
+ c.id = "0"
+ if uidAny := ctx.Session.Get(c.opt.sessionKey); uidAny != nil {
+ switch uidVal := uidAny.(type) {
+ case string:
+ c.id = uidVal
+ case int64:
+ c.id = strconv.FormatInt(uidVal, 10)
+ default:
+ log.Error("invalid uid type in session: %T", uidAny)
+ }
+ }
+
+ oldUID := ctx.Session.Get(c.opt.oldSessionKey)
+ uidChanged := oldUID == nil || oldUID.(string) != c.id
+ cookieToken := ctx.GetSiteCookie(c.opt.Cookie)
+
+ needsNew := true
+ if uidChanged {
+ _ = ctx.Session.Set(c.opt.oldSessionKey, c.id)
+ } else if cookieToken != "" {
+ // If cookie token presents, reuse existing unexpired token, else generate a new one.
+ if issueTime, ok := ParseCsrfToken(cookieToken); ok {
+ dur := time.Since(issueTime) // issueTime is not a monotonic-clock, the server time may change a lot to an early time.
+ if dur >= -CsrfTokenRegenerationInterval && dur <= CsrfTokenRegenerationInterval {
+ c.token = cookieToken
+ needsNew = false
+ }
+ }
+ }
+
+ if needsNew {
+ // FIXME: actionId.
+ c.token = GenerateCsrfToken(c.opt.Secret, c.id, "POST", time.Now())
+ cookie := newCsrfCookie(&c.opt, c.token)
+ ctx.Resp.Header().Add("Set-Cookie", cookie.String())
+ }
+
+ ctx.Data["CsrfToken"] = c.token
+ ctx.Data["CsrfTokenHtml"] = template.HTML(`<input type="hidden" name="_csrf" value="` + template.HTMLEscapeString(c.token) + `">`)
+}
+
+func (c *csrfProtector) validateToken(ctx *Context, token string) {
+ if !ValidCsrfToken(token, c.opt.Secret, c.id, "POST", time.Now()) {
+ c.DeleteCookie(ctx)
+ // currently, there should be no access to the APIPath with CSRF token. because templates shouldn't use the `/api/` endpoints.
+ // FIXME: distinguish what the response is for: HTML (web page) or JSON (fetch)
+ http.Error(ctx.Resp, CsrfErrorString, http.StatusBadRequest)
+ }
+}
+
+// Validate should be used as a per route middleware. It attempts to get a token from an "X-Csrf-Token"
+// HTTP header and then a "_csrf" form value. If one of these is found, the token will be validated.
+// If this validation fails, http.StatusBadRequest is sent.
+func (c *csrfProtector) Validate(ctx *Context) {
+ if token := ctx.Req.Header.Get(CsrfHeaderName); token != "" {
+ c.validateToken(ctx, token)
+ return
+ }
+ if token := ctx.Req.FormValue(CsrfFormName); token != "" {
+ c.validateToken(ctx, token)
+ return
+ }
+ c.validateToken(ctx, "") // no csrf token, use an empty token to respond error
+}
+
+func (c *csrfProtector) DeleteCookie(ctx *Context) {
+ cookie := newCsrfCookie(&c.opt, "")
+ cookie.MaxAge = -1
+ ctx.Resp.Header().Add("Set-Cookie", cookie.String())
+}
diff --git a/services/context/org.go b/services/context/org.go
new file mode 100644
index 0000000..018b76d
--- /dev/null
+++ b/services/context/org.go
@@ -0,0 +1,280 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "strings"
+
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+)
+
+// Organization contains organization context
+type Organization struct {
+ IsOwner bool
+ IsMember bool
+ IsTeamMember bool // Is member of team.
+ IsTeamAdmin bool // In owner team or team that has admin permission level.
+ Organization *organization.Organization
+ OrgLink string
+ CanCreateOrgRepo bool
+ PublicMemberOnly bool // Only display public members
+
+ Team *organization.Team
+ Teams []*organization.Team
+}
+
+func (org *Organization) CanWriteUnit(ctx *Context, unitType unit.Type) bool {
+ return org.Organization.UnitPermission(ctx, ctx.Doer, unitType) >= perm.AccessModeWrite
+}
+
+func (org *Organization) CanReadUnit(ctx *Context, unitType unit.Type) bool {
+ return org.Organization.UnitPermission(ctx, ctx.Doer, unitType) >= perm.AccessModeRead
+}
+
+func GetOrganizationByParams(ctx *Context) {
+ orgName := ctx.Params(":org")
+
+ var err error
+
+ ctx.Org.Organization, err = organization.GetOrgByName(ctx, orgName)
+ if err != nil {
+ if organization.IsErrOrgNotExist(err) {
+ redirectUserID, err := user_model.LookupUserRedirect(ctx, orgName)
+ if err == nil {
+ RedirectToUser(ctx.Base, orgName, redirectUserID)
+ } else if user_model.IsErrUserRedirectNotExist(err) {
+ ctx.NotFound("GetUserByName", err)
+ } else {
+ ctx.ServerError("LookupUserRedirect", err)
+ }
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return
+ }
+}
+
+// HandleOrgAssignment handles organization assignment
+func HandleOrgAssignment(ctx *Context, args ...bool) {
+ var (
+ requireMember bool
+ requireOwner bool
+ requireTeamMember bool
+ requireTeamAdmin bool
+ )
+ if len(args) >= 1 {
+ requireMember = args[0]
+ }
+ if len(args) >= 2 {
+ requireOwner = args[1]
+ }
+ if len(args) >= 3 {
+ requireTeamMember = args[2]
+ }
+ if len(args) >= 4 {
+ requireTeamAdmin = args[3]
+ }
+
+ var err error
+
+ if ctx.ContextUser == nil {
+ // if Organization is not defined, get it from params
+ if ctx.Org.Organization == nil {
+ GetOrganizationByParams(ctx)
+ if ctx.Written() {
+ return
+ }
+ }
+ } else if ctx.ContextUser.IsOrganization() {
+ if ctx.Org == nil {
+ ctx.Org = &Organization{}
+ }
+ ctx.Org.Organization = (*organization.Organization)(ctx.ContextUser)
+ } else {
+ // ContextUser is an individual User
+ return
+ }
+
+ org := ctx.Org.Organization
+
+ // Handle Visibility
+ if org.Visibility != structs.VisibleTypePublic && !ctx.IsSigned {
+ // We must be signed in to see limited or private organizations
+ ctx.NotFound("OrgAssignment", err)
+ return
+ }
+
+ if org.Visibility == structs.VisibleTypePrivate {
+ requireMember = true
+ } else if ctx.IsSigned && ctx.Doer.IsRestricted {
+ requireMember = true
+ }
+
+ ctx.ContextUser = org.AsUser()
+ ctx.Data["Org"] = org
+
+ // Admin has super access.
+ if ctx.IsSigned && ctx.Doer.IsAdmin {
+ ctx.Org.IsOwner = true
+ ctx.Org.IsMember = true
+ ctx.Org.IsTeamMember = true
+ ctx.Org.IsTeamAdmin = true
+ ctx.Org.CanCreateOrgRepo = true
+ } else if ctx.IsSigned {
+ ctx.Org.IsOwner, err = org.IsOwnedBy(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("IsOwnedBy", err)
+ return
+ }
+
+ if ctx.Org.IsOwner {
+ ctx.Org.IsMember = true
+ ctx.Org.IsTeamMember = true
+ ctx.Org.IsTeamAdmin = true
+ ctx.Org.CanCreateOrgRepo = true
+ } else {
+ ctx.Org.IsMember, err = org.IsOrgMember(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("IsOrgMember", err)
+ return
+ }
+ ctx.Org.CanCreateOrgRepo, err = org.CanCreateOrgRepo(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("CanCreateOrgRepo", err)
+ return
+ }
+ }
+ } else {
+ // Fake data.
+ ctx.Data["SignedUser"] = &user_model.User{}
+ }
+ if (requireMember && !ctx.Org.IsMember) ||
+ (requireOwner && !ctx.Org.IsOwner) {
+ ctx.NotFound("OrgAssignment", err)
+ return
+ }
+ ctx.Data["IsOrganizationOwner"] = ctx.Org.IsOwner
+ ctx.Data["IsOrganizationMember"] = ctx.Org.IsMember
+ ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+ ctx.Data["IsPublicMember"] = func(uid int64) bool {
+ is, _ := organization.IsPublicMembership(ctx, ctx.Org.Organization.ID, uid)
+ return is
+ }
+ ctx.Data["CanCreateOrgRepo"] = ctx.Org.CanCreateOrgRepo
+
+ ctx.Org.OrgLink = org.AsUser().OrganisationLink()
+ ctx.Data["OrgLink"] = ctx.Org.OrgLink
+
+ // Member
+ ctx.Org.PublicMemberOnly = ctx.Doer == nil || !ctx.Org.IsMember && !ctx.Doer.IsAdmin
+ opts := &organization.FindOrgMembersOpts{
+ OrgID: org.ID,
+ PublicOnly: ctx.Org.PublicMemberOnly,
+ }
+ ctx.Data["NumMembers"], err = organization.CountOrgMembers(ctx, opts)
+ if err != nil {
+ ctx.ServerError("CountOrgMembers", err)
+ return
+ }
+
+ // Team.
+ if ctx.Org.IsMember {
+ shouldSeeAllTeams := false
+ if ctx.Org.IsOwner {
+ shouldSeeAllTeams = true
+ } else {
+ teams, err := org.GetUserTeams(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetUserTeams", err)
+ return
+ }
+ for _, team := range teams {
+ if team.IncludesAllRepositories && team.AccessMode >= perm.AccessModeAdmin {
+ shouldSeeAllTeams = true
+ break
+ }
+ }
+ }
+ if shouldSeeAllTeams {
+ ctx.Org.Teams, err = org.LoadTeams(ctx)
+ if err != nil {
+ ctx.ServerError("LoadTeams", err)
+ return
+ }
+ } else {
+ ctx.Org.Teams, err = org.GetUserTeams(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetUserTeams", err)
+ return
+ }
+ }
+ ctx.Data["NumTeams"] = len(ctx.Org.Teams)
+ }
+
+ teamName := ctx.Params(":team")
+ if len(teamName) > 0 {
+ teamExists := false
+ for _, team := range ctx.Org.Teams {
+ if team.LowerName == strings.ToLower(teamName) {
+ teamExists = true
+ ctx.Org.Team = team
+ ctx.Org.IsTeamMember = true
+ ctx.Data["Team"] = ctx.Org.Team
+ break
+ }
+ }
+
+ if !teamExists {
+ ctx.NotFound("OrgAssignment", err)
+ return
+ }
+
+ ctx.Data["IsTeamMember"] = ctx.Org.IsTeamMember
+ if requireTeamMember && !ctx.Org.IsTeamMember {
+ ctx.NotFound("OrgAssignment", err)
+ return
+ }
+
+ ctx.Org.IsTeamAdmin = ctx.Org.Team.IsOwnerTeam() || ctx.Org.Team.AccessMode >= perm.AccessModeAdmin
+ ctx.Data["IsTeamAdmin"] = ctx.Org.IsTeamAdmin
+ if requireTeamAdmin && !ctx.Org.IsTeamAdmin {
+ ctx.NotFound("OrgAssignment", err)
+ return
+ }
+ }
+ ctx.Data["ContextUser"] = ctx.ContextUser
+
+ ctx.Data["CanReadProjects"] = ctx.Org.CanReadUnit(ctx, unit.TypeProjects)
+ ctx.Data["CanReadPackages"] = ctx.Org.CanReadUnit(ctx, unit.TypePackages)
+ ctx.Data["CanReadCode"] = ctx.Org.CanReadUnit(ctx, unit.TypeCode)
+
+ ctx.Data["IsFollowing"] = ctx.Doer != nil && user_model.IsFollowing(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ if len(ctx.ContextUser.Description) != 0 {
+ content, err := markdown.RenderString(&markup.RenderContext{
+ Metas: map[string]string{"mode": "document"},
+ Ctx: ctx,
+ }, ctx.ContextUser.Description)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ ctx.Data["RenderedDescription"] = content
+ }
+}
+
+// OrgAssignment returns a middleware to handle organization assignment
+func OrgAssignment(args ...bool) func(ctx *Context) {
+ return func(ctx *Context) {
+ HandleOrgAssignment(ctx, args...)
+ }
+}
diff --git a/services/context/package.go b/services/context/package.go
new file mode 100644
index 0000000..c452c65
--- /dev/null
+++ b/services/context/package.go
@@ -0,0 +1,165 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/models/organization"
+ packages_model "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/models/perm"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/templates"
+)
+
+// Package contains owner, access mode and optional the package descriptor
+type Package struct {
+ Owner *user_model.User
+ AccessMode perm.AccessMode
+ Descriptor *packages_model.PackageDescriptor
+}
+
+type packageAssignmentCtx struct {
+ *Base
+ Doer *user_model.User
+ ContextUser *user_model.User
+}
+
+// PackageAssignment returns a middleware to handle Context.Package assignment
+func PackageAssignment() func(ctx *Context) {
+ return func(ctx *Context) {
+ errorFn := func(status int, title string, obj any) {
+ err, ok := obj.(error)
+ if !ok {
+ err = fmt.Errorf("%s", obj)
+ }
+ if status == http.StatusNotFound {
+ ctx.NotFound(title, err)
+ } else {
+ ctx.ServerError(title, err)
+ }
+ }
+ paCtx := &packageAssignmentCtx{Base: ctx.Base, Doer: ctx.Doer, ContextUser: ctx.ContextUser}
+ ctx.Package = packageAssignment(paCtx, errorFn)
+ }
+}
+
+// PackageAssignmentAPI returns a middleware to handle Context.Package assignment
+func PackageAssignmentAPI() func(ctx *APIContext) {
+ return func(ctx *APIContext) {
+ paCtx := &packageAssignmentCtx{Base: ctx.Base, Doer: ctx.Doer, ContextUser: ctx.ContextUser}
+ ctx.Package = packageAssignment(paCtx, ctx.Error)
+ }
+}
+
+func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, string, any)) *Package {
+ pkg := &Package{
+ Owner: ctx.ContextUser,
+ }
+ var err error
+ pkg.AccessMode, err = determineAccessMode(ctx.Base, pkg, ctx.Doer)
+ if err != nil {
+ errCb(http.StatusInternalServerError, "determineAccessMode", err)
+ return pkg
+ }
+
+ packageType := ctx.Params("type")
+ name := ctx.Params("name")
+ version := ctx.Params("version")
+ if packageType != "" && name != "" && version != "" {
+ pv, err := packages_model.GetVersionByNameAndVersion(ctx, pkg.Owner.ID, packages_model.Type(packageType), name, version)
+ if err != nil {
+ if err == packages_model.ErrPackageNotExist {
+ errCb(http.StatusNotFound, "GetVersionByNameAndVersion", err)
+ } else {
+ errCb(http.StatusInternalServerError, "GetVersionByNameAndVersion", err)
+ }
+ return pkg
+ }
+
+ pkg.Descriptor, err = packages_model.GetPackageDescriptor(ctx, pv)
+ if err != nil {
+ errCb(http.StatusInternalServerError, "GetPackageDescriptor", err)
+ return pkg
+ }
+ }
+
+ return pkg
+}
+
+func determineAccessMode(ctx *Base, pkg *Package, doer *user_model.User) (perm.AccessMode, error) {
+ if setting.Service.RequireSignInView && (doer == nil || doer.IsGhost()) {
+ return perm.AccessModeNone, nil
+ }
+
+ if doer != nil && !doer.IsGhost() && (!doer.IsActive || doer.ProhibitLogin) {
+ return perm.AccessModeNone, nil
+ }
+
+ // TODO: ActionUser permission check
+ accessMode := perm.AccessModeNone
+ if pkg.Owner.IsOrganization() {
+ org := organization.OrgFromUser(pkg.Owner)
+
+ if doer != nil && !doer.IsGhost() {
+ // 1. If user is logged in, check all team packages permissions
+ var err error
+ accessMode, err = org.GetOrgUserMaxAuthorizeLevel(ctx, doer.ID)
+ if err != nil {
+ return accessMode, err
+ }
+ // If access mode is less than write check every team for more permissions
+ // The minimum possible access mode is read for org members
+ if accessMode < perm.AccessModeWrite {
+ teams, err := organization.GetUserOrgTeams(ctx, org.ID, doer.ID)
+ if err != nil {
+ return accessMode, err
+ }
+ for _, t := range teams {
+ perm := t.UnitAccessMode(ctx, unit.TypePackages)
+ if accessMode < perm {
+ accessMode = perm
+ }
+ }
+ }
+ }
+ if accessMode == perm.AccessModeNone && organization.HasOrgOrUserVisible(ctx, pkg.Owner, doer) {
+ // 2. If user is unauthorized or no org member, check if org is visible
+ accessMode = perm.AccessModeRead
+ }
+ } else {
+ if doer != nil && !doer.IsGhost() {
+ // 1. Check if user is package owner
+ if doer.ID == pkg.Owner.ID {
+ accessMode = perm.AccessModeOwner
+ } else if pkg.Owner.Visibility == structs.VisibleTypePublic || pkg.Owner.Visibility == structs.VisibleTypeLimited { // 2. Check if package owner is public or limited
+ accessMode = perm.AccessModeRead
+ }
+ } else if pkg.Owner.Visibility == structs.VisibleTypePublic { // 3. Check if package owner is public
+ accessMode = perm.AccessModeRead
+ }
+ }
+
+ return accessMode, nil
+}
+
+// PackageContexter initializes a package context for a request.
+func PackageContexter() func(next http.Handler) http.Handler {
+ renderer := templates.HTMLRenderer()
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
+ base, baseCleanUp := NewBaseContext(resp, req)
+ defer baseCleanUp()
+
+ // it is still needed when rendering 500 page in a package handler
+ ctx := NewWebContext(base, renderer, nil)
+ ctx.Base.AppendContextValue(WebContextKey, ctx)
+ next.ServeHTTP(ctx.Resp, ctx.Req)
+ })
+ }
+}
diff --git a/services/context/pagination.go b/services/context/pagination.go
new file mode 100644
index 0000000..655a278
--- /dev/null
+++ b/services/context/pagination.go
@@ -0,0 +1,57 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "fmt"
+ "html/template"
+ "net/url"
+ "strings"
+
+ "code.gitea.io/gitea/modules/paginator"
+)
+
+// Pagination provides a pagination via paginator.Paginator and additional configurations for the link params used in rendering
+type Pagination struct {
+ Paginater *paginator.Paginator
+ urlParams []string
+}
+
+// NewPagination creates a new instance of the Pagination struct.
+// "pagingNum" is "page size" or "limit", "current" is "page"
+func NewPagination(total, pagingNum, current, numPages int) *Pagination {
+ p := &Pagination{}
+ p.Paginater = paginator.New(total, pagingNum, current, numPages)
+ return p
+}
+
+// AddParam adds a value from context identified by ctxKey as link param under a given paramKey
+func (p *Pagination) AddParam(ctx *Context, paramKey, ctxKey string) {
+ _, exists := ctx.Data[ctxKey]
+ if !exists {
+ return
+ }
+ paramData := fmt.Sprintf("%v", ctx.Data[ctxKey]) // cast any to string
+ urlParam := fmt.Sprintf("%s=%v", url.QueryEscape(paramKey), url.QueryEscape(paramData))
+ p.urlParams = append(p.urlParams, urlParam)
+}
+
+// AddParamString adds a string parameter directly
+func (p *Pagination) AddParamString(key, value string) {
+ urlParam := fmt.Sprintf("%s=%v", url.QueryEscape(key), url.QueryEscape(value))
+ p.urlParams = append(p.urlParams, urlParam)
+}
+
+// GetParams returns the configured URL params
+func (p *Pagination) GetParams() template.URL {
+ return template.URL(strings.Join(p.urlParams, "&"))
+}
+
+// SetDefaultParams sets common pagination params that are often used
+func (p *Pagination) SetDefaultParams(ctx *Context) {
+ p.AddParam(ctx, "sort", "SortType")
+ p.AddParam(ctx, "q", "Keyword")
+ // do not add any more uncommon params here!
+ p.AddParam(ctx, "fuzzy", "IsFuzzy")
+}
diff --git a/services/context/permission.go b/services/context/permission.go
new file mode 100644
index 0000000..14a9801
--- /dev/null
+++ b/services/context/permission.go
@@ -0,0 +1,149 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "net/http"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// RequireRepoAdmin returns a middleware for requiring repository admin permission
+func RequireRepoAdmin() func(ctx *Context) {
+ return func(ctx *Context) {
+ if !ctx.IsSigned || !ctx.Repo.IsAdmin() {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ }
+}
+
+// RequireRepoWriter returns a middleware for requiring repository write to the specify unitType
+func RequireRepoWriter(unitType unit.Type) func(ctx *Context) {
+ return func(ctx *Context) {
+ if !ctx.Repo.CanWrite(unitType) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ }
+}
+
+// CanEnableEditor checks if the user is allowed to write to the branch of the repo
+func CanEnableEditor() func(ctx *Context) {
+ return func(ctx *Context) {
+ if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
+ ctx.NotFound("CanWriteToBranch denies permission", nil)
+ return
+ }
+ }
+}
+
+// RequireRepoWriterOr returns a middleware for requiring repository write to one of the unit permission
+func RequireRepoWriterOr(unitTypes ...unit.Type) func(ctx *Context) {
+ return func(ctx *Context) {
+ for _, unitType := range unitTypes {
+ if ctx.Repo.CanWrite(unitType) {
+ return
+ }
+ }
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ }
+}
+
+// RequireRepoReader returns a middleware for requiring repository read to the specify unitType
+func RequireRepoReader(unitType unit.Type) func(ctx *Context) {
+ return func(ctx *Context) {
+ if !ctx.Repo.CanRead(unitType) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ unitType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Anonymous user cannot read %-v in Repo %-v\n"+
+ "Anonymous user in Repo has Permissions: %-+v",
+ unitType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ }
+ }
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ }
+}
+
+// RequireRepoReaderOr returns a middleware for requiring repository write to one of the unit permission
+func RequireRepoReaderOr(unitTypes ...unit.Type) func(ctx *Context) {
+ return func(ctx *Context) {
+ for _, unitType := range unitTypes {
+ if ctx.Repo.CanRead(unitType) {
+ return
+ }
+ }
+ if log.IsTrace() {
+ var format string
+ var args []any
+ if ctx.IsSigned {
+ format = "Permission Denied: User %-v cannot read ["
+ args = append(args, ctx.Doer)
+ } else {
+ format = "Permission Denied: Anonymous user cannot read ["
+ }
+ for _, unit := range unitTypes {
+ format += "%-v, "
+ args = append(args, unit)
+ }
+
+ format = format[:len(format)-2] + "] in Repo %-v\n" +
+ "User in Repo has Permissions: %-+v"
+ args = append(args, ctx.Repo.Repository, ctx.Repo.Permission)
+ log.Trace(format, args...)
+ }
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ }
+}
+
+// CheckRepoScopedToken check whether personal access token has repo scope
+func CheckRepoScopedToken(ctx *Context, repo *repo_model.Repository, level auth_model.AccessTokenScopeLevel) {
+ if !ctx.IsBasicAuth || ctx.Data["IsApiToken"] != true {
+ return
+ }
+
+ scope, ok := ctx.Data["ApiTokenScope"].(auth_model.AccessTokenScope)
+ if ok { // it's a personal access token but not oauth2 token
+ var scopeMatched bool
+
+ requiredScopes := auth_model.GetRequiredScopes(level, auth_model.AccessTokenScopeCategoryRepository)
+
+ // check if scope only applies to public resources
+ publicOnly, err := scope.PublicOnly()
+ if err != nil {
+ ctx.ServerError("HasScope", err)
+ return
+ }
+
+ if publicOnly && repo.IsPrivate {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ scopeMatched, err = scope.HasScope(requiredScopes...)
+ if err != nil {
+ ctx.ServerError("HasScope", err)
+ return
+ }
+
+ if !scopeMatched {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+}
diff --git a/services/context/private.go b/services/context/private.go
new file mode 100644
index 0000000..8b41949
--- /dev/null
+++ b/services/context/private.go
@@ -0,0 +1,85 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "time"
+
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/web"
+ web_types "code.gitea.io/gitea/modules/web/types"
+)
+
+// PrivateContext represents a context for private routes
+type PrivateContext struct {
+ *Base
+ Override context.Context
+
+ Repo *Repository
+}
+
+func init() {
+ web.RegisterResponseStatusProvider[*PrivateContext](func(req *http.Request) web_types.ResponseStatusProvider {
+ return req.Context().Value(privateContextKey).(*PrivateContext)
+ })
+}
+
+// Deadline is part of the interface for context.Context and we pass this to the request context
+func (ctx *PrivateContext) Deadline() (deadline time.Time, ok bool) {
+ if ctx.Override != nil {
+ return ctx.Override.Deadline()
+ }
+ return ctx.Base.Deadline()
+}
+
+// Done is part of the interface for context.Context and we pass this to the request context
+func (ctx *PrivateContext) Done() <-chan struct{} {
+ if ctx.Override != nil {
+ return ctx.Override.Done()
+ }
+ return ctx.Base.Done()
+}
+
+// Err is part of the interface for context.Context and we pass this to the request context
+func (ctx *PrivateContext) Err() error {
+ if ctx.Override != nil {
+ return ctx.Override.Err()
+ }
+ return ctx.Base.Err()
+}
+
+var privateContextKey any = "default_private_context"
+
+// GetPrivateContext returns a context for Private routes
+func GetPrivateContext(req *http.Request) *PrivateContext {
+ return req.Context().Value(privateContextKey).(*PrivateContext)
+}
+
+// PrivateContexter returns apicontext as middleware
+func PrivateContexter() func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ base, baseCleanUp := NewBaseContext(w, req)
+ ctx := &PrivateContext{Base: base}
+ defer baseCleanUp()
+ ctx.Base.AppendContextValue(privateContextKey, ctx)
+
+ next.ServeHTTP(ctx.Resp, ctx.Req)
+ })
+ }
+}
+
+// OverrideContext overrides the underlying request context for Done() etc.
+// This function should be used when there is a need for work to continue even if the request has been cancelled.
+// Primarily this affects hook/post-receive and hook/proc-receive both of which need to continue working even if
+// the underlying request has timed out from the ssh/http push
+func OverrideContext(ctx *PrivateContext) (cancel context.CancelFunc) {
+ // We now need to override the request context as the base for our work because even if the request is cancelled we have to continue this work
+ ctx.Override, _, cancel = process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), fmt.Sprintf("PrivateContext: %s", ctx.Req.RequestURI), process.RequestProcessType, true)
+ return cancel
+}
diff --git a/services/context/quota.go b/services/context/quota.go
new file mode 100644
index 0000000..94e8847
--- /dev/null
+++ b/services/context/quota.go
@@ -0,0 +1,200 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "context"
+ "net/http"
+ "strings"
+
+ quota_model "code.gitea.io/gitea/models/quota"
+ "code.gitea.io/gitea/modules/base"
+)
+
+type QuotaTargetType int
+
+const (
+ QuotaTargetUser QuotaTargetType = iota
+ QuotaTargetRepo
+ QuotaTargetOrg
+)
+
+// QuotaExceeded
+// swagger:response quotaExceeded
+type APIQuotaExceeded struct {
+ Message string `json:"message"`
+ UserID int64 `json:"user_id"`
+ UserName string `json:"username,omitempty"`
+}
+
+// QuotaGroupAssignmentAPI returns a middleware to handle context-quota-group assignment for api routes
+func QuotaGroupAssignmentAPI() func(ctx *APIContext) {
+ return func(ctx *APIContext) {
+ groupName := ctx.Params("quotagroup")
+ group, err := quota_model.GetGroupByName(ctx, groupName)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "quota_model.GetGroupByName", err)
+ return
+ }
+ if group == nil {
+ ctx.NotFound()
+ return
+ }
+ ctx.QuotaGroup = group
+ }
+}
+
+// QuotaRuleAssignmentAPI returns a middleware to handle context-quota-rule assignment for api routes
+func QuotaRuleAssignmentAPI() func(ctx *APIContext) {
+ return func(ctx *APIContext) {
+ ruleName := ctx.Params("quotarule")
+ rule, err := quota_model.GetRuleByName(ctx, ruleName)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "quota_model.GetRuleByName", err)
+ return
+ }
+ if rule == nil {
+ ctx.NotFound()
+ return
+ }
+ ctx.QuotaRule = rule
+ }
+}
+
+// ctx.CheckQuota checks whether the user in question is within quota limits (web context)
+func (ctx *Context) CheckQuota(subject quota_model.LimitSubject, userID int64, username string) bool {
+ ok, err := checkQuota(ctx.Base.originCtx, subject, userID, username, func(userID int64, username string) {
+ showHTML := false
+ for _, part := range ctx.Req.Header["Accept"] {
+ if strings.Contains(part, "text/html") {
+ showHTML = true
+ break
+ }
+ }
+ if !showHTML {
+ ctx.plainTextInternal(3, http.StatusRequestEntityTooLarge, []byte("Quota exceeded.\n"))
+ return
+ }
+
+ ctx.Data["IsRepo"] = ctx.Repo.Repository != nil
+ ctx.Data["Title"] = "Quota Exceeded"
+ ctx.HTML(http.StatusRequestEntityTooLarge, base.TplName("status/413"))
+ }, func(err error) {
+ ctx.Error(http.StatusInternalServerError, "quota_model.EvaluateForUser")
+ })
+ if err != nil {
+ return false
+ }
+ return ok
+}
+
+// ctx.CheckQuota checks whether the user in question is within quota limits (API context)
+func (ctx *APIContext) CheckQuota(subject quota_model.LimitSubject, userID int64, username string) bool {
+ ok, err := checkQuota(ctx.Base.originCtx, subject, userID, username, func(userID int64, username string) {
+ ctx.JSON(http.StatusRequestEntityTooLarge, APIQuotaExceeded{
+ Message: "quota exceeded",
+ UserID: userID,
+ UserName: username,
+ })
+ }, func(err error) {
+ ctx.InternalServerError(err)
+ })
+ if err != nil {
+ return false
+ }
+ return ok
+}
+
+// EnforceQuotaWeb returns a middleware that enforces quota limits on the given web route.
+func EnforceQuotaWeb(subject quota_model.LimitSubject, target QuotaTargetType) func(ctx *Context) {
+ return func(ctx *Context) {
+ ctx.CheckQuota(subject, target.UserID(ctx), target.UserName(ctx))
+ }
+}
+
+// EnforceQuotaWeb returns a middleware that enforces quota limits on the given API route.
+func EnforceQuotaAPI(subject quota_model.LimitSubject, target QuotaTargetType) func(ctx *APIContext) {
+ return func(ctx *APIContext) {
+ ctx.CheckQuota(subject, target.UserID(ctx), target.UserName(ctx))
+ }
+}
+
+// checkQuota wraps quota checking into a single function
+func checkQuota(ctx context.Context, subject quota_model.LimitSubject, userID int64, username string, quotaExceededHandler func(userID int64, username string), errorHandler func(err error)) (bool, error) {
+ ok, err := quota_model.EvaluateForUser(ctx, userID, subject)
+ if err != nil {
+ errorHandler(err)
+ return false, err
+ }
+ if !ok {
+ quotaExceededHandler(userID, username)
+ return false, nil
+ }
+ return true, nil
+}
+
+type QuotaContext interface {
+ GetQuotaTargetUserID(target QuotaTargetType) int64
+ GetQuotaTargetUserName(target QuotaTargetType) string
+}
+
+func (ctx *Context) GetQuotaTargetUserID(target QuotaTargetType) int64 {
+ switch target {
+ case QuotaTargetUser:
+ return ctx.Doer.ID
+ case QuotaTargetRepo:
+ return ctx.Repo.Repository.OwnerID
+ case QuotaTargetOrg:
+ return ctx.Org.Organization.ID
+ default:
+ return 0
+ }
+}
+
+func (ctx *Context) GetQuotaTargetUserName(target QuotaTargetType) string {
+ switch target {
+ case QuotaTargetUser:
+ return ctx.Doer.Name
+ case QuotaTargetRepo:
+ return ctx.Repo.Repository.Owner.Name
+ case QuotaTargetOrg:
+ return ctx.Org.Organization.Name
+ default:
+ return ""
+ }
+}
+
+func (ctx *APIContext) GetQuotaTargetUserID(target QuotaTargetType) int64 {
+ switch target {
+ case QuotaTargetUser:
+ return ctx.Doer.ID
+ case QuotaTargetRepo:
+ return ctx.Repo.Repository.OwnerID
+ case QuotaTargetOrg:
+ return ctx.Org.Organization.ID
+ default:
+ return 0
+ }
+}
+
+func (ctx *APIContext) GetQuotaTargetUserName(target QuotaTargetType) string {
+ switch target {
+ case QuotaTargetUser:
+ return ctx.Doer.Name
+ case QuotaTargetRepo:
+ return ctx.Repo.Repository.Owner.Name
+ case QuotaTargetOrg:
+ return ctx.Org.Organization.Name
+ default:
+ return ""
+ }
+}
+
+func (target QuotaTargetType) UserID(ctx QuotaContext) int64 {
+ return ctx.GetQuotaTargetUserID(target)
+}
+
+func (target QuotaTargetType) UserName(ctx QuotaContext) string {
+ return ctx.GetQuotaTargetUserName(target)
+}
diff --git a/services/context/repo.go b/services/context/repo.go
new file mode 100644
index 0000000..d2cee08
--- /dev/null
+++ b/services/context/repo.go
@@ -0,0 +1,1112 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "html"
+ "net/http"
+ "net/url"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ code_indexer "code.gitea.io/gitea/modules/indexer/code"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+
+ "github.com/editorconfig/editorconfig-core-go/v2"
+)
+
+// PullRequest contains information to make a pull request
+type PullRequest struct {
+ BaseRepo *repo_model.Repository
+ Allowed bool
+ SameRepo bool
+ HeadInfoSubURL string // [<user>:]<branch> url segment
+}
+
+// Repository contains information to operate a repository
+type Repository struct {
+ access_model.Permission
+ IsWatching bool
+ IsViewBranch bool
+ IsViewTag bool
+ IsViewCommit bool
+ Repository *repo_model.Repository
+ Owner *user_model.User
+ Commit *git.Commit
+ Tag *git.Tag
+ GitRepo *git.Repository
+ RefName string
+ BranchName string
+ TagName string
+ TreePath string
+ CommitID string
+ RepoLink string
+ CloneLink repo_model.CloneLink
+ CommitsCount int64
+
+ PullRequest *PullRequest
+}
+
+// CanWriteToBranch checks if the branch is writable by the user
+func (r *Repository) CanWriteToBranch(ctx context.Context, user *user_model.User, branch string) bool {
+ return issues_model.CanMaintainerWriteToBranch(ctx, r.Permission, branch, user)
+}
+
+// CanEnableEditor returns true if repository is editable and user has proper access level.
+func (r *Repository) CanEnableEditor(ctx context.Context, user *user_model.User) bool {
+ return r.IsViewBranch && r.CanWriteToBranch(ctx, user, r.BranchName) && r.Repository.CanEnableEditor() && !r.Repository.IsArchived
+}
+
+// CanCreateBranch returns true if repository is editable and user has proper access level.
+func (r *Repository) CanCreateBranch() bool {
+ return r.Permission.CanWrite(unit_model.TypeCode) && r.Repository.CanCreateBranch()
+}
+
+func (r *Repository) GetObjectFormat() git.ObjectFormat {
+ return git.ObjectFormatFromName(r.Repository.ObjectFormatName)
+}
+
+// RepoMustNotBeArchived checks if a repo is archived
+func RepoMustNotBeArchived() func(ctx *Context) {
+ return func(ctx *Context) {
+ if ctx.Repo.Repository.IsArchived {
+ ctx.NotFound("IsArchived", errors.New(ctx.Locale.TrString("repo.archive.title")))
+ }
+ }
+}
+
+// CanCommitToBranchResults represents the results of CanCommitToBranch
+type CanCommitToBranchResults struct {
+ CanCommitToBranch bool
+ EditorEnabled bool
+ UserCanPush bool
+ RequireSigned bool
+ WillSign bool
+ SigningKey string
+ WontSignReason string
+}
+
+// CanCommitToBranch returns true if repository is editable and user has proper access level
+//
+// and branch is not protected for push
+func (r *Repository) CanCommitToBranch(ctx context.Context, doer *user_model.User) (CanCommitToBranchResults, error) {
+ protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, r.Repository.ID, r.BranchName)
+ if err != nil {
+ return CanCommitToBranchResults{}, err
+ }
+ userCanPush := true
+ requireSigned := false
+ if protectedBranch != nil {
+ protectedBranch.Repo = r.Repository
+ userCanPush = protectedBranch.CanUserPush(ctx, doer)
+ requireSigned = protectedBranch.RequireSignedCommits
+ }
+
+ sign, keyID, _, err := asymkey_service.SignCRUDAction(ctx, r.Repository.RepoPath(), doer, r.Repository.RepoPath(), git.BranchPrefix+r.BranchName)
+
+ canCommit := r.CanEnableEditor(ctx, doer) && userCanPush
+ if requireSigned {
+ canCommit = canCommit && sign
+ }
+ wontSignReason := ""
+ if err != nil {
+ if asymkey_service.IsErrWontSign(err) {
+ wontSignReason = string(err.(*asymkey_service.ErrWontSign).Reason)
+ err = nil
+ } else {
+ wontSignReason = "error"
+ }
+ }
+
+ return CanCommitToBranchResults{
+ CanCommitToBranch: canCommit,
+ EditorEnabled: r.CanEnableEditor(ctx, doer),
+ UserCanPush: userCanPush,
+ RequireSigned: requireSigned,
+ WillSign: sign,
+ SigningKey: keyID,
+ WontSignReason: wontSignReason,
+ }, err
+}
+
+// CanUseTimetracker returns whether or not a user can use the timetracker.
+func (r *Repository) CanUseTimetracker(ctx context.Context, issue *issues_model.Issue, user *user_model.User) bool {
+ // Checking for following:
+ // 1. Is timetracker enabled
+ // 2. Is the user a contributor, admin, poster or assignee and do the repository policies require this?
+ isAssigned, _ := issues_model.IsUserAssignedToIssue(ctx, issue, user)
+ return r.Repository.IsTimetrackerEnabled(ctx) && (!r.Repository.AllowOnlyContributorsToTrackTime(ctx) ||
+ r.Permission.CanWriteIssuesOrPulls(issue.IsPull) || issue.IsPoster(user.ID) || isAssigned)
+}
+
+// CanCreateIssueDependencies returns whether or not a user can create dependencies.
+func (r *Repository) CanCreateIssueDependencies(ctx context.Context, user *user_model.User, isPull bool) bool {
+ return r.Repository.IsDependenciesEnabled(ctx) && r.Permission.CanWriteIssuesOrPulls(isPull)
+}
+
+// GetCommitsCount returns cached commit count for current view
+func (r *Repository) GetCommitsCount() (int64, error) {
+ if r.Commit == nil {
+ return 0, nil
+ }
+ var contextName string
+ if r.IsViewBranch {
+ contextName = r.BranchName
+ } else if r.IsViewTag {
+ contextName = r.TagName
+ } else {
+ contextName = r.CommitID
+ }
+ return cache.GetInt64(r.Repository.GetCommitsCountCacheKey(contextName, r.IsViewBranch || r.IsViewTag), func() (int64, error) {
+ return r.Commit.CommitsCount()
+ })
+}
+
+// GetCommitGraphsCount returns cached commit count for current view
+func (r *Repository) GetCommitGraphsCount(ctx context.Context, hidePRRefs bool, branches, files []string) (int64, error) {
+ cacheKey := fmt.Sprintf("commits-count-%d-graph-%t-%s-%s", r.Repository.ID, hidePRRefs, branches, files)
+
+ return cache.GetInt64(cacheKey, func() (int64, error) {
+ if len(branches) == 0 {
+ return git.AllCommitsCount(ctx, r.Repository.RepoPath(), hidePRRefs, files...)
+ }
+ return git.CommitsCount(ctx,
+ git.CommitsCountOptions{
+ RepoPath: r.Repository.RepoPath(),
+ Revision: branches,
+ RelPath: files,
+ })
+ })
+}
+
+// BranchNameSubURL sub-URL for the BranchName field
+func (r *Repository) BranchNameSubURL() string {
+ switch {
+ case r.IsViewBranch:
+ return "branch/" + util.PathEscapeSegments(r.BranchName)
+ case r.IsViewTag:
+ return "tag/" + util.PathEscapeSegments(r.TagName)
+ case r.IsViewCommit:
+ return "commit/" + util.PathEscapeSegments(r.CommitID)
+ }
+ log.Error("Unknown view type for repo: %v", r)
+ return ""
+}
+
+// FileExists returns true if a file exists in the given repo branch
+func (r *Repository) FileExists(path, branch string) (bool, error) {
+ if branch == "" {
+ branch = r.Repository.DefaultBranch
+ }
+ commit, err := r.GitRepo.GetBranchCommit(branch)
+ if err != nil {
+ return false, err
+ }
+ if _, err := commit.GetTreeEntryByPath(path); err != nil {
+ return false, err
+ }
+ return true, nil
+}
+
+// GetEditorconfig returns the .editorconfig definition if found in the
+// HEAD of the default repo branch.
+func (r *Repository) GetEditorconfig(optCommit ...*git.Commit) (cfg *editorconfig.Editorconfig, warning, err error) {
+ if r.GitRepo == nil {
+ return nil, nil, nil
+ }
+
+ var commit *git.Commit
+
+ if len(optCommit) != 0 {
+ commit = optCommit[0]
+ } else {
+ commit, err = r.GitRepo.GetBranchCommit(r.Repository.DefaultBranch)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+ treeEntry, err := commit.GetTreeEntryByPath(".editorconfig")
+ if err != nil {
+ return nil, nil, err
+ }
+ if treeEntry.Blob().Size() >= setting.UI.MaxDisplayFileSize {
+ return nil, nil, git.ErrNotExist{ID: "", RelPath: ".editorconfig"}
+ }
+ reader, err := treeEntry.Blob().DataAsync()
+ if err != nil {
+ return nil, nil, err
+ }
+ defer reader.Close()
+ return editorconfig.ParseGraceful(reader)
+}
+
+// RetrieveBaseRepo retrieves base repository
+func RetrieveBaseRepo(ctx *Context, repo *repo_model.Repository) {
+ // Non-fork repository will not return error in this method.
+ if err := repo.GetBaseRepo(ctx); err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ repo.IsFork = false
+ repo.ForkID = 0
+ return
+ }
+ ctx.ServerError("GetBaseRepo", err)
+ return
+ } else if err = repo.BaseRepo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("BaseRepo.LoadOwner", err)
+ return
+ }
+}
+
+// RetrieveTemplateRepo retrieves template repository used to generate this repository
+func RetrieveTemplateRepo(ctx *Context, repo *repo_model.Repository) {
+ // Non-generated repository will not return error in this method.
+ templateRepo, err := repo_model.GetTemplateRepo(ctx, repo)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ repo.TemplateID = 0
+ return
+ }
+ ctx.ServerError("GetTemplateRepo", err)
+ return
+ } else if err = templateRepo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("TemplateRepo.LoadOwner", err)
+ return
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, templateRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+
+ if !perm.CanRead(unit_model.TypeCode) {
+ repo.TemplateID = 0
+ }
+}
+
+// ComposeGoGetImport returns go-get-import meta content.
+func ComposeGoGetImport(owner, repo string) string {
+ /// setting.AppUrl is guaranteed to be parse as url
+ appURL, _ := url.Parse(setting.AppURL)
+
+ return path.Join(appURL.Host, setting.AppSubURL, url.PathEscape(owner), url.PathEscape(repo))
+}
+
+// EarlyResponseForGoGetMeta responses appropriate go-get meta with status 200
+// if user does not have actual access to the requested repository,
+// or the owner or repository does not exist at all.
+// This is particular a workaround for "go get" command which does not respect
+// .netrc file.
+func EarlyResponseForGoGetMeta(ctx *Context) {
+ username := ctx.Params(":username")
+ reponame := strings.TrimSuffix(ctx.Params(":reponame"), ".git")
+ if username == "" || reponame == "" {
+ ctx.PlainText(http.StatusBadRequest, "invalid repository path")
+ return
+ }
+
+ var cloneURL string
+ if setting.Repository.GoGetCloneURLProtocol == "ssh" {
+ cloneURL = repo_model.ComposeSSHCloneURL(username, reponame)
+ } else {
+ cloneURL = repo_model.ComposeHTTPSCloneURL(username, reponame)
+ }
+ goImportContent := fmt.Sprintf("%s git %s", ComposeGoGetImport(username, reponame), cloneURL)
+ htmlMeta := fmt.Sprintf(`<meta name="go-import" content="%s">`, html.EscapeString(goImportContent))
+ ctx.PlainText(http.StatusOK, htmlMeta)
+}
+
+// RedirectToRepo redirect to a differently-named repository
+func RedirectToRepo(ctx *Base, redirectRepoID int64) {
+ ownerName := ctx.Params(":username")
+ previousRepoName := ctx.Params(":reponame")
+
+ repo, err := repo_model.GetRepositoryByID(ctx, redirectRepoID)
+ if err != nil {
+ log.Error("GetRepositoryByID: %v", err)
+ ctx.Error(http.StatusInternalServerError, "GetRepositoryByID")
+ return
+ }
+
+ redirectPath := strings.Replace(
+ ctx.Req.URL.EscapedPath(),
+ url.PathEscape(ownerName)+"/"+url.PathEscape(previousRepoName),
+ url.PathEscape(repo.OwnerName)+"/"+url.PathEscape(repo.Name),
+ 1,
+ )
+ if ctx.Req.URL.RawQuery != "" {
+ redirectPath += "?" + ctx.Req.URL.RawQuery
+ }
+ ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusTemporaryRedirect)
+}
+
+func repoAssignment(ctx *Context, repo *repo_model.Repository) {
+ var err error
+ if err = repo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("LoadOwner", err)
+ return
+ }
+
+ ctx.Repo.Permission, err = access_model.GetUserRepoPermission(ctx, repo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+
+ // Check access.
+ if !ctx.Repo.Permission.HasAccess() {
+ if ctx.FormString("go-get") == "1" {
+ EarlyResponseForGoGetMeta(ctx)
+ return
+ }
+ ctx.NotFound("no access right", nil)
+ return
+ }
+ ctx.Data["HasAccess"] = true
+ ctx.Data["Permission"] = &ctx.Repo.Permission
+
+ followingRepoList, err := repo_model.FindFollowingReposByRepoID(ctx, repo.ID)
+ if err == nil {
+ followingRepoString := ""
+ for idx, followingRepo := range followingRepoList {
+ if idx > 0 {
+ followingRepoString += ";"
+ }
+ followingRepoString += followingRepo.URI
+ }
+ ctx.Data["FollowingRepos"] = followingRepoString
+ } else if err != repo_model.ErrMirrorNotExist {
+ ctx.ServerError("FindFollowingRepoByRepoID", err)
+ return
+ }
+
+ if repo.IsMirror {
+ pullMirror, err := repo_model.GetMirrorByRepoID(ctx, repo.ID)
+ if err == nil {
+ ctx.Data["PullMirror"] = pullMirror
+ } else if err != repo_model.ErrMirrorNotExist {
+ ctx.ServerError("GetMirrorByRepoID", err)
+ return
+ }
+ }
+
+ pushMirrors, _, err := repo_model.GetPushMirrorsByRepoID(ctx, repo.ID, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetPushMirrorsByRepoID", err)
+ return
+ }
+
+ ctx.Repo.Repository = repo
+ ctx.Data["PushMirrors"] = pushMirrors
+ ctx.Data["RepoName"] = ctx.Repo.Repository.Name
+ ctx.Data["IsEmptyRepo"] = ctx.Repo.Repository.IsEmpty
+ ctx.Data["DefaultWikiBranchName"] = setting.Repository.DefaultBranch
+}
+
+// RepoIDAssignment returns a handler which assigns the repo to the context.
+func RepoIDAssignment() func(ctx *Context) {
+ return func(ctx *Context) {
+ repoID := ctx.ParamsInt64(":repoid")
+
+ // Get repository.
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ ctx.NotFound("GetRepositoryByID", nil)
+ } else {
+ ctx.ServerError("GetRepositoryByID", err)
+ }
+ return
+ }
+
+ repoAssignment(ctx, repo)
+ }
+}
+
+// RepoAssignment returns a middleware to handle repository assignment
+func RepoAssignment(ctx *Context) context.CancelFunc {
+ if _, repoAssignmentOnce := ctx.Data["repoAssignmentExecuted"]; repoAssignmentOnce {
+ log.Trace("RepoAssignment was exec already, skipping second call ...")
+ return nil
+ }
+ ctx.Data["repoAssignmentExecuted"] = true
+
+ var (
+ owner *user_model.User
+ err error
+ )
+
+ userName := ctx.Params(":username")
+ repoName := ctx.Params(":reponame")
+ repoName = strings.TrimSuffix(repoName, ".git")
+ if setting.Other.EnableFeed {
+ repoName = strings.TrimSuffix(repoName, ".rss")
+ repoName = strings.TrimSuffix(repoName, ".atom")
+ }
+
+ // Check if the user is the same as the repository owner
+ if ctx.IsSigned && ctx.Doer.LowerName == strings.ToLower(userName) {
+ owner = ctx.Doer
+ } else {
+ owner, err = user_model.GetUserByName(ctx, userName)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ // go-get does not support redirects
+ // https://github.com/golang/go/issues/19760
+ if ctx.FormString("go-get") == "1" {
+ EarlyResponseForGoGetMeta(ctx)
+ return nil
+ }
+
+ if redirectUserID, err := user_model.LookupUserRedirect(ctx, userName); err == nil {
+ RedirectToUser(ctx.Base, userName, redirectUserID)
+ } else if user_model.IsErrUserRedirectNotExist(err) {
+ ctx.NotFound("GetUserByName", nil)
+ } else {
+ ctx.ServerError("LookupUserRedirect", err)
+ }
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return nil
+ }
+ }
+ ctx.Repo.Owner = owner
+ ctx.ContextUser = owner
+ ctx.Data["ContextUser"] = ctx.ContextUser
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+
+ // redirect link to wiki
+ if strings.HasSuffix(repoName, ".wiki") {
+ // ctx.Req.URL.Path does not have the preceding appSubURL - any redirect must have this added
+ // Now we happen to know that all of our paths are: /:username/:reponame/whatever_else
+ originalRepoName := ctx.Params(":reponame")
+ redirectRepoName := strings.TrimSuffix(repoName, ".wiki")
+ redirectRepoName += originalRepoName[len(redirectRepoName)+5:]
+ redirectPath := strings.Replace(
+ ctx.Req.URL.EscapedPath(),
+ url.PathEscape(userName)+"/"+url.PathEscape(originalRepoName),
+ url.PathEscape(userName)+"/"+url.PathEscape(redirectRepoName)+"/wiki",
+ 1,
+ )
+ if ctx.Req.URL.RawQuery != "" {
+ redirectPath += "?" + ctx.Req.URL.RawQuery
+ }
+ ctx.Redirect(path.Join(setting.AppSubURL, redirectPath))
+ return nil
+ }
+
+ // Get repository.
+ repo, err := repo_model.GetRepositoryByName(ctx, owner.ID, repoName)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ redirectRepoID, err := repo_model.LookupRedirect(ctx, owner.ID, repoName)
+ if err == nil {
+ RedirectToRepo(ctx.Base, redirectRepoID)
+ } else if repo_model.IsErrRedirectNotExist(err) {
+ if ctx.FormString("go-get") == "1" {
+ EarlyResponseForGoGetMeta(ctx)
+ return nil
+ }
+ ctx.NotFound("GetRepositoryByName", nil)
+ } else {
+ ctx.ServerError("LookupRepoRedirect", err)
+ }
+ } else {
+ ctx.ServerError("GetRepositoryByName", err)
+ }
+ return nil
+ }
+ repo.Owner = owner
+
+ repoAssignment(ctx, repo)
+ if ctx.Written() {
+ return nil
+ }
+
+ ctx.Repo.RepoLink = repo.Link()
+ ctx.Data["RepoLink"] = ctx.Repo.RepoLink
+ ctx.Data["RepoRelPath"] = ctx.Repo.Owner.Name + "/" + ctx.Repo.Repository.Name
+
+ if setting.Other.EnableFeed {
+ ctx.Data["EnableFeed"] = true
+ ctx.Data["FeedURL"] = ctx.Repo.RepoLink
+ }
+
+ unit, err := ctx.Repo.Repository.GetUnit(ctx, unit_model.TypeExternalTracker)
+ if err == nil {
+ ctx.Data["RepoExternalIssuesLink"] = unit.ExternalTrackerConfig().ExternalTrackerURL
+ }
+
+ ctx.Data["NumTags"], err = db.Count[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ IncludeDrafts: true,
+ IncludeTags: true,
+ HasSha1: optional.Some(true), // only draft releases which are created with existing tags
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("GetReleaseCountByRepoID", err)
+ return nil
+ }
+ ctx.Data["NumReleases"], err = db.Count[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ // only show draft releases for users who can write, read-only users shouldn't see draft releases.
+ IncludeDrafts: ctx.Repo.CanWrite(unit_model.TypeReleases),
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("GetReleaseCountByRepoID", err)
+ return nil
+ }
+ ctx.Data["NumPackages"], err = packages_model.CountRepositoryPackages(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetPackageCountByRepoID", err)
+ return nil
+ }
+
+ ctx.Data["Title"] = owner.Name + "/" + repo.Name
+ ctx.Data["Repository"] = repo
+ ctx.Data["RepositoryAPActorID"] = repo.APActorID()
+ ctx.Data["Owner"] = ctx.Repo.Repository.Owner
+ ctx.Data["IsRepositoryOwner"] = ctx.Repo.IsOwner()
+ ctx.Data["IsRepositoryAdmin"] = ctx.Repo.IsAdmin()
+ ctx.Data["RepoOwnerIsOrganization"] = repo.Owner.IsOrganization()
+ ctx.Data["CanWriteCode"] = ctx.Repo.CanWrite(unit_model.TypeCode)
+ ctx.Data["CanWriteIssues"] = ctx.Repo.CanWrite(unit_model.TypeIssues)
+ ctx.Data["CanWritePulls"] = ctx.Repo.CanWrite(unit_model.TypePullRequests)
+ ctx.Data["CanWriteActions"] = ctx.Repo.CanWrite(unit_model.TypeActions)
+
+ canSignedUserFork, err := repo_module.CanUserForkRepo(ctx, ctx.Doer, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("CanUserForkRepo", err)
+ return nil
+ }
+ ctx.Data["CanSignedUserFork"] = canSignedUserFork
+
+ userAndOrgForks, err := repo_model.GetForksByUserAndOrgs(ctx, ctx.Doer, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("GetForksByUserAndOrgs", err)
+ return nil
+ }
+ ctx.Data["UserAndOrgForks"] = userAndOrgForks
+
+ // canSignedUserFork is true if the current user doesn't have a fork of this repo yet or
+ // if he owns an org that doesn't have a fork of this repo yet
+ // If multiple forks are available or if the user can fork to another account, but there is already a fork: open selection dialog
+ ctx.Data["ShowForkModal"] = len(userAndOrgForks) > 1 || (canSignedUserFork && len(userAndOrgForks) > 0)
+
+ ctx.Data["RepoCloneLink"] = repo.CloneLink()
+
+ cloneButtonShowHTTPS := !setting.Repository.DisableHTTPGit
+ cloneButtonShowSSH := !setting.SSH.Disabled && (ctx.IsSigned || setting.SSH.ExposeAnonymous)
+ if !cloneButtonShowHTTPS && !cloneButtonShowSSH {
+ // We have to show at least one link, so we just show the HTTPS
+ cloneButtonShowHTTPS = true
+ }
+ ctx.Data["CloneButtonShowHTTPS"] = cloneButtonShowHTTPS
+ ctx.Data["CloneButtonShowSSH"] = cloneButtonShowSSH
+ ctx.Data["CloneButtonOriginLink"] = ctx.Data["RepoCloneLink"] // it may be rewritten to the WikiCloneLink by the router middleware
+
+ ctx.Data["RepoSearchEnabled"] = setting.Indexer.RepoIndexerEnabled
+ if setting.Indexer.RepoIndexerEnabled {
+ ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
+ }
+
+ if ctx.IsSigned {
+ ctx.Data["IsWatchingRepo"] = repo_model.IsWatching(ctx, ctx.Doer.ID, repo.ID)
+ ctx.Data["IsStaringRepo"] = repo_model.IsStaring(ctx, ctx.Doer.ID, repo.ID)
+ }
+
+ if repo.IsFork {
+ RetrieveBaseRepo(ctx, repo)
+ if ctx.Written() {
+ return nil
+ }
+ }
+
+ if repo.IsGenerated() {
+ RetrieveTemplateRepo(ctx, repo)
+ if ctx.Written() {
+ return nil
+ }
+ }
+
+ isHomeOrSettings := ctx.Link == ctx.Repo.RepoLink || ctx.Link == ctx.Repo.RepoLink+"/settings" || strings.HasPrefix(ctx.Link, ctx.Repo.RepoLink+"/settings/")
+
+ // Disable everything when the repo is being created
+ if ctx.Repo.Repository.IsBeingCreated() || ctx.Repo.Repository.IsBroken() {
+ ctx.Data["BranchName"] = ctx.Repo.Repository.DefaultBranch
+ if !isHomeOrSettings {
+ ctx.Redirect(ctx.Repo.RepoLink)
+ }
+ return nil
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ if strings.Contains(err.Error(), "repository does not exist") || strings.Contains(err.Error(), "no such file or directory") {
+ log.Error("Repository %-v has a broken repository on the file system: %s Error: %v", ctx.Repo.Repository, ctx.Repo.Repository.RepoPath(), err)
+ ctx.Repo.Repository.MarkAsBrokenEmpty()
+ ctx.Data["BranchName"] = ctx.Repo.Repository.DefaultBranch
+ // Only allow access to base of repo or settings
+ if !isHomeOrSettings {
+ ctx.Redirect(ctx.Repo.RepoLink)
+ }
+ return nil
+ }
+ ctx.ServerError("RepoAssignment Invalid repo "+repo.FullName(), err)
+ return nil
+ }
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ }
+ ctx.Repo.GitRepo = gitRepo
+
+ // We opened it, we should close it
+ cancel := func() {
+ // If it's been set to nil then assume someone else has closed it.
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ }
+ }
+
+ // Stop at this point when the repo is empty.
+ if ctx.Repo.Repository.IsEmpty {
+ ctx.Data["BranchName"] = ctx.Repo.Repository.DefaultBranch
+ return cancel
+ }
+
+ branchOpts := git_model.FindBranchOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ IsDeletedBranch: optional.Some(false),
+ ListOptions: db.ListOptionsAll,
+ }
+ branchesTotal, err := db.Count[git_model.Branch](ctx, branchOpts)
+ if err != nil {
+ ctx.ServerError("CountBranches", err)
+ return cancel
+ }
+
+ // non-empty repo should have at least 1 branch, so this repository's branches haven't been synced yet
+ if branchesTotal == 0 { // fallback to do a sync immediately
+ branchesTotal, err = repo_module.SyncRepoBranches(ctx, ctx.Repo.Repository.ID, 0)
+ if err != nil {
+ ctx.ServerError("SyncRepoBranches", err)
+ return cancel
+ }
+ }
+
+ ctx.Data["BranchesCount"] = branchesTotal
+
+ // If no branch is set in the request URL, try to guess a default one.
+ if len(ctx.Repo.BranchName) == 0 {
+ if len(ctx.Repo.Repository.DefaultBranch) > 0 && gitRepo.IsBranchExist(ctx.Repo.Repository.DefaultBranch) {
+ ctx.Repo.BranchName = ctx.Repo.Repository.DefaultBranch
+ } else {
+ ctx.Repo.BranchName, _ = gitrepo.GetDefaultBranch(ctx, ctx.Repo.Repository)
+ if ctx.Repo.BranchName == "" {
+ // If it still can't get a default branch, fall back to default branch from setting.
+ // Something might be wrong. Either site admin should fix the repo sync or Gitea should fix a potential bug.
+ ctx.Repo.BranchName = setting.Repository.DefaultBranch
+ }
+ }
+ ctx.Repo.RefName = ctx.Repo.BranchName
+ }
+ ctx.Data["BranchName"] = ctx.Repo.BranchName
+
+ // People who have push access or have forked repository can propose a new pull request.
+ canPush := ctx.Repo.CanWrite(unit_model.TypeCode) ||
+ (ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID))
+ canCompare := false
+
+ // Pull request is allowed if this is a fork repository
+ // and base repository accepts pull requests.
+ if repo.BaseRepo != nil && repo.BaseRepo.AllowsPulls(ctx) {
+ canCompare = true
+ ctx.Data["BaseRepo"] = repo.BaseRepo
+ ctx.Repo.PullRequest.BaseRepo = repo.BaseRepo
+ ctx.Repo.PullRequest.Allowed = canPush
+ ctx.Repo.PullRequest.HeadInfoSubURL = url.PathEscape(ctx.Repo.Owner.Name) + ":" + util.PathEscapeSegments(ctx.Repo.BranchName)
+ } else if repo.AllowsPulls(ctx) {
+ // Or, this is repository accepts pull requests between branches.
+ canCompare = true
+ ctx.Data["BaseRepo"] = repo
+ ctx.Repo.PullRequest.BaseRepo = repo
+ ctx.Repo.PullRequest.Allowed = canPush
+ ctx.Repo.PullRequest.SameRepo = true
+ ctx.Repo.PullRequest.HeadInfoSubURL = util.PathEscapeSegments(ctx.Repo.BranchName)
+ }
+ ctx.Data["CanCompareOrPull"] = canCompare
+ ctx.Data["PullRequestCtx"] = ctx.Repo.PullRequest
+
+ if ctx.Repo.Repository.Status == repo_model.RepositoryPendingTransfer {
+ repoTransfer, err := models.GetPendingRepositoryTransfer(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("GetPendingRepositoryTransfer", err)
+ return cancel
+ }
+
+ if err := repoTransfer.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadRecipient", err)
+ return cancel
+ }
+
+ ctx.Data["RepoTransfer"] = repoTransfer
+ if ctx.Doer != nil {
+ ctx.Data["CanUserAcceptTransfer"] = repoTransfer.CanUserAcceptTransfer(ctx, ctx.Doer)
+ }
+ }
+
+ if ctx.FormString("go-get") == "1" {
+ ctx.Data["GoGetImport"] = ComposeGoGetImport(owner.Name, repo.Name)
+ fullURLPrefix := repo.HTMLURL() + "/src/branch/" + util.PathEscapeSegments(ctx.Repo.BranchName)
+ ctx.Data["GoDocDirectory"] = fullURLPrefix + "{/dir}"
+ ctx.Data["GoDocFile"] = fullURLPrefix + "{/dir}/{file}#L{line}"
+ }
+ return cancel
+}
+
+// RepoRefType type of repo reference
+type RepoRefType int
+
+const (
+ // RepoRefLegacy unknown type, make educated guess and redirect.
+ // for backward compatibility with previous URL scheme
+ RepoRefLegacy RepoRefType = iota
+ // RepoRefAny is for usage where educated guess is needed
+ // but redirect can not be made
+ RepoRefAny
+ // RepoRefBranch branch
+ RepoRefBranch
+ // RepoRefTag tag
+ RepoRefTag
+ // RepoRefCommit commit
+ RepoRefCommit
+ // RepoRefBlob blob
+ RepoRefBlob
+)
+
+const headRefName = "HEAD"
+
+// RepoRef handles repository reference names when the ref name is not
+// explicitly given
+func RepoRef() func(*Context) context.CancelFunc {
+ // since no ref name is explicitly specified, ok to just use branch
+ return RepoRefByType(RepoRefBranch)
+}
+
+// RefTypeIncludesBranches returns true if ref type can be a branch
+func (rt RepoRefType) RefTypeIncludesBranches() bool {
+ if rt == RepoRefLegacy || rt == RepoRefAny || rt == RepoRefBranch {
+ return true
+ }
+ return false
+}
+
+// RefTypeIncludesTags returns true if ref type can be a tag
+func (rt RepoRefType) RefTypeIncludesTags() bool {
+ if rt == RepoRefLegacy || rt == RepoRefAny || rt == RepoRefTag {
+ return true
+ }
+ return false
+}
+
+func getRefNameFromPath(repo *Repository, path string, isExist func(string) bool) string {
+ refName := ""
+ parts := strings.Split(path, "/")
+ for i, part := range parts {
+ refName = strings.TrimPrefix(refName+"/"+part, "/")
+ if isExist(refName) {
+ repo.TreePath = strings.Join(parts[i+1:], "/")
+ return refName
+ }
+ }
+ return ""
+}
+
+func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string {
+ path := ctx.Params("*")
+ switch pathType {
+ case RepoRefLegacy, RepoRefAny:
+ if refName := getRefName(ctx, repo, RepoRefBranch); len(refName) > 0 {
+ return refName
+ }
+ if refName := getRefName(ctx, repo, RepoRefTag); len(refName) > 0 {
+ return refName
+ }
+ // For legacy and API support only full commit sha
+ parts := strings.Split(path, "/")
+
+ if len(parts) > 0 && len(parts[0]) == git.ObjectFormatFromName(repo.Repository.ObjectFormatName).FullLength() {
+ repo.TreePath = strings.Join(parts[1:], "/")
+ return parts[0]
+ }
+ if refName := getRefName(ctx, repo, RepoRefBlob); len(refName) > 0 {
+ return refName
+ }
+ repo.TreePath = path
+ return repo.Repository.DefaultBranch
+ case RepoRefBranch:
+ ref := getRefNameFromPath(repo, path, repo.GitRepo.IsBranchExist)
+ if len(ref) == 0 {
+ // check if ref is HEAD
+ parts := strings.Split(path, "/")
+ if parts[0] == headRefName {
+ repo.TreePath = strings.Join(parts[1:], "/")
+ return repo.Repository.DefaultBranch
+ }
+
+ // maybe it's a renamed branch
+ return getRefNameFromPath(repo, path, func(s string) bool {
+ b, exist, err := git_model.FindRenamedBranch(ctx, repo.Repository.ID, s)
+ if err != nil {
+ log.Error("FindRenamedBranch: %v", err)
+ return false
+ }
+
+ if !exist {
+ return false
+ }
+
+ ctx.Data["IsRenamedBranch"] = true
+ ctx.Data["RenamedBranchName"] = b.To
+
+ return true
+ })
+ }
+
+ return ref
+ case RepoRefTag:
+ return getRefNameFromPath(repo, path, repo.GitRepo.IsTagExist)
+ case RepoRefCommit:
+ parts := strings.Split(path, "/")
+
+ if len(parts) > 0 && len(parts[0]) >= 4 && len(parts[0]) <= repo.GetObjectFormat().FullLength() {
+ repo.TreePath = strings.Join(parts[1:], "/")
+ return parts[0]
+ }
+
+ if len(parts) > 0 && parts[0] == headRefName {
+ // HEAD ref points to last default branch commit
+ commit, err := repo.GitRepo.GetBranchCommit(repo.Repository.DefaultBranch)
+ if err != nil {
+ return ""
+ }
+ repo.TreePath = strings.Join(parts[1:], "/")
+ return commit.ID.String()
+ }
+ case RepoRefBlob:
+ _, err := repo.GitRepo.GetBlob(path)
+ if err != nil {
+ return ""
+ }
+ return path
+ default:
+ log.Error("Unrecognized path type: %v", path)
+ }
+ return ""
+}
+
+// RepoRefByType handles repository reference name for a specific type
+// of repository reference
+func RepoRefByType(refType RepoRefType, ignoreNotExistErr ...bool) func(*Context) context.CancelFunc {
+ return func(ctx *Context) (cancel context.CancelFunc) {
+ // Empty repository does not have reference information.
+ if ctx.Repo.Repository.IsEmpty {
+ // assume the user is viewing the (non-existent) default branch
+ ctx.Repo.IsViewBranch = true
+ ctx.Repo.BranchName = ctx.Repo.Repository.DefaultBranch
+ ctx.Data["TreePath"] = ""
+ return nil
+ }
+
+ var (
+ refName string
+ err error
+ )
+
+ if ctx.Repo.GitRepo == nil {
+ ctx.Repo.GitRepo, err = gitrepo.OpenRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Open Repository %v failed", ctx.Repo.Repository.FullName()), err)
+ return nil
+ }
+ // We opened it, we should close it
+ cancel = func() {
+ // If it's been set to nil then assume someone else has closed it.
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ }
+ }
+ }
+
+ // Get default branch.
+ if len(ctx.Params("*")) == 0 {
+ refName = ctx.Repo.Repository.DefaultBranch
+ if !ctx.Repo.GitRepo.IsBranchExist(refName) {
+ brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 1)
+ if err == nil && len(brs) != 0 {
+ refName = brs[0].Name
+ } else if len(brs) == 0 {
+ log.Error("No branches in non-empty repository %s", ctx.Repo.GitRepo.Path)
+ ctx.Repo.Repository.MarkAsBrokenEmpty()
+ } else {
+ log.Error("GetBranches error: %v", err)
+ ctx.Repo.Repository.MarkAsBrokenEmpty()
+ }
+ }
+ ctx.Repo.RefName = refName
+ ctx.Repo.BranchName = refName
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
+ if err == nil {
+ ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
+ } else if strings.Contains(err.Error(), "fatal: not a git repository") || strings.Contains(err.Error(), "object does not exist") {
+ // if the repository is broken, we can continue to the handler code, to show "Settings -> Delete Repository" for end users
+ log.Error("GetBranchCommit: %v", err)
+ ctx.Repo.Repository.MarkAsBrokenEmpty()
+ } else {
+ ctx.ServerError("GetBranchCommit", err)
+ return cancel
+ }
+ ctx.Repo.IsViewBranch = true
+ } else {
+ refName = getRefName(ctx.Base, ctx.Repo, refType)
+ ctx.Repo.RefName = refName
+ isRenamedBranch, has := ctx.Data["IsRenamedBranch"].(bool)
+ if isRenamedBranch && has {
+ renamedBranchName := ctx.Data["RenamedBranchName"].(string)
+ ctx.Flash.Info(ctx.Tr("repo.branch.renamed", refName, renamedBranchName))
+ link := setting.AppSubURL + strings.Replace(ctx.Req.URL.EscapedPath(), util.PathEscapeSegments(refName), util.PathEscapeSegments(renamedBranchName), 1)
+ ctx.Redirect(link)
+ return cancel
+ }
+
+ if refType.RefTypeIncludesBranches() && ctx.Repo.GitRepo.IsBranchExist(refName) {
+ ctx.Repo.IsViewBranch = true
+ ctx.Repo.BranchName = refName
+
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
+ if err != nil {
+ ctx.ServerError("GetBranchCommit", err)
+ return cancel
+ }
+ ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
+ } else if refType.RefTypeIncludesTags() && ctx.Repo.GitRepo.IsTagExist(refName) {
+ ctx.Repo.IsViewTag = true
+ ctx.Repo.TagName = refName
+
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetTagCommit", err)
+ return cancel
+ }
+ ctx.ServerError("GetTagCommit", err)
+ return cancel
+ }
+ ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
+ } else if len(refName) >= 4 && len(refName) <= ctx.Repo.GetObjectFormat().FullLength() {
+ ctx.Repo.IsViewCommit = true
+ ctx.Repo.CommitID = refName
+
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
+ if err != nil {
+ ctx.NotFound("GetCommit", err)
+ return cancel
+ }
+ // If short commit ID add canonical link header
+ if len(refName) < ctx.Repo.GetObjectFormat().FullLength() {
+ ctx.RespHeader().Set("Link", fmt.Sprintf("<%s>; rel=\"canonical\"",
+ util.URLJoin(setting.AppURL, strings.Replace(ctx.Req.URL.RequestURI(), util.PathEscapeSegments(refName), url.PathEscape(ctx.Repo.Commit.ID.String()), 1))))
+ }
+ } else {
+ if len(ignoreNotExistErr) > 0 && ignoreNotExistErr[0] {
+ return cancel
+ }
+ ctx.NotFound("RepoRef invalid repo", fmt.Errorf("branch or tag not exist: %s", refName))
+ return cancel
+ }
+
+ if refType == RepoRefLegacy {
+ // redirect from old URL scheme to new URL scheme
+ prefix := strings.TrimPrefix(setting.AppSubURL+strings.ToLower(strings.TrimSuffix(ctx.Req.URL.Path, ctx.Params("*"))), strings.ToLower(ctx.Repo.RepoLink))
+
+ ctx.Redirect(path.Join(
+ ctx.Repo.RepoLink,
+ util.PathEscapeSegments(prefix),
+ ctx.Repo.BranchNameSubURL(),
+ util.PathEscapeSegments(ctx.Repo.TreePath)))
+ return cancel
+ }
+ }
+
+ ctx.Data["BranchName"] = ctx.Repo.BranchName
+ ctx.Data["RefName"] = ctx.Repo.RefName
+ ctx.Data["BranchNameSubURL"] = ctx.Repo.BranchNameSubURL()
+ ctx.Data["TagName"] = ctx.Repo.TagName
+ ctx.Data["CommitID"] = ctx.Repo.CommitID
+ ctx.Data["TreePath"] = ctx.Repo.TreePath
+ ctx.Data["IsViewBranch"] = ctx.Repo.IsViewBranch
+ ctx.Data["IsViewTag"] = ctx.Repo.IsViewTag
+ ctx.Data["IsViewCommit"] = ctx.Repo.IsViewCommit
+ ctx.Data["CanCreateBranch"] = ctx.Repo.CanCreateBranch()
+
+ ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount()
+ if err != nil {
+ ctx.ServerError("GetCommitsCount", err)
+ return cancel
+ }
+ ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount
+ ctx.Repo.GitRepo.LastCommitCache = git.NewLastCommitCache(ctx.Repo.CommitsCount, ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, cache.GetCache())
+
+ return cancel
+ }
+}
+
+// GitHookService checks if repository Git hooks service has been enabled.
+func GitHookService() func(ctx *Context) {
+ return func(ctx *Context) {
+ if !ctx.Doer.CanEditGitHook() {
+ ctx.NotFound("GitHookService", nil)
+ return
+ }
+ }
+}
+
+// UnitTypes returns a middleware to set unit types to context variables.
+func UnitTypes() func(ctx *Context) {
+ return func(ctx *Context) {
+ ctx.Data["UnitTypeCode"] = unit_model.TypeCode
+ ctx.Data["UnitTypeIssues"] = unit_model.TypeIssues
+ ctx.Data["UnitTypePullRequests"] = unit_model.TypePullRequests
+ ctx.Data["UnitTypeReleases"] = unit_model.TypeReleases
+ ctx.Data["UnitTypeWiki"] = unit_model.TypeWiki
+ ctx.Data["UnitTypeExternalWiki"] = unit_model.TypeExternalWiki
+ ctx.Data["UnitTypeExternalTracker"] = unit_model.TypeExternalTracker
+ ctx.Data["UnitTypeProjects"] = unit_model.TypeProjects
+ ctx.Data["UnitTypePackages"] = unit_model.TypePackages
+ ctx.Data["UnitTypeActions"] = unit_model.TypeActions
+ }
+}
diff --git a/services/context/repository.go b/services/context/repository.go
new file mode 100644
index 0000000..422ac3f
--- /dev/null
+++ b/services/context/repository.go
@@ -0,0 +1,25 @@
+// Copyright 2023, 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "net/http"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+)
+
+// RepositoryIDAssignmentAPI returns a middleware to handle context-repo assignment for api routes
+func RepositoryIDAssignmentAPI() func(ctx *APIContext) {
+ return func(ctx *APIContext) {
+ repositoryID := ctx.ParamsInt64(":repository-id")
+
+ var err error
+ repository := new(Repository)
+ repository.Repository, err = repo_model.GetRepositoryByID(ctx, repositoryID)
+ if err != nil {
+ ctx.Error(http.StatusNotFound, "GetRepositoryByID", err)
+ }
+ ctx.Repo = repository
+ }
+}
diff --git a/services/context/response.go b/services/context/response.go
new file mode 100644
index 0000000..2f271f2
--- /dev/null
+++ b/services/context/response.go
@@ -0,0 +1,103 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "net/http"
+
+ web_types "code.gitea.io/gitea/modules/web/types"
+)
+
+// ResponseWriter represents a response writer for HTTP
+type ResponseWriter interface {
+ http.ResponseWriter
+ http.Flusher
+ web_types.ResponseStatusProvider
+
+ Before(func(ResponseWriter))
+
+ Status() int // used by access logger template
+ Size() int // used by access logger template
+}
+
+var _ ResponseWriter = &Response{}
+
+// Response represents a response
+type Response struct {
+ http.ResponseWriter
+ written int
+ status int
+ befores []func(ResponseWriter)
+ beforeExecuted bool
+}
+
+// Write writes bytes to HTTP endpoint
+func (r *Response) Write(bs []byte) (int, error) {
+ if !r.beforeExecuted {
+ for _, before := range r.befores {
+ before(r)
+ }
+ r.beforeExecuted = true
+ }
+ size, err := r.ResponseWriter.Write(bs)
+ r.written += size
+ if err != nil {
+ return size, err
+ }
+ if r.status == 0 {
+ r.status = http.StatusOK
+ }
+ return size, nil
+}
+
+func (r *Response) Status() int {
+ return r.status
+}
+
+func (r *Response) Size() int {
+ return r.written
+}
+
+// WriteHeader write status code
+func (r *Response) WriteHeader(statusCode int) {
+ if !r.beforeExecuted {
+ for _, before := range r.befores {
+ before(r)
+ }
+ r.beforeExecuted = true
+ }
+ if r.status == 0 {
+ r.status = statusCode
+ r.ResponseWriter.WriteHeader(statusCode)
+ }
+}
+
+// Flush flushes cached data
+func (r *Response) Flush() {
+ if f, ok := r.ResponseWriter.(http.Flusher); ok {
+ f.Flush()
+ }
+}
+
+// WrittenStatus returned status code written
+func (r *Response) WrittenStatus() int {
+ return r.status
+}
+
+// Before allows for a function to be called before the ResponseWriter has been written to. This is
+// useful for setting headers or any other operations that must happen before a response has been written.
+func (r *Response) Before(f func(ResponseWriter)) {
+ r.befores = append(r.befores, f)
+}
+
+func WrapResponseWriter(resp http.ResponseWriter) *Response {
+ if v, ok := resp.(*Response); ok {
+ return v
+ }
+ return &Response{
+ ResponseWriter: resp,
+ status: 0,
+ befores: make([]func(ResponseWriter), 0),
+ }
+}
diff --git a/services/context/upload/upload.go b/services/context/upload/upload.go
new file mode 100644
index 0000000..77a7eb9
--- /dev/null
+++ b/services/context/upload/upload.go
@@ -0,0 +1,105 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package upload
+
+import (
+ "mime"
+ "net/http"
+ "net/url"
+ "path"
+ "regexp"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+// ErrFileTypeForbidden not allowed file type error
+type ErrFileTypeForbidden struct {
+ Type string
+}
+
+// IsErrFileTypeForbidden checks if an error is a ErrFileTypeForbidden.
+func IsErrFileTypeForbidden(err error) bool {
+ _, ok := err.(ErrFileTypeForbidden)
+ return ok
+}
+
+func (err ErrFileTypeForbidden) Error() string {
+ return "This file extension or type is not allowed to be uploaded."
+}
+
+var wildcardTypeRe = regexp.MustCompile(`^[a-z]+/\*$`)
+
+// Verify validates whether a file is allowed to be uploaded.
+func Verify(buf []byte, fileName, allowedTypesStr string) error {
+ allowedTypesStr = strings.ReplaceAll(allowedTypesStr, "|", ",") // compat for old config format
+
+ allowedTypes := []string{}
+ for _, entry := range strings.Split(allowedTypesStr, ",") {
+ entry = strings.ToLower(strings.TrimSpace(entry))
+ if entry != "" {
+ allowedTypes = append(allowedTypes, entry)
+ }
+ }
+
+ if len(allowedTypes) == 0 {
+ return nil // everything is allowed
+ }
+
+ fullMimeType := http.DetectContentType(buf)
+ mimeType, _, err := mime.ParseMediaType(fullMimeType)
+ if err != nil {
+ log.Warn("Detected attachment type could not be parsed %s", fullMimeType)
+ return ErrFileTypeForbidden{Type: fullMimeType}
+ }
+ extension := strings.ToLower(path.Ext(fileName))
+
+ // https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file#Unique_file_type_specifiers
+ for _, allowEntry := range allowedTypes {
+ if allowEntry == "*/*" {
+ return nil // everything allowed
+ } else if strings.HasPrefix(allowEntry, ".") && allowEntry == extension {
+ return nil // extension is allowed
+ } else if mimeType == allowEntry {
+ return nil // mime type is allowed
+ } else if wildcardTypeRe.MatchString(allowEntry) && strings.HasPrefix(mimeType, allowEntry[:len(allowEntry)-1]) {
+ return nil // wildcard match, e.g. image/*
+ }
+ }
+
+ log.Info("Attachment with type %s blocked from upload", fullMimeType)
+ return ErrFileTypeForbidden{Type: fullMimeType}
+}
+
+// AddUploadContext renders template values for dropzone
+func AddUploadContext(ctx *context.Context, uploadType string) {
+ if uploadType == "release" {
+ ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/releases/attachments"
+ ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/releases/attachments/remove"
+ ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/releases/attachments"
+ ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Release.AllowedTypes, "|", ",")
+ ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles
+ ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize
+ } else if uploadType == "comment" {
+ ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/issues/attachments"
+ ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/issues/attachments/remove"
+ if len(ctx.Params(":index")) > 0 {
+ ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/issues/" + url.PathEscape(ctx.Params(":index")) + "/attachments"
+ } else {
+ ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/issues/attachments"
+ }
+ ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Attachment.AllowedTypes, "|", ",")
+ ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles
+ ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize
+ } else if uploadType == "repo" {
+ ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/upload-file"
+ ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/upload-remove"
+ ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/upload-file"
+ ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Upload.AllowedTypes, "|", ",")
+ ctx.Data["UploadMaxFiles"] = setting.Repository.Upload.MaxFiles
+ ctx.Data["UploadMaxSize"] = setting.Repository.Upload.FileMaxSize
+ }
+}
diff --git a/services/context/upload/upload_test.go b/services/context/upload/upload_test.go
new file mode 100644
index 0000000..f2c3242
--- /dev/null
+++ b/services/context/upload/upload_test.go
@@ -0,0 +1,194 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package upload
+
+import (
+ "bytes"
+ "compress/gzip"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestUpload(t *testing.T) {
+ testContent := []byte(`This is a plain text file.`)
+ var b bytes.Buffer
+ w := gzip.NewWriter(&b)
+ w.Write(testContent)
+ w.Close()
+
+ kases := []struct {
+ data []byte
+ fileName string
+ allowedTypes string
+ err error
+ }{
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "dir/test.txt",
+ allowedTypes: "",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "../../../test.txt",
+ allowedTypes: "",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: ",",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "|",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "*/*",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "*/*,",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "*/*|",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "text/plain",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "dir/test.txt",
+ allowedTypes: "text/plain",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "/dir.txt/test.js",
+ allowedTypes: ".js",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: " text/plain ",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: ".txt",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: " .txt,.js",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: " .txt|.js",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "../../test.txt",
+ allowedTypes: " .txt|.js",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: " .txt ,.js ",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "text/plain, .txt",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "text/*",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "text/*,.js",
+ err: nil,
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "text/**",
+ err: ErrFileTypeForbidden{"text/plain; charset=utf-8"},
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: "application/x-gzip",
+ err: ErrFileTypeForbidden{"text/plain; charset=utf-8"},
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: ".zip",
+ err: ErrFileTypeForbidden{"text/plain; charset=utf-8"},
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: ".zip,.txtx",
+ err: ErrFileTypeForbidden{"text/plain; charset=utf-8"},
+ },
+ {
+ data: testContent,
+ fileName: "test.txt",
+ allowedTypes: ".zip|.txtx",
+ err: ErrFileTypeForbidden{"text/plain; charset=utf-8"},
+ },
+ {
+ data: b.Bytes(),
+ fileName: "test.txt",
+ allowedTypes: "application/x-gzip",
+ err: nil,
+ },
+ }
+
+ for _, kase := range kases {
+ assert.Equal(t, kase.err, Verify(kase.data, kase.fileName, kase.allowedTypes))
+ }
+}
diff --git a/services/context/user.go b/services/context/user.go
new file mode 100644
index 0000000..4c9cd29
--- /dev/null
+++ b/services/context/user.go
@@ -0,0 +1,84 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// UserAssignmentWeb returns a middleware to handle context-user assignment for web routes
+func UserAssignmentWeb() func(ctx *Context) {
+ return func(ctx *Context) {
+ errorFn := func(status int, title string, obj any) {
+ err, ok := obj.(error)
+ if !ok {
+ err = fmt.Errorf("%s", obj)
+ }
+ if status == http.StatusNotFound {
+ ctx.NotFound(title, err)
+ } else {
+ ctx.ServerError(title, err)
+ }
+ }
+ ctx.ContextUser = userAssignment(ctx.Base, ctx.Doer, errorFn)
+ ctx.Data["ContextUser"] = ctx.ContextUser
+ }
+}
+
+// UserIDAssignmentAPI returns a middleware to handle context-user assignment for api routes
+func UserIDAssignmentAPI() func(ctx *APIContext) {
+ return func(ctx *APIContext) {
+ userID := ctx.ParamsInt64(":user-id")
+
+ if ctx.IsSigned && ctx.Doer.ID == userID {
+ ctx.ContextUser = ctx.Doer
+ } else {
+ var err error
+ ctx.ContextUser, err = user_model.GetUserByID(ctx, userID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.Error(http.StatusNotFound, "GetUserByID", err)
+ } else {
+ ctx.Error(http.StatusInternalServerError, "GetUserByID", err)
+ }
+ }
+ }
+ }
+}
+
+// UserAssignmentAPI returns a middleware to handle context-user assignment for api routes
+func UserAssignmentAPI() func(ctx *APIContext) {
+ return func(ctx *APIContext) {
+ ctx.ContextUser = userAssignment(ctx.Base, ctx.Doer, ctx.Error)
+ }
+}
+
+func userAssignment(ctx *Base, doer *user_model.User, errCb func(int, string, any)) (contextUser *user_model.User) {
+ username := ctx.Params(":username")
+
+ if doer != nil && doer.LowerName == strings.ToLower(username) {
+ contextUser = doer
+ } else {
+ var err error
+ contextUser, err = user_model.GetUserByName(ctx, username)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ if redirectUserID, err := user_model.LookupUserRedirect(ctx, username); err == nil {
+ RedirectToUser(ctx, username, redirectUserID)
+ } else if user_model.IsErrUserRedirectNotExist(err) {
+ errCb(http.StatusNotFound, "GetUserByName", err)
+ } else {
+ errCb(http.StatusInternalServerError, "LookupUserRedirect", err)
+ }
+ } else {
+ errCb(http.StatusInternalServerError, "GetUserByName", err)
+ }
+ }
+ }
+ return contextUser
+}
diff --git a/services/context/utils.go b/services/context/utils.go
new file mode 100644
index 0000000..293750f
--- /dev/null
+++ b/services/context/utils.go
@@ -0,0 +1,38 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "strings"
+ "time"
+)
+
+// GetQueryBeforeSince return parsed time (unix format) from URL query's before and since
+func GetQueryBeforeSince(ctx *Base) (before, since int64, err error) {
+ before, err = parseFormTime(ctx, "before")
+ if err != nil {
+ return 0, 0, err
+ }
+
+ since, err = parseFormTime(ctx, "since")
+ if err != nil {
+ return 0, 0, err
+ }
+ return before, since, nil
+}
+
+// parseTime parse time and return unix timestamp
+func parseFormTime(ctx *Base, name string) (int64, error) {
+ value := strings.TrimSpace(ctx.FormString(name))
+ if len(value) != 0 {
+ t, err := time.Parse(time.RFC3339, value)
+ if err != nil {
+ return 0, err
+ }
+ if !t.IsZero() {
+ return t.Unix(), nil
+ }
+ }
+ return 0, nil
+}
diff --git a/services/context/xsrf.go b/services/context/xsrf.go
new file mode 100644
index 0000000..15e36d1
--- /dev/null
+++ b/services/context/xsrf.go
@@ -0,0 +1,99 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+// Copyright 2014 The Macaron Authors
+// Copyright 2020 The Gitea Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// SPDX-License-Identifier: Apache-2.0
+
+package context
+
+import (
+ "bytes"
+ "crypto/hmac"
+ "crypto/sha1"
+ "crypto/subtle"
+ "encoding/base64"
+ "fmt"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// CsrfTokenTimeout represents the duration that XSRF tokens are valid.
+// It is exported so clients may set cookie timeouts that match generated tokens.
+const CsrfTokenTimeout = 24 * time.Hour
+
+// CsrfTokenRegenerationInterval is the interval between token generations, old tokens are still valid before CsrfTokenTimeout
+var CsrfTokenRegenerationInterval = 10 * time.Minute
+
+var csrfTokenSep = []byte(":")
+
+// GenerateCsrfToken returns a URL-safe secure XSRF token that expires in CsrfTokenTimeout hours.
+// key is a secret key for your application.
+// userID is a unique identifier for the user.
+// actionID is the action the user is taking (e.g. POSTing to a particular path).
+func GenerateCsrfToken(key, userID, actionID string, now time.Time) string {
+ nowUnixNano := now.UnixNano()
+ nowUnixNanoStr := strconv.FormatInt(nowUnixNano, 10)
+ h := hmac.New(sha1.New, []byte(key))
+ h.Write([]byte(strings.ReplaceAll(userID, ":", "_")))
+ h.Write(csrfTokenSep)
+ h.Write([]byte(strings.ReplaceAll(actionID, ":", "_")))
+ h.Write(csrfTokenSep)
+ h.Write([]byte(nowUnixNanoStr))
+ tok := fmt.Sprintf("%s:%s", h.Sum(nil), nowUnixNanoStr)
+ return base64.RawURLEncoding.EncodeToString([]byte(tok))
+}
+
+func ParseCsrfToken(token string) (issueTime time.Time, ok bool) {
+ data, err := base64.RawURLEncoding.DecodeString(token)
+ if err != nil {
+ return time.Time{}, false
+ }
+
+ pos := bytes.LastIndex(data, csrfTokenSep)
+ if pos == -1 {
+ return time.Time{}, false
+ }
+ nanos, err := strconv.ParseInt(string(data[pos+1:]), 10, 64)
+ if err != nil {
+ return time.Time{}, false
+ }
+ return time.Unix(0, nanos), true
+}
+
+// ValidCsrfToken returns true if token is a valid and unexpired token returned by Generate.
+func ValidCsrfToken(token, key, userID, actionID string, now time.Time) bool {
+ issueTime, ok := ParseCsrfToken(token)
+ if !ok {
+ return false
+ }
+
+ // Check that the token is not expired.
+ if now.Sub(issueTime) >= CsrfTokenTimeout {
+ return false
+ }
+
+ // Check that the token is not from the future.
+ // Allow 1-minute grace period in case the token is being verified on a
+ // machine whose clock is behind the machine that issued the token.
+ if issueTime.After(now.Add(1 * time.Minute)) {
+ return false
+ }
+
+ expected := GenerateCsrfToken(key, userID, actionID, issueTime)
+
+ // Check that the token matches the expected value.
+ // Use constant time comparison to avoid timing attacks.
+ return subtle.ConstantTimeCompare([]byte(token), []byte(expected)) == 1
+}
diff --git a/services/context/xsrf_test.go b/services/context/xsrf_test.go
new file mode 100644
index 0000000..21cda5d
--- /dev/null
+++ b/services/context/xsrf_test.go
@@ -0,0 +1,91 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+// Copyright 2014 The Macaron Authors
+// Copyright 2020 The Gitea Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// SPDX-License-Identifier: Apache-2.0
+
+package context
+
+import (
+ "encoding/base64"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+const (
+ key = "quay"
+ userID = "12345678"
+ actionID = "POST /form"
+)
+
+var (
+ now = time.Now()
+ oneMinuteFromNow = now.Add(1 * time.Minute)
+)
+
+func Test_ValidToken(t *testing.T) {
+ t.Run("Validate token", func(t *testing.T) {
+ tok := GenerateCsrfToken(key, userID, actionID, now)
+ assert.True(t, ValidCsrfToken(tok, key, userID, actionID, oneMinuteFromNow))
+ assert.True(t, ValidCsrfToken(tok, key, userID, actionID, now.Add(CsrfTokenTimeout-1*time.Nanosecond)))
+ assert.True(t, ValidCsrfToken(tok, key, userID, actionID, now.Add(-1*time.Minute)))
+ })
+}
+
+// Test_SeparatorReplacement tests that separators are being correctly substituted
+func Test_SeparatorReplacement(t *testing.T) {
+ t.Run("Test two separator replacements", func(t *testing.T) {
+ assert.NotEqual(t, GenerateCsrfToken("foo:bar", "baz", "wah", now),
+ GenerateCsrfToken("foo", "bar:baz", "wah", now))
+ })
+}
+
+func Test_InvalidToken(t *testing.T) {
+ t.Run("Test invalid tokens", func(t *testing.T) {
+ invalidTokenTests := []struct {
+ name, key, userID, actionID string
+ t time.Time
+ }{
+ {"Bad key", "foobar", userID, actionID, oneMinuteFromNow},
+ {"Bad userID", key, "foobar", actionID, oneMinuteFromNow},
+ {"Bad actionID", key, userID, "foobar", oneMinuteFromNow},
+ {"Expired", key, userID, actionID, now.Add(CsrfTokenTimeout)},
+ {"More than 1 minute from the future", key, userID, actionID, now.Add(-1*time.Nanosecond - 1*time.Minute)},
+ }
+
+ tok := GenerateCsrfToken(key, userID, actionID, now)
+ for _, itt := range invalidTokenTests {
+ assert.False(t, ValidCsrfToken(tok, itt.key, itt.userID, itt.actionID, itt.t))
+ }
+ })
+}
+
+// Test_ValidateBadData primarily tests that no unexpected panics are triggered during parsing
+func Test_ValidateBadData(t *testing.T) {
+ t.Run("Validate bad data", func(t *testing.T) {
+ badDataTests := []struct {
+ name, tok string
+ }{
+ {"Invalid Base64", "ASDab24(@)$*=="},
+ {"No delimiter", base64.URLEncoding.EncodeToString([]byte("foobar12345678"))},
+ {"Invalid time", base64.URLEncoding.EncodeToString([]byte("foobar:foobar"))},
+ }
+
+ for _, bdt := range badDataTests {
+ assert.False(t, ValidCsrfToken(bdt.tok, key, userID, actionID, oneMinuteFromNow))
+ }
+ })
+}
diff --git a/services/contexttest/context_tests.go b/services/contexttest/context_tests.go
new file mode 100644
index 0000000..7c829f3
--- /dev/null
+++ b/services/contexttest/context_tests.go
@@ -0,0 +1,208 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+// Package contexttest provides utilities for testing Web/API contexts with models.
+package contexttest
+
+import (
+ gocontext "context"
+ "io"
+ "maps"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "strings"
+ "testing"
+ "time"
+
+ org_model "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/go-chi/chi/v5"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func mockRequest(t *testing.T, reqPath string) *http.Request {
+ method, path, found := strings.Cut(reqPath, " ")
+ if !found {
+ method = "GET"
+ path = reqPath
+ }
+ requestURL, err := url.Parse(path)
+ require.NoError(t, err)
+ req := &http.Request{Method: method, URL: requestURL, Form: maps.Clone(requestURL.Query()), Header: http.Header{}}
+ req = req.WithContext(middleware.WithContextData(req.Context()))
+ return req
+}
+
+type MockContextOption struct {
+ Render context.Render
+}
+
+// MockContext mock context for unit tests
+func MockContext(t *testing.T, reqPath string, opts ...MockContextOption) (*context.Context, *httptest.ResponseRecorder) {
+ var opt MockContextOption
+ if len(opts) > 0 {
+ opt = opts[0]
+ }
+ if opt.Render == nil {
+ opt.Render = &MockRender{}
+ }
+ resp := httptest.NewRecorder()
+ req := mockRequest(t, reqPath)
+ base, baseCleanUp := context.NewBaseContext(resp, req)
+ _ = baseCleanUp // during test, it doesn't need to do clean up. TODO: this can be improved later
+ base.Data = middleware.GetContextData(req.Context())
+ base.Locale = &translation.MockLocale{}
+
+ ctx := context.NewWebContext(base, opt.Render, nil)
+ ctx.PageData = map[string]any{}
+ ctx.Data["PageStartTime"] = time.Now()
+ chiCtx := chi.NewRouteContext()
+ ctx.Base.AppendContextValue(chi.RouteCtxKey, chiCtx)
+ return ctx, resp
+}
+
+// MockAPIContext mock context for unit tests
+func MockAPIContext(t *testing.T, reqPath string) (*context.APIContext, *httptest.ResponseRecorder) {
+ resp := httptest.NewRecorder()
+ req := mockRequest(t, reqPath)
+ base, baseCleanUp := context.NewBaseContext(resp, req)
+ base.Data = middleware.GetContextData(req.Context())
+ base.Locale = &translation.MockLocale{}
+ ctx := &context.APIContext{Base: base}
+ _ = baseCleanUp // during test, it doesn't need to do clean up. TODO: this can be improved later
+
+ chiCtx := chi.NewRouteContext()
+ ctx.Base.AppendContextValue(chi.RouteCtxKey, chiCtx)
+ return ctx, resp
+}
+
+func MockPrivateContext(t *testing.T, reqPath string) (*context.PrivateContext, *httptest.ResponseRecorder) {
+ resp := httptest.NewRecorder()
+ req := mockRequest(t, reqPath)
+ base, baseCleanUp := context.NewBaseContext(resp, req)
+ base.Data = middleware.GetContextData(req.Context())
+ base.Locale = &translation.MockLocale{}
+ ctx := &context.PrivateContext{Base: base}
+ _ = baseCleanUp // during test, it doesn't need to do clean up. TODO: this can be improved later
+ chiCtx := chi.NewRouteContext()
+ ctx.Base.AppendContextValue(chi.RouteCtxKey, chiCtx)
+ return ctx, resp
+}
+
+// LoadRepo load a repo into a test context.
+func LoadRepo(t *testing.T, ctx gocontext.Context, repoID int64) {
+ var doer *user_model.User
+ repo := &context.Repository{}
+ switch ctx := ctx.(type) {
+ case *context.Context:
+ ctx.Repo = repo
+ doer = ctx.Doer
+ case *context.APIContext:
+ ctx.Repo = repo
+ doer = ctx.Doer
+ default:
+ assert.FailNow(t, "context is not *context.Context or *context.APIContext")
+ }
+
+ repo.Repository = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ var err error
+ repo.Owner, err = user_model.GetUserByID(ctx, repo.Repository.OwnerID)
+ require.NoError(t, err)
+ repo.RepoLink = repo.Repository.Link()
+ repo.Permission, err = access_model.GetUserRepoPermission(ctx, repo.Repository, doer)
+ require.NoError(t, err)
+}
+
+// LoadRepoCommit loads a repo's commit into a test context.
+func LoadRepoCommit(t *testing.T, ctx gocontext.Context) {
+ var repo *context.Repository
+ switch ctx := ctx.(type) {
+ case *context.Context:
+ repo = ctx.Repo
+ case *context.APIContext:
+ repo = ctx.Repo
+ default:
+ assert.FailNow(t, "context is not *context.Context or *context.APIContext")
+ }
+
+ if repo.GitRepo == nil {
+ assert.FailNow(t, "must call LoadGitRepo")
+ }
+
+ branch, err := repo.GitRepo.GetHEADBranch()
+ require.NoError(t, err)
+ assert.NotNil(t, branch)
+ if branch != nil {
+ repo.Commit, err = repo.GitRepo.GetBranchCommit(branch.Name)
+ require.NoError(t, err)
+ }
+}
+
+// LoadUser load a user into a test context
+func LoadUser(t *testing.T, ctx gocontext.Context, userID int64) {
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID})
+ switch ctx := ctx.(type) {
+ case *context.Context:
+ ctx.Doer = doer
+ case *context.APIContext:
+ ctx.Doer = doer
+ default:
+ assert.FailNow(t, "context is not *context.Context or *context.APIContext")
+ }
+}
+
+// LoadOrganization load an org into a test context
+func LoadOrganization(t *testing.T, ctx gocontext.Context, orgID int64) {
+ org := unittest.AssertExistsAndLoadBean(t, &org_model.Organization{ID: orgID})
+ switch ctx := ctx.(type) {
+ case *context.Context:
+ ctx.Org.Organization = org
+ case *context.APIContext:
+ ctx.Org.Organization = org
+ default:
+ assert.FailNow(t, "context is not *context.Context or *context.APIContext")
+ }
+}
+
+// LoadGitRepo load a git repo into a test context. Requires that ctx.Repo has
+// already been populated.
+func LoadGitRepo(t *testing.T, ctx gocontext.Context) {
+ var repo *context.Repository
+ switch ctx := ctx.(type) {
+ case *context.Context:
+ repo = ctx.Repo
+ case *context.APIContext:
+ repo = ctx.Repo
+ default:
+ assert.FailNow(t, "context is not *context.Context or *context.APIContext")
+ }
+
+ require.NoError(t, repo.Repository.LoadOwner(ctx))
+ var err error
+ repo.GitRepo, err = gitrepo.OpenRepository(ctx, repo.Repository)
+ require.NoError(t, err)
+}
+
+type MockRender struct{}
+
+func (tr *MockRender) TemplateLookup(tmpl string, _ gocontext.Context) (templates.TemplateExecutor, error) {
+ return nil, nil
+}
+
+func (tr *MockRender) HTML(w io.Writer, status int, _ string, _ any, _ gocontext.Context) error {
+ if resp, ok := w.(http.ResponseWriter); ok {
+ resp.WriteHeader(status)
+ }
+ return nil
+}
diff --git a/services/convert/activity.go b/services/convert/activity.go
new file mode 100644
index 0000000..01fef73
--- /dev/null
+++ b/services/convert/activity.go
@@ -0,0 +1,52 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ perm_model "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+func ToActivity(ctx context.Context, ac *activities_model.Action, doer *user_model.User) *api.Activity {
+ p, err := access_model.GetUserRepoPermission(ctx, ac.Repo, doer)
+ if err != nil {
+ log.Error("GetUserRepoPermission[%d]: %v", ac.RepoID, err)
+ p.AccessMode = perm_model.AccessModeNone
+ }
+
+ result := &api.Activity{
+ ID: ac.ID,
+ UserID: ac.UserID,
+ OpType: ac.OpType.String(),
+ ActUserID: ac.ActUserID,
+ ActUser: ToUser(ctx, ac.ActUser, doer),
+ RepoID: ac.RepoID,
+ Repo: ToRepo(ctx, ac.Repo, p),
+ RefName: ac.RefName,
+ IsPrivate: ac.IsPrivate,
+ Content: ac.Content,
+ Created: ac.CreatedUnix.AsTime(),
+ }
+
+ if ac.Comment != nil {
+ result.CommentID = ac.CommentID
+ result.Comment = ToAPIComment(ctx, ac.Repo, ac.Comment)
+ }
+
+ return result
+}
+
+func ToActivities(ctx context.Context, al activities_model.ActionList, doer *user_model.User) []*api.Activity {
+ result := make([]*api.Activity, 0, len(al))
+ for _, ac := range al {
+ result = append(result, ToActivity(ctx, ac, doer))
+ }
+ return result
+}
diff --git a/services/convert/attachment.go b/services/convert/attachment.go
new file mode 100644
index 0000000..d632c94
--- /dev/null
+++ b/services/convert/attachment.go
@@ -0,0 +1,63 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ repo_model "code.gitea.io/gitea/models/repo"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+func WebAssetDownloadURL(repo *repo_model.Repository, attach *repo_model.Attachment) string {
+ if attach.ExternalURL != "" {
+ return attach.ExternalURL
+ }
+
+ return attach.DownloadURL()
+}
+
+func APIAssetDownloadURL(repo *repo_model.Repository, attach *repo_model.Attachment) string {
+ return attach.DownloadURL()
+}
+
+// ToAttachment converts models.Attachment to api.Attachment for API usage
+func ToAttachment(repo *repo_model.Repository, a *repo_model.Attachment) *api.Attachment {
+ return toAttachment(repo, a, WebAssetDownloadURL)
+}
+
+// ToAPIAttachment converts models.Attachment to api.Attachment for API usage
+func ToAPIAttachment(repo *repo_model.Repository, a *repo_model.Attachment) *api.Attachment {
+ return toAttachment(repo, a, APIAssetDownloadURL)
+}
+
+// toAttachment converts models.Attachment to api.Attachment for API usage
+func toAttachment(repo *repo_model.Repository, a *repo_model.Attachment, getDownloadURL func(repo *repo_model.Repository, attach *repo_model.Attachment) string) *api.Attachment {
+ var typeName string
+ if a.ExternalURL != "" {
+ typeName = "external"
+ } else {
+ typeName = "attachment"
+ }
+ return &api.Attachment{
+ ID: a.ID,
+ Name: a.Name,
+ Created: a.CreatedUnix.AsTime(),
+ DownloadCount: a.DownloadCount,
+ Size: a.Size,
+ UUID: a.UUID,
+ DownloadURL: getDownloadURL(repo, a), // for web request json and api request json, return different download urls
+ Type: typeName,
+ }
+}
+
+func ToAPIAttachments(repo *repo_model.Repository, attachments []*repo_model.Attachment) []*api.Attachment {
+ return toAttachments(repo, attachments, APIAssetDownloadURL)
+}
+
+func toAttachments(repo *repo_model.Repository, attachments []*repo_model.Attachment, getDownloadURL func(repo *repo_model.Repository, attach *repo_model.Attachment) string) []*api.Attachment {
+ converted := make([]*api.Attachment, 0, len(attachments))
+ for _, attachment := range attachments {
+ converted = append(converted, toAttachment(repo, attachment, getDownloadURL))
+ }
+ return converted
+}
diff --git a/services/convert/convert.go b/services/convert/convert.go
new file mode 100644
index 0000000..7a09449
--- /dev/null
+++ b/services/convert/convert.go
@@ -0,0 +1,510 @@
+// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+ "fmt"
+ "strconv"
+ "strings"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/auth"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/gitdiff"
+)
+
+// ToEmail convert models.EmailAddress to api.Email
+func ToEmail(email *user_model.EmailAddress) *api.Email {
+ return &api.Email{
+ Email: email.Email,
+ Verified: email.IsActivated,
+ Primary: email.IsPrimary,
+ }
+}
+
+// ToEmail convert models.EmailAddress to api.Email
+func ToEmailSearch(email *user_model.SearchEmailResult) *api.Email {
+ return &api.Email{
+ Email: email.Email,
+ Verified: email.IsActivated,
+ Primary: email.IsPrimary,
+ UserID: email.UID,
+ UserName: email.Name,
+ }
+}
+
+// ToBranch convert a git.Commit and git.Branch to an api.Branch
+func ToBranch(ctx context.Context, repo *repo_model.Repository, branchName string, c *git.Commit, bp *git_model.ProtectedBranch, user *user_model.User, isRepoAdmin bool) (*api.Branch, error) {
+ if bp == nil {
+ var hasPerm bool
+ var canPush bool
+ var err error
+ if user != nil {
+ hasPerm, err = access_model.HasAccessUnit(ctx, user, repo, unit.TypeCode, perm.AccessModeWrite)
+ if err != nil {
+ return nil, err
+ }
+
+ perms, err := access_model.GetUserRepoPermission(ctx, repo, user)
+ if err != nil {
+ return nil, err
+ }
+ canPush = issues_model.CanMaintainerWriteToBranch(ctx, perms, branchName, user)
+ }
+
+ return &api.Branch{
+ Name: branchName,
+ Commit: ToPayloadCommit(ctx, repo, c),
+ Protected: false,
+ RequiredApprovals: 0,
+ EnableStatusCheck: false,
+ StatusCheckContexts: []string{},
+ UserCanPush: canPush,
+ UserCanMerge: hasPerm,
+ }, nil
+ }
+
+ branch := &api.Branch{
+ Name: branchName,
+ Commit: ToPayloadCommit(ctx, repo, c),
+ Protected: true,
+ RequiredApprovals: bp.RequiredApprovals,
+ EnableStatusCheck: bp.EnableStatusCheck,
+ StatusCheckContexts: bp.StatusCheckContexts,
+ }
+
+ if isRepoAdmin {
+ branch.EffectiveBranchProtectionName = bp.RuleName
+ }
+
+ if user != nil {
+ permission, err := access_model.GetUserRepoPermission(ctx, repo, user)
+ if err != nil {
+ return nil, err
+ }
+ bp.Repo = repo
+ branch.UserCanPush = bp.CanUserPush(ctx, user)
+ branch.UserCanMerge = git_model.IsUserMergeWhitelisted(ctx, bp, user.ID, permission)
+ }
+
+ return branch, nil
+}
+
+// getWhitelistEntities returns the names of the entities that are in the whitelist
+func getWhitelistEntities[T *user_model.User | *organization.Team](entities []T, whitelistIDs []int64) []string {
+ whitelistUserIDsSet := container.SetOf(whitelistIDs...)
+ whitelistNames := make([]string, 0)
+ for _, entity := range entities {
+ switch v := any(entity).(type) {
+ case *user_model.User:
+ if whitelistUserIDsSet.Contains(v.ID) {
+ whitelistNames = append(whitelistNames, v.Name)
+ }
+ case *organization.Team:
+ if whitelistUserIDsSet.Contains(v.ID) {
+ whitelistNames = append(whitelistNames, v.Name)
+ }
+ }
+ }
+
+ return whitelistNames
+}
+
+// ToBranchProtection convert a ProtectedBranch to api.BranchProtection
+func ToBranchProtection(ctx context.Context, bp *git_model.ProtectedBranch, repo *repo_model.Repository) *api.BranchProtection {
+ readers, err := access_model.GetRepoReaders(ctx, repo)
+ if err != nil {
+ log.Error("GetRepoReaders: %v", err)
+ }
+
+ pushWhitelistUsernames := getWhitelistEntities(readers, bp.WhitelistUserIDs)
+ mergeWhitelistUsernames := getWhitelistEntities(readers, bp.MergeWhitelistUserIDs)
+ approvalsWhitelistUsernames := getWhitelistEntities(readers, bp.ApprovalsWhitelistUserIDs)
+
+ teamReaders, err := organization.OrgFromUser(repo.Owner).TeamsWithAccessToRepo(ctx, repo.ID, perm.AccessModeRead)
+ if err != nil {
+ log.Error("Repo.Owner.TeamsWithAccessToRepo: %v", err)
+ }
+
+ pushWhitelistTeams := getWhitelistEntities(teamReaders, bp.WhitelistTeamIDs)
+ mergeWhitelistTeams := getWhitelistEntities(teamReaders, bp.MergeWhitelistTeamIDs)
+ approvalsWhitelistTeams := getWhitelistEntities(teamReaders, bp.ApprovalsWhitelistTeamIDs)
+
+ branchName := ""
+ if !git_model.IsRuleNameSpecial(bp.RuleName) {
+ branchName = bp.RuleName
+ }
+
+ return &api.BranchProtection{
+ BranchName: branchName,
+ RuleName: bp.RuleName,
+ EnablePush: bp.CanPush,
+ EnablePushWhitelist: bp.EnableWhitelist,
+ PushWhitelistUsernames: pushWhitelistUsernames,
+ PushWhitelistTeams: pushWhitelistTeams,
+ PushWhitelistDeployKeys: bp.WhitelistDeployKeys,
+ EnableMergeWhitelist: bp.EnableMergeWhitelist,
+ MergeWhitelistUsernames: mergeWhitelistUsernames,
+ MergeWhitelistTeams: mergeWhitelistTeams,
+ EnableStatusCheck: bp.EnableStatusCheck,
+ StatusCheckContexts: bp.StatusCheckContexts,
+ RequiredApprovals: bp.RequiredApprovals,
+ EnableApprovalsWhitelist: bp.EnableApprovalsWhitelist,
+ ApprovalsWhitelistUsernames: approvalsWhitelistUsernames,
+ ApprovalsWhitelistTeams: approvalsWhitelistTeams,
+ BlockOnRejectedReviews: bp.BlockOnRejectedReviews,
+ BlockOnOfficialReviewRequests: bp.BlockOnOfficialReviewRequests,
+ BlockOnOutdatedBranch: bp.BlockOnOutdatedBranch,
+ DismissStaleApprovals: bp.DismissStaleApprovals,
+ IgnoreStaleApprovals: bp.IgnoreStaleApprovals,
+ RequireSignedCommits: bp.RequireSignedCommits,
+ ProtectedFilePatterns: bp.ProtectedFilePatterns,
+ UnprotectedFilePatterns: bp.UnprotectedFilePatterns,
+ ApplyToAdmins: bp.ApplyToAdmins,
+ Created: bp.CreatedUnix.AsTime(),
+ Updated: bp.UpdatedUnix.AsTime(),
+ }
+}
+
+// ToTag convert a git.Tag to an api.Tag
+func ToTag(repo *repo_model.Repository, t *git.Tag) *api.Tag {
+ return &api.Tag{
+ Name: t.Name,
+ Message: strings.TrimSpace(t.Message),
+ ID: t.ID.String(),
+ Commit: ToCommitMeta(repo, t),
+ ZipballURL: util.URLJoin(repo.HTMLURL(), "archive", t.Name+".zip"),
+ TarballURL: util.URLJoin(repo.HTMLURL(), "archive", t.Name+".tar.gz"),
+ ArchiveDownloadCount: t.ArchiveDownloadCount,
+ }
+}
+
+// ToActionTask convert a actions_model.ActionTask to an api.ActionTask
+func ToActionTask(ctx context.Context, t *actions_model.ActionTask) (*api.ActionTask, error) {
+ if err := t.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+
+ url := strings.TrimSuffix(setting.AppURL, "/") + t.GetRunLink()
+
+ return &api.ActionTask{
+ ID: t.ID,
+ Name: t.Job.Name,
+ HeadBranch: t.Job.Run.PrettyRef(),
+ HeadSHA: t.Job.CommitSHA,
+ RunNumber: t.Job.Run.Index,
+ Event: t.Job.Run.TriggerEvent,
+ DisplayTitle: t.Job.Run.Title,
+ Status: t.Status.String(),
+ WorkflowID: t.Job.Run.WorkflowID,
+ URL: url,
+ CreatedAt: t.Created.AsLocalTime(),
+ UpdatedAt: t.Updated.AsLocalTime(),
+ RunStartedAt: t.Started.AsLocalTime(),
+ }, nil
+}
+
+// ToVerification convert a git.Commit.Signature to an api.PayloadCommitVerification
+func ToVerification(ctx context.Context, c *git.Commit) *api.PayloadCommitVerification {
+ verif := asymkey_model.ParseCommitWithSignature(ctx, c)
+ commitVerification := &api.PayloadCommitVerification{
+ Verified: verif.Verified,
+ Reason: verif.Reason,
+ }
+ if c.Signature != nil {
+ commitVerification.Signature = c.Signature.Signature
+ commitVerification.Payload = c.Signature.Payload
+ }
+ if verif.SigningUser != nil {
+ commitVerification.Signer = &api.PayloadUser{
+ Name: verif.SigningUser.Name,
+ Email: verif.SigningUser.Email,
+ }
+ }
+ return commitVerification
+}
+
+// ToPublicKey convert asymkey_model.PublicKey to api.PublicKey
+func ToPublicKey(apiLink string, key *asymkey_model.PublicKey) *api.PublicKey {
+ return &api.PublicKey{
+ ID: key.ID,
+ Key: key.Content,
+ URL: fmt.Sprintf("%s%d", apiLink, key.ID),
+ Title: key.Name,
+ Fingerprint: key.Fingerprint,
+ Created: key.CreatedUnix.AsTime(),
+ }
+}
+
+// ToGPGKey converts models.GPGKey to api.GPGKey
+func ToGPGKey(key *asymkey_model.GPGKey) *api.GPGKey {
+ subkeys := make([]*api.GPGKey, len(key.SubsKey))
+ for id, k := range key.SubsKey {
+ subkeys[id] = &api.GPGKey{
+ ID: k.ID,
+ PrimaryKeyID: k.PrimaryKeyID,
+ KeyID: k.KeyID,
+ PublicKey: k.Content,
+ Created: k.CreatedUnix.AsTime(),
+ Expires: k.ExpiredUnix.AsTime(),
+ CanSign: k.CanSign,
+ CanEncryptComms: k.CanEncryptComms,
+ CanEncryptStorage: k.CanEncryptStorage,
+ CanCertify: k.CanSign,
+ Verified: k.Verified,
+ }
+ }
+ emails := make([]*api.GPGKeyEmail, len(key.Emails))
+ for i, e := range key.Emails {
+ emails[i] = ToGPGKeyEmail(e)
+ }
+ return &api.GPGKey{
+ ID: key.ID,
+ PrimaryKeyID: key.PrimaryKeyID,
+ KeyID: key.KeyID,
+ PublicKey: key.Content,
+ Created: key.CreatedUnix.AsTime(),
+ Expires: key.ExpiredUnix.AsTime(),
+ Emails: emails,
+ SubsKey: subkeys,
+ CanSign: key.CanSign,
+ CanEncryptComms: key.CanEncryptComms,
+ CanEncryptStorage: key.CanEncryptStorage,
+ CanCertify: key.CanSign,
+ Verified: key.Verified,
+ }
+}
+
+// ToGPGKeyEmail convert models.EmailAddress to api.GPGKeyEmail
+func ToGPGKeyEmail(email *user_model.EmailAddress) *api.GPGKeyEmail {
+ return &api.GPGKeyEmail{
+ Email: email.Email,
+ Verified: email.IsActivated,
+ }
+}
+
+// ToGitHook convert git.Hook to api.GitHook
+func ToGitHook(h *git.Hook) *api.GitHook {
+ return &api.GitHook{
+ Name: h.Name(),
+ IsActive: h.IsActive,
+ Content: h.Content,
+ }
+}
+
+// ToDeployKey convert asymkey_model.DeployKey to api.DeployKey
+func ToDeployKey(apiLink string, key *asymkey_model.DeployKey) *api.DeployKey {
+ return &api.DeployKey{
+ ID: key.ID,
+ KeyID: key.KeyID,
+ Key: key.Content,
+ Fingerprint: key.Fingerprint,
+ URL: fmt.Sprintf("%s%d", apiLink, key.ID),
+ Title: key.Name,
+ Created: key.CreatedUnix.AsTime(),
+ ReadOnly: key.Mode == perm.AccessModeRead, // All deploy keys are read-only.
+ }
+}
+
+// ToOrganization convert user_model.User to api.Organization
+func ToOrganization(ctx context.Context, org *organization.Organization) *api.Organization {
+ return &api.Organization{
+ ID: org.ID,
+ AvatarURL: org.AsUser().AvatarLink(ctx),
+ Name: org.Name,
+ UserName: org.Name,
+ FullName: org.FullName,
+ Email: org.Email,
+ Description: org.Description,
+ Website: org.Website,
+ Location: org.Location,
+ Visibility: org.Visibility.String(),
+ RepoAdminChangeTeamAccess: org.RepoAdminChangeTeamAccess,
+ }
+}
+
+// ToTeam convert models.Team to api.Team
+func ToTeam(ctx context.Context, team *organization.Team, loadOrg ...bool) (*api.Team, error) {
+ teams, err := ToTeams(ctx, []*organization.Team{team}, len(loadOrg) != 0 && loadOrg[0])
+ if err != nil || len(teams) == 0 {
+ return nil, err
+ }
+ return teams[0], nil
+}
+
+// ToTeams convert models.Team list to api.Team list
+func ToTeams(ctx context.Context, teams []*organization.Team, loadOrgs bool) ([]*api.Team, error) {
+ cache := make(map[int64]*api.Organization)
+ apiTeams := make([]*api.Team, 0, len(teams))
+ for _, t := range teams {
+ if err := t.LoadUnits(ctx); err != nil {
+ return nil, err
+ }
+
+ apiTeam := &api.Team{
+ ID: t.ID,
+ Name: t.Name,
+ Description: t.Description,
+ IncludesAllRepositories: t.IncludesAllRepositories,
+ CanCreateOrgRepo: t.CanCreateOrgRepo,
+ Permission: t.AccessMode.String(),
+ Units: t.GetUnitNames(),
+ UnitsMap: t.GetUnitsMap(),
+ }
+
+ if loadOrgs {
+ apiOrg, ok := cache[t.OrgID]
+ if !ok {
+ org, err := organization.GetOrgByID(ctx, t.OrgID)
+ if err != nil {
+ return nil, err
+ }
+ apiOrg = ToOrganization(ctx, org)
+ cache[t.OrgID] = apiOrg
+ }
+ apiTeam.Organization = apiOrg
+ }
+
+ apiTeams = append(apiTeams, apiTeam)
+ }
+ return apiTeams, nil
+}
+
+// ToAnnotatedTag convert git.Tag to api.AnnotatedTag
+func ToAnnotatedTag(ctx context.Context, repo *repo_model.Repository, t *git.Tag, c *git.Commit) *api.AnnotatedTag {
+ return &api.AnnotatedTag{
+ Tag: t.Name,
+ SHA: t.ID.String(),
+ Object: ToAnnotatedTagObject(repo, c),
+ Message: t.Message,
+ URL: util.URLJoin(repo.APIURL(), "git/tags", t.ID.String()),
+ Tagger: ToCommitUser(t.Tagger),
+ Verification: ToVerification(ctx, c),
+ ArchiveDownloadCount: t.ArchiveDownloadCount,
+ }
+}
+
+// ToAnnotatedTagObject convert a git.Commit to an api.AnnotatedTagObject
+func ToAnnotatedTagObject(repo *repo_model.Repository, commit *git.Commit) *api.AnnotatedTagObject {
+ return &api.AnnotatedTagObject{
+ SHA: commit.ID.String(),
+ Type: string(git.ObjectCommit),
+ URL: util.URLJoin(repo.APIURL(), "git/commits", commit.ID.String()),
+ }
+}
+
+// ToTagProtection convert a git.ProtectedTag to an api.TagProtection
+func ToTagProtection(ctx context.Context, pt *git_model.ProtectedTag, repo *repo_model.Repository) *api.TagProtection {
+ readers, err := access_model.GetRepoReaders(ctx, repo)
+ if err != nil {
+ log.Error("GetRepoReaders: %v", err)
+ }
+
+ whitelistUsernames := getWhitelistEntities(readers, pt.AllowlistUserIDs)
+
+ teamReaders, err := organization.OrgFromUser(repo.Owner).TeamsWithAccessToRepo(ctx, repo.ID, perm.AccessModeRead)
+ if err != nil {
+ log.Error("Repo.Owner.TeamsWithAccessToRepo: %v", err)
+ }
+
+ whitelistTeams := getWhitelistEntities(teamReaders, pt.AllowlistTeamIDs)
+
+ return &api.TagProtection{
+ ID: pt.ID,
+ NamePattern: pt.NamePattern,
+ WhitelistUsernames: whitelistUsernames,
+ WhitelistTeams: whitelistTeams,
+ Created: pt.CreatedUnix.AsTime(),
+ Updated: pt.UpdatedUnix.AsTime(),
+ }
+}
+
+// ToTopicResponse convert from models.Topic to api.TopicResponse
+func ToTopicResponse(topic *repo_model.Topic) *api.TopicResponse {
+ return &api.TopicResponse{
+ ID: topic.ID,
+ Name: topic.Name,
+ RepoCount: topic.RepoCount,
+ Created: topic.CreatedUnix.AsTime(),
+ Updated: topic.UpdatedUnix.AsTime(),
+ }
+}
+
+// ToOAuth2Application convert from auth.OAuth2Application to api.OAuth2Application
+func ToOAuth2Application(app *auth.OAuth2Application) *api.OAuth2Application {
+ return &api.OAuth2Application{
+ ID: app.ID,
+ Name: app.Name,
+ ClientID: app.ClientID,
+ ClientSecret: app.ClientSecret,
+ ConfidentialClient: app.ConfidentialClient,
+ RedirectURIs: app.RedirectURIs,
+ Created: app.CreatedUnix.AsTime(),
+ }
+}
+
+// ToLFSLock convert a LFSLock to api.LFSLock
+func ToLFSLock(ctx context.Context, l *git_model.LFSLock) *api.LFSLock {
+ u, err := user_model.GetUserByID(ctx, l.OwnerID)
+ if err != nil {
+ return nil
+ }
+ return &api.LFSLock{
+ ID: strconv.FormatInt(l.ID, 10),
+ Path: l.Path,
+ LockedAt: l.Created.Round(time.Second),
+ Owner: &api.LFSLockOwner{
+ Name: u.Name,
+ },
+ }
+}
+
+// ToChangedFile convert a gitdiff.DiffFile to api.ChangedFile
+func ToChangedFile(f *gitdiff.DiffFile, repo *repo_model.Repository, commit string) *api.ChangedFile {
+ status := "changed"
+ previousFilename := ""
+ if f.IsDeleted {
+ status = "deleted"
+ } else if f.IsCreated {
+ status = "added"
+ } else if f.IsRenamed && f.Type == gitdiff.DiffFileCopy {
+ status = "copied"
+ } else if f.IsRenamed && f.Type == gitdiff.DiffFileRename {
+ status = "renamed"
+ previousFilename = f.OldName
+ } else if f.Addition == 0 && f.Deletion == 0 {
+ status = "unchanged"
+ }
+
+ file := &api.ChangedFile{
+ Filename: f.GetDiffFileName(),
+ Status: status,
+ Additions: f.Addition,
+ Deletions: f.Deletion,
+ Changes: f.Addition + f.Deletion,
+ PreviousFilename: previousFilename,
+ HTMLURL: fmt.Sprint(repo.HTMLURL(), "/src/commit/", commit, "/", util.PathEscapeSegments(f.GetDiffFileName())),
+ ContentsURL: fmt.Sprint(repo.APIURL(), "/contents/", util.PathEscapeSegments(f.GetDiffFileName()), "?ref=", commit),
+ RawURL: fmt.Sprint(repo.HTMLURL(), "/raw/commit/", commit, "/", util.PathEscapeSegments(f.GetDiffFileName())),
+ }
+
+ return file
+}
diff --git a/services/convert/git_commit.go b/services/convert/git_commit.go
new file mode 100644
index 0000000..e0efcdd
--- /dev/null
+++ b/services/convert/git_commit.go
@@ -0,0 +1,228 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+ "net/url"
+ "time"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ ctx "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/gitdiff"
+)
+
+// ToCommitUser convert a git.Signature to an api.CommitUser
+func ToCommitUser(sig *git.Signature) *api.CommitUser {
+ return &api.CommitUser{
+ Identity: api.Identity{
+ Name: sig.Name,
+ Email: sig.Email,
+ },
+ Date: sig.When.UTC().Format(time.RFC3339),
+ }
+}
+
+// ToCommitMeta convert a git.Tag to an api.CommitMeta
+func ToCommitMeta(repo *repo_model.Repository, tag *git.Tag) *api.CommitMeta {
+ return &api.CommitMeta{
+ SHA: tag.Object.String(),
+ URL: util.URLJoin(repo.APIURL(), "git/commits", tag.ID.String()),
+ Created: tag.Tagger.When,
+ }
+}
+
+// ToPayloadCommit convert a git.Commit to api.PayloadCommit
+func ToPayloadCommit(ctx context.Context, repo *repo_model.Repository, c *git.Commit) *api.PayloadCommit {
+ authorUsername := ""
+ if author, err := user_model.GetUserByEmail(ctx, c.Author.Email); err == nil {
+ authorUsername = author.Name
+ } else if !user_model.IsErrUserNotExist(err) {
+ log.Error("GetUserByEmail: %v", err)
+ }
+
+ committerUsername := ""
+ if committer, err := user_model.GetUserByEmail(ctx, c.Committer.Email); err == nil {
+ committerUsername = committer.Name
+ } else if !user_model.IsErrUserNotExist(err) {
+ log.Error("GetUserByEmail: %v", err)
+ }
+
+ return &api.PayloadCommit{
+ ID: c.ID.String(),
+ Message: c.Message(),
+ URL: util.URLJoin(repo.HTMLURL(), "commit", c.ID.String()),
+ Author: &api.PayloadUser{
+ Name: c.Author.Name,
+ Email: c.Author.Email,
+ UserName: authorUsername,
+ },
+ Committer: &api.PayloadUser{
+ Name: c.Committer.Name,
+ Email: c.Committer.Email,
+ UserName: committerUsername,
+ },
+ Timestamp: c.Author.When,
+ Verification: ToVerification(ctx, c),
+ }
+}
+
+type ToCommitOptions struct {
+ Stat bool
+ Verification bool
+ Files bool
+}
+
+func ParseCommitOptions(ctx *ctx.APIContext) ToCommitOptions {
+ return ToCommitOptions{
+ Stat: ctx.FormString("stat") == "" || ctx.FormBool("stat"),
+ Files: ctx.FormString("files") == "" || ctx.FormBool("files"),
+ Verification: ctx.FormString("verification") == "" || ctx.FormBool("verification"),
+ }
+}
+
+// ToCommit convert a git.Commit to api.Commit
+func ToCommit(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, commit *git.Commit, userCache map[string]*user_model.User, opts ToCommitOptions) (*api.Commit, error) {
+ var apiAuthor, apiCommitter *api.User
+
+ // Retrieve author and committer information
+
+ var cacheAuthor *user_model.User
+ var ok bool
+ if userCache == nil {
+ cacheAuthor = (*user_model.User)(nil)
+ ok = false
+ } else {
+ cacheAuthor, ok = userCache[commit.Author.Email]
+ }
+
+ if ok {
+ apiAuthor = ToUser(ctx, cacheAuthor, nil)
+ } else {
+ author, err := user_model.GetUserByEmail(ctx, commit.Author.Email)
+ if err != nil && !user_model.IsErrUserNotExist(err) {
+ return nil, err
+ } else if err == nil {
+ apiAuthor = ToUser(ctx, author, nil)
+ if userCache != nil {
+ userCache[commit.Author.Email] = author
+ }
+ }
+ }
+
+ var cacheCommitter *user_model.User
+ if userCache == nil {
+ cacheCommitter = (*user_model.User)(nil)
+ ok = false
+ } else {
+ cacheCommitter, ok = userCache[commit.Committer.Email]
+ }
+
+ if ok {
+ apiCommitter = ToUser(ctx, cacheCommitter, nil)
+ } else {
+ committer, err := user_model.GetUserByEmail(ctx, commit.Committer.Email)
+ if err != nil && !user_model.IsErrUserNotExist(err) {
+ return nil, err
+ } else if err == nil {
+ apiCommitter = ToUser(ctx, committer, nil)
+ if userCache != nil {
+ userCache[commit.Committer.Email] = committer
+ }
+ }
+ }
+
+ // Retrieve parent(s) of the commit
+ apiParents := make([]*api.CommitMeta, commit.ParentCount())
+ for i := 0; i < commit.ParentCount(); i++ {
+ sha, _ := commit.ParentID(i)
+ apiParents[i] = &api.CommitMeta{
+ URL: repo.APIURL() + "/git/commits/" + url.PathEscape(sha.String()),
+ SHA: sha.String(),
+ }
+ }
+
+ res := &api.Commit{
+ CommitMeta: &api.CommitMeta{
+ URL: repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String()),
+ SHA: commit.ID.String(),
+ Created: commit.Committer.When,
+ },
+ HTMLURL: repo.HTMLURL() + "/commit/" + url.PathEscape(commit.ID.String()),
+ RepoCommit: &api.RepoCommit{
+ URL: repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String()),
+ Author: &api.CommitUser{
+ Identity: api.Identity{
+ Name: commit.Author.Name,
+ Email: commit.Author.Email,
+ },
+ Date: commit.Author.When.Format(time.RFC3339),
+ },
+ Committer: &api.CommitUser{
+ Identity: api.Identity{
+ Name: commit.Committer.Name,
+ Email: commit.Committer.Email,
+ },
+ Date: commit.Committer.When.Format(time.RFC3339),
+ },
+ Message: commit.Message(),
+ Tree: &api.CommitMeta{
+ URL: repo.APIURL() + "/git/trees/" + url.PathEscape(commit.ID.String()),
+ SHA: commit.ID.String(),
+ Created: commit.Committer.When,
+ },
+ },
+ Author: apiAuthor,
+ Committer: apiCommitter,
+ Parents: apiParents,
+ }
+
+ // Retrieve verification for commit
+ if opts.Verification {
+ res.RepoCommit.Verification = ToVerification(ctx, commit)
+ }
+
+ // Retrieve files affected by the commit
+ if opts.Files {
+ fileStatus, err := git.GetCommitFileStatus(gitRepo.Ctx, repo.RepoPath(), commit.ID.String())
+ if err != nil {
+ return nil, err
+ }
+
+ affectedFileList := make([]*api.CommitAffectedFiles, 0, len(fileStatus.Added)+len(fileStatus.Removed)+len(fileStatus.Modified))
+ for filestatus, files := range map[string][]string{"added": fileStatus.Added, "removed": fileStatus.Removed, "modified": fileStatus.Modified} {
+ for _, filename := range files {
+ affectedFileList = append(affectedFileList, &api.CommitAffectedFiles{
+ Filename: filename,
+ Status: filestatus,
+ })
+ }
+ }
+
+ res.Files = affectedFileList
+ }
+
+ // Get diff stats for commit
+ if opts.Stat {
+ diff, err := gitdiff.GetDiff(ctx, gitRepo, &gitdiff.DiffOptions{
+ AfterCommitID: commit.ID.String(),
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ res.Stats = &api.CommitStats{
+ Total: diff.TotalAddition + diff.TotalDeletion,
+ Additions: diff.TotalAddition,
+ Deletions: diff.TotalDeletion,
+ }
+ }
+
+ return res, nil
+}
diff --git a/services/convert/git_commit_test.go b/services/convert/git_commit_test.go
new file mode 100644
index 0000000..68d1b05
--- /dev/null
+++ b/services/convert/git_commit_test.go
@@ -0,0 +1,42 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "testing"
+ "time"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestToCommitMeta(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ headRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ sha1 := git.Sha1ObjectFormat
+ signature := &git.Signature{Name: "Test Signature", Email: "test@email.com", When: time.Unix(0, 0)}
+ tag := &git.Tag{
+ Name: "Test Tag",
+ ID: sha1.EmptyObjectID(),
+ Object: sha1.EmptyObjectID(),
+ Type: "Test Type",
+ Tagger: signature,
+ Message: "Test Message",
+ }
+
+ commitMeta := ToCommitMeta(headRepo, tag)
+
+ assert.NotNil(t, commitMeta)
+ assert.EqualValues(t, &api.CommitMeta{
+ SHA: sha1.EmptyObjectID().String(),
+ URL: util.URLJoin(headRepo.APIURL(), "git/commits", sha1.EmptyObjectID().String()),
+ Created: time.Unix(0, 0),
+ }, commitMeta)
+}
diff --git a/services/convert/issue.go b/services/convert/issue.go
new file mode 100644
index 0000000..f514dc4
--- /dev/null
+++ b/services/convert/issue.go
@@ -0,0 +1,288 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "strings"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/label"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+func ToIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) *api.Issue {
+ return toIssue(ctx, doer, issue, WebAssetDownloadURL)
+}
+
+// ToAPIIssue converts an Issue to API format
+// it assumes some fields assigned with values:
+// Required - Poster, Labels,
+// Optional - Milestone, Assignee, PullRequest
+func ToAPIIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) *api.Issue {
+ return toIssue(ctx, doer, issue, APIAssetDownloadURL)
+}
+
+func toIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, getDownloadURL func(repo *repo_model.Repository, attach *repo_model.Attachment) string) *api.Issue {
+ if err := issue.LoadPoster(ctx); err != nil {
+ return &api.Issue{}
+ }
+ if err := issue.LoadRepo(ctx); err != nil {
+ return &api.Issue{}
+ }
+ if err := issue.LoadAttachments(ctx); err != nil {
+ return &api.Issue{}
+ }
+
+ apiIssue := &api.Issue{
+ ID: issue.ID,
+ Index: issue.Index,
+ Poster: ToUser(ctx, issue.Poster, doer),
+ Title: issue.Title,
+ Body: issue.Content,
+ Attachments: toAttachments(issue.Repo, issue.Attachments, getDownloadURL),
+ Ref: issue.Ref,
+ State: issue.State(),
+ IsLocked: issue.IsLocked,
+ Comments: issue.NumComments,
+ Created: issue.CreatedUnix.AsTime(),
+ Updated: issue.UpdatedUnix.AsTime(),
+ PinOrder: issue.PinOrder,
+ }
+
+ if issue.Repo != nil {
+ if err := issue.Repo.LoadOwner(ctx); err != nil {
+ return &api.Issue{}
+ }
+ apiIssue.URL = issue.APIURL(ctx)
+ apiIssue.HTMLURL = issue.HTMLURL()
+ if err := issue.LoadLabels(ctx); err != nil {
+ return &api.Issue{}
+ }
+ apiIssue.Labels = ToLabelList(issue.Labels, issue.Repo, issue.Repo.Owner)
+ apiIssue.Repo = &api.RepositoryMeta{
+ ID: issue.Repo.ID,
+ Name: issue.Repo.Name,
+ Owner: issue.Repo.OwnerName,
+ FullName: issue.Repo.FullName(),
+ }
+ }
+
+ if issue.ClosedUnix != 0 {
+ apiIssue.Closed = issue.ClosedUnix.AsTimePtr()
+ }
+
+ if err := issue.LoadMilestone(ctx); err != nil {
+ return &api.Issue{}
+ }
+ if issue.Milestone != nil {
+ apiIssue.Milestone = ToAPIMilestone(issue.Milestone)
+ }
+
+ if err := issue.LoadAssignees(ctx); err != nil {
+ return &api.Issue{}
+ }
+ if len(issue.Assignees) > 0 {
+ for _, assignee := range issue.Assignees {
+ apiIssue.Assignees = append(apiIssue.Assignees, ToUser(ctx, assignee, nil))
+ }
+ apiIssue.Assignee = ToUser(ctx, issue.Assignees[0], nil) // For compatibility, we're keeping the first assignee as `apiIssue.Assignee`
+ }
+ if issue.IsPull {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return &api.Issue{}
+ }
+ if issue.PullRequest != nil {
+ apiIssue.PullRequest = &api.PullRequestMeta{
+ HasMerged: issue.PullRequest.HasMerged,
+ IsWorkInProgress: issue.PullRequest.IsWorkInProgress(ctx),
+ }
+ if issue.PullRequest.HasMerged {
+ apiIssue.PullRequest.Merged = issue.PullRequest.MergedUnix.AsTimePtr()
+ }
+ // Add pr's html url
+ apiIssue.PullRequest.HTMLURL = issue.HTMLURL()
+ }
+ }
+ if issue.DeadlineUnix != 0 {
+ apiIssue.Deadline = issue.DeadlineUnix.AsTimePtr()
+ }
+
+ return apiIssue
+}
+
+// ToIssueList converts an IssueList to API format
+func ToIssueList(ctx context.Context, doer *user_model.User, il issues_model.IssueList) []*api.Issue {
+ result := make([]*api.Issue, len(il))
+ for i := range il {
+ result[i] = ToIssue(ctx, doer, il[i])
+ }
+ return result
+}
+
+// ToAPIIssueList converts an IssueList to API format
+func ToAPIIssueList(ctx context.Context, doer *user_model.User, il issues_model.IssueList) []*api.Issue {
+ result := make([]*api.Issue, len(il))
+ for i := range il {
+ result[i] = ToAPIIssue(ctx, doer, il[i])
+ }
+ return result
+}
+
+// ToTrackedTime converts TrackedTime to API format
+func ToTrackedTime(ctx context.Context, doer *user_model.User, t *issues_model.TrackedTime) (apiT *api.TrackedTime) {
+ apiT = &api.TrackedTime{
+ ID: t.ID,
+ IssueID: t.IssueID,
+ UserID: t.UserID,
+ Time: t.Time,
+ Created: t.Created,
+ }
+ if t.Issue != nil {
+ apiT.Issue = ToAPIIssue(ctx, doer, t.Issue)
+ }
+ if t.User != nil {
+ apiT.UserName = t.User.Name
+ }
+ return apiT
+}
+
+// ToStopWatches convert Stopwatch list to api.StopWatches
+func ToStopWatches(ctx context.Context, sws []*issues_model.Stopwatch) (api.StopWatches, error) {
+ result := api.StopWatches(make([]api.StopWatch, 0, len(sws)))
+
+ issueCache := make(map[int64]*issues_model.Issue)
+ repoCache := make(map[int64]*repo_model.Repository)
+ var (
+ issue *issues_model.Issue
+ repo *repo_model.Repository
+ ok bool
+ err error
+ )
+
+ for _, sw := range sws {
+ issue, ok = issueCache[sw.IssueID]
+ if !ok {
+ issue, err = issues_model.GetIssueByID(ctx, sw.IssueID)
+ if err != nil {
+ return nil, err
+ }
+ }
+ repo, ok = repoCache[issue.RepoID]
+ if !ok {
+ repo, err = repo_model.GetRepositoryByID(ctx, issue.RepoID)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ result = append(result, api.StopWatch{
+ Created: sw.CreatedUnix.AsTime(),
+ Seconds: sw.Seconds(),
+ Duration: sw.Duration(),
+ IssueIndex: issue.Index,
+ IssueTitle: issue.Title,
+ RepoOwnerName: repo.OwnerName,
+ RepoName: repo.Name,
+ })
+ }
+ return result, nil
+}
+
+// ToTrackedTimeList converts TrackedTimeList to API format
+func ToTrackedTimeList(ctx context.Context, doer *user_model.User, tl issues_model.TrackedTimeList) api.TrackedTimeList {
+ result := make([]*api.TrackedTime, 0, len(tl))
+ for _, t := range tl {
+ result = append(result, ToTrackedTime(ctx, doer, t))
+ }
+ return result
+}
+
+// ToLabel converts Label to API format
+func ToLabel(label *issues_model.Label, repo *repo_model.Repository, org *user_model.User) *api.Label {
+ result := &api.Label{
+ ID: label.ID,
+ Name: label.Name,
+ Exclusive: label.Exclusive,
+ Color: strings.TrimLeft(label.Color, "#"),
+ Description: label.Description,
+ IsArchived: label.IsArchived(),
+ }
+
+ labelBelongsToRepo := label.BelongsToRepo()
+
+ // calculate URL
+ if labelBelongsToRepo && repo != nil {
+ result.URL = fmt.Sprintf("%s/labels/%d", repo.APIURL(), label.ID)
+ } else { // BelongsToOrg
+ if org != nil {
+ result.URL = fmt.Sprintf("%sapi/v1/orgs/%s/labels/%d", setting.AppURL, url.PathEscape(org.Name), label.ID)
+ } else {
+ log.Error("ToLabel did not get org to calculate url for label with id '%d'", label.ID)
+ }
+ }
+
+ if labelBelongsToRepo && repo == nil {
+ log.Error("ToLabel did not get repo to calculate url for label with id '%d'", label.ID)
+ }
+
+ return result
+}
+
+// ToLabelList converts list of Label to API format
+func ToLabelList(labels []*issues_model.Label, repo *repo_model.Repository, org *user_model.User) []*api.Label {
+ result := make([]*api.Label, len(labels))
+ for i := range labels {
+ result[i] = ToLabel(labels[i], repo, org)
+ }
+ return result
+}
+
+// ToAPIMilestone converts Milestone into API Format
+func ToAPIMilestone(m *issues_model.Milestone) *api.Milestone {
+ apiMilestone := &api.Milestone{
+ ID: m.ID,
+ State: m.State(),
+ Title: m.Name,
+ Description: m.Content,
+ OpenIssues: m.NumOpenIssues,
+ ClosedIssues: m.NumClosedIssues,
+ Created: m.CreatedUnix.AsTime(),
+ Updated: m.UpdatedUnix.AsTimePtr(),
+ }
+ if m.IsClosed {
+ apiMilestone.Closed = m.ClosedDateUnix.AsTimePtr()
+ }
+ if m.DeadlineUnix.Year() < 9999 {
+ apiMilestone.Deadline = m.DeadlineUnix.AsTimePtr()
+ }
+ return apiMilestone
+}
+
+// ToLabelTemplate converts Label to API format
+func ToLabelTemplate(label *label.Label) *api.LabelTemplate {
+ result := &api.LabelTemplate{
+ Name: label.Name,
+ Exclusive: label.Exclusive,
+ Color: strings.TrimLeft(label.Color, "#"),
+ Description: label.Description,
+ }
+
+ return result
+}
+
+// ToLabelTemplateList converts list of Label to API format
+func ToLabelTemplateList(labels []*label.Label) []*api.LabelTemplate {
+ result := make([]*api.LabelTemplate, len(labels))
+ for i := range labels {
+ result[i] = ToLabelTemplate(labels[i])
+ }
+ return result
+}
diff --git a/services/convert/issue_comment.go b/services/convert/issue_comment.go
new file mode 100644
index 0000000..9ec9ac7
--- /dev/null
+++ b/services/convert/issue_comment.go
@@ -0,0 +1,187 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// ToAPIComment converts a issues_model.Comment to the api.Comment format for API usage
+func ToAPIComment(ctx context.Context, repo *repo_model.Repository, c *issues_model.Comment) *api.Comment {
+ return &api.Comment{
+ ID: c.ID,
+ Poster: ToUser(ctx, c.Poster, nil),
+ HTMLURL: c.HTMLURL(ctx),
+ IssueURL: c.IssueURL(ctx),
+ PRURL: c.PRURL(ctx),
+ Body: c.Content,
+ Attachments: ToAPIAttachments(repo, c.Attachments),
+ Created: c.CreatedUnix.AsTime(),
+ Updated: c.UpdatedUnix.AsTime(),
+ }
+}
+
+// ToTimelineComment converts a issues_model.Comment to the api.TimelineComment format
+func ToTimelineComment(ctx context.Context, repo *repo_model.Repository, c *issues_model.Comment, doer *user_model.User) *api.TimelineComment {
+ err := c.LoadMilestone(ctx)
+ if err != nil {
+ log.Error("LoadMilestone: %v", err)
+ return nil
+ }
+
+ err = c.LoadAssigneeUserAndTeam(ctx)
+ if err != nil {
+ log.Error("LoadAssigneeUserAndTeam: %v", err)
+ return nil
+ }
+
+ err = c.LoadResolveDoer(ctx)
+ if err != nil {
+ log.Error("LoadResolveDoer: %v", err)
+ return nil
+ }
+
+ err = c.LoadDepIssueDetails(ctx)
+ if err != nil {
+ log.Error("LoadDepIssueDetails: %v", err)
+ return nil
+ }
+
+ err = c.LoadTime(ctx)
+ if err != nil {
+ log.Error("LoadTime: %v", err)
+ return nil
+ }
+
+ err = c.LoadLabel(ctx)
+ if err != nil {
+ log.Error("LoadLabel: %v", err)
+ return nil
+ }
+
+ if c.Content != "" {
+ if (c.Type == issues_model.CommentTypeAddTimeManual ||
+ c.Type == issues_model.CommentTypeStopTracking ||
+ c.Type == issues_model.CommentTypeDeleteTimeManual) &&
+ c.Content[0] == '|' {
+ // TimeTracking Comments from v1.21 on store the seconds instead of an formatted string
+ // so we check for the "|" delimiter and convert new to legacy format on demand
+ c.Content = util.SecToTime(c.Content[1:])
+ }
+ }
+
+ comment := &api.TimelineComment{
+ ID: c.ID,
+ Type: c.Type.String(),
+ Poster: ToUser(ctx, c.Poster, nil),
+ HTMLURL: c.HTMLURL(ctx),
+ IssueURL: c.IssueURL(ctx),
+ PRURL: c.PRURL(ctx),
+ Body: c.Content,
+ Created: c.CreatedUnix.AsTime(),
+ Updated: c.UpdatedUnix.AsTime(),
+
+ OldProjectID: c.OldProjectID,
+ ProjectID: c.ProjectID,
+
+ OldTitle: c.OldTitle,
+ NewTitle: c.NewTitle,
+
+ OldRef: c.OldRef,
+ NewRef: c.NewRef,
+
+ RefAction: c.RefAction.String(),
+ RefCommitSHA: c.CommitSHA,
+
+ ReviewID: c.ReviewID,
+
+ RemovedAssignee: c.RemovedAssignee,
+ }
+
+ if c.OldMilestone != nil {
+ comment.OldMilestone = ToAPIMilestone(c.OldMilestone)
+ }
+ if c.Milestone != nil {
+ comment.Milestone = ToAPIMilestone(c.Milestone)
+ }
+
+ if c.Time != nil {
+ err = c.Time.LoadAttributes(ctx)
+ if err != nil {
+ log.Error("Time.LoadAttributes: %v", err)
+ return nil
+ }
+
+ comment.TrackedTime = ToTrackedTime(ctx, doer, c.Time)
+ }
+
+ if c.RefIssueID != 0 {
+ issue, err := issues_model.GetIssueByID(ctx, c.RefIssueID)
+ if err != nil {
+ log.Error("GetIssueByID(%d): %v", c.RefIssueID, err)
+ return nil
+ }
+ comment.RefIssue = ToAPIIssue(ctx, doer, issue)
+ }
+
+ if c.RefCommentID != 0 {
+ com, err := issues_model.GetCommentByID(ctx, c.RefCommentID)
+ if err != nil {
+ log.Error("GetCommentByID(%d): %v", c.RefCommentID, err)
+ return nil
+ }
+ err = com.LoadPoster(ctx)
+ if err != nil {
+ log.Error("LoadPoster: %v", err)
+ return nil
+ }
+ comment.RefComment = ToAPIComment(ctx, repo, com)
+ }
+
+ if c.Label != nil {
+ var org *user_model.User
+ var repo *repo_model.Repository
+ if c.Label.BelongsToOrg() {
+ var err error
+ org, err = user_model.GetUserByID(ctx, c.Label.OrgID)
+ if err != nil {
+ log.Error("GetUserByID(%d): %v", c.Label.OrgID, err)
+ return nil
+ }
+ }
+ if c.Label.BelongsToRepo() {
+ var err error
+ repo, err = repo_model.GetRepositoryByID(ctx, c.Label.RepoID)
+ if err != nil {
+ log.Error("GetRepositoryByID(%d): %v", c.Label.RepoID, err)
+ return nil
+ }
+ }
+ comment.Label = ToLabel(c.Label, repo, org)
+ }
+
+ if c.Assignee != nil {
+ comment.Assignee = ToUser(ctx, c.Assignee, nil)
+ }
+ if c.AssigneeTeam != nil {
+ comment.AssigneeTeam, _ = ToTeam(ctx, c.AssigneeTeam)
+ }
+
+ if c.ResolveDoer != nil {
+ comment.ResolveDoer = ToUser(ctx, c.ResolveDoer, nil)
+ }
+
+ if c.DependentIssue != nil {
+ comment.DependentIssue = ToAPIIssue(ctx, doer, c.DependentIssue)
+ }
+
+ return comment
+}
diff --git a/services/convert/issue_test.go b/services/convert/issue_test.go
new file mode 100644
index 0000000..0aeb3e5
--- /dev/null
+++ b/services/convert/issue_test.go
@@ -0,0 +1,58 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLabel_ToLabel(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: label.RepoID})
+ assert.Equal(t, &api.Label{
+ ID: label.ID,
+ Name: label.Name,
+ Color: "abcdef",
+ URL: fmt.Sprintf("%sapi/v1/repos/user2/repo1/labels/%d", setting.AppURL, label.ID),
+ }, ToLabel(label, repo, nil))
+}
+
+func TestMilestone_APIFormat(t *testing.T) {
+ milestone := &issues_model.Milestone{
+ ID: 3,
+ RepoID: 4,
+ Name: "milestoneName",
+ Content: "milestoneContent",
+ IsClosed: false,
+ NumOpenIssues: 5,
+ NumClosedIssues: 6,
+ CreatedUnix: timeutil.TimeStamp(time.Date(1999, time.January, 1, 0, 0, 0, 0, time.UTC).Unix()),
+ UpdatedUnix: timeutil.TimeStamp(time.Date(1999, time.March, 1, 0, 0, 0, 0, time.UTC).Unix()),
+ DeadlineUnix: timeutil.TimeStamp(time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC).Unix()),
+ }
+ assert.Equal(t, api.Milestone{
+ ID: milestone.ID,
+ State: api.StateOpen,
+ Title: milestone.Name,
+ Description: milestone.Content,
+ OpenIssues: milestone.NumOpenIssues,
+ ClosedIssues: milestone.NumClosedIssues,
+ Created: milestone.CreatedUnix.AsTime(),
+ Updated: milestone.UpdatedUnix.AsTimePtr(),
+ Deadline: milestone.DeadlineUnix.AsTimePtr(),
+ }, *ToAPIMilestone(milestone))
+}
diff --git a/services/convert/main_test.go b/services/convert/main_test.go
new file mode 100644
index 0000000..363cc4a
--- /dev/null
+++ b/services/convert/main_test.go
@@ -0,0 +1,16 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models/actions"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/convert/mirror.go b/services/convert/mirror.go
new file mode 100644
index 0000000..85e0d1c
--- /dev/null
+++ b/services/convert/mirror.go
@@ -0,0 +1,27 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToPushMirror convert from repo_model.PushMirror and remoteAddress to api.TopicResponse
+func ToPushMirror(ctx context.Context, pm *repo_model.PushMirror) (*api.PushMirror, error) {
+ repo := pm.GetRepository(ctx)
+ return &api.PushMirror{
+ RepoName: repo.Name,
+ RemoteName: pm.RemoteName,
+ RemoteAddress: pm.RemoteAddress,
+ CreatedUnix: pm.CreatedUnix.AsTime(),
+ LastUpdateUnix: pm.LastUpdateUnix.AsTimePtr(),
+ LastError: pm.LastError,
+ Interval: pm.Interval.String(),
+ SyncOnCommit: pm.SyncOnCommit,
+ PublicKey: pm.GetPublicKey(),
+ }, nil
+}
diff --git a/services/convert/notification.go b/services/convert/notification.go
new file mode 100644
index 0000000..41063cf
--- /dev/null
+++ b/services/convert/notification.go
@@ -0,0 +1,98 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+ "net/url"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToNotificationThread convert a Notification to api.NotificationThread
+func ToNotificationThread(ctx context.Context, n *activities_model.Notification) *api.NotificationThread {
+ result := &api.NotificationThread{
+ ID: n.ID,
+ Unread: !(n.Status == activities_model.NotificationStatusRead || n.Status == activities_model.NotificationStatusPinned),
+ Pinned: n.Status == activities_model.NotificationStatusPinned,
+ UpdatedAt: n.UpdatedUnix.AsTime(),
+ URL: n.APIURL(),
+ }
+
+ // since user only get notifications when he has access to use minimal access mode
+ if n.Repository != nil {
+ result.Repository = ToRepo(ctx, n.Repository, access_model.Permission{AccessMode: perm.AccessModeRead})
+
+ // This permission is not correct and we should not be reporting it
+ for repository := result.Repository; repository != nil; repository = repository.Parent {
+ repository.Permissions = nil
+ }
+ }
+
+ // handle Subject
+ switch n.Source {
+ case activities_model.NotificationSourceIssue:
+ result.Subject = &api.NotificationSubject{Type: api.NotifySubjectIssue}
+ if n.Issue != nil {
+ result.Subject.Title = n.Issue.Title
+ result.Subject.URL = n.Issue.APIURL(ctx)
+ result.Subject.HTMLURL = n.Issue.HTMLURL()
+ result.Subject.State = n.Issue.State()
+ comment, err := n.Issue.GetLastComment(ctx)
+ if err == nil && comment != nil {
+ result.Subject.LatestCommentURL = comment.APIURL(ctx)
+ result.Subject.LatestCommentHTMLURL = comment.HTMLURL(ctx)
+ }
+ }
+ case activities_model.NotificationSourcePullRequest:
+ result.Subject = &api.NotificationSubject{Type: api.NotifySubjectPull}
+ if n.Issue != nil {
+ result.Subject.Title = n.Issue.Title
+ result.Subject.URL = n.Issue.APIURL(ctx)
+ result.Subject.HTMLURL = n.Issue.HTMLURL()
+ result.Subject.State = n.Issue.State()
+ comment, err := n.Issue.GetLastComment(ctx)
+ if err == nil && comment != nil {
+ result.Subject.LatestCommentURL = comment.APIURL(ctx)
+ result.Subject.LatestCommentHTMLURL = comment.HTMLURL(ctx)
+ }
+
+ if err := n.Issue.LoadPullRequest(ctx); err == nil &&
+ n.Issue.PullRequest != nil &&
+ n.Issue.PullRequest.HasMerged {
+ result.Subject.State = "merged"
+ }
+ }
+ case activities_model.NotificationSourceCommit:
+ url := n.Repository.HTMLURL() + "/commit/" + url.PathEscape(n.CommitID)
+ result.Subject = &api.NotificationSubject{
+ Type: api.NotifySubjectCommit,
+ Title: n.CommitID,
+ URL: url,
+ HTMLURL: url,
+ }
+ case activities_model.NotificationSourceRepository:
+ result.Subject = &api.NotificationSubject{
+ Type: api.NotifySubjectRepository,
+ Title: n.Repository.FullName(),
+ // FIXME: this is a relative URL, rather useless and inconsistent, but keeping for backwards compat
+ URL: n.Repository.Link(),
+ HTMLURL: n.Repository.HTMLURL(),
+ }
+ }
+
+ return result
+}
+
+// ToNotifications convert list of Notification to api.NotificationThread list
+func ToNotifications(ctx context.Context, nl activities_model.NotificationList) []*api.NotificationThread {
+ result := make([]*api.NotificationThread, 0, len(nl))
+ for _, n := range nl {
+ result = append(result, ToNotificationThread(ctx, n))
+ }
+ return result
+}
diff --git a/services/convert/package.go b/services/convert/package.go
new file mode 100644
index 0000000..b5fca21
--- /dev/null
+++ b/services/convert/package.go
@@ -0,0 +1,53 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/packages"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ user_model "code.gitea.io/gitea/models/user"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToPackage convert a packages.PackageDescriptor to api.Package
+func ToPackage(ctx context.Context, pd *packages.PackageDescriptor, doer *user_model.User) (*api.Package, error) {
+ var repo *api.Repository
+ if pd.Repository != nil {
+ permission, err := access_model.GetUserRepoPermission(ctx, pd.Repository, doer)
+ if err != nil {
+ return nil, err
+ }
+
+ if permission.HasAccess() {
+ repo = ToRepo(ctx, pd.Repository, permission)
+ }
+ }
+
+ return &api.Package{
+ ID: pd.Version.ID,
+ Owner: ToUser(ctx, pd.Owner, doer),
+ Repository: repo,
+ Creator: ToUser(ctx, pd.Creator, doer),
+ Type: string(pd.Package.Type),
+ Name: pd.Package.Name,
+ Version: pd.Version.Version,
+ CreatedAt: pd.Version.CreatedUnix.AsTime(),
+ HTMLURL: pd.VersionHTMLURL(),
+ }, nil
+}
+
+// ToPackageFile converts packages.PackageFileDescriptor to api.PackageFile
+func ToPackageFile(pfd *packages.PackageFileDescriptor) *api.PackageFile {
+ return &api.PackageFile{
+ ID: pfd.File.ID,
+ Size: pfd.Blob.Size,
+ Name: pfd.File.Name,
+ HashMD5: pfd.Blob.HashMD5,
+ HashSHA1: pfd.Blob.HashSHA1,
+ HashSHA256: pfd.Blob.HashSHA256,
+ HashSHA512: pfd.Blob.HashSHA512,
+ }
+}
diff --git a/services/convert/pull.go b/services/convert/pull.go
new file mode 100644
index 0000000..4ec24a8
--- /dev/null
+++ b/services/convert/pull.go
@@ -0,0 +1,261 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+ "fmt"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToAPIPullRequest assumes following fields have been assigned with valid values:
+// Required - Issue
+// Optional - Merger
+func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) *api.PullRequest {
+ var (
+ baseBranch *git.Branch
+ headBranch *git.Branch
+ baseCommit *git.Commit
+ err error
+ )
+
+ if err = pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo[%d]: %v", pr.ID, err)
+ return nil
+ }
+
+ apiIssue := ToAPIIssue(ctx, doer, pr.Issue)
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("GetRepositoryById[%d]: %v", pr.ID, err)
+ return nil
+ }
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("GetRepositoryById[%d]: %v", pr.ID, err)
+ return nil
+ }
+
+ var doerID int64
+ if doer != nil {
+ doerID = doer.ID
+ }
+
+ const repoDoerPermCacheKey = "repo_doer_perm_cache"
+ p, err := cache.GetWithContextCache(ctx, repoDoerPermCacheKey, fmt.Sprintf("%d_%d", pr.BaseRepoID, doerID),
+ func() (access_model.Permission, error) {
+ return access_model.GetUserRepoPermission(ctx, pr.BaseRepo, doer)
+ })
+ if err != nil {
+ log.Error("GetUserRepoPermission[%d]: %v", pr.BaseRepoID, err)
+ p.AccessMode = perm.AccessModeNone
+ }
+
+ apiPullRequest := &api.PullRequest{
+ ID: pr.ID,
+ URL: pr.Issue.HTMLURL(),
+ Index: pr.Index,
+ Poster: apiIssue.Poster,
+ Title: apiIssue.Title,
+ Body: apiIssue.Body,
+ Labels: apiIssue.Labels,
+ Milestone: apiIssue.Milestone,
+ Assignee: apiIssue.Assignee,
+ Assignees: apiIssue.Assignees,
+ State: apiIssue.State,
+ Draft: pr.IsWorkInProgress(ctx),
+ IsLocked: apiIssue.IsLocked,
+ Comments: apiIssue.Comments,
+ ReviewComments: pr.GetReviewCommentsCount(ctx),
+ HTMLURL: pr.Issue.HTMLURL(),
+ DiffURL: pr.Issue.DiffURL(),
+ PatchURL: pr.Issue.PatchURL(),
+ HasMerged: pr.HasMerged,
+ MergeBase: pr.MergeBase,
+ Mergeable: pr.Mergeable(ctx),
+ Deadline: apiIssue.Deadline,
+ Created: pr.Issue.CreatedUnix.AsTimePtr(),
+ Updated: pr.Issue.UpdatedUnix.AsTimePtr(),
+ PinOrder: apiIssue.PinOrder,
+
+ AllowMaintainerEdit: pr.AllowMaintainerEdit,
+
+ Base: &api.PRBranchInfo{
+ Name: pr.BaseBranch,
+ Ref: pr.BaseBranch,
+ RepoID: pr.BaseRepoID,
+ Repository: ToRepo(ctx, pr.BaseRepo, p),
+ },
+ Head: &api.PRBranchInfo{
+ Name: pr.HeadBranch,
+ Ref: fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index),
+ RepoID: -1,
+ },
+ }
+
+ if err = pr.LoadRequestedReviewers(ctx); err != nil {
+ log.Error("LoadRequestedReviewers[%d]: %v", pr.ID, err)
+ return nil
+ }
+ if err = pr.LoadRequestedReviewersTeams(ctx); err != nil {
+ log.Error("LoadRequestedReviewersTeams[%d]: %v", pr.ID, err)
+ return nil
+ }
+
+ for _, reviewer := range pr.RequestedReviewers {
+ apiPullRequest.RequestedReviewers = append(apiPullRequest.RequestedReviewers, ToUser(ctx, reviewer, nil))
+ }
+
+ for _, reviewerTeam := range pr.RequestedReviewersTeams {
+ convertedTeam, err := ToTeam(ctx, reviewerTeam, true)
+ if err != nil {
+ log.Error("LoadRequestedReviewersTeams[%d]: %v", pr.ID, err)
+ return nil
+ }
+
+ apiPullRequest.RequestedReviewersTeams = append(apiPullRequest.RequestedReviewersTeams, convertedTeam)
+ }
+
+ if pr.Issue.ClosedUnix != 0 {
+ apiPullRequest.Closed = pr.Issue.ClosedUnix.AsTimePtr()
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ log.Error("OpenRepository[%s]: %v", pr.BaseRepo.RepoPath(), err)
+ return nil
+ }
+ defer gitRepo.Close()
+
+ baseBranch, err = gitRepo.GetBranch(pr.BaseBranch)
+ if err != nil && !git.IsErrBranchNotExist(err) {
+ log.Error("GetBranch[%s]: %v", pr.BaseBranch, err)
+ return nil
+ }
+
+ if err == nil {
+ baseCommit, err = baseBranch.GetCommit()
+ if err != nil && !git.IsErrNotExist(err) {
+ log.Error("GetCommit[%s]: %v", baseBranch.Name, err)
+ return nil
+ }
+
+ if err == nil {
+ apiPullRequest.Base.Sha = baseCommit.ID.String()
+ }
+ }
+
+ if pr.Flow == issues_model.PullRequestFlowAGit {
+ gitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ log.Error("OpenRepository[%s]: %v", pr.GetGitRefName(), err)
+ return nil
+ }
+ defer gitRepo.Close()
+
+ apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err)
+ return nil
+ }
+ apiPullRequest.Head.RepoID = pr.BaseRepoID
+ apiPullRequest.Head.Repository = apiPullRequest.Base.Repository
+ apiPullRequest.Head.Name = ""
+ }
+
+ if pr.HeadRepo != nil && pr.Flow == issues_model.PullRequestFlowGithub {
+ p, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
+ if err != nil {
+ log.Error("GetUserRepoPermission[%d]: %v", pr.HeadRepoID, err)
+ p.AccessMode = perm.AccessModeNone
+ }
+
+ apiPullRequest.Head.RepoID = pr.HeadRepo.ID
+ apiPullRequest.Head.Repository = ToRepo(ctx, pr.HeadRepo, p)
+
+ headGitRepo, err := gitrepo.OpenRepository(ctx, pr.HeadRepo)
+ if err != nil {
+ log.Error("OpenRepository[%s]: %v", pr.HeadRepo.RepoPath(), err)
+ return nil
+ }
+ defer headGitRepo.Close()
+
+ headBranch, err = headGitRepo.GetBranch(pr.HeadBranch)
+ if err != nil && !git.IsErrBranchNotExist(err) {
+ log.Error("GetBranch[%s]: %v", pr.HeadBranch, err)
+ return nil
+ }
+
+ // Outer scope variables to be used in diff calculation
+ var (
+ startCommitID string
+ endCommitID string
+ )
+
+ if git.IsErrBranchNotExist(err) {
+ headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref)
+ if err != nil && !git.IsErrNotExist(err) {
+ log.Error("GetCommit[%s]: %v", pr.HeadBranch, err)
+ return nil
+ }
+ if err == nil {
+ apiPullRequest.Head.Sha = headCommitID
+ endCommitID = headCommitID
+ }
+ } else {
+ commit, err := headBranch.GetCommit()
+ if err != nil && !git.IsErrNotExist(err) {
+ log.Error("GetCommit[%s]: %v", headBranch.Name, err)
+ return nil
+ }
+ if err == nil {
+ apiPullRequest.Head.Ref = pr.HeadBranch
+ apiPullRequest.Head.Sha = commit.ID.String()
+ endCommitID = commit.ID.String()
+ }
+ }
+
+ // Calculate diff
+ startCommitID = pr.MergeBase
+
+ apiPullRequest.ChangedFiles, apiPullRequest.Additions, apiPullRequest.Deletions, err = gitRepo.GetDiffShortStat(startCommitID, endCommitID)
+ if err != nil {
+ log.Error("GetDiffShortStat: %v", err)
+ }
+ }
+
+ if len(apiPullRequest.Head.Sha) == 0 && len(apiPullRequest.Head.Ref) != 0 {
+ baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ log.Error("OpenRepository[%s]: %v", pr.BaseRepo.RepoPath(), err)
+ return nil
+ }
+ defer baseGitRepo.Close()
+ refs, err := baseGitRepo.GetRefsFiltered(apiPullRequest.Head.Ref)
+ if err != nil {
+ log.Error("GetRefsFiltered[%s]: %v", apiPullRequest.Head.Ref, err)
+ return nil
+ } else if len(refs) == 0 {
+ log.Error("unable to resolve PR head ref")
+ } else {
+ apiPullRequest.Head.Sha = refs[0].Object.String()
+ }
+ }
+
+ if pr.HasMerged {
+ apiPullRequest.Merged = pr.MergedUnix.AsTimePtr()
+ apiPullRequest.MergedCommitID = &pr.MergedCommitID
+ apiPullRequest.MergedBy = ToUser(ctx, pr.Merger, nil)
+ }
+
+ return apiPullRequest
+}
diff --git a/services/convert/pull_review.go b/services/convert/pull_review.go
new file mode 100644
index 0000000..f7990e7
--- /dev/null
+++ b/services/convert/pull_review.go
@@ -0,0 +1,139 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+ "strings"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToPullReview convert a review to api format
+func ToPullReview(ctx context.Context, r *issues_model.Review, doer *user_model.User) (*api.PullReview, error) {
+ if err := r.LoadAttributes(ctx); err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ return nil, err
+ }
+ r.Reviewer = user_model.NewGhostUser()
+ }
+
+ result := &api.PullReview{
+ ID: r.ID,
+ Reviewer: ToUser(ctx, r.Reviewer, doer),
+ State: api.ReviewStateUnknown,
+ Body: r.Content,
+ CommitID: r.CommitID,
+ Stale: r.Stale,
+ Official: r.Official,
+ Dismissed: r.Dismissed,
+ CodeCommentsCount: r.GetCodeCommentsCount(ctx),
+ Submitted: r.CreatedUnix.AsTime(),
+ Updated: r.UpdatedUnix.AsTime(),
+ HTMLURL: r.HTMLURL(ctx),
+ HTMLPullURL: r.Issue.HTMLURL(),
+ }
+
+ if r.ReviewerTeam != nil {
+ var err error
+ result.ReviewerTeam, err = ToTeam(ctx, r.ReviewerTeam)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ switch r.Type {
+ case issues_model.ReviewTypeApprove:
+ result.State = api.ReviewStateApproved
+ case issues_model.ReviewTypeReject:
+ result.State = api.ReviewStateRequestChanges
+ case issues_model.ReviewTypeComment:
+ result.State = api.ReviewStateComment
+ case issues_model.ReviewTypePending:
+ result.State = api.ReviewStatePending
+ case issues_model.ReviewTypeRequest:
+ result.State = api.ReviewStateRequestReview
+ }
+
+ return result, nil
+}
+
+// ToPullReviewList convert a list of review to it's api format
+func ToPullReviewList(ctx context.Context, rl []*issues_model.Review, doer *user_model.User) ([]*api.PullReview, error) {
+ result := make([]*api.PullReview, 0, len(rl))
+ for i := range rl {
+ // show pending reviews only for the user who created them
+ if rl[i].Type == issues_model.ReviewTypePending && (doer == nil || !(doer.IsAdmin || doer.ID == rl[i].ReviewerID)) {
+ continue
+ }
+ r, err := ToPullReview(ctx, rl[i], doer)
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, r)
+ }
+ return result, nil
+}
+
+// ToPullReviewCommentList convert the CodeComments of an review to it's api format
+func ToPullReviewComment(ctx context.Context, review *issues_model.Review, comment *issues_model.Comment, doer *user_model.User) (*api.PullReviewComment, error) {
+ apiComment := &api.PullReviewComment{
+ ID: comment.ID,
+ Body: comment.Content,
+ Poster: ToUser(ctx, comment.Poster, doer),
+ Resolver: ToUser(ctx, comment.ResolveDoer, doer),
+ ReviewID: review.ID,
+ Created: comment.CreatedUnix.AsTime(),
+ Updated: comment.UpdatedUnix.AsTime(),
+ Path: comment.TreePath,
+ CommitID: comment.CommitSHA,
+ OrigCommitID: comment.OldRef,
+ DiffHunk: patch2diff(comment.Patch),
+ HTMLURL: comment.HTMLURL(ctx),
+ HTMLPullURL: review.Issue.HTMLURL(),
+ }
+
+ if comment.Line < 0 {
+ apiComment.OldLineNum = comment.UnsignedLine()
+ } else {
+ apiComment.LineNum = comment.UnsignedLine()
+ }
+
+ return apiComment, nil
+}
+
+// ToPullReviewCommentList convert the CodeComments of an review to it's api format
+func ToPullReviewCommentList(ctx context.Context, review *issues_model.Review, doer *user_model.User) ([]*api.PullReviewComment, error) {
+ if err := review.LoadAttributes(ctx); err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ return nil, err
+ }
+ review.Reviewer = user_model.NewGhostUser()
+ }
+
+ apiComments := make([]*api.PullReviewComment, 0, len(review.CodeComments))
+
+ for _, lines := range review.CodeComments {
+ for _, comments := range lines {
+ for _, comment := range comments {
+ apiComment, err := ToPullReviewComment(ctx, review, comment, doer)
+ if err != nil {
+ return nil, err
+ }
+ apiComments = append(apiComments, apiComment)
+ }
+ }
+ }
+ return apiComments, nil
+}
+
+func patch2diff(patch string) string {
+ split := strings.Split(patch, "\n@@")
+ if len(split) == 2 {
+ return "@@" + split[1]
+ }
+ return ""
+}
diff --git a/services/convert/pull_test.go b/services/convert/pull_test.go
new file mode 100644
index 0000000..1339ed5
--- /dev/null
+++ b/services/convert/pull_test.go
@@ -0,0 +1,78 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestPullRequest_APIFormat(t *testing.T) {
+ // with HeadRepo
+ require.NoError(t, unittest.PrepareTestDatabase())
+ headRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ require.NoError(t, pr.LoadAttributes(db.DefaultContext))
+ require.NoError(t, pr.LoadIssue(db.DefaultContext))
+ apiPullRequest := ToAPIPullRequest(git.DefaultContext, pr, nil)
+ assert.NotNil(t, apiPullRequest)
+ assert.EqualValues(t, &structs.PRBranchInfo{
+ Name: "branch1",
+ Ref: "refs/pull/2/head",
+ Sha: "4a357436d925b5c974181ff12a994538ddc5a269",
+ RepoID: 1,
+ Repository: ToRepo(db.DefaultContext, headRepo, access_model.Permission{AccessMode: perm.AccessModeRead}),
+ }, apiPullRequest.Head)
+
+ // withOut HeadRepo
+ pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ require.NoError(t, pr.LoadIssue(db.DefaultContext))
+ require.NoError(t, pr.LoadAttributes(db.DefaultContext))
+ // simulate fork deletion
+ pr.HeadRepo = nil
+ pr.HeadRepoID = 100000
+ apiPullRequest = ToAPIPullRequest(git.DefaultContext, pr, nil)
+ assert.NotNil(t, apiPullRequest)
+ assert.Nil(t, apiPullRequest.Head.Repository)
+ assert.EqualValues(t, -1, apiPullRequest.Head.RepoID)
+}
+
+func TestPullReviewList(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ t.Run("Pending review", func(t *testing.T) {
+ reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 6, ReviewerID: reviewer.ID})
+ rl := []*issues_model.Review{review}
+
+ t.Run("Anonymous", func(t *testing.T) {
+ prList, err := ToPullReviewList(db.DefaultContext, rl, nil)
+ require.NoError(t, err)
+ assert.Empty(t, prList)
+ })
+ t.Run("Reviewer", func(t *testing.T) {
+ prList, err := ToPullReviewList(db.DefaultContext, rl, reviewer)
+ require.NoError(t, err)
+ assert.Len(t, prList, 1)
+ })
+ t.Run("Admin", func(t *testing.T) {
+ adminUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{IsAdmin: true}, unittest.Cond("id != ?", reviewer.ID))
+ prList, err := ToPullReviewList(db.DefaultContext, rl, adminUser)
+ require.NoError(t, err)
+ assert.Len(t, prList, 1)
+ })
+ })
+}
diff --git a/services/convert/quota.go b/services/convert/quota.go
new file mode 100644
index 0000000..791cd8e
--- /dev/null
+++ b/services/convert/quota.go
@@ -0,0 +1,185 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+ "strconv"
+
+ action_model "code.gitea.io/gitea/models/actions"
+ issue_model "code.gitea.io/gitea/models/issues"
+ package_model "code.gitea.io/gitea/models/packages"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+func ToQuotaRuleInfo(rule quota_model.Rule, withName bool) api.QuotaRuleInfo {
+ info := api.QuotaRuleInfo{
+ Limit: rule.Limit,
+ Subjects: make([]string, len(rule.Subjects)),
+ }
+ for i := range len(rule.Subjects) {
+ info.Subjects[i] = rule.Subjects[i].String()
+ }
+
+ if withName {
+ info.Name = rule.Name
+ }
+
+ return info
+}
+
+func toQuotaInfoUsed(used *quota_model.Used) api.QuotaUsed {
+ info := api.QuotaUsed{
+ Size: api.QuotaUsedSize{
+ Repos: api.QuotaUsedSizeRepos{
+ Public: used.Size.Repos.Public,
+ Private: used.Size.Repos.Private,
+ },
+ Git: api.QuotaUsedSizeGit{
+ LFS: used.Size.Git.LFS,
+ },
+ Assets: api.QuotaUsedSizeAssets{
+ Attachments: api.QuotaUsedSizeAssetsAttachments{
+ Issues: used.Size.Assets.Attachments.Issues,
+ Releases: used.Size.Assets.Attachments.Releases,
+ },
+ Artifacts: used.Size.Assets.Artifacts,
+ Packages: api.QuotaUsedSizeAssetsPackages{
+ All: used.Size.Assets.Packages.All,
+ },
+ },
+ },
+ }
+ return info
+}
+
+func ToQuotaInfo(used *quota_model.Used, groups quota_model.GroupList, withNames bool) api.QuotaInfo {
+ info := api.QuotaInfo{
+ Used: toQuotaInfoUsed(used),
+ Groups: ToQuotaGroupList(groups, withNames),
+ }
+
+ return info
+}
+
+func ToQuotaGroup(group quota_model.Group, withNames bool) api.QuotaGroup {
+ info := api.QuotaGroup{
+ Rules: make([]api.QuotaRuleInfo, len(group.Rules)),
+ }
+ if withNames {
+ info.Name = group.Name
+ }
+ for i := range len(group.Rules) {
+ info.Rules[i] = ToQuotaRuleInfo(group.Rules[i], withNames)
+ }
+
+ return info
+}
+
+func ToQuotaGroupList(groups quota_model.GroupList, withNames bool) api.QuotaGroupList {
+ list := make(api.QuotaGroupList, len(groups))
+
+ for i := range len(groups) {
+ list[i] = ToQuotaGroup(*groups[i], withNames)
+ }
+
+ return list
+}
+
+func ToQuotaUsedAttachmentList(ctx context.Context, attachments []*repo_model.Attachment) (*api.QuotaUsedAttachmentList, error) {
+ getAttachmentContainer := func(a *repo_model.Attachment) (string, string, error) {
+ if a.ReleaseID != 0 {
+ release, err := repo_model.GetReleaseByID(ctx, a.ReleaseID)
+ if err != nil {
+ return "", "", err
+ }
+ if err = release.LoadAttributes(ctx); err != nil {
+ return "", "", err
+ }
+ return release.APIURL(), release.HTMLURL(), nil
+ }
+ if a.CommentID != 0 {
+ comment, err := issue_model.GetCommentByID(ctx, a.CommentID)
+ if err != nil {
+ return "", "", err
+ }
+ return comment.APIURL(ctx), comment.HTMLURL(ctx), nil
+ }
+ if a.IssueID != 0 {
+ issue, err := issue_model.GetIssueByID(ctx, a.IssueID)
+ if err != nil {
+ return "", "", err
+ }
+ if err = issue.LoadRepo(ctx); err != nil {
+ return "", "", err
+ }
+ return issue.APIURL(ctx), issue.HTMLURL(), nil
+ }
+ return "", "", nil
+ }
+
+ result := make(api.QuotaUsedAttachmentList, len(attachments))
+ for i, a := range attachments {
+ capiURL, chtmlURL, err := getAttachmentContainer(a)
+ if err != nil {
+ return nil, err
+ }
+
+ apiURL := capiURL + "/assets/" + strconv.FormatInt(a.ID, 10)
+ result[i] = &api.QuotaUsedAttachment{
+ Name: a.Name,
+ Size: a.Size,
+ APIURL: apiURL,
+ }
+ result[i].ContainedIn.APIURL = capiURL
+ result[i].ContainedIn.HTMLURL = chtmlURL
+ }
+
+ return &result, nil
+}
+
+func ToQuotaUsedPackageList(ctx context.Context, packages []*package_model.PackageVersion) (*api.QuotaUsedPackageList, error) {
+ result := make(api.QuotaUsedPackageList, len(packages))
+ for i, pv := range packages {
+ d, err := package_model.GetPackageDescriptor(ctx, pv)
+ if err != nil {
+ return nil, err
+ }
+
+ var size int64
+ for _, file := range d.Files {
+ size += file.Blob.Size
+ }
+
+ result[i] = &api.QuotaUsedPackage{
+ Name: d.Package.Name,
+ Type: d.Package.Type.Name(),
+ Version: d.Version.Version,
+ Size: size,
+ HTMLURL: d.VersionHTMLURL(),
+ }
+ }
+
+ return &result, nil
+}
+
+func ToQuotaUsedArtifactList(ctx context.Context, artifacts []*action_model.ActionArtifact) (*api.QuotaUsedArtifactList, error) {
+ result := make(api.QuotaUsedArtifactList, len(artifacts))
+ for i, a := range artifacts {
+ run, err := action_model.GetRunByID(ctx, a.RunID)
+ if err != nil {
+ return nil, err
+ }
+
+ result[i] = &api.QuotaUsedArtifact{
+ Name: a.ArtifactName,
+ Size: a.FileCompressedSize,
+ HTMLURL: run.HTMLURL(),
+ }
+ }
+
+ return &result, nil
+}
diff --git a/services/convert/release.go b/services/convert/release.go
new file mode 100644
index 0000000..8c0f61b
--- /dev/null
+++ b/services/convert/release.go
@@ -0,0 +1,35 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToAPIRelease convert a repo_model.Release to api.Release
+func ToAPIRelease(ctx context.Context, repo *repo_model.Repository, r *repo_model.Release) *api.Release {
+ return &api.Release{
+ ID: r.ID,
+ TagName: r.TagName,
+ Target: r.Target,
+ Title: r.Title,
+ Note: r.Note,
+ URL: r.APIURL(),
+ HTMLURL: r.HTMLURL(),
+ TarURL: r.TarURL(),
+ ZipURL: r.ZipURL(),
+ HideArchiveLinks: r.HideArchiveLinks,
+ UploadURL: r.APIUploadURL(),
+ IsDraft: r.IsDraft,
+ IsPrerelease: r.IsPrerelease,
+ CreatedAt: r.CreatedUnix.AsTime(),
+ PublishedAt: r.CreatedUnix.AsTime(),
+ Publisher: ToUser(ctx, r.Publisher, nil),
+ Attachments: ToAPIAttachments(repo, r.Attachments),
+ ArchiveDownloadCount: r.ArchiveDownloadCount,
+ }
+}
diff --git a/services/convert/release_test.go b/services/convert/release_test.go
new file mode 100644
index 0000000..2e40bb9
--- /dev/null
+++ b/services/convert/release_test.go
@@ -0,0 +1,29 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRelease_ToRelease(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ release1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Release{ID: 1})
+ release1.LoadAttributes(db.DefaultContext)
+
+ apiRelease := ToAPIRelease(db.DefaultContext, repo1, release1)
+ assert.NotNil(t, apiRelease)
+ assert.EqualValues(t, 1, apiRelease.ID)
+ assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL)
+ assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL)
+}
diff --git a/services/convert/repository.go b/services/convert/repository.go
new file mode 100644
index 0000000..2fb6f6d
--- /dev/null
+++ b/services/convert/repository.go
@@ -0,0 +1,254 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToRepo converts a Repository to api.Repository
+func ToRepo(ctx context.Context, repo *repo_model.Repository, permissionInRepo access_model.Permission) *api.Repository {
+ return innerToRepo(ctx, repo, permissionInRepo, false)
+}
+
+func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInRepo access_model.Permission, isParent bool) *api.Repository {
+ var parent *api.Repository
+
+ if permissionInRepo.Units == nil && permissionInRepo.UnitsMode == nil {
+ // If Units and UnitsMode are both nil, it means that it's a hard coded permission,
+ // like access_model.Permission{AccessMode: perm.AccessModeAdmin}.
+ // So we need to load units for the repo, or UnitAccessMode will always return perm.AccessModeNone.
+ _ = repo.LoadUnits(ctx) // the error is not important, so ignore it
+ permissionInRepo.Units = repo.Units
+ }
+
+ cloneLink := repo.CloneLink()
+ permission := &api.Permission{
+ Admin: permissionInRepo.AccessMode >= perm.AccessModeAdmin,
+ Push: permissionInRepo.UnitAccessMode(unit_model.TypeCode) >= perm.AccessModeWrite,
+ Pull: permissionInRepo.UnitAccessMode(unit_model.TypeCode) >= perm.AccessModeRead,
+ }
+ if !isParent {
+ err := repo.GetBaseRepo(ctx)
+ if err != nil {
+ return nil
+ }
+ if repo.BaseRepo != nil {
+ // FIXME: The permission of the parent repo is not correct.
+ // It's the permission of the current repo, so it's probably different from the parent repo.
+ // But there isn't a good way to get the permission of the parent repo, because the doer is not passed in.
+ // Use the permission of the current repo to keep the behavior consistent with the old API.
+ // Maybe the right way is setting the permission of the parent repo to nil, empty is better than wrong.
+ parent = innerToRepo(ctx, repo.BaseRepo, permissionInRepo, true)
+ }
+ }
+
+ // check enabled/disabled units
+ hasIssues := false
+ var externalTracker *api.ExternalTracker
+ var internalTracker *api.InternalTracker
+ if unit, err := repo.GetUnit(ctx, unit_model.TypeIssues); err == nil {
+ config := unit.IssuesConfig()
+ hasIssues = true
+ internalTracker = &api.InternalTracker{
+ EnableTimeTracker: config.EnableTimetracker,
+ AllowOnlyContributorsToTrackTime: config.AllowOnlyContributorsToTrackTime,
+ EnableIssueDependencies: config.EnableDependencies,
+ }
+ } else if unit, err := repo.GetUnit(ctx, unit_model.TypeExternalTracker); err == nil {
+ config := unit.ExternalTrackerConfig()
+ hasIssues = true
+ externalTracker = &api.ExternalTracker{
+ ExternalTrackerURL: config.ExternalTrackerURL,
+ ExternalTrackerFormat: config.ExternalTrackerFormat,
+ ExternalTrackerStyle: config.ExternalTrackerStyle,
+ ExternalTrackerRegexpPattern: config.ExternalTrackerRegexpPattern,
+ }
+ }
+ hasWiki := false
+ globallyEditableWiki := false
+ var externalWiki *api.ExternalWiki
+ if wikiUnit, err := repo.GetUnit(ctx, unit_model.TypeWiki); err == nil {
+ hasWiki = true
+ if wikiUnit.DefaultPermissions == repo_model.UnitAccessModeWrite {
+ globallyEditableWiki = true
+ }
+ } else if unit, err := repo.GetUnit(ctx, unit_model.TypeExternalWiki); err == nil {
+ hasWiki = true
+ config := unit.ExternalWikiConfig()
+ externalWiki = &api.ExternalWiki{
+ ExternalWikiURL: config.ExternalWikiURL,
+ }
+ }
+ hasPullRequests := false
+ ignoreWhitespaceConflicts := false
+ allowMerge := false
+ allowRebase := false
+ allowRebaseMerge := false
+ allowSquash := false
+ allowFastForwardOnly := false
+ allowRebaseUpdate := false
+ defaultDeleteBranchAfterMerge := false
+ defaultMergeStyle := repo_model.MergeStyleMerge
+ defaultAllowMaintainerEdit := false
+ if unit, err := repo.GetUnit(ctx, unit_model.TypePullRequests); err == nil {
+ config := unit.PullRequestsConfig()
+ hasPullRequests = true
+ ignoreWhitespaceConflicts = config.IgnoreWhitespaceConflicts
+ allowMerge = config.AllowMerge
+ allowRebase = config.AllowRebase
+ allowRebaseMerge = config.AllowRebaseMerge
+ allowSquash = config.AllowSquash
+ allowFastForwardOnly = config.AllowFastForwardOnly
+ allowRebaseUpdate = config.AllowRebaseUpdate
+ defaultDeleteBranchAfterMerge = config.DefaultDeleteBranchAfterMerge
+ defaultMergeStyle = config.GetDefaultMergeStyle()
+ defaultAllowMaintainerEdit = config.DefaultAllowMaintainerEdit
+ }
+ hasProjects := false
+ if _, err := repo.GetUnit(ctx, unit_model.TypeProjects); err == nil {
+ hasProjects = true
+ }
+
+ hasReleases := false
+ if _, err := repo.GetUnit(ctx, unit_model.TypeReleases); err == nil {
+ hasReleases = true
+ }
+
+ hasPackages := false
+ if _, err := repo.GetUnit(ctx, unit_model.TypePackages); err == nil {
+ hasPackages = true
+ }
+
+ hasActions := false
+ if _, err := repo.GetUnit(ctx, unit_model.TypeActions); err == nil {
+ hasActions = true
+ }
+
+ if err := repo.LoadOwner(ctx); err != nil {
+ return nil
+ }
+
+ numReleases, _ := db.Count[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ IncludeDrafts: false,
+ IncludeTags: false,
+ RepoID: repo.ID,
+ })
+
+ mirrorInterval := ""
+ var mirrorUpdated time.Time
+ if repo.IsMirror {
+ pullMirror, err := repo_model.GetMirrorByRepoID(ctx, repo.ID)
+ if err == nil {
+ mirrorInterval = pullMirror.Interval.String()
+ mirrorUpdated = pullMirror.UpdatedUnix.AsTime()
+ }
+ }
+
+ var transfer *api.RepoTransfer
+ if repo.Status == repo_model.RepositoryPendingTransfer {
+ t, err := models.GetPendingRepositoryTransfer(ctx, repo)
+ if err != nil && !models.IsErrNoPendingTransfer(err) {
+ log.Warn("GetPendingRepositoryTransfer: %v", err)
+ } else {
+ if err := t.LoadAttributes(ctx); err != nil {
+ log.Warn("LoadAttributes of RepoTransfer: %v", err)
+ } else {
+ transfer = ToRepoTransfer(ctx, t)
+ }
+ }
+ }
+
+ var language string
+ if repo.PrimaryLanguage != nil {
+ language = repo.PrimaryLanguage.Language
+ }
+
+ repoAPIURL := repo.APIURL()
+
+ return &api.Repository{
+ ID: repo.ID,
+ Owner: ToUserWithAccessMode(ctx, repo.Owner, permissionInRepo.AccessMode),
+ Name: repo.Name,
+ FullName: repo.FullName(),
+ Description: repo.Description,
+ Private: repo.IsPrivate,
+ Template: repo.IsTemplate,
+ Empty: repo.IsEmpty,
+ Archived: repo.IsArchived,
+ Size: int(repo.Size / 1024),
+ Fork: repo.IsFork,
+ Parent: parent,
+ Mirror: repo.IsMirror,
+ HTMLURL: repo.HTMLURL(),
+ URL: repoAPIURL,
+ SSHURL: cloneLink.SSH,
+ CloneURL: cloneLink.HTTPS,
+ OriginalURL: repo.SanitizedOriginalURL(),
+ Website: repo.Website,
+ Language: language,
+ LanguagesURL: repoAPIURL + "/languages",
+ Stars: repo.NumStars,
+ Forks: repo.NumForks,
+ Watchers: repo.NumWatches,
+ OpenIssues: repo.NumOpenIssues,
+ OpenPulls: repo.NumOpenPulls,
+ Releases: int(numReleases),
+ DefaultBranch: repo.DefaultBranch,
+ Created: repo.CreatedUnix.AsTime(),
+ Updated: repo.UpdatedUnix.AsTime(),
+ ArchivedAt: repo.ArchivedUnix.AsTime(),
+ Permissions: permission,
+ HasIssues: hasIssues,
+ ExternalTracker: externalTracker,
+ InternalTracker: internalTracker,
+ HasWiki: hasWiki,
+ WikiBranch: repo.WikiBranch,
+ GloballyEditableWiki: globallyEditableWiki,
+ HasProjects: hasProjects,
+ HasReleases: hasReleases,
+ HasPackages: hasPackages,
+ HasActions: hasActions,
+ ExternalWiki: externalWiki,
+ HasPullRequests: hasPullRequests,
+ IgnoreWhitespaceConflicts: ignoreWhitespaceConflicts,
+ AllowMerge: allowMerge,
+ AllowRebase: allowRebase,
+ AllowRebaseMerge: allowRebaseMerge,
+ AllowSquash: allowSquash,
+ AllowFastForwardOnly: allowFastForwardOnly,
+ AllowRebaseUpdate: allowRebaseUpdate,
+ DefaultDeleteBranchAfterMerge: defaultDeleteBranchAfterMerge,
+ DefaultMergeStyle: string(defaultMergeStyle),
+ DefaultAllowMaintainerEdit: defaultAllowMaintainerEdit,
+ AvatarURL: repo.AvatarLink(ctx),
+ Internal: !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePrivate,
+ MirrorInterval: mirrorInterval,
+ MirrorUpdated: mirrorUpdated,
+ RepoTransfer: transfer,
+ Topics: repo.Topics,
+ ObjectFormatName: repo.ObjectFormatName,
+ }
+}
+
+// ToRepoTransfer convert a models.RepoTransfer to a structs.RepeTransfer
+func ToRepoTransfer(ctx context.Context, t *models.RepoTransfer) *api.RepoTransfer {
+ teams, _ := ToTeams(ctx, t.Teams, false)
+
+ return &api.RepoTransfer{
+ Doer: ToUser(ctx, t.Doer, nil),
+ Recipient: ToUser(ctx, t.Recipient, nil),
+ Teams: teams,
+ }
+}
diff --git a/services/convert/secret.go b/services/convert/secret.go
new file mode 100644
index 0000000..dd7b9f0
--- /dev/null
+++ b/services/convert/secret.go
@@ -0,0 +1,18 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ secret_model "code.gitea.io/gitea/models/secret"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToSecret converts Secret to API format
+func ToSecret(secret *secret_model.Secret) *api.Secret {
+ result := &api.Secret{
+ Name: secret.Name,
+ }
+
+ return result
+}
diff --git a/services/convert/status.go b/services/convert/status.go
new file mode 100644
index 0000000..6cef63c
--- /dev/null
+++ b/services/convert/status.go
@@ -0,0 +1,65 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+
+ git_model "code.gitea.io/gitea/models/git"
+ user_model "code.gitea.io/gitea/models/user"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToCommitStatus converts git_model.CommitStatus to api.CommitStatus
+func ToCommitStatus(ctx context.Context, status *git_model.CommitStatus) *api.CommitStatus {
+ apiStatus := &api.CommitStatus{
+ Created: status.CreatedUnix.AsTime(),
+ Updated: status.CreatedUnix.AsTime(),
+ State: status.State,
+ TargetURL: status.TargetURL,
+ Description: status.Description,
+ ID: status.Index,
+ URL: status.APIURL(ctx),
+ Context: status.Context,
+ }
+
+ if status.CreatorID != 0 {
+ creator, _ := user_model.GetUserByID(ctx, status.CreatorID)
+ apiStatus.Creator = ToUser(ctx, creator, nil)
+ }
+
+ return apiStatus
+}
+
+// ToCombinedStatus converts List of CommitStatus to a CombinedStatus
+func ToCombinedStatus(ctx context.Context, statuses []*git_model.CommitStatus, repo *api.Repository) *api.CombinedStatus {
+ if len(statuses) == 0 {
+ return nil
+ }
+
+ retStatus := &api.CombinedStatus{
+ SHA: statuses[0].SHA,
+ TotalCount: len(statuses),
+ Repository: repo,
+ URL: "",
+ }
+
+ retStatus.Statuses = make([]*api.CommitStatus, 0, len(statuses))
+ for _, status := range statuses {
+ retStatus.Statuses = append(retStatus.Statuses, ToCommitStatus(ctx, status))
+ if retStatus.State == "" || status.State.NoBetterThan(retStatus.State) {
+ retStatus.State = status.State
+ }
+ }
+ // According to https://docs.github.com/en/rest/commits/statuses?apiVersion=2022-11-28#get-the-combined-status-for-a-specific-reference
+ // > Additionally, a combined state is returned. The state is one of:
+ // > failure if any of the contexts report as error or failure
+ // > pending if there are no statuses or a context is pending
+ // > success if the latest status for all contexts is success
+ if retStatus.State.IsError() {
+ retStatus.State = api.CommitStatusFailure
+ }
+
+ return retStatus
+}
diff --git a/services/convert/user.go b/services/convert/user.go
new file mode 100644
index 0000000..94a400d
--- /dev/null
+++ b/services/convert/user.go
@@ -0,0 +1,113 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/perm"
+ user_model "code.gitea.io/gitea/models/user"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToUser convert user_model.User to api.User
+// if doer is set, private information is added if the doer has the permission to see it
+func ToUser(ctx context.Context, user, doer *user_model.User) *api.User {
+ if user == nil {
+ return nil
+ }
+ authed := false
+ signed := false
+ if doer != nil {
+ signed = true
+ authed = doer.ID == user.ID || doer.IsAdmin
+ }
+ return toUser(ctx, user, signed, authed)
+}
+
+// ToUsers convert list of user_model.User to list of api.User
+func ToUsers(ctx context.Context, doer *user_model.User, users []*user_model.User) []*api.User {
+ result := make([]*api.User, len(users))
+ for i := range users {
+ result[i] = ToUser(ctx, users[i], doer)
+ }
+ return result
+}
+
+// ToUserWithAccessMode convert user_model.User to api.User
+// AccessMode is not none show add some more information
+func ToUserWithAccessMode(ctx context.Context, user *user_model.User, accessMode perm.AccessMode) *api.User {
+ if user == nil {
+ return nil
+ }
+ return toUser(ctx, user, accessMode != perm.AccessModeNone, false)
+}
+
+// toUser convert user_model.User to api.User
+// signed shall only be set if requester is logged in. authed shall only be set if user is site admin or user himself
+func toUser(ctx context.Context, user *user_model.User, signed, authed bool) *api.User {
+ result := &api.User{
+ ID: user.ID,
+ UserName: user.Name,
+ FullName: user.FullName,
+ Email: user.GetPlaceholderEmail(),
+ AvatarURL: user.AvatarLink(ctx),
+ HTMLURL: user.HTMLURL(),
+ Created: user.CreatedUnix.AsTime(),
+ Restricted: user.IsRestricted,
+ Location: user.Location,
+ Pronouns: user.Pronouns,
+ Website: user.Website,
+ Description: user.Description,
+ // counter's
+ Followers: user.NumFollowers,
+ Following: user.NumFollowing,
+ StarredRepos: user.NumStars,
+ }
+
+ result.Visibility = user.Visibility.String()
+
+ // hide primary email if API caller is anonymous or user keep email private
+ if signed && (!user.KeepEmailPrivate || authed) {
+ result.Email = user.Email
+ }
+
+ // only site admin will get these information and possibly user himself
+ if authed {
+ result.IsAdmin = user.IsAdmin
+ result.LoginName = user.LoginName
+ result.SourceID = user.LoginSource
+ result.LastLogin = user.LastLoginUnix.AsTime()
+ result.Language = user.Language
+ result.IsActive = user.IsActive
+ result.ProhibitLogin = user.ProhibitLogin
+ }
+ return result
+}
+
+// User2UserSettings return UserSettings based on a user
+func User2UserSettings(user *user_model.User) api.UserSettings {
+ return api.UserSettings{
+ FullName: user.FullName,
+ Website: user.Website,
+ Location: user.Location,
+ Pronouns: user.Pronouns,
+ Language: user.Language,
+ Description: user.Description,
+ Theme: user.Theme,
+ HideEmail: user.KeepEmailPrivate,
+ HideActivity: user.KeepActivityPrivate,
+ DiffViewStyle: user.DiffViewStyle,
+ EnableRepoUnitHints: user.EnableRepoUnitHints,
+ }
+}
+
+// ToUserAndPermission return User and its collaboration permission for a repository
+func ToUserAndPermission(ctx context.Context, user, doer *user_model.User, accessMode perm.AccessMode) api.RepoCollaboratorPermission {
+ return api.RepoCollaboratorPermission{
+ User: ToUser(ctx, user, doer),
+ Permission: accessMode.String(),
+ RoleName: accessMode.String(),
+ }
+}
diff --git a/services/convert/user_test.go b/services/convert/user_test.go
new file mode 100644
index 0000000..0f0b520
--- /dev/null
+++ b/services/convert/user_test.go
@@ -0,0 +1,41 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ api "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestUser_ToUser(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1, IsAdmin: true})
+
+ apiUser := toUser(db.DefaultContext, user1, true, true)
+ assert.True(t, apiUser.IsAdmin)
+ assert.Contains(t, apiUser.AvatarURL, "://")
+
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2, IsAdmin: false})
+
+ apiUser = toUser(db.DefaultContext, user2, true, true)
+ assert.False(t, apiUser.IsAdmin)
+
+ apiUser = toUser(db.DefaultContext, user1, false, false)
+ assert.False(t, apiUser.IsAdmin)
+ assert.EqualValues(t, api.VisibleTypePublic.String(), apiUser.Visibility)
+
+ user31 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate})
+
+ apiUser = toUser(db.DefaultContext, user31, true, true)
+ assert.False(t, apiUser.IsAdmin)
+ assert.EqualValues(t, api.VisibleTypePrivate.String(), apiUser.Visibility)
+}
diff --git a/services/convert/utils.go b/services/convert/utils.go
new file mode 100644
index 0000000..fe35fd2
--- /dev/null
+++ b/services/convert/utils.go
@@ -0,0 +1,44 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "strings"
+
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+)
+
+// ToCorrectPageSize makes sure page size is in allowed range.
+func ToCorrectPageSize(size int) int {
+ if size <= 0 {
+ size = setting.API.DefaultPagingNum
+ } else if size > setting.API.MaxResponseItems {
+ size = setting.API.MaxResponseItems
+ }
+ return size
+}
+
+// ToGitServiceType return GitServiceType based on string
+func ToGitServiceType(value string) structs.GitServiceType {
+ switch strings.ToLower(value) {
+ case "github":
+ return structs.GithubService
+ case "gitea":
+ return structs.GiteaService
+ case "gitlab":
+ return structs.GitlabService
+ case "gogs":
+ return structs.GogsService
+ case "onedev":
+ return structs.OneDevService
+ case "gitbucket":
+ return structs.GitBucketService
+ case "forgejo":
+ return structs.ForgejoService
+ default:
+ return structs.PlainGitService
+ }
+}
diff --git a/services/convert/utils_test.go b/services/convert/utils_test.go
new file mode 100644
index 0000000..b464d8b
--- /dev/null
+++ b/services/convert/utils_test.go
@@ -0,0 +1,39 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestToCorrectPageSize(t *testing.T) {
+ assert.EqualValues(t, 30, ToCorrectPageSize(0))
+ assert.EqualValues(t, 30, ToCorrectPageSize(-10))
+ assert.EqualValues(t, 20, ToCorrectPageSize(20))
+ assert.EqualValues(t, 50, ToCorrectPageSize(100))
+}
+
+func TestToGitServiceType(t *testing.T) {
+ tc := []struct {
+ typ string
+ enum int
+ }{{
+ typ: "github", enum: 2,
+ }, {
+ typ: "gitea", enum: 3,
+ }, {
+ typ: "gitlab", enum: 4,
+ }, {
+ typ: "gogs", enum: 5,
+ }, {
+ typ: "forgejo", enum: 9,
+ }, {
+ typ: "trash", enum: 1,
+ }}
+ for _, test := range tc {
+ assert.EqualValues(t, test.enum, ToGitServiceType(test.typ))
+ }
+}
diff --git a/services/convert/wiki.go b/services/convert/wiki.go
new file mode 100644
index 0000000..767bfdb
--- /dev/null
+++ b/services/convert/wiki.go
@@ -0,0 +1,45 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "time"
+
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// ToWikiCommit convert a git commit into a WikiCommit
+func ToWikiCommit(commit *git.Commit) *api.WikiCommit {
+ return &api.WikiCommit{
+ ID: commit.ID.String(),
+ Author: &api.CommitUser{
+ Identity: api.Identity{
+ Name: commit.Author.Name,
+ Email: commit.Author.Email,
+ },
+ Date: commit.Author.When.UTC().Format(time.RFC3339),
+ },
+ Committer: &api.CommitUser{
+ Identity: api.Identity{
+ Name: commit.Committer.Name,
+ Email: commit.Committer.Email,
+ },
+ Date: commit.Committer.When.UTC().Format(time.RFC3339),
+ },
+ Message: commit.CommitMessage,
+ }
+}
+
+// ToWikiCommitList convert a list of git commits into a WikiCommitList
+func ToWikiCommitList(commits []*git.Commit, total int64) *api.WikiCommitList {
+ result := make([]*api.WikiCommit, len(commits))
+ for i := range commits {
+ result[i] = ToWikiCommit(commits[i])
+ }
+ return &api.WikiCommitList{
+ WikiCommits: result,
+ Count: total,
+ }
+}
diff --git a/services/cron/cron.go b/services/cron/cron.go
new file mode 100644
index 0000000..3c5737e
--- /dev/null
+++ b/services/cron/cron.go
@@ -0,0 +1,130 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cron
+
+import (
+ "context"
+ "runtime/pprof"
+ "time"
+
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/sync"
+ "code.gitea.io/gitea/modules/translation"
+
+ "github.com/go-co-op/gocron"
+)
+
+var scheduler = gocron.NewScheduler(time.Local)
+
+// Prevent duplicate running tasks.
+var taskStatusTable = sync.NewStatusTable()
+
+// NewContext begins cron tasks
+// Each cron task is run within the shutdown context as a running server
+// AtShutdown the cron server is stopped
+func NewContext(original context.Context) {
+ defer pprof.SetGoroutineLabels(original)
+ _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().ShutdownContext(), "Service: Cron", process.SystemProcessType, true)
+ initBasicTasks()
+ initExtendedTasks()
+ initActionsTasks()
+
+ lock.Lock()
+ for _, task := range tasks {
+ if task.IsEnabled() && task.DoRunAtStart() {
+ go task.Run()
+ }
+ }
+
+ scheduler.StartAsync()
+ started = true
+ lock.Unlock()
+ graceful.GetManager().RunAtShutdown(context.Background(), func() {
+ scheduler.Stop()
+ lock.Lock()
+ started = false
+ lock.Unlock()
+ finished()
+ })
+}
+
+// TaskTableRow represents a task row in the tasks table
+type TaskTableRow struct {
+ Name string
+ Spec string
+ Next time.Time
+ Prev time.Time
+ Status string
+ LastMessage string
+ LastDoer string
+ ExecTimes int64
+ task *Task
+}
+
+func (t *TaskTableRow) FormatLastMessage(locale translation.Locale) string {
+ if t.Status == "finished" {
+ return t.task.GetConfig().FormatMessage(locale, t.Name, t.Status, t.LastDoer)
+ }
+
+ return t.task.GetConfig().FormatMessage(locale, t.Name, t.Status, t.LastDoer, t.LastMessage)
+}
+
+// TaskTable represents a table of tasks
+type TaskTable []*TaskTableRow
+
+// ListTasks returns all running cron tasks.
+func ListTasks() TaskTable {
+ jobs := scheduler.Jobs()
+ jobMap := map[string]*gocron.Job{}
+ for _, job := range jobs {
+ // the first tag is the task name
+ tags := job.Tags()
+ if len(tags) == 0 { // should never happen
+ continue
+ }
+ jobMap[job.Tags()[0]] = job
+ }
+
+ lock.Lock()
+ defer lock.Unlock()
+
+ tTable := make([]*TaskTableRow, 0, len(tasks))
+ for _, task := range tasks {
+ spec := "-"
+ var (
+ next time.Time
+ prev time.Time
+ )
+ if e, ok := jobMap[task.Name]; ok {
+ tags := e.Tags()
+ if len(tags) > 1 {
+ spec = tags[1] // the second tag is the task spec
+ }
+ next = e.NextRun()
+ prev = e.PreviousRun()
+ }
+
+ task.lock.Lock()
+ // If the manual run is after the cron run, use that instead.
+ if prev.Before(task.LastRun) {
+ prev = task.LastRun
+ }
+ tTable = append(tTable, &TaskTableRow{
+ Name: task.Name,
+ Spec: spec,
+ Next: next,
+ Prev: prev,
+ ExecTimes: task.ExecTimes,
+ LastMessage: task.LastMessage,
+ Status: task.Status,
+ LastDoer: task.LastDoer,
+ task: task,
+ })
+ task.lock.Unlock()
+ }
+
+ return tTable
+}
diff --git a/services/cron/setting.go b/services/cron/setting.go
new file mode 100644
index 0000000..6dad888
--- /dev/null
+++ b/services/cron/setting.go
@@ -0,0 +1,86 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cron
+
+import (
+ "time"
+
+ "code.gitea.io/gitea/modules/translation"
+)
+
+// Config represents a basic configuration interface that cron task
+type Config interface {
+ IsEnabled() bool
+ DoRunAtStart() bool
+ GetSchedule() string
+ FormatMessage(locale translation.Locale, name, status, doer string, args ...any) string
+ DoNoticeOnSuccess() bool
+}
+
+// BaseConfig represents the basic config for a Cron task
+type BaseConfig struct {
+ Enabled bool
+ RunAtStart bool
+ Schedule string
+ NoticeOnSuccess bool
+}
+
+// OlderThanConfig represents a cron task with OlderThan setting
+type OlderThanConfig struct {
+ BaseConfig
+ OlderThan time.Duration
+}
+
+// UpdateExistingConfig represents a cron task with UpdateExisting setting
+type UpdateExistingConfig struct {
+ BaseConfig
+ UpdateExisting bool
+}
+
+// CleanupHookTaskConfig represents a cron task with settings to cleanup hook_task
+type CleanupHookTaskConfig struct {
+ BaseConfig
+ CleanupType string
+ OlderThan time.Duration
+ NumberToKeep int
+}
+
+// GetSchedule returns the schedule for the base config
+func (b *BaseConfig) GetSchedule() string {
+ return b.Schedule
+}
+
+// IsEnabled returns the enabled status for the config
+func (b *BaseConfig) IsEnabled() bool {
+ return b.Enabled
+}
+
+// DoRunAtStart returns whether the task should be run at the start
+func (b *BaseConfig) DoRunAtStart() bool {
+ return b.RunAtStart
+}
+
+// DoNoticeOnSuccess returns whether a success notice should be posted
+func (b *BaseConfig) DoNoticeOnSuccess() bool {
+ return b.NoticeOnSuccess
+}
+
+// FormatMessage returns a message for the task
+// Please note the `status` string will be concatenated with `admin.dashboard.cron.` and `admin.dashboard.task.` to provide locale messages. Similarly `name` will be composed with `admin.dashboard.` to provide the locale name for the task.
+func (b *BaseConfig) FormatMessage(locale translation.Locale, name, status, doer string, args ...any) string {
+ realArgs := make([]any, 0, len(args)+2)
+ realArgs = append(realArgs, locale.TrString("admin.dashboard."+name))
+ if doer == "" {
+ realArgs = append(realArgs, "(Cron)")
+ } else {
+ realArgs = append(realArgs, doer)
+ }
+ if len(args) > 0 {
+ realArgs = append(realArgs, args...)
+ }
+ if doer == "" {
+ return locale.TrString("admin.dashboard.cron."+status, realArgs...)
+ }
+ return locale.TrString("admin.dashboard.task."+status, realArgs...)
+}
diff --git a/services/cron/tasks.go b/services/cron/tasks.go
new file mode 100644
index 0000000..f8a7444
--- /dev/null
+++ b/services/cron/tasks.go
@@ -0,0 +1,230 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cron
+
+import (
+ "context"
+ "fmt"
+ "reflect"
+ "strings"
+ "sync"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ system_model "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/translation"
+)
+
+var (
+ lock = sync.Mutex{}
+ started = false
+ tasks = []*Task{}
+ tasksMap = map[string]*Task{}
+)
+
+// Task represents a Cron task
+type Task struct {
+ lock sync.Mutex
+ Name string
+ config Config
+ fun func(context.Context, *user_model.User, Config) error
+ Status string
+ LastMessage string
+ LastDoer string
+ ExecTimes int64
+ // This stores the time of the last manual run of this task.
+ LastRun time.Time
+}
+
+// DoRunAtStart returns if this task should run at the start
+func (t *Task) DoRunAtStart() bool {
+ return t.config.DoRunAtStart()
+}
+
+// IsEnabled returns if this task is enabled as cron task
+func (t *Task) IsEnabled() bool {
+ return t.config.IsEnabled()
+}
+
+// GetConfig will return a copy of the task's config
+func (t *Task) GetConfig() Config {
+ if reflect.TypeOf(t.config).Kind() == reflect.Ptr {
+ // Pointer:
+ return reflect.New(reflect.ValueOf(t.config).Elem().Type()).Interface().(Config)
+ }
+ // Not pointer:
+ return reflect.New(reflect.TypeOf(t.config)).Elem().Interface().(Config)
+}
+
+// Run will run the task incrementing the cron counter with no user defined
+func (t *Task) Run() {
+ t.RunWithUser(&user_model.User{
+ ID: -1,
+ Name: "(Cron)",
+ LowerName: "(cron)",
+ }, t.config)
+}
+
+// RunWithUser will run the task incrementing the cron counter at the time with User
+func (t *Task) RunWithUser(doer *user_model.User, config Config) {
+ if !taskStatusTable.StartIfNotRunning(t.Name) {
+ return
+ }
+ t.lock.Lock()
+ if config == nil {
+ config = t.config
+ }
+ t.ExecTimes++
+ t.lock.Unlock()
+ defer func() {
+ taskStatusTable.Stop(t.Name)
+ }()
+ graceful.GetManager().RunWithShutdownContext(func(baseCtx context.Context) {
+ defer func() {
+ if err := recover(); err != nil {
+ // Recover a panic within the execution of the task.
+ combinedErr := fmt.Errorf("%s\n%s", err, log.Stack(2))
+ log.Error("PANIC whilst running task: %s Value: %v", t.Name, combinedErr)
+ }
+ }()
+ // Store the time of this run, before the function is executed, so it
+ // matches the behavior of what the cron library does.
+ t.lock.Lock()
+ t.LastRun = time.Now()
+ t.lock.Unlock()
+
+ pm := process.GetManager()
+ doerName := ""
+ if doer != nil && doer.ID != -1 {
+ doerName = doer.Name
+ }
+
+ ctx, _, finished := pm.AddContext(baseCtx, config.FormatMessage(translation.NewLocale("en-US"), t.Name, "process", doerName))
+ defer finished()
+
+ if err := t.fun(ctx, doer, config); err != nil {
+ var message string
+ var status string
+ if db.IsErrCancelled(err) {
+ status = "cancelled"
+ message = err.(db.ErrCancelled).Message
+ } else {
+ status = "error"
+ message = err.Error()
+ }
+
+ t.lock.Lock()
+ t.LastMessage = message
+ t.Status = status
+ t.LastDoer = doerName
+ t.lock.Unlock()
+
+ if err := system_model.CreateNotice(ctx, system_model.NoticeTask, config.FormatMessage(translation.NewLocale("en-US"), t.Name, "cancelled", doerName, message)); err != nil {
+ log.Error("CreateNotice: %v", err)
+ }
+ return
+ }
+
+ t.lock.Lock()
+ t.Status = "finished"
+ t.LastMessage = ""
+ t.LastDoer = doerName
+ t.lock.Unlock()
+
+ if config.DoNoticeOnSuccess() {
+ if err := system_model.CreateNotice(ctx, system_model.NoticeTask, config.FormatMessage(translation.NewLocale("en-US"), t.Name, "finished", doerName)); err != nil {
+ log.Error("CreateNotice: %v", err)
+ }
+ }
+ })
+}
+
+// GetTask gets the named task
+func GetTask(name string) *Task {
+ lock.Lock()
+ defer lock.Unlock()
+ log.Info("Getting %s in %v", name, tasksMap[name])
+
+ return tasksMap[name]
+}
+
+// RegisterTask allows a task to be registered with the cron service
+func RegisterTask(name string, config Config, fun func(context.Context, *user_model.User, Config) error) error {
+ log.Debug("Registering task: %s", name)
+
+ i18nKey := "admin.dashboard." + name
+ if value := translation.NewLocale("en-US").TrString(i18nKey); value == i18nKey {
+ return fmt.Errorf("translation is missing for task %q, please add translation for %q", name, i18nKey)
+ }
+
+ _, err := setting.GetCronSettings(name, config)
+ if err != nil {
+ log.Error("Unable to register cron task with name: %s Error: %v", name, err)
+ return err
+ }
+
+ task := &Task{
+ Name: name,
+ config: config,
+ fun: fun,
+ }
+ lock.Lock()
+ locked := true
+ defer func() {
+ if locked {
+ lock.Unlock()
+ }
+ }()
+ if _, has := tasksMap[task.Name]; has {
+ log.Error("A task with this name: %s has already been registered", name)
+ return fmt.Errorf("duplicate task with name: %s", task.Name)
+ }
+
+ if config.IsEnabled() {
+ // We cannot use the entry return as there is no way to lock it
+ if err := addTaskToScheduler(task); err != nil {
+ return err
+ }
+ }
+
+ tasks = append(tasks, task)
+ tasksMap[task.Name] = task
+ if started && config.IsEnabled() && config.DoRunAtStart() {
+ lock.Unlock()
+ locked = false
+ task.Run()
+ }
+
+ return nil
+}
+
+// RegisterTaskFatal will register a task but if there is an error log.Fatal
+func RegisterTaskFatal(name string, config Config, fun func(context.Context, *user_model.User, Config) error) {
+ if err := RegisterTask(name, config, fun); err != nil {
+ log.Fatal("Unable to register cron task %s Error: %v", name, err)
+ }
+}
+
+func addTaskToScheduler(task *Task) error {
+ tags := []string{task.Name, task.config.GetSchedule()} // name and schedule can't be get from job, so we add them as tag
+ if scheduleHasSeconds(task.config.GetSchedule()) {
+ scheduler = scheduler.CronWithSeconds(task.config.GetSchedule())
+ } else {
+ scheduler = scheduler.Cron(task.config.GetSchedule())
+ }
+ if _, err := scheduler.Tag(tags...).Do(task.Run); err != nil {
+ log.Error("Unable to register cron task with name: %s Error: %v", task.Name, err)
+ return err
+ }
+ return nil
+}
+
+func scheduleHasSeconds(schedule string) bool {
+ return len(strings.Fields(schedule)) >= 6
+}
diff --git a/services/cron/tasks_actions.go b/services/cron/tasks_actions.go
new file mode 100644
index 0000000..59cfe36
--- /dev/null
+++ b/services/cron/tasks_actions.go
@@ -0,0 +1,76 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cron
+
+import (
+ "context"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ actions_service "code.gitea.io/gitea/services/actions"
+)
+
+func initActionsTasks() {
+ if !setting.Actions.Enabled {
+ return
+ }
+ registerStopZombieTasks()
+ registerStopEndlessTasks()
+ registerCancelAbandonedJobs()
+ registerScheduleTasks()
+ registerActionsCleanup()
+}
+
+func registerStopZombieTasks() {
+ RegisterTaskFatal("stop_zombie_tasks", &BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@every 5m",
+ }, func(ctx context.Context, _ *user_model.User, cfg Config) error {
+ return actions_service.StopZombieTasks(ctx)
+ })
+}
+
+func registerStopEndlessTasks() {
+ RegisterTaskFatal("stop_endless_tasks", &BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@every 30m",
+ }, func(ctx context.Context, _ *user_model.User, cfg Config) error {
+ return actions_service.StopEndlessTasks(ctx)
+ })
+}
+
+func registerCancelAbandonedJobs() {
+ RegisterTaskFatal("cancel_abandoned_jobs", &BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@every 6h",
+ }, func(ctx context.Context, _ *user_model.User, cfg Config) error {
+ return actions_service.CancelAbandonedJobs(ctx)
+ })
+}
+
+// registerScheduleTasks registers a scheduled task that runs every minute to start any due schedule tasks.
+func registerScheduleTasks() {
+ // Register the task with a unique name, enabled status, and schedule for every minute.
+ RegisterTaskFatal("start_schedule_tasks", &BaseConfig{
+ Enabled: true,
+ RunAtStart: false,
+ Schedule: "@every 1m",
+ }, func(ctx context.Context, _ *user_model.User, cfg Config) error {
+ // Call the function to start schedule tasks and pass the context.
+ return actions_service.StartScheduleTasks(ctx)
+ })
+}
+
+func registerActionsCleanup() {
+ RegisterTaskFatal("cleanup_actions", &BaseConfig{
+ Enabled: true,
+ RunAtStart: false,
+ Schedule: "@midnight",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return actions_service.Cleanup(ctx)
+ })
+}
diff --git a/services/cron/tasks_basic.go b/services/cron/tasks_basic.go
new file mode 100644
index 0000000..2a213ae
--- /dev/null
+++ b/services/cron/tasks_basic.go
@@ -0,0 +1,175 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cron
+
+import (
+ "context"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/migrations"
+ mirror_service "code.gitea.io/gitea/services/mirror"
+ packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup"
+ repo_service "code.gitea.io/gitea/services/repository"
+ archiver_service "code.gitea.io/gitea/services/repository/archiver"
+)
+
+func registerUpdateMirrorTask() {
+ type UpdateMirrorTaskConfig struct {
+ BaseConfig
+ PullLimit int
+ PushLimit int
+ }
+
+ RegisterTaskFatal("update_mirrors", &UpdateMirrorTaskConfig{
+ BaseConfig: BaseConfig{
+ Enabled: true,
+ RunAtStart: false,
+ Schedule: "@every 10m",
+ },
+ PullLimit: 50,
+ PushLimit: 50,
+ }, func(ctx context.Context, _ *user_model.User, cfg Config) error {
+ umtc := cfg.(*UpdateMirrorTaskConfig)
+ return mirror_service.Update(ctx, umtc.PullLimit, umtc.PushLimit)
+ })
+}
+
+func registerRepoHealthCheck() {
+ type RepoHealthCheckConfig struct {
+ BaseConfig
+ Timeout time.Duration
+ Args []string `delim:" "`
+ }
+ RegisterTaskFatal("repo_health_check", &RepoHealthCheckConfig{
+ BaseConfig: BaseConfig{
+ Enabled: true,
+ RunAtStart: false,
+ Schedule: "@midnight",
+ },
+ Timeout: 60 * time.Second,
+ Args: []string{},
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ rhcConfig := config.(*RepoHealthCheckConfig)
+ // the git args are set by config, they can be safe to be trusted
+ return repo_service.GitFsckRepos(ctx, rhcConfig.Timeout, git.ToTrustedCmdArgs(rhcConfig.Args))
+ })
+}
+
+func registerCheckRepoStats() {
+ RegisterTaskFatal("check_repo_stats", &BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@midnight",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return models.CheckRepoStats(ctx)
+ })
+}
+
+func registerArchiveCleanup() {
+ RegisterTaskFatal("archive_cleanup", &OlderThanConfig{
+ BaseConfig: BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@midnight",
+ },
+ OlderThan: 24 * time.Hour,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ acConfig := config.(*OlderThanConfig)
+ return archiver_service.DeleteOldRepositoryArchives(ctx, acConfig.OlderThan)
+ })
+}
+
+func registerSyncExternalUsers() {
+ RegisterTaskFatal("sync_external_users", &UpdateExistingConfig{
+ BaseConfig: BaseConfig{
+ Enabled: true,
+ RunAtStart: false,
+ Schedule: "@midnight",
+ },
+ UpdateExisting: true,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ realConfig := config.(*UpdateExistingConfig)
+ return auth.SyncExternalUsers(ctx, realConfig.UpdateExisting)
+ })
+}
+
+func registerDeletedBranchesCleanup() {
+ RegisterTaskFatal("deleted_branches_cleanup", &OlderThanConfig{
+ BaseConfig: BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@midnight",
+ },
+ OlderThan: 24 * time.Hour,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ realConfig := config.(*OlderThanConfig)
+ git_model.RemoveOldDeletedBranches(ctx, realConfig.OlderThan)
+ return nil
+ })
+}
+
+func registerUpdateMigrationPosterID() {
+ RegisterTaskFatal("update_migration_poster_id", &BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@midnight",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return migrations.UpdateMigrationPosterID(ctx)
+ })
+}
+
+func registerCleanupHookTaskTable() {
+ RegisterTaskFatal("cleanup_hook_task_table", &CleanupHookTaskConfig{
+ BaseConfig: BaseConfig{
+ Enabled: true,
+ RunAtStart: false,
+ Schedule: "@midnight",
+ },
+ CleanupType: "OlderThan",
+ OlderThan: 168 * time.Hour,
+ NumberToKeep: 10,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ realConfig := config.(*CleanupHookTaskConfig)
+ return webhook.CleanupHookTaskTable(ctx, webhook.ToHookTaskCleanupType(realConfig.CleanupType), realConfig.OlderThan, realConfig.NumberToKeep)
+ })
+}
+
+func registerCleanupPackages() {
+ RegisterTaskFatal("cleanup_packages", &OlderThanConfig{
+ BaseConfig: BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@midnight",
+ },
+ OlderThan: 24 * time.Hour,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ realConfig := config.(*OlderThanConfig)
+ return packages_cleanup_service.CleanupTask(ctx, realConfig.OlderThan)
+ })
+}
+
+func initBasicTasks() {
+ if setting.Mirror.Enabled {
+ registerUpdateMirrorTask()
+ }
+ registerRepoHealthCheck()
+ registerCheckRepoStats()
+ registerArchiveCleanup()
+ registerSyncExternalUsers()
+ registerDeletedBranchesCleanup()
+ if !setting.Repository.DisableMigrations {
+ registerUpdateMigrationPosterID()
+ }
+ registerCleanupHookTaskTable()
+ if setting.Packages.Enabled {
+ registerCleanupPackages()
+ }
+}
diff --git a/services/cron/tasks_extended.go b/services/cron/tasks_extended.go
new file mode 100644
index 0000000..e1ba527
--- /dev/null
+++ b/services/cron/tasks_extended.go
@@ -0,0 +1,243 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cron
+
+import (
+ "context"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/updatechecker"
+ repo_service "code.gitea.io/gitea/services/repository"
+ archiver_service "code.gitea.io/gitea/services/repository/archiver"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+func registerDeleteInactiveUsers() {
+ RegisterTaskFatal("delete_inactive_accounts", &OlderThanConfig{
+ BaseConfig: BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@annually",
+ },
+ OlderThan: time.Minute * time.Duration(setting.Service.ActiveCodeLives),
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ olderThanConfig := config.(*OlderThanConfig)
+ return user_service.DeleteInactiveUsers(ctx, olderThanConfig.OlderThan)
+ })
+}
+
+func registerDeleteRepositoryArchives() {
+ RegisterTaskFatal("delete_repo_archives", &BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@annually",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return archiver_service.DeleteRepositoryArchives(ctx)
+ })
+}
+
+func registerGarbageCollectRepositories() {
+ type RepoHealthCheckConfig struct {
+ BaseConfig
+ Timeout time.Duration
+ Args []string `delim:" "`
+ }
+ RegisterTaskFatal("git_gc_repos", &RepoHealthCheckConfig{
+ BaseConfig: BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 72h",
+ },
+ Timeout: time.Duration(setting.Git.Timeout.GC) * time.Second,
+ Args: setting.Git.GCArgs,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ rhcConfig := config.(*RepoHealthCheckConfig)
+ // the git args are set by config, they can be safe to be trusted
+ return repo_service.GitGcRepos(ctx, rhcConfig.Timeout, git.ToTrustedCmdArgs(rhcConfig.Args))
+ })
+}
+
+func registerRewriteAllPublicKeys() {
+ RegisterTaskFatal("resync_all_sshkeys", &BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 72h",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return asymkey_model.RewriteAllPublicKeys(ctx)
+ })
+}
+
+func registerRewriteAllPrincipalKeys() {
+ RegisterTaskFatal("resync_all_sshprincipals", &BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 72h",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return asymkey_model.RewriteAllPrincipalKeys(ctx)
+ })
+}
+
+func registerRepositoryUpdateHook() {
+ RegisterTaskFatal("resync_all_hooks", &BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 72h",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return repo_service.SyncRepositoryHooks(ctx)
+ })
+}
+
+func registerReinitMissingRepositories() {
+ RegisterTaskFatal("reinit_missing_repos", &BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 72h",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return repo_service.ReinitMissingRepositories(ctx)
+ })
+}
+
+func registerDeleteMissingRepositories() {
+ RegisterTaskFatal("delete_missing_repos", &BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 72h",
+ }, func(ctx context.Context, user *user_model.User, _ Config) error {
+ return repo_service.DeleteMissingRepositories(ctx, user)
+ })
+}
+
+func registerRemoveRandomAvatars() {
+ RegisterTaskFatal("delete_generated_repository_avatars", &BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 72h",
+ }, func(ctx context.Context, _ *user_model.User, _ Config) error {
+ return repo_service.RemoveRandomAvatars(ctx)
+ })
+}
+
+func registerDeleteOldActions() {
+ RegisterTaskFatal("delete_old_actions", &OlderThanConfig{
+ BaseConfig: BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 168h",
+ },
+ OlderThan: 365 * 24 * time.Hour,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ olderThanConfig := config.(*OlderThanConfig)
+ return activities_model.DeleteOldActions(ctx, olderThanConfig.OlderThan)
+ })
+}
+
+func registerUpdateGiteaChecker() {
+ type UpdateCheckerConfig struct {
+ BaseConfig
+ HTTPEndpoint string
+ DomainEndpoint string
+ }
+ RegisterTaskFatal("update_checker", &UpdateCheckerConfig{
+ BaseConfig: BaseConfig{
+ Enabled: true,
+ RunAtStart: false,
+ Schedule: "@every 168h",
+ },
+ HTTPEndpoint: "https://dl.gitea.com/gitea/version.json",
+ DomainEndpoint: "release.forgejo.org",
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ updateCheckerConfig := config.(*UpdateCheckerConfig)
+ return updatechecker.GiteaUpdateChecker(updateCheckerConfig.HTTPEndpoint, updateCheckerConfig.DomainEndpoint)
+ })
+}
+
+func registerDeleteOldSystemNotices() {
+ RegisterTaskFatal("delete_old_system_notices", &OlderThanConfig{
+ BaseConfig: BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 168h",
+ },
+ OlderThan: 365 * 24 * time.Hour,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ olderThanConfig := config.(*OlderThanConfig)
+ return system.DeleteOldSystemNotices(ctx, olderThanConfig.OlderThan)
+ })
+}
+
+func registerGCLFS() {
+ if !setting.LFS.StartServer {
+ return
+ }
+ type GCLFSConfig struct {
+ OlderThanConfig
+ LastUpdatedMoreThanAgo time.Duration
+ NumberToCheckPerRepo int64
+ ProportionToCheckPerRepo float64
+ }
+
+ RegisterTaskFatal("gc_lfs", &GCLFSConfig{
+ OlderThanConfig: OlderThanConfig{
+ BaseConfig: BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@every 24h",
+ },
+ // Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
+ // and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
+ // an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
+ // changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
+ // objects.
+ //
+ // It is likely that a week is potentially excessive but it should definitely be enough that any
+ // unassociated LFS object is genuinely unassociated.
+ OlderThan: 24 * time.Hour * 7,
+ },
+ // Only GC things that haven't been looked at in the past 3 days
+ LastUpdatedMoreThanAgo: 24 * time.Hour * 3,
+ NumberToCheckPerRepo: 100,
+ ProportionToCheckPerRepo: 0.6,
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ gcLFSConfig := config.(*GCLFSConfig)
+ return repo_service.GarbageCollectLFSMetaObjects(ctx, repo_service.GarbageCollectLFSMetaObjectsOptions{
+ AutoFix: true,
+ OlderThan: time.Now().Add(-gcLFSConfig.OlderThan),
+ UpdatedLessRecentlyThan: time.Now().Add(-gcLFSConfig.LastUpdatedMoreThanAgo),
+ })
+ })
+}
+
+func registerRebuildIssueIndexer() {
+ RegisterTaskFatal("rebuild_issue_indexer", &BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@annually",
+ }, func(ctx context.Context, _ *user_model.User, config Config) error {
+ return issue_indexer.PopulateIssueIndexer(ctx)
+ })
+}
+
+func initExtendedTasks() {
+ registerDeleteInactiveUsers()
+ registerDeleteRepositoryArchives()
+ registerGarbageCollectRepositories()
+ registerRewriteAllPublicKeys()
+ registerRewriteAllPrincipalKeys()
+ registerRepositoryUpdateHook()
+ registerReinitMissingRepositories()
+ registerDeleteMissingRepositories()
+ registerRemoveRandomAvatars()
+ registerDeleteOldActions()
+ registerUpdateGiteaChecker()
+ registerDeleteOldSystemNotices()
+ registerGCLFS()
+ registerRebuildIssueIndexer()
+}
diff --git a/services/cron/tasks_test.go b/services/cron/tasks_test.go
new file mode 100644
index 0000000..9b969a6
--- /dev/null
+++ b/services/cron/tasks_test.go
@@ -0,0 +1,68 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cron
+
+import (
+ "sort"
+ "strconv"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestAddTaskToScheduler(t *testing.T) {
+ assert.Empty(t, scheduler.Jobs())
+ defer scheduler.Clear()
+
+ // no seconds
+ err := addTaskToScheduler(&Task{
+ Name: "task 1",
+ config: &BaseConfig{
+ Schedule: "5 4 * * *",
+ },
+ })
+ require.NoError(t, err)
+ jobs := scheduler.Jobs()
+ assert.Len(t, jobs, 1)
+ assert.Equal(t, "task 1", jobs[0].Tags()[0])
+ assert.Equal(t, "5 4 * * *", jobs[0].Tags()[1])
+
+ // with seconds
+ err = addTaskToScheduler(&Task{
+ Name: "task 2",
+ config: &BaseConfig{
+ Schedule: "30 5 4 * * *",
+ },
+ })
+ require.NoError(t, err)
+ jobs = scheduler.Jobs() // the item order is not guaranteed, so we need to sort it before "assert"
+ sort.Slice(jobs, func(i, j int) bool {
+ return jobs[i].Tags()[0] < jobs[j].Tags()[0]
+ })
+ assert.Len(t, jobs, 2)
+ assert.Equal(t, "task 2", jobs[1].Tags()[0])
+ assert.Equal(t, "30 5 4 * * *", jobs[1].Tags()[1])
+}
+
+func TestScheduleHasSeconds(t *testing.T) {
+ tests := []struct {
+ schedule string
+ hasSecond bool
+ }{
+ {"* * * * * *", true},
+ {"* * * * *", false},
+ {"5 4 * * *", false},
+ {"5 4 * * *", false},
+ {"5,8 4 * * *", false},
+ {"* * * * * *", true},
+ {"5,8 4 * * *", false},
+ }
+
+ for i, test := range tests {
+ t.Run(strconv.Itoa(i), func(t *testing.T) {
+ assert.Equal(t, test.hasSecond, scheduleHasSeconds(test.schedule))
+ })
+ }
+}
diff --git a/services/doctor/authorizedkeys.go b/services/doctor/authorizedkeys.go
new file mode 100644
index 0000000..2920cf5
--- /dev/null
+++ b/services/doctor/authorizedkeys.go
@@ -0,0 +1,100 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+const tplCommentPrefix = `# gitea public key`
+
+func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) error {
+ if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile {
+ return nil
+ }
+
+ fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys")
+ f, err := os.Open(fPath)
+ if err != nil {
+ if !autofix {
+ logger.Critical("Unable to open authorized_keys file. ERROR: %v", err)
+ return fmt.Errorf("Unable to open authorized_keys file. ERROR: %w", err)
+ }
+ logger.Warn("Unable to open authorized_keys. (ERROR: %v). Attempting to rewrite...", err)
+ if err = asymkey_model.RewriteAllPublicKeys(ctx); err != nil {
+ logger.Critical("Unable to rewrite authorized_keys file. ERROR: %v", err)
+ return fmt.Errorf("Unable to rewrite authorized_keys file. ERROR: %w", err)
+ }
+ }
+ defer f.Close()
+
+ linesInAuthorizedKeys := make(container.Set[string])
+
+ scanner := bufio.NewScanner(f)
+ for scanner.Scan() {
+ line := scanner.Text()
+ if strings.HasPrefix(line, tplCommentPrefix) {
+ continue
+ }
+ linesInAuthorizedKeys.Add(line)
+ }
+ if err = scanner.Err(); err != nil {
+ return fmt.Errorf("scan: %w", err)
+ }
+ // although there is a "defer close" above, here close explicitly before the generating, because it needs to open the file for writing again
+ _ = f.Close()
+
+ // now we regenerate and check if there are any lines missing
+ regenerated := &bytes.Buffer{}
+ if err := asymkey_model.RegeneratePublicKeys(ctx, regenerated); err != nil {
+ logger.Critical("Unable to regenerate authorized_keys file. ERROR: %v", err)
+ return fmt.Errorf("Unable to regenerate authorized_keys file. ERROR: %w", err)
+ }
+ scanner = bufio.NewScanner(regenerated)
+ for scanner.Scan() {
+ line := scanner.Text()
+ if strings.HasPrefix(line, tplCommentPrefix) {
+ continue
+ }
+ if linesInAuthorizedKeys.Contains(line) {
+ continue
+ }
+ if !autofix {
+ logger.Critical(
+ "authorized_keys file %q is out of date.\nRegenerate it with:\n\t\"%s\"\nor\n\t\"%s\"",
+ fPath,
+ "forgejo admin regenerate keys",
+ "forgejo doctor check --run authorized-keys --fix")
+ return fmt.Errorf(`authorized_keys is out of date and should be regenerated with "forgejo admin regenerate keys" or "forgejo doctor check --run authorized-keys --fix"`)
+ }
+ logger.Warn("authorized_keys is out of date. Attempting rewrite...")
+ err = asymkey_model.RewriteAllPublicKeys(ctx)
+ if err != nil {
+ logger.Critical("Unable to rewrite authorized_keys file. ERROR: %v", err)
+ return fmt.Errorf("Unable to rewrite authorized_keys file. ERROR: %w", err)
+ }
+ }
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check if OpenSSH authorized_keys file is up-to-date",
+ Name: "authorized-keys",
+ IsDefault: true,
+ Run: checkAuthorizedKeys,
+ Priority: 4,
+ })
+}
diff --git a/services/doctor/breaking.go b/services/doctor/breaking.go
new file mode 100644
index 0000000..77e3d4e
--- /dev/null
+++ b/services/doctor/breaking.go
@@ -0,0 +1,97 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/builder"
+)
+
+func iterateUserAccounts(ctx context.Context, each func(*user.User) error) error {
+ err := db.Iterate(
+ ctx,
+ builder.Gt{"id": 0},
+ func(ctx context.Context, bean *user.User) error {
+ return each(bean)
+ },
+ )
+ return err
+}
+
+// Since 1.16.4 new restrictions has been set on email addresses. However users with invalid email
+// addresses would be currently facing a error due to their invalid email address.
+// Ref: https://github.com/go-gitea/gitea/pull/19085 & https://github.com/go-gitea/gitea/pull/17688
+func checkUserEmail(ctx context.Context, logger log.Logger, _ bool) error {
+ // We could use quirky SQL to get all users that start without a [a-zA-Z0-9], but that would mean
+ // DB provider-specific SQL and only works _now_. So instead we iterate through all user accounts
+ // and use the user.ValidateEmail function to be future-proof.
+ var invalidUserCount int64
+ if err := iterateUserAccounts(ctx, func(u *user.User) error {
+ // Only check for users, skip
+ if u.Type != user.UserTypeIndividual {
+ return nil
+ }
+
+ if err := user.ValidateEmail(u.Email); err != nil {
+ invalidUserCount++
+ logger.Warn("User[id=%d name=%q] have not a valid e-mail: %v", u.ID, u.Name, err)
+ }
+ return nil
+ }); err != nil {
+ return fmt.Errorf("iterateUserAccounts: %w", err)
+ }
+
+ if invalidUserCount == 0 {
+ logger.Info("All users have a valid e-mail.")
+ } else {
+ logger.Warn("%d user(s) have a non-valid e-mail.", invalidUserCount)
+ }
+ return nil
+}
+
+// From time to time Gitea makes changes to the reserved usernames and which symbols
+// are allowed for various reasons. This check helps with detecting users that, according
+// to our reserved names, don't have a valid username.
+func checkUserName(ctx context.Context, logger log.Logger, _ bool) error {
+ var invalidUserCount int64
+ if err := iterateUserAccounts(ctx, func(u *user.User) error {
+ if err := user.IsUsableUsername(u.Name); err != nil {
+ invalidUserCount++
+ logger.Warn("User[id=%d] does not have a valid username: %v", u.ID, err)
+ }
+ return nil
+ }); err != nil {
+ return fmt.Errorf("iterateUserAccounts: %w", err)
+ }
+
+ if invalidUserCount == 0 {
+ logger.Info("All users have a valid username.")
+ } else {
+ logger.Warn("%d user(s) have a non-valid username.", invalidUserCount)
+ }
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check if users has an valid email address",
+ Name: "check-user-email",
+ IsDefault: false,
+ Run: checkUserEmail,
+ Priority: 9,
+ })
+ Register(&Check{
+ Title: "Check if users have a valid username",
+ Name: "check-user-names",
+ IsDefault: false,
+ Run: checkUserName,
+ Priority: 9,
+ })
+}
diff --git a/services/doctor/checkOldArchives.go b/services/doctor/checkOldArchives.go
new file mode 100644
index 0000000..390dfb4
--- /dev/null
+++ b/services/doctor/checkOldArchives.go
@@ -0,0 +1,59 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "os"
+ "path/filepath"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+)
+
+func checkOldArchives(ctx context.Context, logger log.Logger, autofix bool) error {
+ numRepos := 0
+ numReposUpdated := 0
+ err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
+ if repo.IsEmpty {
+ return nil
+ }
+
+ p := filepath.Join(repo.RepoPath(), "archives")
+ isDir, err := util.IsDir(p)
+ if err != nil {
+ log.Warn("check if %s is directory failed: %v", p, err)
+ }
+ if isDir {
+ numRepos++
+ if autofix {
+ if err := os.RemoveAll(p); err == nil {
+ numReposUpdated++
+ } else {
+ log.Warn("remove %s failed: %v", p, err)
+ }
+ }
+ }
+ return nil
+ })
+
+ if autofix {
+ logger.Info("%d / %d old archives in repository deleted", numReposUpdated, numRepos)
+ } else {
+ logger.Info("%d old archives in repository need to be deleted", numRepos)
+ }
+
+ return err
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check old archives",
+ Name: "check-old-archives",
+ IsDefault: false,
+ Run: checkOldArchives,
+ Priority: 7,
+ })
+}
diff --git a/services/doctor/dbconsistency.go b/services/doctor/dbconsistency.go
new file mode 100644
index 0000000..80f538d
--- /dev/null
+++ b/services/doctor/dbconsistency.go
@@ -0,0 +1,268 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ activities_model "code.gitea.io/gitea/models/activities"
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/migrations"
+ org_model "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+type consistencyCheck struct {
+ Name string
+ Counter func(context.Context) (int64, error)
+ Fixer func(context.Context) (int64, error)
+ FixedMessage string
+}
+
+func (c *consistencyCheck) Run(ctx context.Context, logger log.Logger, autofix bool) error {
+ count, err := c.Counter(ctx)
+ if err != nil {
+ logger.Critical("Error: %v whilst counting %s", err, c.Name)
+ return err
+ }
+ if count > 0 {
+ if autofix {
+ var fixed int64
+ if fixed, err = c.Fixer(ctx); err != nil {
+ logger.Critical("Error: %v whilst fixing %s", err, c.Name)
+ return err
+ }
+
+ prompt := "Deleted"
+ if c.FixedMessage != "" {
+ prompt = c.FixedMessage
+ }
+
+ if fixed < 0 {
+ logger.Info(prompt+" %d %s", count, c.Name)
+ } else {
+ logger.Info(prompt+" %d/%d %s", fixed, count, c.Name)
+ }
+ } else {
+ logger.Warn("Found %d %s", count, c.Name)
+ }
+ }
+ return nil
+}
+
+func asFixer(fn func(ctx context.Context) error) func(ctx context.Context) (int64, error) {
+ return func(ctx context.Context) (int64, error) {
+ err := fn(ctx)
+ return -1, err
+ }
+}
+
+func genericOrphanCheck(name, subject, refobject, joincond string) consistencyCheck {
+ return consistencyCheck{
+ Name: name,
+ Counter: func(ctx context.Context) (int64, error) {
+ return db.CountOrphanedObjects(ctx, subject, refobject, joincond)
+ },
+ Fixer: func(ctx context.Context) (int64, error) {
+ err := db.DeleteOrphanedObjects(ctx, subject, refobject, joincond)
+ return -1, err
+ },
+ }
+}
+
+func checkDBConsistency(ctx context.Context, logger log.Logger, autofix bool) error {
+ // make sure DB version is up-to-date
+ if err := db.InitEngineWithMigration(ctx, migrations.EnsureUpToDate); err != nil {
+ logger.Critical("Model version on the database does not match the current Gitea version. Model consistency will not be checked until the database is upgraded")
+ return err
+ }
+
+ consistencyChecks := []consistencyCheck{
+ {
+ // find labels without existing repo or org
+ Name: "Orphaned Labels without existing repository or organisation",
+ Counter: issues_model.CountOrphanedLabels,
+ Fixer: asFixer(issues_model.DeleteOrphanedLabels),
+ },
+ {
+ // find IssueLabels without existing label
+ Name: "Orphaned Issue Labels without existing label",
+ Counter: issues_model.CountOrphanedIssueLabels,
+ Fixer: asFixer(issues_model.DeleteOrphanedIssueLabels),
+ },
+ {
+ // find issues without existing repository
+ Name: "Orphaned Issues without existing repository",
+ Counter: issues_model.CountOrphanedIssues,
+ Fixer: asFixer(issues_model.DeleteOrphanedIssues),
+ },
+ // find releases without existing repository
+ genericOrphanCheck("Orphaned Releases without existing repository",
+ "release", "repository", "`release`.repo_id=repository.id"),
+ // find pulls without existing issues
+ genericOrphanCheck("Orphaned PullRequests without existing issue",
+ "pull_request", "issue", "pull_request.issue_id=issue.id"),
+ // find pull requests without base repository
+ genericOrphanCheck("Pull request entries without existing base repository",
+ "pull_request", "repository", "pull_request.base_repo_id=repository.id"),
+ // find tracked times without existing issues/pulls
+ genericOrphanCheck("Orphaned TrackedTimes without existing issue",
+ "tracked_time", "issue", "tracked_time.issue_id=issue.id"),
+ // find attachments without existing issues or releases
+ {
+ Name: "Orphaned Attachments without existing issues or releases",
+ Counter: repo_model.CountOrphanedAttachments,
+ Fixer: asFixer(repo_model.DeleteOrphanedAttachments),
+ },
+ // find null archived repositories
+ {
+ Name: "Repositories with is_archived IS NULL",
+ Counter: repo_model.CountNullArchivedRepository,
+ Fixer: repo_model.FixNullArchivedRepository,
+ FixedMessage: "Fixed",
+ },
+ // find label comments with empty labels
+ {
+ Name: "Label comments with empty labels",
+ Counter: issues_model.CountCommentTypeLabelWithEmptyLabel,
+ Fixer: issues_model.FixCommentTypeLabelWithEmptyLabel,
+ FixedMessage: "Fixed",
+ },
+ // find label comments with labels from outside the repository
+ {
+ Name: "Label comments with labels from outside the repository",
+ Counter: issues_model.CountCommentTypeLabelWithOutsideLabels,
+ Fixer: issues_model.FixCommentTypeLabelWithOutsideLabels,
+ FixedMessage: "Removed",
+ },
+ // find issue_label with labels from outside the repository
+ {
+ Name: "IssueLabels with Labels from outside the repository",
+ Counter: issues_model.CountIssueLabelWithOutsideLabels,
+ Fixer: issues_model.FixIssueLabelWithOutsideLabels,
+ FixedMessage: "Removed",
+ },
+ {
+ Name: "Action with created_unix set as an empty string",
+ Counter: activities_model.CountActionCreatedUnixString,
+ Fixer: activities_model.FixActionCreatedUnixString,
+ FixedMessage: "Set to zero",
+ },
+ {
+ Name: "Action Runners without existing owner",
+ Counter: actions_model.CountRunnersWithoutBelongingOwner,
+ Fixer: actions_model.FixRunnersWithoutBelongingOwner,
+ FixedMessage: "Removed",
+ },
+ {
+ Name: "Action Runners without existing repository",
+ Counter: actions_model.CountRunnersWithoutBelongingRepo,
+ Fixer: actions_model.FixRunnersWithoutBelongingRepo,
+ FixedMessage: "Removed",
+ },
+ {
+ Name: "Topics with empty repository count",
+ Counter: repo_model.CountOrphanedTopics,
+ Fixer: repo_model.DeleteOrphanedTopics,
+ FixedMessage: "Removed",
+ },
+ {
+ Name: "Orphaned OAuth2Application without existing User",
+ Counter: auth_model.CountOrphanedOAuth2Applications,
+ Fixer: auth_model.DeleteOrphanedOAuth2Applications,
+ FixedMessage: "Removed",
+ },
+ {
+ Name: "Owner teams with no admin access",
+ Counter: org_model.CountInconsistentOwnerTeams,
+ Fixer: org_model.FixInconsistentOwnerTeams,
+ FixedMessage: "Fixed",
+ },
+ }
+
+ // TODO: function to recalc all counters
+
+ if setting.Database.Type.IsPostgreSQL() {
+ consistencyChecks = append(consistencyChecks, consistencyCheck{
+ Name: "Sequence values",
+ Counter: db.CountBadSequences,
+ Fixer: asFixer(db.FixBadSequences),
+ FixedMessage: "Updated",
+ })
+ }
+
+ consistencyChecks = append(consistencyChecks,
+ // find protected branches without existing repository
+ genericOrphanCheck("Protected Branches without existing repository",
+ "protected_branch", "repository", "protected_branch.repo_id=repository.id"),
+ // find branches without existing repository
+ genericOrphanCheck("Branches without existing repository",
+ "branch", "repository", "branch.repo_id=repository.id"),
+ // find LFS locks without existing repository
+ genericOrphanCheck("LFS locks without existing repository",
+ "lfs_lock", "repository", "lfs_lock.repo_id=repository.id"),
+ // find collaborations without users
+ genericOrphanCheck("Collaborations without existing user",
+ "collaboration", "user", "collaboration.user_id=`user`.id"),
+ // find collaborations without repository
+ genericOrphanCheck("Collaborations without existing repository",
+ "collaboration", "repository", "collaboration.repo_id=repository.id"),
+ // find access without users
+ genericOrphanCheck("Access entries without existing user",
+ "access", "user", "access.user_id=`user`.id"),
+ // find access without repository
+ genericOrphanCheck("Access entries without existing repository",
+ "access", "repository", "access.repo_id=repository.id"),
+ // find action without repository
+ genericOrphanCheck("Action entries without existing repository",
+ "action", "repository", "action.repo_id=repository.id"),
+ // find action without user
+ genericOrphanCheck("Action entries without existing user",
+ "action", "user", "action.act_user_id=`user`.id"),
+ // find OAuth2Grant without existing user
+ genericOrphanCheck("Orphaned OAuth2Grant without existing User",
+ "oauth2_grant", "user", "oauth2_grant.user_id=`user`.id"),
+ // find OAuth2AuthorizationCode without existing OAuth2Grant
+ genericOrphanCheck("Orphaned OAuth2AuthorizationCode without existing OAuth2Grant",
+ "oauth2_authorization_code", "oauth2_grant", "oauth2_authorization_code.grant_id=oauth2_grant.id"),
+ // find stopwatches without existing user
+ genericOrphanCheck("Orphaned Stopwatches without existing User",
+ "stopwatch", "user", "stopwatch.user_id=`user`.id"),
+ // find stopwatches without existing issue
+ genericOrphanCheck("Orphaned Stopwatches without existing Issue",
+ "stopwatch", "issue", "stopwatch.issue_id=`issue`.id"),
+ // find redirects without existing user.
+ genericOrphanCheck("Orphaned Redirects without existing redirect user",
+ "user_redirect", "user", "user_redirect.redirect_user_id=`user`.id"),
+ // find archive download count without existing release
+ genericOrphanCheck("Archive download count without existing Release",
+ "repo_archive_download_count", "release", "repo_archive_download_count.release_id=release.id"),
+ // find authorization tokens without existing user
+ genericOrphanCheck("Authorization token without existing User",
+ "forgejo_auth_token", "user", "forgejo_auth_token.uid=`user`.id"),
+ )
+
+ for _, c := range consistencyChecks {
+ if err := c.Run(ctx, logger, autofix); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check consistency of database",
+ Name: "check-db-consistency",
+ IsDefault: false,
+ Run: checkDBConsistency,
+ Priority: 3,
+ })
+}
diff --git a/services/doctor/dbversion.go b/services/doctor/dbversion.go
new file mode 100644
index 0000000..2b20cb2
--- /dev/null
+++ b/services/doctor/dbversion.go
@@ -0,0 +1,42 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/migrations"
+ "code.gitea.io/gitea/modules/log"
+)
+
+func checkDBVersion(ctx context.Context, logger log.Logger, autofix bool) error {
+ logger.Info("Expected database version: %d", migrations.ExpectedVersion())
+ if err := db.InitEngineWithMigration(ctx, migrations.EnsureUpToDate); err != nil {
+ if !autofix {
+ logger.Critical("Error: %v during ensure up to date", err)
+ return err
+ }
+ logger.Warn("Got Error: %v during ensure up to date", err)
+ logger.Warn("Attempting to migrate to the latest DB version to fix this.")
+
+ err = db.InitEngineWithMigration(ctx, migrations.Migrate)
+ if err != nil {
+ logger.Critical("Error: %v during migration", err)
+ }
+ return err
+ }
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check Database Version",
+ Name: "check-db-version",
+ IsDefault: true,
+ Run: checkDBVersion,
+ AbortIfFailed: false,
+ Priority: 2,
+ })
+}
diff --git a/services/doctor/doctor.go b/services/doctor/doctor.go
new file mode 100644
index 0000000..a4eb5e1
--- /dev/null
+++ b/services/doctor/doctor.go
@@ -0,0 +1,138 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "sort"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+)
+
+// Check represents a Doctor check
+type Check struct {
+ Title string
+ Name string
+ IsDefault bool
+ Run func(ctx context.Context, logger log.Logger, autofix bool) error
+ AbortIfFailed bool
+ SkipDatabaseInitialization bool
+ Priority int
+ InitStorage bool
+}
+
+func initDBSkipLogger(ctx context.Context) error {
+ setting.MustInstalled()
+ setting.LoadDBSetting()
+ if err := db.InitEngine(ctx); err != nil {
+ return fmt.Errorf("db.InitEngine: %w", err)
+ }
+ // some doctor sub-commands need to use git command
+ if err := git.InitFull(ctx); err != nil {
+ return fmt.Errorf("git.InitFull: %w", err)
+ }
+ return nil
+}
+
+type doctorCheckLogger struct {
+ colorize bool
+}
+
+var _ log.BaseLogger = (*doctorCheckLogger)(nil)
+
+func (d *doctorCheckLogger) Log(skip int, level log.Level, format string, v ...any) {
+ _, _ = fmt.Fprintf(os.Stdout, format+"\n", v...)
+}
+
+func (d *doctorCheckLogger) GetLevel() log.Level {
+ return log.TRACE
+}
+
+type doctorCheckStepLogger struct {
+ colorize bool
+}
+
+var _ log.BaseLogger = (*doctorCheckStepLogger)(nil)
+
+func (d *doctorCheckStepLogger) Log(skip int, level log.Level, format string, v ...any) {
+ levelChar := fmt.Sprintf("[%s]", strings.ToUpper(level.String()[0:1]))
+ var levelArg any = levelChar
+ if d.colorize {
+ levelArg = log.NewColoredValue(levelChar, level.ColorAttributes()...)
+ }
+ args := append([]any{levelArg}, v...)
+ _, _ = fmt.Fprintf(os.Stdout, " - %s "+format+"\n", args...)
+}
+
+func (d *doctorCheckStepLogger) GetLevel() log.Level {
+ return log.TRACE
+}
+
+// Checks is the list of available commands
+var Checks []*Check
+
+// RunChecks runs the doctor checks for the provided list
+func RunChecks(ctx context.Context, colorize, autofix bool, checks []*Check) error {
+ SortChecks(checks)
+ // the checks output logs by a special logger, they do not use the default logger
+ logger := log.BaseLoggerToGeneralLogger(&doctorCheckLogger{colorize: colorize})
+ loggerStep := log.BaseLoggerToGeneralLogger(&doctorCheckStepLogger{colorize: colorize})
+ dbIsInit := false
+ storageIsInit := false
+ for i, check := range checks {
+ if !dbIsInit && !check.SkipDatabaseInitialization {
+ // Only open database after the most basic configuration check
+ if err := initDBSkipLogger(ctx); err != nil {
+ logger.Error("Error whilst initializing the database: %v", err)
+ logger.Error("Check if you are using the right config file. You can use a --config directive to specify one.")
+ return nil
+ }
+ dbIsInit = true
+ }
+ if !storageIsInit && check.InitStorage {
+ if err := storage.Init(); err != nil {
+ logger.Error("Error whilst initializing the storage: %v", err)
+ logger.Error("Check if you are using the right config file. You can use a --config directive to specify one.")
+ return nil
+ }
+ storageIsInit = true
+ }
+ logger.Info("\n[%d] %s", i+1, check.Title)
+ if err := check.Run(ctx, loggerStep, autofix); err != nil {
+ if check.AbortIfFailed {
+ logger.Critical("FAIL")
+ return err
+ }
+ logger.Error("ERROR")
+ } else {
+ logger.Info("OK")
+ }
+ }
+ logger.Info("\nAll done (checks: %d).", len(checks))
+ return nil
+}
+
+// Register registers a command with the list
+func Register(command *Check) {
+ Checks = append(Checks, command)
+}
+
+func SortChecks(checks []*Check) {
+ sort.SliceStable(checks, func(i, j int) bool {
+ if checks[i].Priority == checks[j].Priority {
+ return checks[i].Name < checks[j].Name
+ }
+ if checks[i].Priority == 0 {
+ return false
+ }
+ return checks[i].Priority < checks[j].Priority
+ })
+}
diff --git a/services/doctor/fix16961.go b/services/doctor/fix16961.go
new file mode 100644
index 0000000..50d9ac6
--- /dev/null
+++ b/services/doctor/fix16961.go
@@ -0,0 +1,328 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "xorm.io/builder"
+)
+
+// #16831 revealed that the dump command that was broken in 1.14.3-1.14.6 and 1.15.0 (#15885).
+// This led to repo_unit and login_source cfg not being converted to JSON in the dump
+// Unfortunately although it was hoped that there were only a few users affected it
+// appears that many users are affected.
+
+// We therefore need to provide a doctor command to fix this repeated issue #16961
+
+func parseBool16961(bs []byte) (bool, error) {
+ if bytes.EqualFold(bs, []byte("%!s(bool=false)")) {
+ return false, nil
+ }
+
+ if bytes.EqualFold(bs, []byte("%!s(bool=true)")) {
+ return true, nil
+ }
+
+ return false, fmt.Errorf("unexpected bool format: %s", string(bs))
+}
+
+func fixUnitConfig16961(bs []byte, cfg *repo_model.UnitConfig) (fixed bool, err error) {
+ err = json.UnmarshalHandleDoubleEncode(bs, &cfg)
+ if err == nil {
+ return false, nil
+ }
+
+ // Handle #16961
+ if string(bs) != "&{}" && len(bs) != 0 {
+ return false, err
+ }
+
+ return true, nil
+}
+
+func fixExternalWikiConfig16961(bs []byte, cfg *repo_model.ExternalWikiConfig) (fixed bool, err error) {
+ err = json.UnmarshalHandleDoubleEncode(bs, &cfg)
+ if err == nil {
+ return false, nil
+ }
+
+ if len(bs) < 3 {
+ return false, err
+ }
+ if bs[0] != '&' || bs[1] != '{' || bs[len(bs)-1] != '}' {
+ return false, err
+ }
+ cfg.ExternalWikiURL = string(bs[2 : len(bs)-1])
+ return true, nil
+}
+
+func fixExternalTrackerConfig16961(bs []byte, cfg *repo_model.ExternalTrackerConfig) (fixed bool, err error) {
+ err = json.UnmarshalHandleDoubleEncode(bs, &cfg)
+ if err == nil {
+ return false, nil
+ }
+ // Handle #16961
+ if len(bs) < 3 {
+ return false, err
+ }
+
+ if bs[0] != '&' || bs[1] != '{' || bs[len(bs)-1] != '}' {
+ return false, err
+ }
+
+ parts := bytes.Split(bs[2:len(bs)-1], []byte{' '})
+ if len(parts) != 3 {
+ return false, err
+ }
+
+ cfg.ExternalTrackerURL = string(bytes.Join(parts[:len(parts)-2], []byte{' '}))
+ cfg.ExternalTrackerFormat = string(parts[len(parts)-2])
+ cfg.ExternalTrackerStyle = string(parts[len(parts)-1])
+ return true, nil
+}
+
+func fixPullRequestsConfig16961(bs []byte, cfg *repo_model.PullRequestsConfig) (fixed bool, err error) {
+ err = json.UnmarshalHandleDoubleEncode(bs, &cfg)
+ if err == nil {
+ return false, nil
+ }
+
+ // Handle #16961
+ if len(bs) < 3 {
+ return false, err
+ }
+
+ if bs[0] != '&' || bs[1] != '{' || bs[len(bs)-1] != '}' {
+ return false, err
+ }
+
+ // PullRequestsConfig was the following in 1.14
+ // type PullRequestsConfig struct {
+ // IgnoreWhitespaceConflicts bool
+ // AllowMerge bool
+ // AllowRebase bool
+ // AllowRebaseMerge bool
+ // AllowSquash bool
+ // AllowManualMerge bool
+ // AutodetectManualMerge bool
+ // }
+ //
+ // 1.15 added in addition:
+ // DefaultDeleteBranchAfterMerge bool
+ // DefaultMergeStyle MergeStyle
+ parts := bytes.Split(bs[2:len(bs)-1], []byte{' '})
+ if len(parts) < 7 {
+ return false, err
+ }
+
+ var parseErr error
+ cfg.IgnoreWhitespaceConflicts, parseErr = parseBool16961(parts[0])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ cfg.AllowMerge, parseErr = parseBool16961(parts[1])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ cfg.AllowRebase, parseErr = parseBool16961(parts[2])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ cfg.AllowRebaseMerge, parseErr = parseBool16961(parts[3])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ cfg.AllowSquash, parseErr = parseBool16961(parts[4])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ cfg.AllowManualMerge, parseErr = parseBool16961(parts[5])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ cfg.AutodetectManualMerge, parseErr = parseBool16961(parts[6])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+
+ // 1.14 unit
+ if len(parts) == 7 {
+ return true, nil
+ }
+
+ if len(parts) < 9 {
+ return false, err
+ }
+
+ cfg.DefaultDeleteBranchAfterMerge, parseErr = parseBool16961(parts[7])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+
+ cfg.DefaultMergeStyle = repo_model.MergeStyle(string(bytes.Join(parts[8:], []byte{' '})))
+ return true, nil
+}
+
+func fixIssuesConfig16961(bs []byte, cfg *repo_model.IssuesConfig) (fixed bool, err error) {
+ err = json.UnmarshalHandleDoubleEncode(bs, &cfg)
+ if err == nil {
+ return false, nil
+ }
+
+ // Handle #16961
+ if len(bs) < 3 {
+ return false, err
+ }
+
+ if bs[0] != '&' || bs[1] != '{' || bs[len(bs)-1] != '}' {
+ return false, err
+ }
+
+ parts := bytes.Split(bs[2:len(bs)-1], []byte{' '})
+ if len(parts) != 3 {
+ return false, err
+ }
+ var parseErr error
+ cfg.EnableTimetracker, parseErr = parseBool16961(parts[0])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ cfg.AllowOnlyContributorsToTrackTime, parseErr = parseBool16961(parts[1])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ cfg.EnableDependencies, parseErr = parseBool16961(parts[2])
+ if parseErr != nil {
+ return false, errors.Join(err, parseErr)
+ }
+ return true, nil
+}
+
+func fixBrokenRepoUnit16961(repoUnit *repo_model.RepoUnit, bs []byte) (fixed bool, err error) {
+ // Shortcut empty or null values
+ if len(bs) == 0 {
+ return false, nil
+ }
+
+ var cfg any
+ err = json.UnmarshalHandleDoubleEncode(bs, &cfg)
+ if err == nil {
+ return false, nil
+ }
+
+ switch repoUnit.Type {
+ case unit.TypeCode, unit.TypeReleases, unit.TypeWiki, unit.TypeProjects:
+ cfg := &repo_model.UnitConfig{}
+ repoUnit.Config = cfg
+ if fixed, err := fixUnitConfig16961(bs, cfg); !fixed {
+ return false, err
+ }
+ case unit.TypeExternalWiki:
+ cfg := &repo_model.ExternalWikiConfig{}
+ repoUnit.Config = cfg
+
+ if fixed, err := fixExternalWikiConfig16961(bs, cfg); !fixed {
+ return false, err
+ }
+ case unit.TypeExternalTracker:
+ cfg := &repo_model.ExternalTrackerConfig{}
+ repoUnit.Config = cfg
+ if fixed, err := fixExternalTrackerConfig16961(bs, cfg); !fixed {
+ return false, err
+ }
+ case unit.TypePullRequests:
+ cfg := &repo_model.PullRequestsConfig{}
+ repoUnit.Config = cfg
+
+ if fixed, err := fixPullRequestsConfig16961(bs, cfg); !fixed {
+ return false, err
+ }
+ case unit.TypeIssues:
+ cfg := &repo_model.IssuesConfig{}
+ repoUnit.Config = cfg
+ if fixed, err := fixIssuesConfig16961(bs, cfg); !fixed {
+ return false, err
+ }
+ default:
+ panic(fmt.Sprintf("unrecognized repo unit type: %v", repoUnit.Type))
+ }
+ return true, nil
+}
+
+func fixBrokenRepoUnits16961(ctx context.Context, logger log.Logger, autofix bool) error {
+ // RepoUnit describes all units of a repository
+ type RepoUnit struct {
+ ID int64
+ RepoID int64
+ Type unit.Type
+ Config []byte
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"`
+ }
+
+ count := 0
+
+ err := db.Iterate(
+ ctx,
+ builder.Gt{
+ "id": 0,
+ },
+ func(ctx context.Context, unit *RepoUnit) error {
+ bs := unit.Config
+ repoUnit := &repo_model.RepoUnit{
+ ID: unit.ID,
+ RepoID: unit.RepoID,
+ Type: unit.Type,
+ CreatedUnix: unit.CreatedUnix,
+ }
+
+ if fixed, err := fixBrokenRepoUnit16961(repoUnit, bs); !fixed {
+ return err
+ }
+
+ count++
+ if !autofix {
+ return nil
+ }
+
+ return repo_model.UpdateRepoUnit(ctx, repoUnit)
+ },
+ )
+ if err != nil {
+ logger.Critical("Unable to iterate across repounits to fix the broken units: Error %v", err)
+ return err
+ }
+
+ if !autofix {
+ if count == 0 {
+ logger.Info("Found no broken repo_units")
+ } else {
+ logger.Warn("Found %d broken repo_units", count)
+ }
+ return nil
+ }
+ logger.Info("Fixed %d broken repo_units", count)
+
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check for incorrectly dumped repo_units (See #16961)",
+ Name: "fix-broken-repo-units",
+ IsDefault: false,
+ Run: fixBrokenRepoUnits16961,
+ Priority: 7,
+ })
+}
diff --git a/services/doctor/fix16961_test.go b/services/doctor/fix16961_test.go
new file mode 100644
index 0000000..498ed9c
--- /dev/null
+++ b/services/doctor/fix16961_test.go
@@ -0,0 +1,271 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "testing"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_fixUnitConfig_16961(t *testing.T) {
+ tests := []struct {
+ name string
+ bs string
+ wantFixed bool
+ wantErr bool
+ }{
+ {
+ name: "empty",
+ bs: "",
+ wantFixed: true,
+ wantErr: false,
+ },
+ {
+ name: "normal: {}",
+ bs: "{}",
+ wantFixed: false,
+ wantErr: false,
+ },
+ {
+ name: "broken but fixable: &{}",
+ bs: "&{}",
+ wantFixed: true,
+ wantErr: false,
+ },
+ {
+ name: "broken but unfixable: &{asdasd}",
+ bs: "&{asdasd}",
+ wantFixed: false,
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ gotFixed, err := fixUnitConfig16961([]byte(tt.bs), &repo_model.UnitConfig{})
+ if (err != nil) != tt.wantErr {
+ t.Errorf("fixUnitConfig_16961() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if gotFixed != tt.wantFixed {
+ t.Errorf("fixUnitConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
+ }
+ })
+ }
+}
+
+func Test_fixExternalWikiConfig_16961(t *testing.T) {
+ tests := []struct {
+ name string
+ bs string
+ expected string
+ wantFixed bool
+ wantErr bool
+ }{
+ {
+ name: "normal: {\"ExternalWikiURL\":\"http://someurl\"}",
+ bs: "{\"ExternalWikiURL\":\"http://someurl\"}",
+ expected: "http://someurl",
+ wantFixed: false,
+ wantErr: false,
+ },
+ {
+ name: "broken: &{http://someurl}",
+ bs: "&{http://someurl}",
+ expected: "http://someurl",
+ wantFixed: true,
+ wantErr: false,
+ },
+ {
+ name: "broken but unfixable: http://someurl",
+ bs: "http://someurl",
+ wantFixed: false,
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ cfg := &repo_model.ExternalWikiConfig{}
+ gotFixed, err := fixExternalWikiConfig16961([]byte(tt.bs), cfg)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("fixExternalWikiConfig_16961() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if gotFixed != tt.wantFixed {
+ t.Errorf("fixExternalWikiConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
+ }
+ if cfg.ExternalWikiURL != tt.expected {
+ t.Errorf("fixExternalWikiConfig_16961().ExternalWikiURL = %v, want %v", cfg.ExternalWikiURL, tt.expected)
+ }
+ })
+ }
+}
+
+func Test_fixExternalTrackerConfig_16961(t *testing.T) {
+ tests := []struct {
+ name string
+ bs string
+ expected repo_model.ExternalTrackerConfig
+ wantFixed bool
+ wantErr bool
+ }{
+ {
+ name: "normal",
+ bs: `{"ExternalTrackerURL":"a","ExternalTrackerFormat":"b","ExternalTrackerStyle":"c"}`,
+ expected: repo_model.ExternalTrackerConfig{
+ ExternalTrackerURL: "a",
+ ExternalTrackerFormat: "b",
+ ExternalTrackerStyle: "c",
+ },
+ wantFixed: false,
+ wantErr: false,
+ },
+ {
+ name: "broken",
+ bs: "&{a b c}",
+ expected: repo_model.ExternalTrackerConfig{
+ ExternalTrackerURL: "a",
+ ExternalTrackerFormat: "b",
+ ExternalTrackerStyle: "c",
+ },
+ wantFixed: true,
+ wantErr: false,
+ },
+ {
+ name: "broken - too many fields",
+ bs: "&{a b c d}",
+ wantFixed: false,
+ wantErr: true,
+ },
+ {
+ name: "broken - wrong format",
+ bs: "a b c d}",
+ wantFixed: false,
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ cfg := &repo_model.ExternalTrackerConfig{}
+ gotFixed, err := fixExternalTrackerConfig16961([]byte(tt.bs), cfg)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("fixExternalTrackerConfig_16961() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if gotFixed != tt.wantFixed {
+ t.Errorf("fixExternalTrackerConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
+ }
+ if cfg.ExternalTrackerFormat != tt.expected.ExternalTrackerFormat {
+ t.Errorf("fixExternalTrackerConfig_16961().ExternalTrackerFormat = %v, want %v", tt.expected.ExternalTrackerFormat, cfg.ExternalTrackerFormat)
+ }
+ if cfg.ExternalTrackerStyle != tt.expected.ExternalTrackerStyle {
+ t.Errorf("fixExternalTrackerConfig_16961().ExternalTrackerStyle = %v, want %v", tt.expected.ExternalTrackerStyle, cfg.ExternalTrackerStyle)
+ }
+ if cfg.ExternalTrackerURL != tt.expected.ExternalTrackerURL {
+ t.Errorf("fixExternalTrackerConfig_16961().ExternalTrackerURL = %v, want %v", tt.expected.ExternalTrackerURL, cfg.ExternalTrackerURL)
+ }
+ })
+ }
+}
+
+func Test_fixPullRequestsConfig_16961(t *testing.T) {
+ tests := []struct {
+ name string
+ bs string
+ expected repo_model.PullRequestsConfig
+ wantFixed bool
+ wantErr bool
+ }{
+ {
+ name: "normal",
+ bs: `{"IgnoreWhitespaceConflicts":false,"AllowMerge":false,"AllowRebase":false,"AllowRebaseMerge":false,"AllowSquash":false,"AllowManualMerge":false,"AutodetectManualMerge":false,"DefaultDeleteBranchAfterMerge":false,"DefaultMergeStyle":""}`,
+ },
+ {
+ name: "broken - 1.14",
+ bs: `&{%!s(bool=false) %!s(bool=true) %!s(bool=true) %!s(bool=true) %!s(bool=true) %!s(bool=false) %!s(bool=false)}`,
+ expected: repo_model.PullRequestsConfig{
+ IgnoreWhitespaceConflicts: false,
+ AllowMerge: true,
+ AllowRebase: true,
+ AllowRebaseMerge: true,
+ AllowSquash: true,
+ AllowManualMerge: false,
+ AutodetectManualMerge: false,
+ },
+ wantFixed: true,
+ },
+ {
+ name: "broken - 1.15",
+ bs: `&{%!s(bool=false) %!s(bool=true) %!s(bool=true) %!s(bool=true) %!s(bool=true) %!s(bool=false) %!s(bool=false) %!s(bool=false) merge}`,
+ expected: repo_model.PullRequestsConfig{
+ AllowMerge: true,
+ AllowRebase: true,
+ AllowRebaseMerge: true,
+ AllowSquash: true,
+ DefaultMergeStyle: repo_model.MergeStyleMerge,
+ },
+ wantFixed: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ cfg := &repo_model.PullRequestsConfig{}
+ gotFixed, err := fixPullRequestsConfig16961([]byte(tt.bs), cfg)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("fixPullRequestsConfig_16961() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if gotFixed != tt.wantFixed {
+ t.Errorf("fixPullRequestsConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
+ }
+ assert.EqualValues(t, &tt.expected, cfg)
+ })
+ }
+}
+
+func Test_fixIssuesConfig_16961(t *testing.T) {
+ tests := []struct {
+ name string
+ bs string
+ expected repo_model.IssuesConfig
+ wantFixed bool
+ wantErr bool
+ }{
+ {
+ name: "normal",
+ bs: `{"EnableTimetracker":true,"AllowOnlyContributorsToTrackTime":true,"EnableDependencies":true}`,
+ expected: repo_model.IssuesConfig{
+ EnableTimetracker: true,
+ AllowOnlyContributorsToTrackTime: true,
+ EnableDependencies: true,
+ },
+ },
+ {
+ name: "broken",
+ bs: `&{%!s(bool=true) %!s(bool=true) %!s(bool=true)}`,
+ expected: repo_model.IssuesConfig{
+ EnableTimetracker: true,
+ AllowOnlyContributorsToTrackTime: true,
+ EnableDependencies: true,
+ },
+ wantFixed: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ cfg := &repo_model.IssuesConfig{}
+ gotFixed, err := fixIssuesConfig16961([]byte(tt.bs), cfg)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("fixIssuesConfig_16961() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if gotFixed != tt.wantFixed {
+ t.Errorf("fixIssuesConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
+ }
+ assert.EqualValues(t, &tt.expected, cfg)
+ })
+ }
+}
diff --git a/services/doctor/fix8312.go b/services/doctor/fix8312.go
new file mode 100644
index 0000000..4fc0498
--- /dev/null
+++ b/services/doctor/fix8312.go
@@ -0,0 +1,61 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ org_model "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/builder"
+)
+
+func fixOwnerTeamCreateOrgRepo(ctx context.Context, logger log.Logger, autofix bool) error {
+ count := 0
+
+ err := db.Iterate(
+ ctx,
+ builder.Eq{"authorize": perm.AccessModeOwner, "can_create_org_repo": false},
+ func(ctx context.Context, team *org_model.Team) error {
+ team.CanCreateOrgRepo = true
+ count++
+
+ if !autofix {
+ return nil
+ }
+
+ return models.UpdateTeam(ctx, team, false, false)
+ },
+ )
+ if err != nil {
+ logger.Critical("Unable to iterate across repounits to fix incorrect can_create_org_repo: Error %v", err)
+ return err
+ }
+
+ if !autofix {
+ if count == 0 {
+ logger.Info("Found no team with incorrect can_create_org_repo")
+ } else {
+ logger.Warn("Found %d teams with incorrect can_create_org_repo", count)
+ }
+ return nil
+ }
+ logger.Info("Fixed %d teams with incorrect can_create_org_repo", count)
+
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check for incorrect can_create_org_repo for org owner teams",
+ Name: "fix-owner-team-create-org-repo",
+ IsDefault: false,
+ Run: fixOwnerTeamCreateOrgRepo,
+ Priority: 7,
+ })
+}
diff --git a/services/doctor/heads.go b/services/doctor/heads.go
new file mode 100644
index 0000000..41fca01
--- /dev/null
+++ b/services/doctor/heads.go
@@ -0,0 +1,88 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+)
+
+func synchronizeRepoHeads(ctx context.Context, logger log.Logger, autofix bool) error {
+ numRepos := 0
+ numHeadsBroken := 0
+ numDefaultBranchesBroken := 0
+ numReposUpdated := 0
+ err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
+ numRepos++
+ _, _, defaultBranchErr := git.NewCommand(ctx, "rev-parse").AddDashesAndList(repo.DefaultBranch).RunStdString(&git.RunOpts{Dir: repo.RepoPath()})
+
+ head, _, headErr := git.NewCommand(ctx, "symbolic-ref", "--short", "HEAD").RunStdString(&git.RunOpts{Dir: repo.RepoPath()})
+
+ // what we expect: default branch is valid, and HEAD points to it
+ if headErr == nil && defaultBranchErr == nil && head == repo.DefaultBranch {
+ return nil
+ }
+
+ if headErr != nil {
+ numHeadsBroken++
+ }
+ if defaultBranchErr != nil {
+ numDefaultBranchesBroken++
+ }
+
+ // if default branch is broken, let the user fix that in the UI
+ if defaultBranchErr != nil {
+ logger.Warn("Default branch for %s/%s doesn't point to a valid commit", repo.OwnerName, repo.Name)
+ return nil
+ }
+
+ // if we're not autofixing, that's all we can do
+ if !autofix {
+ return nil
+ }
+
+ // otherwise, let's try fixing HEAD
+ err := git.NewCommand(ctx, "symbolic-ref").AddDashesAndList("HEAD", git.BranchPrefix+repo.DefaultBranch).Run(&git.RunOpts{Dir: repo.RepoPath()})
+ if err != nil {
+ logger.Warn("Failed to fix HEAD for %s/%s: %v", repo.OwnerName, repo.Name, err)
+ return nil
+ }
+ numReposUpdated++
+ return nil
+ })
+ if err != nil {
+ logger.Critical("Error when fixing repo HEADs: %v", err)
+ }
+
+ if autofix {
+ logger.Info("Out of %d repos, HEADs for %d are now fixed and HEADS for %d are still broken", numRepos, numReposUpdated, numDefaultBranchesBroken+numHeadsBroken-numReposUpdated)
+ } else {
+ if numHeadsBroken == 0 && numDefaultBranchesBroken == 0 {
+ logger.Info("All %d repos have their HEADs in the correct state", numRepos)
+ } else {
+ if numHeadsBroken == 0 && numDefaultBranchesBroken != 0 {
+ logger.Critical("Default branches are broken for %d/%d repos", numDefaultBranchesBroken, numRepos)
+ } else if numHeadsBroken != 0 && numDefaultBranchesBroken == 0 {
+ logger.Warn("HEADs are broken for %d/%d repos", numHeadsBroken, numRepos)
+ } else {
+ logger.Critical("Out of %d repos, HEADS are broken for %d and default branches are broken for %d", numRepos, numHeadsBroken, numDefaultBranchesBroken)
+ }
+ }
+ }
+
+ return err
+}
+
+func init() {
+ Register(&Check{
+ Title: "Synchronize repo HEADs",
+ Name: "synchronize-repo-heads",
+ IsDefault: true,
+ Run: synchronizeRepoHeads,
+ Priority: 7,
+ })
+}
diff --git a/services/doctor/lfs.go b/services/doctor/lfs.go
new file mode 100644
index 0000000..8531b7b
--- /dev/null
+++ b/services/doctor/lfs.go
@@ -0,0 +1,52 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/repository"
+)
+
+func init() {
+ Register(&Check{
+ Title: "Garbage collect LFS",
+ Name: "gc-lfs",
+ IsDefault: false,
+ Run: garbageCollectLFSCheck,
+ AbortIfFailed: false,
+ SkipDatabaseInitialization: false,
+ Priority: 1,
+ })
+}
+
+func garbageCollectLFSCheck(ctx context.Context, logger log.Logger, autofix bool) error {
+ if !setting.LFS.StartServer {
+ return fmt.Errorf("LFS support is disabled")
+ }
+
+ if err := repository.GarbageCollectLFSMetaObjects(ctx, repository.GarbageCollectLFSMetaObjectsOptions{
+ LogDetail: logger.Info,
+ AutoFix: autofix,
+ // Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
+ // and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
+ // an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
+ // changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
+ // objects.
+ //
+ // It is likely that a week is potentially excessive but it should definitely be enough that any
+ // unassociated LFS object is genuinely unassociated.
+ OlderThan: time.Now().Add(-24 * time.Hour * 7),
+ // We don't set the UpdatedLessRecentlyThan because we want to do a full GC
+ }); err != nil {
+ logger.Error("Couldn't garabage collect LFS objects: %v", err)
+ return err
+ }
+
+ return checkStorage(&checkStorageOptions{LFS: true})(ctx, logger, autofix)
+}
diff --git a/services/doctor/mergebase.go b/services/doctor/mergebase.go
new file mode 100644
index 0000000..de460c4
--- /dev/null
+++ b/services/doctor/mergebase.go
@@ -0,0 +1,114 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/builder"
+)
+
+func iteratePRs(ctx context.Context, repo *repo_model.Repository, each func(*repo_model.Repository, *issues_model.PullRequest) error) error {
+ return db.Iterate(
+ ctx,
+ builder.Eq{"base_repo_id": repo.ID},
+ func(ctx context.Context, bean *issues_model.PullRequest) error {
+ return each(repo, bean)
+ },
+ )
+}
+
+func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) error {
+ numRepos := 0
+ numPRs := 0
+ numPRsUpdated := 0
+ err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
+ numRepos++
+ return iteratePRs(ctx, repo, func(repo *repo_model.Repository, pr *issues_model.PullRequest) error {
+ numPRs++
+ pr.BaseRepo = repo
+ repoPath := repo.RepoPath()
+
+ oldMergeBase := pr.MergeBase
+
+ if !pr.HasMerged {
+ var err error
+ pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitRefName()).RunStdString(&git.RunOpts{Dir: repoPath})
+ if err != nil {
+ var err2 error
+ pr.MergeBase, _, err2 = git.NewCommand(ctx, "rev-parse").AddDynamicArguments(git.BranchPrefix + pr.BaseBranch).RunStdString(&git.RunOpts{Dir: repoPath})
+ if err2 != nil {
+ logger.Warn("Unable to get merge base for PR ID %d, #%d onto %s in %s/%s. Error: %v & %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err, err2)
+ return nil
+ }
+ }
+ } else {
+ parentsString, _, err := git.NewCommand(ctx, "rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID).RunStdString(&git.RunOpts{Dir: repoPath})
+ if err != nil {
+ logger.Warn("Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
+ return nil
+ }
+ parents := strings.Split(strings.TrimSpace(parentsString), " ")
+ if len(parents) < 2 {
+ return nil
+ }
+
+ refs := append([]string{}, parents[1:]...)
+ refs = append(refs, pr.GetGitRefName())
+ cmd := git.NewCommand(ctx, "merge-base").AddDashesAndList(refs...)
+ pr.MergeBase, _, err = cmd.RunStdString(&git.RunOpts{Dir: repoPath})
+ if err != nil {
+ logger.Warn("Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
+ return nil
+ }
+ }
+ pr.MergeBase = strings.TrimSpace(pr.MergeBase)
+ if pr.MergeBase != oldMergeBase {
+ if autofix {
+ if err := pr.UpdateCols(ctx, "merge_base"); err != nil {
+ logger.Critical("Failed to update merge_base. ERROR: %v", err)
+ return fmt.Errorf("Failed to update merge_base. ERROR: %w", err)
+ }
+ } else {
+ logger.Info("#%d onto %s in %s/%s: MergeBase should be %s but is %s", pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, oldMergeBase, pr.MergeBase)
+ }
+ numPRsUpdated++
+ }
+ return nil
+ })
+ })
+
+ if autofix {
+ logger.Info("%d PR mergebases updated of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos)
+ } else {
+ if numPRsUpdated == 0 {
+ logger.Info("All %d PRs in %d repos have a correct mergebase", numPRs, numRepos)
+ } else if err == nil {
+ logger.Critical("%d PRs with incorrect mergebases of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos)
+ return fmt.Errorf("%d PRs with incorrect mergebases of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos)
+ } else {
+ logger.Warn("%d PRs with incorrect mergebases of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos)
+ }
+ }
+
+ return err
+}
+
+func init() {
+ Register(&Check{
+ Title: "Recalculate merge bases",
+ Name: "recalculate-merge-bases",
+ IsDefault: false,
+ Run: checkPRMergeBase,
+ Priority: 7,
+ })
+}
diff --git a/services/doctor/misc.go b/services/doctor/misc.go
new file mode 100644
index 0000000..9300c3a
--- /dev/null
+++ b/services/doctor/misc.go
@@ -0,0 +1,299 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "os/exec"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+
+ lru "github.com/hashicorp/golang-lru/v2"
+ "xorm.io/builder"
+)
+
+func iterateRepositories(ctx context.Context, each func(*repo_model.Repository) error) error {
+ err := db.Iterate(
+ ctx,
+ builder.Gt{"id": 0},
+ func(ctx context.Context, bean *repo_model.Repository) error {
+ return each(bean)
+ },
+ )
+ return err
+}
+
+func checkScriptType(ctx context.Context, logger log.Logger, autofix bool) error {
+ path, err := exec.LookPath(setting.ScriptType)
+ if err != nil {
+ logger.Critical("ScriptType \"%q\" is not on the current PATH. Error: %v", setting.ScriptType, err)
+ return fmt.Errorf("ScriptType \"%q\" is not on the current PATH. Error: %w", setting.ScriptType, err)
+ }
+ logger.Info("ScriptType %s is on the current PATH at %s", setting.ScriptType, path)
+ return nil
+}
+
+func checkHooks(ctx context.Context, logger log.Logger, autofix bool) error {
+ if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
+ results, err := repository.CheckDelegateHooks(repo.RepoPath())
+ if err != nil {
+ logger.Critical("Unable to check delegate hooks for repo %-v. ERROR: %v", repo, err)
+ return fmt.Errorf("Unable to check delegate hooks for repo %-v. ERROR: %w", repo, err)
+ }
+ if len(results) > 0 && autofix {
+ logger.Warn("Regenerated hooks for %s", repo.FullName())
+ if err := repository.CreateDelegateHooks(repo.RepoPath()); err != nil {
+ logger.Critical("Unable to recreate delegate hooks for %-v. ERROR: %v", repo, err)
+ return fmt.Errorf("Unable to recreate delegate hooks for %-v. ERROR: %w", repo, err)
+ }
+ }
+ for _, result := range results {
+ logger.Warn(result)
+ }
+ return nil
+ }); err != nil {
+ logger.Critical("Errors noted whilst checking delegate hooks.")
+ return err
+ }
+ return nil
+}
+
+func checkUserStarNum(ctx context.Context, logger log.Logger, autofix bool) error {
+ if autofix {
+ if err := models.DoctorUserStarNum(ctx); err != nil {
+ logger.Critical("Unable update User Stars numbers")
+ return err
+ }
+ logger.Info("Updated User Stars numbers.")
+ } else {
+ logger.Info("No check available for User Stars numbers (skipped)")
+ }
+ return nil
+}
+
+func checkEnablePushOptions(ctx context.Context, logger log.Logger, autofix bool) error {
+ numRepos := 0
+ numNeedUpdate := 0
+
+ if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
+ numRepos++
+ r, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return err
+ }
+ defer r.Close()
+
+ if autofix {
+ _, _, err := git.NewCommand(ctx, "config", "receive.advertisePushOptions", "true").RunStdString(&git.RunOpts{Dir: r.Path})
+ return err
+ }
+
+ value, _, err := git.NewCommand(ctx, "config", "receive.advertisePushOptions").RunStdString(&git.RunOpts{Dir: r.Path})
+ if err != nil {
+ return err
+ }
+
+ result, valid := git.ParseBool(strings.TrimSpace(value))
+ if !result || !valid {
+ numNeedUpdate++
+ logger.Info("%s: does not have receive.advertisePushOptions set correctly: %q", repo.FullName(), value)
+ }
+ return nil
+ }); err != nil {
+ logger.Critical("Unable to EnablePushOptions: %v", err)
+ return err
+ }
+
+ if autofix {
+ logger.Info("Enabled push options for %d repositories.", numRepos)
+ } else {
+ logger.Info("Checked %d repositories, %d need updates.", numRepos, numNeedUpdate)
+ }
+
+ return nil
+}
+
+func checkDaemonExport(ctx context.Context, logger log.Logger, autofix bool) error {
+ numRepos := 0
+ numNeedUpdate := 0
+ cache, err := lru.New[int64, any](512)
+ if err != nil {
+ logger.Critical("Unable to create cache: %v", err)
+ return err
+ }
+ if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
+ numRepos++
+
+ if owner, has := cache.Get(repo.OwnerID); has {
+ repo.Owner = owner.(*user_model.User)
+ } else {
+ if err := repo.LoadOwner(ctx); err != nil {
+ return err
+ }
+ cache.Add(repo.OwnerID, repo.Owner)
+ }
+
+ // Create/Remove git-daemon-export-ok for git-daemon...
+ daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
+ isExist, err := util.IsExist(daemonExportFile)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err)
+ return err
+ }
+ isPublic := !repo.IsPrivate && repo.Owner.Visibility == structs.VisibleTypePublic
+
+ if isPublic != isExist {
+ numNeedUpdate++
+ if autofix {
+ if !isPublic && isExist {
+ if err = util.Remove(daemonExportFile); err != nil {
+ log.Error("Failed to remove %s: %v", daemonExportFile, err)
+ }
+ } else if isPublic && !isExist {
+ if f, err := os.Create(daemonExportFile); err != nil {
+ log.Error("Failed to create %s: %v", daemonExportFile, err)
+ } else {
+ f.Close()
+ }
+ }
+ }
+ }
+ return nil
+ }); err != nil {
+ logger.Critical("Unable to checkDaemonExport: %v", err)
+ return err
+ }
+
+ if autofix {
+ logger.Info("Updated git-daemon-export-ok files for %d of %d repositories.", numNeedUpdate, numRepos)
+ } else {
+ logger.Info("Checked %d repositories, %d need updates.", numRepos, numNeedUpdate)
+ }
+
+ return nil
+}
+
+func checkCommitGraph(ctx context.Context, logger log.Logger, autofix bool) error {
+ numRepos := 0
+ numNeedUpdate := 0
+ numWritten := 0
+ if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
+ numRepos++
+
+ commitGraphExists := func() (bool, error) {
+ // Check commit-graph exists
+ commitGraphFile := path.Join(repo.RepoPath(), `objects/info/commit-graph`)
+ isExist, err := util.IsExist(commitGraphFile)
+ if err != nil {
+ logger.Error("Unable to check if %s exists. Error: %v", commitGraphFile, err)
+ return false, err
+ }
+
+ if !isExist {
+ commitGraphsDir := path.Join(repo.RepoPath(), `objects/info/commit-graphs`)
+ isExist, err = util.IsExist(commitGraphsDir)
+ if err != nil {
+ logger.Error("Unable to check if %s exists. Error: %v", commitGraphsDir, err)
+ return false, err
+ }
+ }
+ return isExist, nil
+ }
+
+ isExist, err := commitGraphExists()
+ if err != nil {
+ return err
+ }
+ if !isExist {
+ numNeedUpdate++
+ if autofix {
+ if err := git.WriteCommitGraph(ctx, repo.RepoPath()); err != nil {
+ logger.Error("Unable to write commit-graph in %s. Error: %v", repo.FullName(), err)
+ return err
+ }
+ isExist, err := commitGraphExists()
+ if err != nil {
+ return err
+ }
+ if isExist {
+ numWritten++
+ logger.Info("Commit-graph written: %s", repo.FullName())
+ } else {
+ logger.Warn("No commit-graph written: %s", repo.FullName())
+ }
+ }
+ }
+ return nil
+ }); err != nil {
+ logger.Critical("Unable to checkCommitGraph: %v", err)
+ return err
+ }
+
+ if autofix {
+ logger.Info("Wrote commit-graph files for %d of %d repositories.", numWritten, numRepos)
+ } else {
+ logger.Info("Checked %d repositories, %d without commit-graphs.", numRepos, numNeedUpdate)
+ }
+
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check if SCRIPT_TYPE is available",
+ Name: "script-type",
+ IsDefault: false,
+ Run: checkScriptType,
+ Priority: 5,
+ })
+ Register(&Check{
+ Title: "Check if hook files are up-to-date and executable",
+ Name: "hooks",
+ IsDefault: false,
+ Run: checkHooks,
+ Priority: 6,
+ })
+ Register(&Check{
+ Title: "Recalculate Stars number for all user",
+ Name: "recalculate-stars-number",
+ IsDefault: false,
+ Run: checkUserStarNum,
+ Priority: 6,
+ })
+ Register(&Check{
+ Title: "Check that all git repositories have receive.advertisePushOptions set to true",
+ Name: "enable-push-options",
+ IsDefault: false,
+ Run: checkEnablePushOptions,
+ Priority: 7,
+ })
+ Register(&Check{
+ Title: "Check git-daemon-export-ok files",
+ Name: "check-git-daemon-export-ok",
+ IsDefault: false,
+ Run: checkDaemonExport,
+ Priority: 8,
+ })
+ Register(&Check{
+ Title: "Check commit-graphs",
+ Name: "check-commit-graphs",
+ IsDefault: false,
+ Run: checkCommitGraph,
+ Priority: 9,
+ })
+}
diff --git a/services/doctor/packages_nuget.go b/services/doctor/packages_nuget.go
new file mode 100644
index 0000000..47fdb3a
--- /dev/null
+++ b/services/doctor/packages_nuget.go
@@ -0,0 +1,160 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "fmt"
+ "slices"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/modules/log"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ nuget_module "code.gitea.io/gitea/modules/packages/nuget"
+ packages_service "code.gitea.io/gitea/services/packages"
+
+ "xorm.io/builder"
+)
+
+func init() {
+ Register(&Check{
+ Title: "Extract Nuget Nuspec Files to content store",
+ Name: "packages-nuget-nuspec",
+ IsDefault: false,
+ Run: PackagesNugetNuspecCheck,
+ Priority: 15,
+ InitStorage: true,
+ })
+}
+
+func PackagesNugetNuspecCheck(ctx context.Context, logger log.Logger, autofix bool) error {
+ found := 0
+ fixed := 0
+ errors := 0
+
+ err := db.Iterate(ctx, builder.Eq{"package.type": packages.TypeNuGet, "package.is_internal": false}, func(ctx context.Context, pkg *packages.Package) error {
+ logger.Info("Processing package %s", pkg.Name)
+
+ pvs, _, err := packages.SearchVersions(ctx, &packages.PackageSearchOptions{
+ Type: packages.TypeNuGet,
+ PackageID: pkg.ID,
+ })
+ if err != nil {
+ // Should never happen
+ logger.Error("Failed to search for versions for package %s: %v", pkg.Name, err)
+ return err
+ }
+
+ logger.Info("Found %d versions for package %s", len(pvs), pkg.Name)
+
+ for _, pv := range pvs {
+ pfs, err := packages.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ logger.Error("Failed to get files for package version %s %s: %v", pkg.Name, pv.Version, err)
+ errors++
+ continue
+ }
+
+ if slices.ContainsFunc(pfs, func(pf *packages.PackageFile) bool { return strings.HasSuffix(pf.LowerName, ".nuspec") }) {
+ logger.Debug("Nuspec file already exists for %s %s", pkg.Name, pv.Version)
+ continue
+ }
+
+ nupkgIdx := slices.IndexFunc(pfs, func(pf *packages.PackageFile) bool { return pf.IsLead })
+
+ if nupkgIdx < 0 {
+ logger.Error("Missing nupkg file for %s %s", pkg.Name, pv.Version)
+ errors++
+ continue
+ }
+
+ pf := pfs[nupkgIdx]
+
+ logger.Warn("Missing nuspec file found for %s %s", pkg.Name, pv.Version)
+ found++
+
+ if !autofix {
+ continue
+ }
+
+ s, _, _, err := packages_service.GetPackageFileStream(ctx, pf)
+ if err != nil {
+ logger.Error("Failed to get nupkg file stream for %s %s: %v", pkg.Name, pv.Version, err)
+ errors++
+ continue
+ }
+ defer s.Close()
+
+ buf, err := packages_module.CreateHashedBufferFromReader(s)
+ if err != nil {
+ logger.Error("Failed to create hashed buffer for nupkg from reader for %s %s: %v", pkg.Name, pv.Version, err)
+ errors++
+ continue
+ }
+ defer buf.Close()
+
+ np, err := nuget_module.ParsePackageMetaData(buf, buf.Size())
+ if err != nil {
+ logger.Error("Failed to parse package metadata for %s %s: %v", pkg.Name, pv.Version, err)
+ errors++
+ continue
+ }
+
+ nuspecBuf, err := packages_module.CreateHashedBufferFromReaderWithSize(np.NuspecContent, np.NuspecContent.Len())
+ if err != nil {
+ logger.Error("Failed to create hashed buffer for nuspec from reader for %s %s: %v", pkg.Name, pv.Version, err)
+ errors++
+ continue
+ }
+ defer nuspecBuf.Close()
+
+ _, err = packages_service.AddFileToPackageVersionInternal(
+ ctx,
+ pv,
+ &packages_service.PackageFileCreationInfo{
+ PackageFileInfo: packages_service.PackageFileInfo{
+ Filename: fmt.Sprintf("%s.nuspec", pkg.LowerName),
+ },
+ Data: nuspecBuf,
+ IsLead: false,
+ },
+ )
+ if err != nil {
+ logger.Error("Failed to add nuspec file for %s %s: %v", pkg.Name, pv.Version, err)
+ errors++
+ continue
+ }
+
+ fixed++
+ }
+
+ return nil
+ })
+ if err != nil {
+ logger.Error("Failed to iterate over users: %v", err)
+ return err
+ }
+
+ if autofix {
+ if fixed > 0 {
+ logger.Info("Fixed %d package versions by extracting nuspec files", fixed)
+ } else {
+ logger.Info("No package versions with missing nuspec files found")
+ }
+ } else {
+ if found > 0 {
+ logger.Info("Found %d package versions with missing nuspec files", found)
+ } else {
+ logger.Info("No package versions with missing nuspec files found")
+ }
+ }
+
+ if errors > 0 {
+ return fmt.Errorf("failed to fix %d nuspec files", errors)
+ }
+
+ return nil
+}
diff --git a/services/doctor/paths.go b/services/doctor/paths.go
new file mode 100644
index 0000000..8e37f01
--- /dev/null
+++ b/services/doctor/paths.go
@@ -0,0 +1,124 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "fmt"
+ "os"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+type configurationFile struct {
+ Name string
+ Path string
+ IsDirectory bool
+ Required bool
+ Writable bool
+}
+
+func checkConfigurationFile(logger log.Logger, autofix bool, fileOpts configurationFile) error {
+ logger.Info(`%-26s %q`, log.NewColoredValue(fileOpts.Name+":", log.Reset), fileOpts.Path)
+ fi, err := os.Stat(fileOpts.Path)
+ if err != nil {
+ if os.IsNotExist(err) && autofix && fileOpts.IsDirectory {
+ if err := os.MkdirAll(fileOpts.Path, 0o777); err != nil {
+ logger.Error(" Directory does not exist and could not be created. ERROR: %v", err)
+ return fmt.Errorf("Configuration directory: \"%q\" does not exist and could not be created. ERROR: %w", fileOpts.Path, err)
+ }
+ fi, err = os.Stat(fileOpts.Path)
+ }
+ }
+ if err != nil {
+ if fileOpts.Required {
+ logger.Error(" Is REQUIRED but is not accessible. ERROR: %v", err)
+ return fmt.Errorf("Configuration file \"%q\" is not accessible but is required. Error: %w", fileOpts.Path, err)
+ }
+ logger.Warn(" NOTICE: is not accessible (Error: %v)", err)
+ // this is a non-critical error
+ return nil
+ }
+
+ if fileOpts.IsDirectory && !fi.IsDir() {
+ logger.Error(" ERROR: not a directory")
+ return fmt.Errorf("Configuration directory \"%q\" is not a directory. Error: %w", fileOpts.Path, err)
+ } else if !fileOpts.IsDirectory && !fi.Mode().IsRegular() {
+ logger.Error(" ERROR: not a regular file")
+ return fmt.Errorf("Configuration file \"%q\" is not a regular file. Error: %w", fileOpts.Path, err)
+ } else if fileOpts.Writable {
+ if err := isWritableDir(fileOpts.Path); err != nil {
+ logger.Error(" ERROR: is required to be writable but is not writable: %v", err)
+ return fmt.Errorf("Configuration file \"%q\" is required to be writable but is not. Error: %w", fileOpts.Path, err)
+ }
+ }
+ return nil
+}
+
+func checkConfigurationFiles(ctx context.Context, logger log.Logger, autofix bool) error {
+ if fi, err := os.Stat(setting.CustomConf); err != nil || !fi.Mode().IsRegular() {
+ logger.Error("Failed to find configuration file at '%s'.", setting.CustomConf)
+ logger.Error("If you've never ran Forgejo yet, this is normal and '%s' will be created for you on first run.", setting.CustomConf)
+ logger.Error("Otherwise check that you are running this command from the correct path and/or provide a `--config` parameter.")
+ logger.Critical("Cannot proceed without a configuration file")
+ return err
+ }
+
+ setting.MustInstalled()
+
+ configurationFiles := []configurationFile{
+ {"Configuration File Path", setting.CustomConf, false, true, false},
+ {"Repository Root Path", setting.RepoRootPath, true, true, true},
+ {"Data Root Path", setting.AppDataPath, true, true, true},
+ {"Custom File Root Path", setting.CustomPath, true, false, false},
+ {"Work directory", setting.AppWorkPath, true, true, false},
+ {"Log Root Path", setting.Log.RootPath, true, true, true},
+ }
+
+ if !setting.HasBuiltinBindata {
+ configurationFiles = append(configurationFiles, configurationFile{"Static File Root Path", setting.StaticRootPath, true, true, false})
+ }
+
+ numberOfErrors := 0
+ for _, configurationFile := range configurationFiles {
+ if err := checkConfigurationFile(logger, autofix, configurationFile); err != nil {
+ numberOfErrors++
+ }
+ }
+
+ if numberOfErrors > 0 {
+ logger.Critical("Please check your configuration files and try again.")
+ return fmt.Errorf("%d configuration files with errors", numberOfErrors)
+ }
+
+ return nil
+}
+
+func isWritableDir(path string) error {
+ // There's no platform-independent way of checking if a directory is writable
+ // https://stackoverflow.com/questions/20026320/how-to-tell-if-folder-exists-and-is-writable
+
+ tmpFile, err := os.CreateTemp(path, "doctors-order")
+ if err != nil {
+ return err
+ }
+ if err := os.Remove(tmpFile.Name()); err != nil {
+ fmt.Printf("Warning: can't remove temporary file: '%s'\n", tmpFile.Name()) //nolint:forbidigo
+ }
+ tmpFile.Close()
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check paths and basic configuration",
+ Name: "paths",
+ IsDefault: true,
+ Run: checkConfigurationFiles,
+ AbortIfFailed: true,
+ SkipDatabaseInitialization: true,
+ Priority: 1,
+ })
+}
diff --git a/services/doctor/push_mirror_consistency.go b/services/doctor/push_mirror_consistency.go
new file mode 100644
index 0000000..68b96d6
--- /dev/null
+++ b/services/doctor/push_mirror_consistency.go
@@ -0,0 +1,91 @@
+// Copyright 2023 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/builder"
+)
+
+func FixPushMirrorsWithoutGitRemote(ctx context.Context, logger log.Logger, autofix bool) error {
+ var missingMirrors []*repo_model.PushMirror
+
+ err := db.Iterate(ctx, builder.Gt{"id": 0}, func(ctx context.Context, repo *repo_model.Repository) error {
+ pushMirrors, _, err := repo_model.GetPushMirrorsByRepoID(ctx, repo.ID, db.ListOptions{})
+ if err != nil {
+ return err
+ }
+
+ for i := 0; i < len(pushMirrors); i++ {
+ _, err = repo_model.GetPushMirrorRemoteAddress(repo.OwnerName, repo.Name, pushMirrors[i].RemoteName)
+ if err != nil {
+ if strings.Contains(err.Error(), "No such remote") {
+ missingMirrors = append(missingMirrors, pushMirrors[i])
+ } else if logger != nil {
+ logger.Warn("Unable to retrieve the remote address of a mirror: %s", err)
+ }
+ }
+ }
+
+ return nil
+ })
+ if err != nil {
+ if logger != nil {
+ logger.Critical("Unable to iterate across repounits to fix push mirrors without a git remote: Error %v", err)
+ }
+ return err
+ }
+
+ count := len(missingMirrors)
+ if !autofix {
+ if logger != nil {
+ if count == 0 {
+ logger.Info("Found no push mirrors with missing git remotes")
+ } else {
+ logger.Warn("Found %d push mirrors with missing git remotes", count)
+ }
+ }
+ return nil
+ }
+
+ for i := 0; i < len(missingMirrors); i++ {
+ if logger != nil {
+ logger.Info("Removing push mirror #%d (remote: %s), for repo: %s/%s",
+ missingMirrors[i].ID,
+ missingMirrors[i].RemoteName,
+ missingMirrors[i].GetRepository(ctx).OwnerName,
+ missingMirrors[i].GetRepository(ctx).Name)
+ }
+
+ err = repo_model.DeletePushMirrors(ctx, repo_model.PushMirrorOptions{
+ ID: missingMirrors[i].ID,
+ RepoID: missingMirrors[i].RepoID,
+ RemoteName: missingMirrors[i].RemoteName,
+ })
+ if err != nil {
+ if logger != nil {
+ logger.Critical("Error removing a push mirror (repo_id: %d, push_mirror: %d): %s", missingMirrors[i].Repo.ID, missingMirrors[i].ID, err)
+ }
+ return err
+ }
+ }
+
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check for push mirrors without a git remote configured",
+ Name: "fix-push-mirrors-without-git-remote",
+ IsDefault: false,
+ Run: FixPushMirrorsWithoutGitRemote,
+ Priority: 7,
+ })
+}
diff --git a/services/doctor/repository.go b/services/doctor/repository.go
new file mode 100644
index 0000000..6c33426
--- /dev/null
+++ b/services/doctor/repository.go
@@ -0,0 +1,80 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/storage"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "xorm.io/builder"
+)
+
+func handleDeleteOrphanedRepos(ctx context.Context, logger log.Logger, autofix bool) error {
+ test := &consistencyCheck{
+ Name: "Repos with no existing owner",
+ Counter: countOrphanedRepos,
+ Fixer: deleteOrphanedRepos,
+ FixedMessage: "Deleted all content related to orphaned repos",
+ }
+ return test.Run(ctx, logger, autofix)
+}
+
+// countOrphanedRepos count repository where user of owner_id do not exist
+func countOrphanedRepos(ctx context.Context) (int64, error) {
+ return db.CountOrphanedObjects(ctx, "repository", "user", "repository.owner_id=`user`.id")
+}
+
+// deleteOrphanedRepos delete repository where user of owner_id do not exist
+func deleteOrphanedRepos(ctx context.Context) (int64, error) {
+ if err := storage.Init(); err != nil {
+ return 0, err
+ }
+
+ batchSize := db.MaxBatchInsertSize("repository")
+ e := db.GetEngine(ctx)
+ var deleted int64
+ adminUser := &user_model.User{IsAdmin: true}
+
+ for {
+ select {
+ case <-ctx.Done():
+ return deleted, ctx.Err()
+ default:
+ var ids []int64
+ if err := e.Table("`repository`").
+ Join("LEFT", "`user`", "repository.owner_id=`user`.id").
+ Where(builder.IsNull{"`user`.id"}).
+ Select("`repository`.id").Limit(batchSize).Find(&ids); err != nil {
+ return deleted, err
+ }
+
+ // if we don't get ids we have deleted them all
+ if len(ids) == 0 {
+ return deleted, nil
+ }
+
+ for _, id := range ids {
+ if err := repo_service.DeleteRepositoryDirectly(ctx, adminUser, id, true); err != nil {
+ return deleted, err
+ }
+ deleted++
+ }
+ }
+ }
+}
+
+func init() {
+ Register(&Check{
+ Title: "Deleted all content related to orphaned repos",
+ Name: "delete-orphaned-repos",
+ IsDefault: false,
+ Run: handleDeleteOrphanedRepos,
+ Priority: 4,
+ })
+}
diff --git a/services/doctor/storage.go b/services/doctor/storage.go
new file mode 100644
index 0000000..3f3b562
--- /dev/null
+++ b/services/doctor/storage.go
@@ -0,0 +1,270 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+ "errors"
+ "io/fs"
+ "strings"
+
+ "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
+)
+
+type commonStorageCheckOptions struct {
+ storer storage.ObjectStorage
+ isOrphaned func(path string, obj storage.Object, stat fs.FileInfo) (bool, error)
+ name string
+}
+
+func commonCheckStorage(logger log.Logger, autofix bool, opts *commonStorageCheckOptions) error {
+ totalCount, orphanedCount := 0, 0
+ totalSize, orphanedSize := int64(0), int64(0)
+
+ var pathsToDelete []string
+ if err := opts.storer.IterateObjects("", func(p string, obj storage.Object) error {
+ defer obj.Close()
+
+ totalCount++
+ stat, err := obj.Stat()
+ if err != nil {
+ return err
+ }
+ totalSize += stat.Size()
+
+ orphaned, err := opts.isOrphaned(p, obj, stat)
+ if err != nil {
+ return err
+ }
+ if orphaned {
+ orphanedCount++
+ orphanedSize += stat.Size()
+ if autofix {
+ pathsToDelete = append(pathsToDelete, p)
+ }
+ }
+ return nil
+ }); err != nil {
+ logger.Error("Error whilst iterating %s storage: %v", opts.name, err)
+ return err
+ }
+
+ if orphanedCount > 0 {
+ if autofix {
+ var deletedNum int
+ for _, p := range pathsToDelete {
+ if err := opts.storer.Delete(p); err != nil {
+ log.Error("Error whilst deleting %s from %s storage: %v", p, opts.name, err)
+ } else {
+ deletedNum++
+ }
+ }
+ logger.Info("Deleted %d/%d orphaned %s(s)", deletedNum, orphanedCount, opts.name)
+ } else {
+ logger.Warn("Found %d/%d (%s/%s) orphaned %s(s)", orphanedCount, totalCount, base.FileSize(orphanedSize), base.FileSize(totalSize), opts.name)
+ }
+ } else {
+ logger.Info("Found %d (%s) %s(s)", totalCount, base.FileSize(totalSize), opts.name)
+ }
+ return nil
+}
+
+type checkStorageOptions struct {
+ All bool
+ Attachments bool
+ LFS bool
+ Avatars bool
+ RepoAvatars bool
+ RepoArchives bool
+ Packages bool
+}
+
+// checkStorage will return a doctor check function to check the requested storage types for "orphaned" stored object/files and optionally delete them
+func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger log.Logger, autofix bool) error {
+ return func(ctx context.Context, logger log.Logger, autofix bool) error {
+ if err := storage.Init(); err != nil {
+ logger.Error("storage.Init failed: %v", err)
+ return err
+ }
+
+ if opts.Attachments || opts.All {
+ if err := commonCheckStorage(logger, autofix,
+ &commonStorageCheckOptions{
+ storer: storage.Attachments,
+ isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
+ exists, err := repo.ExistAttachmentsByUUID(ctx, stat.Name())
+ return !exists, err
+ },
+ name: "attachment",
+ }); err != nil {
+ return err
+ }
+ }
+
+ if opts.LFS || opts.All {
+ if !setting.LFS.StartServer {
+ logger.Info("LFS isn't enabled (skipped)")
+ return nil
+ }
+ if err := commonCheckStorage(logger, autofix,
+ &commonStorageCheckOptions{
+ storer: storage.LFS,
+ isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
+ // The oid of an LFS stored object is the name but with all the path.Separators removed
+ oid := strings.ReplaceAll(path, "/", "")
+ exists, err := git.ExistsLFSObject(ctx, oid)
+ return !exists, err
+ },
+ name: "LFS file",
+ }); err != nil {
+ return err
+ }
+ }
+
+ if opts.Avatars || opts.All {
+ if err := commonCheckStorage(logger, autofix,
+ &commonStorageCheckOptions{
+ storer: storage.Avatars,
+ isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
+ exists, err := user.ExistsWithAvatarAtStoragePath(ctx, path)
+ return !exists, err
+ },
+ name: "avatar",
+ }); err != nil {
+ return err
+ }
+ }
+
+ if opts.RepoAvatars || opts.All {
+ if err := commonCheckStorage(logger, autofix,
+ &commonStorageCheckOptions{
+ storer: storage.RepoAvatars,
+ isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
+ exists, err := repo.ExistsWithAvatarAtStoragePath(ctx, path)
+ return !exists, err
+ },
+ name: "repo avatar",
+ }); err != nil {
+ return err
+ }
+ }
+
+ if opts.RepoArchives || opts.All {
+ if err := commonCheckStorage(logger, autofix,
+ &commonStorageCheckOptions{
+ storer: storage.RepoArchives,
+ isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
+ exists, err := repo.ExistsRepoArchiverWithStoragePath(ctx, path)
+ if err == nil || errors.Is(err, util.ErrInvalidArgument) {
+ // invalid arguments mean that the object is not a valid repo archiver and it should be removed
+ return !exists, nil
+ }
+ return !exists, err
+ },
+ name: "repo archive",
+ }); err != nil {
+ return err
+ }
+ }
+
+ if opts.Packages || opts.All {
+ if !setting.Packages.Enabled {
+ logger.Info("Packages isn't enabled (skipped)")
+ return nil
+ }
+ if err := commonCheckStorage(logger, autofix,
+ &commonStorageCheckOptions{
+ storer: storage.Packages,
+ isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
+ key, err := packages_module.RelativePathToKey(path)
+ if err != nil {
+ // If there is an error here then the relative path does not match a valid package
+ // Therefore it is orphaned by default
+ return true, nil
+ }
+
+ exists, err := packages.ExistPackageBlobWithSHA(ctx, string(key))
+
+ return !exists, err
+ },
+ name: "package blob",
+ }); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check if there are orphaned storage files",
+ Name: "storages",
+ IsDefault: false,
+ Run: checkStorage(&checkStorageOptions{All: true}),
+ AbortIfFailed: false,
+ SkipDatabaseInitialization: false,
+ Priority: 1,
+ })
+
+ Register(&Check{
+ Title: "Check if there are orphaned attachments in storage",
+ Name: "storage-attachments",
+ IsDefault: false,
+ Run: checkStorage(&checkStorageOptions{Attachments: true}),
+ AbortIfFailed: false,
+ SkipDatabaseInitialization: false,
+ Priority: 1,
+ })
+
+ Register(&Check{
+ Title: "Check if there are orphaned lfs files in storage",
+ Name: "storage-lfs",
+ IsDefault: false,
+ Run: checkStorage(&checkStorageOptions{LFS: true}),
+ AbortIfFailed: false,
+ SkipDatabaseInitialization: false,
+ Priority: 1,
+ })
+
+ Register(&Check{
+ Title: "Check if there are orphaned avatars in storage",
+ Name: "storage-avatars",
+ IsDefault: false,
+ Run: checkStorage(&checkStorageOptions{Avatars: true, RepoAvatars: true}),
+ AbortIfFailed: false,
+ SkipDatabaseInitialization: false,
+ Priority: 1,
+ })
+
+ Register(&Check{
+ Title: "Check if there are orphaned archives in storage",
+ Name: "storage-archives",
+ IsDefault: false,
+ Run: checkStorage(&checkStorageOptions{RepoArchives: true}),
+ AbortIfFailed: false,
+ SkipDatabaseInitialization: false,
+ Priority: 1,
+ })
+
+ Register(&Check{
+ Title: "Check if there are orphaned package blobs in storage",
+ Name: "storage-packages",
+ IsDefault: false,
+ Run: checkStorage(&checkStorageOptions{Packages: true}),
+ AbortIfFailed: false,
+ SkipDatabaseInitialization: false,
+ Priority: 1,
+ })
+}
diff --git a/services/doctor/usertype.go b/services/doctor/usertype.go
new file mode 100644
index 0000000..ab32b78
--- /dev/null
+++ b/services/doctor/usertype.go
@@ -0,0 +1,41 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package doctor
+
+import (
+ "context"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+)
+
+func checkUserType(ctx context.Context, logger log.Logger, autofix bool) error {
+ count, err := user_model.CountWrongUserType(ctx)
+ if err != nil {
+ logger.Critical("Error: %v whilst counting wrong user types")
+ return err
+ }
+ if count > 0 {
+ if autofix {
+ if count, err = user_model.FixWrongUserType(ctx); err != nil {
+ logger.Critical("Error: %v whilst fixing wrong user types")
+ return err
+ }
+ logger.Info("%d users with wrong type fixed", count)
+ } else {
+ logger.Warn("%d users with wrong type exist", count)
+ }
+ }
+ return nil
+}
+
+func init() {
+ Register(&Check{
+ Title: "Check if user with wrong type exist",
+ Name: "check-user-type",
+ IsDefault: true,
+ Run: checkUserType,
+ Priority: 3,
+ })
+}
diff --git a/services/externalaccount/link.go b/services/externalaccount/link.go
new file mode 100644
index 0000000..d6e2ea7
--- /dev/null
+++ b/services/externalaccount/link.go
@@ -0,0 +1,30 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package externalaccount
+
+import (
+ "context"
+ "fmt"
+
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/markbates/goth"
+)
+
+// Store represents a thing that stores things
+type Store interface {
+ Get(any) any
+ Set(any, any) error
+ Release() error
+}
+
+// LinkAccountFromStore links the provided user with a stored external user
+func LinkAccountFromStore(ctx context.Context, store Store, user *user_model.User) error {
+ gothUser := store.Get("linkAccountGothUser")
+ if gothUser == nil {
+ return fmt.Errorf("not in LinkAccount session")
+ }
+
+ return LinkAccountToUser(ctx, user, gothUser.(goth.User))
+}
diff --git a/services/externalaccount/user.go b/services/externalaccount/user.go
new file mode 100644
index 0000000..3cfd8c8
--- /dev/null
+++ b/services/externalaccount/user.go
@@ -0,0 +1,107 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package externalaccount
+
+import (
+ "context"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/markbates/goth"
+)
+
+func toExternalLoginUser(ctx context.Context, user *user_model.User, gothUser goth.User) (*user_model.ExternalLoginUser, error) {
+ authSource, err := auth.GetActiveOAuth2SourceByName(ctx, gothUser.Provider)
+ if err != nil {
+ return nil, err
+ }
+ return &user_model.ExternalLoginUser{
+ ExternalID: gothUser.UserID,
+ UserID: user.ID,
+ LoginSourceID: authSource.ID,
+ RawData: gothUser.RawData,
+ Provider: gothUser.Provider,
+ Email: gothUser.Email,
+ Name: gothUser.Name,
+ FirstName: gothUser.FirstName,
+ LastName: gothUser.LastName,
+ NickName: gothUser.NickName,
+ Description: gothUser.Description,
+ AvatarURL: gothUser.AvatarURL,
+ Location: gothUser.Location,
+ AccessToken: gothUser.AccessToken,
+ AccessTokenSecret: gothUser.AccessTokenSecret,
+ RefreshToken: gothUser.RefreshToken,
+ ExpiresAt: gothUser.ExpiresAt,
+ }, nil
+}
+
+// LinkAccountToUser link the gothUser to the user
+func LinkAccountToUser(ctx context.Context, user *user_model.User, gothUser goth.User) error {
+ externalLoginUser, err := toExternalLoginUser(ctx, user, gothUser)
+ if err != nil {
+ return err
+ }
+
+ if err := user_model.LinkExternalToUser(ctx, user, externalLoginUser); err != nil {
+ return err
+ }
+
+ externalID := externalLoginUser.ExternalID
+
+ var tp structs.GitServiceType
+ for _, s := range structs.SupportedFullGitService {
+ if strings.EqualFold(s.Name(), gothUser.Provider) {
+ tp = s
+ break
+ }
+ }
+
+ if tp.Name() != "" {
+ return UpdateMigrationsByType(ctx, tp, externalID, user.ID)
+ }
+
+ return nil
+}
+
+// UpdateExternalUser updates external user's information
+func UpdateExternalUser(ctx context.Context, user *user_model.User, gothUser goth.User) error {
+ externalLoginUser, err := toExternalLoginUser(ctx, user, gothUser)
+ if err != nil {
+ return err
+ }
+
+ return user_model.UpdateExternalUserByExternalID(ctx, externalLoginUser)
+}
+
+// UpdateMigrationsByType updates all migrated repositories' posterid from gitServiceType to replace originalAuthorID to posterID
+func UpdateMigrationsByType(ctx context.Context, tp structs.GitServiceType, externalUserID string, userID int64) error {
+ // Skip update if externalUserID is not a valid numeric ID or exceeds int64
+ if _, err := strconv.ParseInt(externalUserID, 10, 64); err != nil {
+ return nil
+ }
+
+ if err := issues_model.UpdateIssuesMigrationsByType(ctx, tp, externalUserID, userID); err != nil {
+ return err
+ }
+
+ if err := issues_model.UpdateCommentsMigrationsByType(ctx, tp, externalUserID, userID); err != nil {
+ return err
+ }
+
+ if err := repo_model.UpdateReleasesMigrationsByType(ctx, tp, externalUserID, userID); err != nil {
+ return err
+ }
+
+ if err := issues_model.UpdateReactionsMigrationsByType(ctx, tp, externalUserID, userID); err != nil {
+ return err
+ }
+ return issues_model.UpdateReviewsMigrationsByType(ctx, tp, externalUserID, userID)
+}
diff --git a/services/f3/driver/asset.go b/services/f3/driver/asset.go
new file mode 100644
index 0000000..6759cc6
--- /dev/null
+++ b/services/f3/driver/asset.go
@@ -0,0 +1,171 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "crypto/sha256"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "os"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/services/attachment"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+ "github.com/google/uuid"
+)
+
+var _ f3_tree.ForgeDriverInterface = &issue{}
+
+type asset struct {
+ common
+
+ forgejoAsset *repo_model.Attachment
+ sha string
+ contentType string
+ downloadFunc f3.DownloadFuncType
+}
+
+func (o *asset) SetNative(asset any) {
+ o.forgejoAsset = asset.(*repo_model.Attachment)
+}
+
+func (o *asset) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoAsset.ID)
+}
+
+func (o *asset) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *asset) ToFormat() f3.Interface {
+ if o.forgejoAsset == nil {
+ return o.NewFormat()
+ }
+
+ return &f3.ReleaseAsset{
+ Common: f3.NewCommon(o.GetNativeID()),
+ Name: o.forgejoAsset.Name,
+ ContentType: o.contentType,
+ Size: o.forgejoAsset.Size,
+ DownloadCount: o.forgejoAsset.DownloadCount,
+ Created: o.forgejoAsset.CreatedUnix.AsTime(),
+ SHA256: o.sha,
+ DownloadURL: o.forgejoAsset.DownloadURL(),
+ DownloadFunc: o.downloadFunc,
+ }
+}
+
+func (o *asset) FromFormat(content f3.Interface) {
+ asset := content.(*f3.ReleaseAsset)
+ o.forgejoAsset = &repo_model.Attachment{
+ ID: f3_util.ParseInt(asset.GetID()),
+ Name: asset.Name,
+ Size: asset.Size,
+ DownloadCount: asset.DownloadCount,
+ CreatedUnix: timeutil.TimeStamp(asset.Created.Unix()),
+ CustomDownloadURL: asset.DownloadURL,
+ }
+ o.contentType = asset.ContentType
+ o.sha = asset.SHA256
+ o.downloadFunc = asset.DownloadFunc
+}
+
+func (o *asset) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ id := node.GetID().Int64()
+
+ asset, err := repo_model.GetAttachmentByID(ctx, id)
+ if repo_model.IsErrAttachmentNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("asset %v %w", id, err))
+ }
+
+ o.forgejoAsset = asset
+
+ path := o.forgejoAsset.RelativePath()
+
+ {
+ f, err := storage.Attachments.Open(path)
+ if err != nil {
+ panic(err)
+ }
+ hasher := sha256.New()
+ if _, err := io.Copy(hasher, f); err != nil {
+ panic(fmt.Errorf("io.Copy to hasher: %v", err))
+ }
+ o.sha = hex.EncodeToString(hasher.Sum(nil))
+ }
+
+ o.downloadFunc = func() io.ReadCloser {
+ o.Trace("download %s from copy stored in temporary file %s", o.forgejoAsset.DownloadURL, path)
+ f, err := os.Open(path)
+ if err != nil {
+ panic(err)
+ }
+ return f
+ }
+ return true
+}
+
+func (o *asset) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoAsset.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoAsset.ID).Cols("name").Update(o.forgejoAsset); err != nil {
+ panic(fmt.Errorf("UpdateAssetCols: %v %v", o.forgejoAsset, err))
+ }
+}
+
+func (o *asset) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ uploader, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ panic(fmt.Errorf("GetAdminUser %w", err))
+ }
+
+ o.forgejoAsset.UploaderID = uploader.ID
+ o.forgejoAsset.RepoID = f3_tree.GetProjectID(o.GetNode())
+ o.forgejoAsset.ReleaseID = f3_tree.GetReleaseID(o.GetNode())
+ o.forgejoAsset.UUID = uuid.New().String()
+
+ download := o.downloadFunc()
+ defer download.Close()
+
+ _, err = attachment.NewAttachment(ctx, o.forgejoAsset, download, o.forgejoAsset.Size)
+ if err != nil {
+ panic(err)
+ }
+
+ o.Trace("asset created %d", o.forgejoAsset.ID)
+ return generic.NewNodeID(o.forgejoAsset.ID)
+}
+
+func (o *asset) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ if err := repo_model.DeleteAttachment(ctx, o.forgejoAsset, true); err != nil {
+ panic(err)
+ }
+}
+
+func newAsset() generic.NodeDriverInterface {
+ return &asset{}
+}
diff --git a/services/f3/driver/assets.go b/services/f3/driver/assets.go
new file mode 100644
index 0000000..88a3979
--- /dev/null
+++ b/services/f3/driver/assets.go
@@ -0,0 +1,42 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type assets struct {
+ container
+}
+
+func (o *assets) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ if page > 1 {
+ return generic.NewChildrenSlice(0)
+ }
+
+ releaseID := f3_tree.GetReleaseID(o.GetNode())
+
+ release, err := repo_model.GetReleaseByID(ctx, releaseID)
+ if err != nil {
+ panic(fmt.Errorf("GetReleaseByID %v %w", releaseID, err))
+ }
+
+ if err := release.LoadAttributes(ctx); err != nil {
+ panic(fmt.Errorf("error while listing assets: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(release.Attachments...)...)
+}
+
+func newAssets() generic.NodeDriverInterface {
+ return &assets{}
+}
diff --git a/services/f3/driver/comment.go b/services/f3/driver/comment.go
new file mode 100644
index 0000000..0c10fd7
--- /dev/null
+++ b/services/f3/driver/comment.go
@@ -0,0 +1,122 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &comment{}
+
+type comment struct {
+ common
+
+ forgejoComment *issues_model.Comment
+}
+
+func (o *comment) SetNative(comment any) {
+ o.forgejoComment = comment.(*issues_model.Comment)
+}
+
+func (o *comment) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoComment.ID)
+}
+
+func (o *comment) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *comment) ToFormat() f3.Interface {
+ if o.forgejoComment == nil {
+ return o.NewFormat()
+ }
+ return &f3.Comment{
+ Common: f3.NewCommon(fmt.Sprintf("%d", o.forgejoComment.ID)),
+ PosterID: f3_tree.NewUserReference(o.forgejoComment.Poster.ID),
+ Content: o.forgejoComment.Content,
+ Created: o.forgejoComment.CreatedUnix.AsTime(),
+ Updated: o.forgejoComment.UpdatedUnix.AsTime(),
+ }
+}
+
+func (o *comment) FromFormat(content f3.Interface) {
+ comment := content.(*f3.Comment)
+
+ o.forgejoComment = &issues_model.Comment{
+ ID: f3_util.ParseInt(comment.GetID()),
+ PosterID: comment.PosterID.GetIDAsInt(),
+ Poster: &user_model.User{
+ ID: comment.PosterID.GetIDAsInt(),
+ },
+ Content: comment.Content,
+ CreatedUnix: timeutil.TimeStamp(comment.Created.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(comment.Updated.Unix()),
+ }
+}
+
+func (o *comment) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ id := node.GetID().Int64()
+
+ comment, err := issues_model.GetCommentByID(ctx, id)
+ if issues_model.IsErrCommentNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("comment %v %w", id, err))
+ }
+ if err := comment.LoadPoster(ctx); err != nil {
+ panic(fmt.Errorf("LoadPoster %v %w", *comment, err))
+ }
+ o.forgejoComment = comment
+ return true
+}
+
+func (o *comment) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoComment.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoComment.ID).Cols("content").Update(o.forgejoComment); err != nil {
+ panic(fmt.Errorf("UpdateCommentCols: %v %v", o.forgejoComment, err))
+ }
+}
+
+func (o *comment) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ sess := db.GetEngine(ctx)
+
+ if _, err := sess.NoAutoTime().Insert(o.forgejoComment); err != nil {
+ panic(err)
+ }
+ o.Trace("comment created %d", o.forgejoComment.ID)
+ return generic.NewNodeID(o.forgejoComment.ID)
+}
+
+func (o *comment) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ if err := issues_model.DeleteComment(ctx, o.forgejoComment); err != nil {
+ panic(err)
+ }
+}
+
+func newComment() generic.NodeDriverInterface {
+ return &comment{}
+}
diff --git a/services/f3/driver/comments.go b/services/f3/driver/comments.go
new file mode 100644
index 0000000..eb79b74
--- /dev/null
+++ b/services/f3/driver/comments.go
@@ -0,0 +1,49 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type comments struct {
+ container
+}
+
+func (o *comments) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ commentable := f3_tree.GetCommentableID(o.GetNode())
+
+ issue, err := issues_model.GetIssueByIndex(ctx, project, commentable)
+ if err != nil {
+ panic(fmt.Errorf("GetIssueByIndex %v %w", commentable, err))
+ }
+
+ sess := db.GetEngine(ctx).
+ Table("comment").
+ Where("`issue_id` = ? AND `type` = ?", issue.ID, issues_model.CommentTypeComment)
+ if page != 0 {
+ sess = db.SetSessionPagination(sess, &db.ListOptions{Page: page, PageSize: pageSize})
+ }
+ forgejoComments := make([]*issues_model.Comment, 0, pageSize)
+ if err := sess.Find(&forgejoComments); err != nil {
+ panic(fmt.Errorf("error while listing comments: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoComments...)...)
+}
+
+func newComments() generic.NodeDriverInterface {
+ return &comments{}
+}
diff --git a/services/f3/driver/common.go b/services/f3/driver/common.go
new file mode 100644
index 0000000..104f91c
--- /dev/null
+++ b/services/f3/driver/common.go
@@ -0,0 +1,48 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type common struct {
+ generic.NullDriver
+}
+
+func (o *common) GetHelper() any {
+ panic("not implemented")
+}
+
+func (o *common) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ return generic.NewChildrenSlice(0)
+}
+
+func (o *common) GetNativeID() string {
+ return ""
+}
+
+func (o *common) SetNative(native any) {
+}
+
+func (o *common) getTree() generic.TreeInterface {
+ return o.GetNode().GetTree()
+}
+
+func (o *common) getPageSize() int {
+ return o.getTreeDriver().GetPageSize()
+}
+
+func (o *common) getKind() generic.Kind {
+ return o.GetNode().GetKind()
+}
+
+func (o *common) getTreeDriver() *treeDriver {
+ return o.GetTreeDriver().(*treeDriver)
+}
+
+func (o *common) IsNull() bool { return false }
diff --git a/services/f3/driver/container.go b/services/f3/driver/container.go
new file mode 100644
index 0000000..1530444
--- /dev/null
+++ b/services/f3/driver/container.go
@@ -0,0 +1,43 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type container struct {
+ common
+}
+
+func (o *container) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *container) ToFormat() f3.Interface {
+ return o.NewFormat()
+}
+
+func (o *container) FromFormat(content f3.Interface) {
+}
+
+func (o *container) Get(context.Context) bool { return true }
+
+func (o *container) Put(ctx context.Context) generic.NodeID {
+ return o.upsert(ctx)
+}
+
+func (o *container) Patch(ctx context.Context) {
+ o.upsert(ctx)
+}
+
+func (o *container) upsert(context.Context) generic.NodeID {
+ return generic.NewNodeID(o.getKind())
+}
diff --git a/services/f3/driver/forge.go b/services/f3/driver/forge.go
new file mode 100644
index 0000000..a4bcf61
--- /dev/null
+++ b/services/f3/driver/forge.go
@@ -0,0 +1,64 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ user_model "code.gitea.io/gitea/models/user"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ "code.forgejo.org/f3/gof3/v3/util"
+)
+
+type forge struct {
+ generic.NullDriver
+
+ ownersKind map[string]generic.Kind
+}
+
+func newForge() generic.NodeDriverInterface {
+ return &forge{
+ ownersKind: make(map[string]generic.Kind),
+ }
+}
+
+func (o *forge) getOwnersKind(ctx context.Context, id string) generic.Kind {
+ kind, ok := o.ownersKind[id]
+ if !ok {
+ user, err := user_model.GetUserByID(ctx, util.ParseInt(id))
+ if err != nil {
+ panic(fmt.Errorf("user_repo.GetUserByID: %w", err))
+ }
+ kind = f3_tree.KindUsers
+ if user.IsOrganization() {
+ kind = f3_tree.KindOrganization
+ }
+ o.ownersKind[id] = kind
+ }
+ return kind
+}
+
+func (o *forge) getOwnersPath(ctx context.Context, id string) f3_tree.Path {
+ return f3_tree.NewPathFromString("/").SetForge().SetOwners(o.getOwnersKind(ctx, id))
+}
+
+func (o *forge) Equals(context.Context, generic.NodeInterface) bool { return true }
+func (o *forge) Get(context.Context) bool { return true }
+func (o *forge) Put(context.Context) generic.NodeID { return generic.NewNodeID("forge") }
+func (o *forge) Patch(context.Context) {}
+func (o *forge) Delete(context.Context) {}
+func (o *forge) NewFormat() f3.Interface { return &f3.Forge{} }
+func (o *forge) FromFormat(f3.Interface) {}
+
+func (o *forge) ToFormat() f3.Interface {
+ return &f3.Forge{
+ Common: f3.NewCommon("forge"),
+ URL: o.String(),
+ }
+}
diff --git a/services/f3/driver/issue.go b/services/f3/driver/issue.go
new file mode 100644
index 0000000..7f1614d
--- /dev/null
+++ b/services/f3/driver/issue.go
@@ -0,0 +1,238 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/timeutil"
+ issue_service "code.gitea.io/gitea/services/issue"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &issue{}
+
+type issue struct {
+ common
+
+ forgejoIssue *issues_model.Issue
+}
+
+func (o *issue) SetNative(issue any) {
+ o.forgejoIssue = issue.(*issues_model.Issue)
+}
+
+func (o *issue) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoIssue.Index)
+}
+
+func (o *issue) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *issue) ToFormat() f3.Interface {
+ if o.forgejoIssue == nil {
+ return o.NewFormat()
+ }
+
+ var milestone *f3.Reference
+ if o.forgejoIssue.Milestone != nil {
+ milestone = f3_tree.NewIssueMilestoneReference(o.forgejoIssue.Milestone.ID)
+ }
+
+ assignees := make([]*f3.Reference, 0, len(o.forgejoIssue.Assignees))
+ for _, assignee := range o.forgejoIssue.Assignees {
+ assignees = append(assignees, f3_tree.NewUserReference(assignee.ID))
+ }
+
+ labels := make([]*f3.Reference, 0, len(o.forgejoIssue.Labels))
+ for _, label := range o.forgejoIssue.Labels {
+ labels = append(labels, f3_tree.NewIssueLabelReference(label.ID))
+ }
+
+ return &f3.Issue{
+ Title: o.forgejoIssue.Title,
+ Common: f3.NewCommon(o.GetNativeID()),
+ PosterID: f3_tree.NewUserReference(o.forgejoIssue.Poster.ID),
+ Assignees: assignees,
+ Labels: labels,
+ Content: o.forgejoIssue.Content,
+ Milestone: milestone,
+ State: string(o.forgejoIssue.State()),
+ Created: o.forgejoIssue.CreatedUnix.AsTime(),
+ Updated: o.forgejoIssue.UpdatedUnix.AsTime(),
+ Closed: o.forgejoIssue.ClosedUnix.AsTimePtr(),
+ IsLocked: o.forgejoIssue.IsLocked,
+ }
+}
+
+func (o *issue) FromFormat(content f3.Interface) {
+ issue := content.(*f3.Issue)
+ var milestone *issues_model.Milestone
+ if issue.Milestone != nil {
+ milestone = &issues_model.Milestone{
+ ID: issue.Milestone.GetIDAsInt(),
+ }
+ }
+ o.forgejoIssue = &issues_model.Issue{
+ Title: issue.Title,
+ Index: f3_util.ParseInt(issue.GetID()),
+ PosterID: issue.PosterID.GetIDAsInt(),
+ Poster: &user_model.User{
+ ID: issue.PosterID.GetIDAsInt(),
+ },
+ Content: issue.Content,
+ Milestone: milestone,
+ IsClosed: issue.State == f3.IssueStateClosed,
+ CreatedUnix: timeutil.TimeStamp(issue.Created.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(issue.Updated.Unix()),
+ IsLocked: issue.IsLocked,
+ }
+
+ assignees := make([]*user_model.User, 0, len(issue.Assignees))
+ for _, assignee := range issue.Assignees {
+ assignees = append(assignees, &user_model.User{ID: assignee.GetIDAsInt()})
+ }
+ o.forgejoIssue.Assignees = assignees
+
+ labels := make([]*issues_model.Label, 0, len(issue.Labels))
+ for _, label := range issue.Labels {
+ labels = append(labels, &issues_model.Label{ID: label.GetIDAsInt()})
+ }
+ o.forgejoIssue.Labels = labels
+
+ if issue.Closed != nil {
+ o.forgejoIssue.ClosedUnix = timeutil.TimeStamp(issue.Closed.Unix())
+ }
+}
+
+func (o *issue) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ id := node.GetID().Int64()
+
+ issue, err := issues_model.GetIssueByIndex(ctx, project, id)
+ if issues_model.IsErrIssueNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("issue %v %w", id, err))
+ }
+ if err := issue.LoadAttributes(ctx); err != nil {
+ panic(err)
+ }
+
+ o.forgejoIssue = issue
+ return true
+}
+
+func (o *issue) Patch(ctx context.Context) {
+ node := o.GetNode()
+ project := f3_tree.GetProjectID(o.GetNode())
+ id := node.GetID().Int64()
+ o.Trace("repo_id = %d, index = %d", project, id)
+ if _, err := db.GetEngine(ctx).Where("`repo_id` = ? AND `index` = ?", project, id).Cols("name", "content", "is_closed").Update(o.forgejoIssue); err != nil {
+ panic(fmt.Errorf("%v %v", o.forgejoIssue, err))
+ }
+}
+
+func (o *issue) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ o.forgejoIssue.RepoID = f3_tree.GetProjectID(o.GetNode())
+ makeLabels := func(issueID int64) []issues_model.IssueLabel {
+ labels := make([]issues_model.IssueLabel, 0, len(o.forgejoIssue.Labels))
+ for _, label := range o.forgejoIssue.Labels {
+ o.Trace("%d with label %d", issueID, label.ID)
+ labels = append(labels, issues_model.IssueLabel{
+ IssueID: issueID,
+ LabelID: label.ID,
+ })
+ }
+ return labels
+ }
+
+ idx, err := db.GetNextResourceIndex(ctx, "issue_index", o.forgejoIssue.RepoID)
+ if err != nil {
+ panic(fmt.Errorf("generate issue index failed: %w", err))
+ }
+ o.forgejoIssue.Index = idx
+
+ sess := db.GetEngine(ctx)
+
+ if _, err = sess.NoAutoTime().Insert(o.forgejoIssue); err != nil {
+ panic(err)
+ }
+
+ labels := makeLabels(o.forgejoIssue.ID)
+ if len(labels) > 0 {
+ if _, err := sess.Insert(labels); err != nil {
+ panic(err)
+ }
+ }
+
+ makeAssignees := func(issueID int64) []issues_model.IssueAssignees {
+ assignees := make([]issues_model.IssueAssignees, 0, len(o.forgejoIssue.Assignees))
+ for _, assignee := range o.forgejoIssue.Assignees {
+ o.Trace("%d with assignee %d", issueID, assignee.ID)
+ assignees = append(assignees, issues_model.IssueAssignees{
+ IssueID: issueID,
+ AssigneeID: assignee.ID,
+ })
+ }
+ return assignees
+ }
+
+ assignees := makeAssignees(o.forgejoIssue.ID)
+ if len(assignees) > 0 {
+ if _, err := sess.Insert(assignees); err != nil {
+ panic(err)
+ }
+ }
+
+ o.Trace("issue created %d/%d", o.forgejoIssue.ID, o.forgejoIssue.Index)
+ return generic.NewNodeID(o.forgejoIssue.Index)
+}
+
+func (o *issue) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ owner := f3_tree.GetOwnerName(o.GetNode())
+ project := f3_tree.GetProjectName(o.GetNode())
+ repoPath := repo_model.RepoPath(owner, project)
+ gitRepo, err := git.OpenRepository(ctx, repoPath)
+ if err != nil {
+ panic(err)
+ }
+ defer gitRepo.Close()
+
+ doer, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ panic(fmt.Errorf("GetAdminUser %w", err))
+ }
+
+ if err := issue_service.DeleteIssue(ctx, doer, gitRepo, o.forgejoIssue); err != nil {
+ panic(err)
+ }
+}
+
+func newIssue() generic.NodeDriverInterface {
+ return &issue{}
+}
diff --git a/services/f3/driver/issues.go b/services/f3/driver/issues.go
new file mode 100644
index 0000000..3a5a64e
--- /dev/null
+++ b/services/f3/driver/issues.go
@@ -0,0 +1,40 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type issues struct {
+ container
+}
+
+func (o *issues) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ project := f3_tree.GetProjectID(o.GetNode())
+
+ forgejoIssues, err := issues_model.Issues(ctx, &issues_model.IssuesOptions{
+ Paginator: &db.ListOptions{Page: page, PageSize: pageSize},
+ RepoIDs: []int64{project},
+ })
+ if err != nil {
+ panic(fmt.Errorf("error while listing issues: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoIssues...)...)
+}
+
+func newIssues() generic.NodeDriverInterface {
+ return &issues{}
+}
diff --git a/services/f3/driver/label.go b/services/f3/driver/label.go
new file mode 100644
index 0000000..6d1fcaa
--- /dev/null
+++ b/services/f3/driver/label.go
@@ -0,0 +1,113 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &label{}
+
+type label struct {
+ common
+
+ forgejoLabel *issues_model.Label
+}
+
+func (o *label) SetNative(label any) {
+ o.forgejoLabel = label.(*issues_model.Label)
+}
+
+func (o *label) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoLabel.ID)
+}
+
+func (o *label) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *label) ToFormat() f3.Interface {
+ if o.forgejoLabel == nil {
+ return o.NewFormat()
+ }
+ return &f3.Label{
+ Common: f3.NewCommon(fmt.Sprintf("%d", o.forgejoLabel.ID)),
+ Name: o.forgejoLabel.Name,
+ Color: o.forgejoLabel.Color,
+ Description: o.forgejoLabel.Description,
+ }
+}
+
+func (o *label) FromFormat(content f3.Interface) {
+ label := content.(*f3.Label)
+ o.forgejoLabel = &issues_model.Label{
+ ID: f3_util.ParseInt(label.GetID()),
+ Name: label.Name,
+ Description: label.Description,
+ Color: label.Color,
+ }
+}
+
+func (o *label) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ id := node.GetID().Int64()
+
+ label, err := issues_model.GetLabelInRepoByID(ctx, project, id)
+ if issues_model.IsErrRepoLabelNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("label %v %w", id, err))
+ }
+ o.forgejoLabel = label
+ return true
+}
+
+func (o *label) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoLabel.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoLabel.ID).Cols("name", "description", "color").Update(o.forgejoLabel); err != nil {
+ panic(fmt.Errorf("UpdateLabelCols: %v %v", o.forgejoLabel, err))
+ }
+}
+
+func (o *label) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ o.forgejoLabel.RepoID = f3_tree.GetProjectID(o.GetNode())
+ if err := issues_model.NewLabel(ctx, o.forgejoLabel); err != nil {
+ panic(err)
+ }
+ o.Trace("label created %d", o.forgejoLabel.ID)
+ return generic.NewNodeID(o.forgejoLabel.ID)
+}
+
+func (o *label) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+
+ if err := issues_model.DeleteLabel(ctx, project, o.forgejoLabel.ID); err != nil {
+ panic(err)
+ }
+}
+
+func newLabel() generic.NodeDriverInterface {
+ return &label{}
+}
diff --git a/services/f3/driver/labels.go b/services/f3/driver/labels.go
new file mode 100644
index 0000000..03f986b
--- /dev/null
+++ b/services/f3/driver/labels.go
@@ -0,0 +1,37 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type labels struct {
+ container
+}
+
+func (o *labels) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ project := f3_tree.GetProjectID(o.GetNode())
+
+ forgejoLabels, err := issues_model.GetLabelsByRepoID(ctx, project, "", db.ListOptions{Page: page, PageSize: pageSize})
+ if err != nil {
+ panic(fmt.Errorf("error while listing labels: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoLabels...)...)
+}
+
+func newLabels() generic.NodeDriverInterface {
+ return &labels{}
+}
diff --git a/services/f3/driver/main.go b/services/f3/driver/main.go
new file mode 100644
index 0000000..825d456
--- /dev/null
+++ b/services/f3/driver/main.go
@@ -0,0 +1,17 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ driver_options "code.gitea.io/gitea/services/f3/driver/options"
+
+ "code.forgejo.org/f3/gof3/v3/options"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+)
+
+func init() {
+ f3_tree.RegisterForgeFactory(driver_options.Name, newTreeDriver)
+ options.RegisterFactory(driver_options.Name, newOptions)
+}
diff --git a/services/f3/driver/main_test.go b/services/f3/driver/main_test.go
new file mode 100644
index 0000000..8505b69
--- /dev/null
+++ b/services/f3/driver/main_test.go
@@ -0,0 +1,30 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ driver_options "code.gitea.io/gitea/services/f3/driver/options"
+
+ _ "code.gitea.io/gitea/models"
+ _ "code.gitea.io/gitea/models/actions"
+ _ "code.gitea.io/gitea/models/activities"
+ _ "code.gitea.io/gitea/models/perm/access"
+ _ "code.gitea.io/gitea/services/f3/driver/tests"
+
+ tests_f3 "code.forgejo.org/f3/gof3/v3/tree/tests/f3"
+ "github.com/stretchr/testify/require"
+)
+
+func TestF3(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ tests_f3.ForgeCompliance(t, driver_options.Name)
+}
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/f3/driver/milestone.go b/services/f3/driver/milestone.go
new file mode 100644
index 0000000..222407f
--- /dev/null
+++ b/services/f3/driver/milestone.go
@@ -0,0 +1,150 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &milestone{}
+
+type milestone struct {
+ common
+
+ forgejoMilestone *issues_model.Milestone
+}
+
+func (o *milestone) SetNative(milestone any) {
+ o.forgejoMilestone = milestone.(*issues_model.Milestone)
+}
+
+func (o *milestone) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoMilestone.ID)
+}
+
+func (o *milestone) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *milestone) ToFormat() f3.Interface {
+ if o.forgejoMilestone == nil {
+ return o.NewFormat()
+ }
+ return &f3.Milestone{
+ Common: f3.NewCommon(fmt.Sprintf("%d", o.forgejoMilestone.ID)),
+ Title: o.forgejoMilestone.Name,
+ Description: o.forgejoMilestone.Content,
+ Created: o.forgejoMilestone.CreatedUnix.AsTime(),
+ Updated: o.forgejoMilestone.UpdatedUnix.AsTimePtr(),
+ Deadline: o.forgejoMilestone.DeadlineUnix.AsTimePtr(),
+ State: string(o.forgejoMilestone.State()),
+ }
+}
+
+func (o *milestone) FromFormat(content f3.Interface) {
+ milestone := content.(*f3.Milestone)
+
+ var deadline timeutil.TimeStamp
+ if milestone.Deadline != nil {
+ deadline = timeutil.TimeStamp(milestone.Deadline.Unix())
+ }
+ if deadline == 0 {
+ deadline = timeutil.TimeStamp(time.Date(9999, 1, 1, 0, 0, 0, 0, setting.DefaultUILocation).Unix())
+ }
+
+ var closed timeutil.TimeStamp
+ if milestone.Closed != nil {
+ closed = timeutil.TimeStamp(milestone.Closed.Unix())
+ }
+
+ if milestone.Created.IsZero() {
+ if milestone.Updated != nil {
+ milestone.Created = *milestone.Updated
+ } else if milestone.Deadline != nil {
+ milestone.Created = *milestone.Deadline
+ } else {
+ milestone.Created = time.Now()
+ }
+ }
+ if milestone.Updated == nil || milestone.Updated.IsZero() {
+ milestone.Updated = &milestone.Created
+ }
+
+ o.forgejoMilestone = &issues_model.Milestone{
+ ID: f3_util.ParseInt(milestone.GetID()),
+ Name: milestone.Title,
+ Content: milestone.Description,
+ IsClosed: milestone.State == f3.MilestoneStateClosed,
+ CreatedUnix: timeutil.TimeStamp(milestone.Created.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(milestone.Updated.Unix()),
+ ClosedDateUnix: closed,
+ DeadlineUnix: deadline,
+ }
+}
+
+func (o *milestone) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ id := node.GetID().Int64()
+
+ milestone, err := issues_model.GetMilestoneByRepoID(ctx, project, id)
+ if issues_model.IsErrMilestoneNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("milestone %v %w", id, err))
+ }
+ o.forgejoMilestone = milestone
+ return true
+}
+
+func (o *milestone) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoMilestone.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoMilestone.ID).Cols("name", "description").Update(o.forgejoMilestone); err != nil {
+ panic(fmt.Errorf("UpdateMilestoneCols: %v %v", o.forgejoMilestone, err))
+ }
+}
+
+func (o *milestone) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ o.forgejoMilestone.RepoID = f3_tree.GetProjectID(o.GetNode())
+ if err := issues_model.NewMilestone(ctx, o.forgejoMilestone); err != nil {
+ panic(err)
+ }
+ o.Trace("milestone created %d", o.forgejoMilestone.ID)
+ return generic.NewNodeID(o.forgejoMilestone.ID)
+}
+
+func (o *milestone) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+
+ if err := issues_model.DeleteMilestoneByRepoID(ctx, project, o.forgejoMilestone.ID); err != nil {
+ panic(err)
+ }
+}
+
+func newMilestone() generic.NodeDriverInterface {
+ return &milestone{}
+}
diff --git a/services/f3/driver/milestones.go b/services/f3/driver/milestones.go
new file mode 100644
index 0000000..c816903
--- /dev/null
+++ b/services/f3/driver/milestones.go
@@ -0,0 +1,40 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type milestones struct {
+ container
+}
+
+func (o *milestones) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ project := f3_tree.GetProjectID(o.GetNode())
+
+ forgejoMilestones, err := db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ ListOptions: db.ListOptions{Page: page, PageSize: pageSize},
+ RepoID: project,
+ })
+ if err != nil {
+ panic(fmt.Errorf("error while listing milestones: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoMilestones...)...)
+}
+
+func newMilestones() generic.NodeDriverInterface {
+ return &milestones{}
+}
diff --git a/services/f3/driver/options.go b/services/f3/driver/options.go
new file mode 100644
index 0000000..abc5015
--- /dev/null
+++ b/services/f3/driver/options.go
@@ -0,0 +1,20 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "net/http"
+
+ driver_options "code.gitea.io/gitea/services/f3/driver/options"
+
+ "code.forgejo.org/f3/gof3/v3/options"
+)
+
+func newOptions() options.Interface {
+ o := &driver_options.Options{}
+ o.SetName(driver_options.Name)
+ o.SetNewMigrationHTTPClient(func() *http.Client { return &http.Client{} })
+ return o
+}
diff --git a/services/f3/driver/options/name.go b/services/f3/driver/options/name.go
new file mode 100644
index 0000000..9922d11
--- /dev/null
+++ b/services/f3/driver/options/name.go
@@ -0,0 +1,7 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package options
+
+const Name = "internal_forgejo"
diff --git a/services/f3/driver/options/options.go b/services/f3/driver/options/options.go
new file mode 100644
index 0000000..ee9fdd6
--- /dev/null
+++ b/services/f3/driver/options/options.go
@@ -0,0 +1,31 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package options
+
+import (
+ "net/http"
+
+ "code.forgejo.org/f3/gof3/v3/options"
+ "code.forgejo.org/f3/gof3/v3/options/cli"
+ "code.forgejo.org/f3/gof3/v3/options/logger"
+)
+
+type NewMigrationHTTPClientFun func() *http.Client
+
+type Options struct {
+ options.Options
+ logger.OptionsLogger
+ cli.OptionsCLI
+
+ NewMigrationHTTPClient NewMigrationHTTPClientFun
+}
+
+func (o *Options) GetNewMigrationHTTPClient() NewMigrationHTTPClientFun {
+ return o.NewMigrationHTTPClient
+}
+
+func (o *Options) SetNewMigrationHTTPClient(fun NewMigrationHTTPClientFun) {
+ o.NewMigrationHTTPClient = fun
+}
diff --git a/services/f3/driver/organization.go b/services/f3/driver/organization.go
new file mode 100644
index 0000000..76b2400
--- /dev/null
+++ b/services/f3/driver/organization.go
@@ -0,0 +1,111 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ org_model "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &organization{}
+
+type organization struct {
+ common
+
+ forgejoOrganization *org_model.Organization
+}
+
+func (o *organization) SetNative(organization any) {
+ o.forgejoOrganization = organization.(*org_model.Organization)
+}
+
+func (o *organization) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoOrganization.ID)
+}
+
+func (o *organization) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *organization) ToFormat() f3.Interface {
+ if o.forgejoOrganization == nil {
+ return o.NewFormat()
+ }
+ return &f3.Organization{
+ Common: f3.NewCommon(fmt.Sprintf("%d", o.forgejoOrganization.ID)),
+ Name: o.forgejoOrganization.Name,
+ FullName: o.forgejoOrganization.FullName,
+ }
+}
+
+func (o *organization) FromFormat(content f3.Interface) {
+ organization := content.(*f3.Organization)
+ o.forgejoOrganization = &org_model.Organization{
+ ID: f3_util.ParseInt(organization.GetID()),
+ Name: organization.Name,
+ FullName: organization.FullName,
+ }
+}
+
+func (o *organization) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+ id := node.GetID().Int64()
+ organization, err := org_model.GetOrgByID(ctx, id)
+ if user_model.IsErrUserNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("organization %v %w", id, err))
+ }
+ o.forgejoOrganization = organization
+ return true
+}
+
+func (o *organization) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoOrganization.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoOrganization.ID).Cols("full_name").Update(o.forgejoOrganization); err != nil {
+ panic(fmt.Errorf("UpdateOrganizationCols: %v %v", o.forgejoOrganization, err))
+ }
+}
+
+func (o *organization) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ doer, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ panic(fmt.Errorf("GetAdminUser %w", err))
+ }
+ err = org_model.CreateOrganization(ctx, o.forgejoOrganization, doer)
+ if err != nil {
+ panic(err)
+ }
+
+ return generic.NewNodeID(o.forgejoOrganization.ID)
+}
+
+func (o *organization) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ if err := org_model.DeleteOrganization(ctx, o.forgejoOrganization); err != nil {
+ panic(err)
+ }
+}
+
+func newOrganization() generic.NodeDriverInterface {
+ return &organization{}
+}
diff --git a/services/f3/driver/organizations.go b/services/f3/driver/organizations.go
new file mode 100644
index 0000000..98c4c14
--- /dev/null
+++ b/services/f3/driver/organizations.go
@@ -0,0 +1,50 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ org_model "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type organizations struct {
+ container
+}
+
+func (o *organizations) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ sess := db.GetEngine(ctx)
+ if page != 0 {
+ sess = db.SetSessionPagination(sess, &db.ListOptions{Page: page, PageSize: o.getPageSize()})
+ }
+ sess = sess.Select("`user`.*").
+ Where("`type`=?", user_model.UserTypeOrganization)
+ organizations := make([]*org_model.Organization, 0, o.getPageSize())
+
+ if err := sess.Find(&organizations); err != nil {
+ panic(fmt.Errorf("error while listing organizations: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(organizations...)...)
+}
+
+func (o *organizations) GetIDFromName(ctx context.Context, name string) generic.NodeID {
+ organization, err := org_model.GetOrgByName(ctx, name)
+ if err != nil {
+ panic(fmt.Errorf("GetOrganizationByName: %v", err))
+ }
+
+ return generic.NewNodeID(organization.ID)
+}
+
+func newOrganizations() generic.NodeDriverInterface {
+ return &organizations{}
+}
diff --git a/services/f3/driver/project.go b/services/f3/driver/project.go
new file mode 100644
index 0000000..c2a2df3
--- /dev/null
+++ b/services/f3/driver/project.go
@@ -0,0 +1,188 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &project{}
+
+type project struct {
+ common
+
+ forgejoProject *repo_model.Repository
+ forked *f3.Reference
+}
+
+func (o *project) SetNative(project any) {
+ o.forgejoProject = project.(*repo_model.Repository)
+}
+
+func (o *project) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoProject.ID)
+}
+
+func (o *project) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *project) setForkedReference(ctx context.Context) {
+ if !o.forgejoProject.IsFork {
+ return
+ }
+
+ if err := o.forgejoProject.GetBaseRepo(ctx); err != nil {
+ panic(fmt.Errorf("GetBaseRepo %v %w", o.forgejoProject, err))
+ }
+ forkParent := o.forgejoProject.BaseRepo
+ if err := forkParent.LoadOwner(ctx); err != nil {
+ panic(fmt.Errorf("LoadOwner %v %w", forkParent, err))
+ }
+ owners := "users"
+ if forkParent.Owner.IsOrganization() {
+ owners = "organizations"
+ }
+
+ o.forked = f3_tree.NewProjectReference(owners, fmt.Sprintf("%d", forkParent.Owner.ID), fmt.Sprintf("%d", forkParent.ID))
+}
+
+func (o *project) ToFormat() f3.Interface {
+ if o.forgejoProject == nil {
+ return o.NewFormat()
+ }
+ return &f3.Project{
+ Common: f3.NewCommon(fmt.Sprintf("%d", o.forgejoProject.ID)),
+ Name: o.forgejoProject.Name,
+ IsPrivate: o.forgejoProject.IsPrivate,
+ IsMirror: o.forgejoProject.IsMirror,
+ Description: o.forgejoProject.Description,
+ DefaultBranch: o.forgejoProject.DefaultBranch,
+ Forked: o.forked,
+ }
+}
+
+func (o *project) FromFormat(content f3.Interface) {
+ project := content.(*f3.Project)
+ o.forgejoProject = &repo_model.Repository{
+ ID: f3_util.ParseInt(project.GetID()),
+ Name: project.Name,
+ IsPrivate: project.IsPrivate,
+ IsMirror: project.IsMirror,
+ Description: project.Description,
+ DefaultBranch: project.DefaultBranch,
+ }
+ if project.Forked != nil {
+ o.forgejoProject.IsFork = true
+ o.forgejoProject.ForkID = project.Forked.GetIDAsInt()
+ }
+ o.forked = project.Forked
+}
+
+func (o *project) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+ id := node.GetID().Int64()
+ u, err := repo_model.GetRepositoryByID(ctx, id)
+ if repo_model.IsErrRepoNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("project %v %w", id, err))
+ }
+ o.forgejoProject = u
+ o.setForkedReference(ctx)
+ return true
+}
+
+func (o *project) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoProject.ID)
+ o.forgejoProject.LowerName = strings.ToLower(o.forgejoProject.Name)
+ if err := repo_model.UpdateRepositoryCols(ctx, o.forgejoProject,
+ "description",
+ "name",
+ "lower_name",
+ ); err != nil {
+ panic(fmt.Errorf("UpdateRepositoryCols: %v %v", o.forgejoProject, err))
+ }
+}
+
+func (o *project) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ ownerID := f3_tree.GetOwnerID(o.GetNode())
+ owner, err := user_model.GetUserByID(ctx, ownerID)
+ if err != nil {
+ panic(fmt.Errorf("GetUserByID %v %w", ownerID, err))
+ }
+ doer, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ panic(fmt.Errorf("GetAdminUser %w", err))
+ }
+
+ if o.forked == nil {
+ repo, err := repo_service.CreateRepositoryDirectly(ctx, doer, owner, repo_service.CreateRepoOptions{
+ Name: o.forgejoProject.Name,
+ Description: o.forgejoProject.Description,
+ IsPrivate: o.forgejoProject.IsPrivate,
+ DefaultBranch: o.forgejoProject.DefaultBranch,
+ })
+ if err != nil {
+ panic(err)
+ }
+ o.forgejoProject = repo
+ o.Trace("project created %d", o.forgejoProject.ID)
+ } else {
+ if err = o.forgejoProject.GetBaseRepo(ctx); err != nil {
+ panic(fmt.Errorf("GetBaseRepo %v %w", o.forgejoProject, err))
+ }
+ if err = o.forgejoProject.BaseRepo.LoadOwner(ctx); err != nil {
+ panic(fmt.Errorf("LoadOwner %v %w", o.forgejoProject.BaseRepo, err))
+ }
+
+ repo, err := repo_service.ForkRepositoryIfNotExists(ctx, doer, owner, repo_service.ForkRepoOptions{
+ BaseRepo: o.forgejoProject.BaseRepo,
+ Name: o.forgejoProject.Name,
+ Description: o.forgejoProject.Description,
+ })
+ if err != nil {
+ panic(err)
+ }
+ o.forgejoProject = repo
+ o.Trace("project created %d", o.forgejoProject.ID)
+ }
+ return generic.NewNodeID(o.forgejoProject.ID)
+}
+
+func (o *project) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ doer, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ panic(fmt.Errorf("GetAdminUser %w", err))
+ }
+
+ if err := repo_service.DeleteRepository(ctx, doer, o.forgejoProject, true); err != nil {
+ panic(err)
+ }
+}
+
+func newProject() generic.NodeDriverInterface {
+ return &project{}
+}
diff --git a/services/f3/driver/projects.go b/services/f3/driver/projects.go
new file mode 100644
index 0000000..a2dabc3
--- /dev/null
+++ b/services/f3/driver/projects.go
@@ -0,0 +1,55 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type projects struct {
+ container
+}
+
+func (o *projects) GetIDFromName(ctx context.Context, name string) generic.NodeID {
+ owner := f3_tree.GetOwnerName(o.GetNode())
+ forgejoProject, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, name)
+ if repo_model.IsErrRepoNotExist(err) {
+ return generic.NilID
+ }
+
+ if err != nil {
+ panic(fmt.Errorf("error GetRepositoryByOwnerAndName(%s, %s): %v", owner, name, err))
+ }
+
+ return generic.NewNodeID(forgejoProject.ID)
+}
+
+func (o *projects) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ owner := f3_tree.GetOwner(o.GetNode())
+
+ forgejoProjects, _, err := repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{Page: page, PageSize: pageSize},
+ OwnerID: owner.GetID().Int64(),
+ Private: true,
+ })
+ if err != nil {
+ panic(fmt.Errorf("error while listing projects: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoProjects...)...)
+}
+
+func newProjects() generic.NodeDriverInterface {
+ return &projects{}
+}
diff --git a/services/f3/driver/pullrequest.go b/services/f3/driver/pullrequest.go
new file mode 100644
index 0000000..466b4bd
--- /dev/null
+++ b/services/f3/driver/pullrequest.go
@@ -0,0 +1,320 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/timeutil"
+ issue_service "code.gitea.io/gitea/services/issue"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &pullRequest{}
+
+type pullRequest struct {
+ common
+
+ forgejoPullRequest *issues_model.Issue
+ headRepository *f3.Reference
+ baseRepository *f3.Reference
+ fetchFunc f3.PullRequestFetchFunc
+}
+
+func (o *pullRequest) SetNative(pullRequest any) {
+ o.forgejoPullRequest = pullRequest.(*issues_model.Issue)
+}
+
+func (o *pullRequest) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoPullRequest.Index)
+}
+
+func (o *pullRequest) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *pullRequest) repositoryToReference(ctx context.Context, repository *repo_model.Repository) *f3.Reference {
+ if repository == nil {
+ panic("unexpected nil repository")
+ }
+ forge := o.getTree().GetRoot().GetChild(generic.NewNodeID(f3_tree.KindForge)).GetDriver().(*forge)
+ owners := forge.getOwnersPath(ctx, fmt.Sprintf("%d", repository.OwnerID))
+ return f3_tree.NewRepositoryReference(owners.String(), repository.OwnerID, repository.ID)
+}
+
+func (o *pullRequest) referenceToRepository(reference *f3.Reference) int64 {
+ var project int64
+ if reference.Get() == "../../repository/vcs" {
+ project = f3_tree.GetProjectID(o.GetNode())
+ } else {
+ p := f3_tree.ToPath(generic.PathAbsolute(o.GetNode().GetCurrentPath().String(), reference.Get()))
+ o.Trace("%v %v", o.GetNode().GetCurrentPath().String(), p)
+ _, project = p.OwnerAndProjectID()
+ }
+ return project
+}
+
+func (o *pullRequest) ToFormat() f3.Interface {
+ if o.forgejoPullRequest == nil {
+ return o.NewFormat()
+ }
+
+ var milestone *f3.Reference
+ if o.forgejoPullRequest.Milestone != nil {
+ milestone = f3_tree.NewIssueMilestoneReference(o.forgejoPullRequest.Milestone.ID)
+ }
+
+ var mergedTime *time.Time
+ if o.forgejoPullRequest.PullRequest.HasMerged {
+ mergedTime = o.forgejoPullRequest.PullRequest.MergedUnix.AsTimePtr()
+ }
+
+ var closedTime *time.Time
+ if o.forgejoPullRequest.IsClosed {
+ closedTime = o.forgejoPullRequest.ClosedUnix.AsTimePtr()
+ }
+
+ makePullRequestBranch := func(repo *repo_model.Repository, branch string) f3.PullRequestBranch {
+ r, err := git.OpenRepository(context.Background(), repo.RepoPath())
+ if err != nil {
+ panic(err)
+ }
+ defer r.Close()
+
+ b, err := r.GetBranch(branch)
+ if err != nil {
+ panic(err)
+ }
+
+ c, err := b.GetCommit()
+ if err != nil {
+ panic(err)
+ }
+
+ return f3.PullRequestBranch{
+ Ref: branch,
+ SHA: c.ID.String(),
+ }
+ }
+ if err := o.forgejoPullRequest.PullRequest.LoadHeadRepo(db.DefaultContext); err != nil {
+ panic(err)
+ }
+ head := makePullRequestBranch(o.forgejoPullRequest.PullRequest.HeadRepo, o.forgejoPullRequest.PullRequest.HeadBranch)
+ head.Repository = o.headRepository
+ if err := o.forgejoPullRequest.PullRequest.LoadBaseRepo(db.DefaultContext); err != nil {
+ panic(err)
+ }
+ base := makePullRequestBranch(o.forgejoPullRequest.PullRequest.BaseRepo, o.forgejoPullRequest.PullRequest.BaseBranch)
+ base.Repository = o.baseRepository
+
+ return &f3.PullRequest{
+ Common: f3.NewCommon(o.GetNativeID()),
+ PosterID: f3_tree.NewUserReference(o.forgejoPullRequest.Poster.ID),
+ Title: o.forgejoPullRequest.Title,
+ Content: o.forgejoPullRequest.Content,
+ Milestone: milestone,
+ State: string(o.forgejoPullRequest.State()),
+ IsLocked: o.forgejoPullRequest.IsLocked,
+ Created: o.forgejoPullRequest.CreatedUnix.AsTime(),
+ Updated: o.forgejoPullRequest.UpdatedUnix.AsTime(),
+ Closed: closedTime,
+ Merged: o.forgejoPullRequest.PullRequest.HasMerged,
+ MergedTime: mergedTime,
+ MergeCommitSHA: o.forgejoPullRequest.PullRequest.MergedCommitID,
+ Head: head,
+ Base: base,
+ FetchFunc: o.fetchFunc,
+ }
+}
+
+func (o *pullRequest) FromFormat(content f3.Interface) {
+ pullRequest := content.(*f3.PullRequest)
+ var milestone *issues_model.Milestone
+ if pullRequest.Milestone != nil {
+ milestone = &issues_model.Milestone{
+ ID: pullRequest.Milestone.GetIDAsInt(),
+ }
+ }
+
+ o.headRepository = pullRequest.Head.Repository
+ o.baseRepository = pullRequest.Base.Repository
+ pr := issues_model.PullRequest{
+ HeadBranch: pullRequest.Head.Ref,
+ HeadRepoID: o.referenceToRepository(o.headRepository),
+ BaseBranch: pullRequest.Base.Ref,
+ BaseRepoID: o.referenceToRepository(o.baseRepository),
+
+ MergeBase: pullRequest.Base.SHA,
+ Index: f3_util.ParseInt(pullRequest.GetID()),
+ HasMerged: pullRequest.Merged,
+ }
+
+ o.forgejoPullRequest = &issues_model.Issue{
+ Index: f3_util.ParseInt(pullRequest.GetID()),
+ PosterID: pullRequest.PosterID.GetIDAsInt(),
+ Poster: &user_model.User{
+ ID: pullRequest.PosterID.GetIDAsInt(),
+ },
+ Title: pullRequest.Title,
+ Content: pullRequest.Content,
+ Milestone: milestone,
+ IsClosed: pullRequest.State == f3.PullRequestStateClosed,
+ CreatedUnix: timeutil.TimeStamp(pullRequest.Created.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(pullRequest.Updated.Unix()),
+ IsLocked: pullRequest.IsLocked,
+ PullRequest: &pr,
+ IsPull: true,
+ }
+
+ if pullRequest.Closed != nil {
+ o.forgejoPullRequest.ClosedUnix = timeutil.TimeStamp(pullRequest.Closed.Unix())
+ }
+}
+
+func (o *pullRequest) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ id := node.GetID().Int64()
+
+ issue, err := issues_model.GetIssueByIndex(ctx, project, id)
+ if issues_model.IsErrIssueNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("issue %v %w", id, err))
+ }
+ if err := issue.LoadAttributes(ctx); err != nil {
+ panic(err)
+ }
+ if err := issue.PullRequest.LoadHeadRepo(ctx); err != nil {
+ panic(err)
+ }
+ o.headRepository = o.repositoryToReference(ctx, issue.PullRequest.HeadRepo)
+ if err := issue.PullRequest.LoadBaseRepo(ctx); err != nil {
+ panic(err)
+ }
+ o.baseRepository = o.repositoryToReference(ctx, issue.PullRequest.BaseRepo)
+
+ o.forgejoPullRequest = issue
+ o.Trace("ID = %s", o.forgejoPullRequest.ID)
+ return true
+}
+
+func (o *pullRequest) Patch(ctx context.Context) {
+ node := o.GetNode()
+ project := f3_tree.GetProjectID(o.GetNode())
+ id := node.GetID().Int64()
+ o.Trace("repo_id = %d, index = %d", project, id)
+ if _, err := db.GetEngine(ctx).Where("`repo_id` = ? AND `index` = ?", project, id).Cols("name", "content").Update(o.forgejoPullRequest); err != nil {
+ panic(fmt.Errorf("%v %v", o.forgejoPullRequest, err))
+ }
+}
+
+func (o *pullRequest) GetPullRequestPushRefs() []string {
+ return []string{
+ fmt.Sprintf("refs/f3/%s/head", o.GetNativeID()),
+ fmt.Sprintf("refs/pull/%s/head", o.GetNativeID()),
+ }
+}
+
+func (o *pullRequest) GetPullRequestRef() string {
+ return fmt.Sprintf("refs/pull/%s/head", o.GetNativeID())
+}
+
+func (o *pullRequest) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ o.forgejoPullRequest.RepoID = f3_tree.GetProjectID(o.GetNode())
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ panic(err)
+ }
+ defer committer.Close()
+
+ idx, err := db.GetNextResourceIndex(ctx, "issue_index", o.forgejoPullRequest.RepoID)
+ if err != nil {
+ panic(fmt.Errorf("generate issue index failed: %w", err))
+ }
+ o.forgejoPullRequest.Index = idx
+
+ sess := db.GetEngine(ctx)
+
+ if _, err = sess.NoAutoTime().Insert(o.forgejoPullRequest); err != nil {
+ panic(err)
+ }
+
+ pr := o.forgejoPullRequest.PullRequest
+ pr.Index = o.forgejoPullRequest.Index
+ pr.IssueID = o.forgejoPullRequest.ID
+ pr.HeadRepoID = o.referenceToRepository(o.headRepository)
+ if pr.HeadRepoID == 0 {
+ panic(fmt.Errorf("HeadRepoID == 0 in %v", pr))
+ }
+ pr.BaseRepoID = o.referenceToRepository(o.baseRepository)
+ if pr.BaseRepoID == 0 {
+ panic(fmt.Errorf("BaseRepoID == 0 in %v", pr))
+ }
+
+ if _, err = sess.NoAutoTime().Insert(pr); err != nil {
+ panic(err)
+ }
+
+ if err = committer.Commit(); err != nil {
+ panic(fmt.Errorf("Commit: %w", err))
+ }
+
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ panic(err)
+ }
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ panic(err)
+ }
+
+ o.Trace("pullRequest created %d/%d", o.forgejoPullRequest.ID, o.forgejoPullRequest.Index)
+ return generic.NewNodeID(o.forgejoPullRequest.Index)
+}
+
+func (o *pullRequest) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ owner := f3_tree.GetOwnerName(o.GetNode())
+ project := f3_tree.GetProjectName(o.GetNode())
+ repoPath := repo_model.RepoPath(owner, project)
+ gitRepo, err := git.OpenRepository(ctx, repoPath)
+ if err != nil {
+ panic(err)
+ }
+ defer gitRepo.Close()
+
+ doer, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ panic(fmt.Errorf("GetAdminUser %w", err))
+ }
+
+ if err := issue_service.DeleteIssue(ctx, doer, gitRepo, o.forgejoPullRequest); err != nil {
+ panic(err)
+ }
+}
+
+func newPullRequest() generic.NodeDriverInterface {
+ return &pullRequest{}
+}
diff --git a/services/f3/driver/pullrequests.go b/services/f3/driver/pullrequests.go
new file mode 100644
index 0000000..e7f2910
--- /dev/null
+++ b/services/f3/driver/pullrequests.go
@@ -0,0 +1,42 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/optional"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type pullRequests struct {
+ container
+}
+
+func (o *pullRequests) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ project := f3_tree.GetProjectID(o.GetNode())
+
+ forgejoPullRequests, err := issues_model.Issues(ctx, &issues_model.IssuesOptions{
+ Paginator: &db.ListOptions{Page: page, PageSize: pageSize},
+ RepoIDs: []int64{project},
+ IsPull: optional.Some(true),
+ })
+ if err != nil {
+ panic(fmt.Errorf("error while listing pullRequests: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoPullRequests...)...)
+}
+
+func newPullRequests() generic.NodeDriverInterface {
+ return &pullRequests{}
+}
diff --git a/services/f3/driver/reaction.go b/services/f3/driver/reaction.go
new file mode 100644
index 0000000..0dc486c
--- /dev/null
+++ b/services/f3/driver/reaction.go
@@ -0,0 +1,133 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &reaction{}
+
+type reaction struct {
+ common
+
+ forgejoReaction *issues_model.Reaction
+}
+
+func (o *reaction) SetNative(reaction any) {
+ o.forgejoReaction = reaction.(*issues_model.Reaction)
+}
+
+func (o *reaction) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoReaction.ID)
+}
+
+func (o *reaction) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *reaction) ToFormat() f3.Interface {
+ if o.forgejoReaction == nil {
+ return o.NewFormat()
+ }
+ return &f3.Reaction{
+ Common: f3.NewCommon(fmt.Sprintf("%d", o.forgejoReaction.ID)),
+ UserID: f3_tree.NewUserReference(o.forgejoReaction.User.ID),
+ Content: o.forgejoReaction.Type,
+ }
+}
+
+func (o *reaction) FromFormat(content f3.Interface) {
+ reaction := content.(*f3.Reaction)
+
+ o.forgejoReaction = &issues_model.Reaction{
+ ID: f3_util.ParseInt(reaction.GetID()),
+ UserID: reaction.UserID.GetIDAsInt(),
+ User: &user_model.User{
+ ID: reaction.UserID.GetIDAsInt(),
+ },
+ Type: reaction.Content,
+ }
+}
+
+func (o *reaction) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ id := node.GetID().Int64()
+
+ if has, err := db.GetEngine(ctx).Where("ID = ?", id).Get(o.forgejoReaction); err != nil {
+ panic(fmt.Errorf("reaction %v %w", id, err))
+ } else if !has {
+ return false
+ }
+ if _, err := o.forgejoReaction.LoadUser(ctx); err != nil {
+ panic(fmt.Errorf("LoadUser %v %w", *o.forgejoReaction, err))
+ }
+ return true
+}
+
+func (o *reaction) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoReaction.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoReaction.ID).Cols("type").Update(o.forgejoReaction); err != nil {
+ panic(fmt.Errorf("UpdateReactionCols: %v %v", o.forgejoReaction, err))
+ }
+}
+
+func (o *reaction) Put(ctx context.Context) generic.NodeID {
+ o.Error("%v", o.forgejoReaction.User)
+
+ sess := db.GetEngine(ctx)
+
+ reactionable := f3_tree.GetReactionable(o.GetNode())
+ reactionableID := f3_tree.GetReactionableID(o.GetNode())
+
+ switch reactionable.GetKind() {
+ case f3_tree.KindIssue, f3_tree.KindPullRequest:
+ project := f3_tree.GetProjectID(o.GetNode())
+ issue, err := issues_model.GetIssueByIndex(ctx, project, reactionableID)
+ if err != nil {
+ panic(fmt.Errorf("GetIssueByIndex %v %w", reactionableID, err))
+ }
+ o.forgejoReaction.IssueID = issue.ID
+ case f3_tree.KindComment:
+ o.forgejoReaction.CommentID = reactionableID
+ default:
+ panic(fmt.Errorf("unexpected type %v", reactionable.GetKind()))
+ }
+
+ o.Error("%v", o.forgejoReaction)
+
+ if _, err := sess.Insert(o.forgejoReaction); err != nil {
+ panic(err)
+ }
+ o.Trace("reaction created %d", o.forgejoReaction.ID)
+ return generic.NewNodeID(o.forgejoReaction.ID)
+}
+
+func (o *reaction) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ sess := db.GetEngine(ctx)
+ if _, err := sess.Delete(o.forgejoReaction); err != nil {
+ panic(err)
+ }
+}
+
+func newReaction() generic.NodeDriverInterface {
+ return &reaction{}
+}
diff --git a/services/f3/driver/reactions.go b/services/f3/driver/reactions.go
new file mode 100644
index 0000000..b7fd5e8
--- /dev/null
+++ b/services/f3/driver/reactions.go
@@ -0,0 +1,59 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ "xorm.io/builder"
+)
+
+type reactions struct {
+ container
+}
+
+func (o *reactions) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ reactionable := f3_tree.GetReactionable(o.GetNode())
+ reactionableID := f3_tree.GetReactionableID(o.GetNode())
+
+ sess := db.GetEngine(ctx)
+ cond := builder.NewCond()
+ switch reactionable.GetKind() {
+ case f3_tree.KindIssue, f3_tree.KindPullRequest:
+ project := f3_tree.GetProjectID(o.GetNode())
+ issue, err := issues_model.GetIssueByIndex(ctx, project, reactionableID)
+ if err != nil {
+ panic(fmt.Errorf("GetIssueByIndex %v %w", reactionableID, err))
+ }
+ cond = cond.And(builder.Eq{"reaction.issue_id": issue.ID})
+ case f3_tree.KindComment:
+ cond = cond.And(builder.Eq{"reaction.comment_id": reactionableID})
+ default:
+ panic(fmt.Errorf("unexpected type %v", reactionable.GetKind()))
+ }
+
+ sess = sess.Where(cond)
+ if page > 0 {
+ sess = db.SetSessionPagination(sess, &db.ListOptions{Page: page, PageSize: pageSize})
+ }
+ reactions := make([]*issues_model.Reaction, 0, 10)
+ if err := sess.Find(&reactions); err != nil {
+ panic(fmt.Errorf("error while listing reactions: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(reactions...)...)
+}
+
+func newReactions() generic.NodeDriverInterface {
+ return &reactions{}
+}
diff --git a/services/f3/driver/release.go b/services/f3/driver/release.go
new file mode 100644
index 0000000..e937f84
--- /dev/null
+++ b/services/f3/driver/release.go
@@ -0,0 +1,161 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/timeutil"
+ release_service "code.gitea.io/gitea/services/release"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &release{}
+
+type release struct {
+ common
+
+ forgejoRelease *repo_model.Release
+}
+
+func (o *release) SetNative(release any) {
+ o.forgejoRelease = release.(*repo_model.Release)
+}
+
+func (o *release) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoRelease.ID)
+}
+
+func (o *release) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *release) ToFormat() f3.Interface {
+ if o.forgejoRelease == nil {
+ return o.NewFormat()
+ }
+ return &f3.Release{
+ Common: f3.NewCommon(fmt.Sprintf("%d", o.forgejoRelease.ID)),
+ TagName: o.forgejoRelease.TagName,
+ TargetCommitish: o.forgejoRelease.Target,
+ Name: o.forgejoRelease.Title,
+ Body: o.forgejoRelease.Note,
+ Draft: o.forgejoRelease.IsDraft,
+ Prerelease: o.forgejoRelease.IsPrerelease,
+ PublisherID: f3_tree.NewUserReference(o.forgejoRelease.Publisher.ID),
+ Created: o.forgejoRelease.CreatedUnix.AsTime(),
+ }
+}
+
+func (o *release) FromFormat(content f3.Interface) {
+ release := content.(*f3.Release)
+
+ o.forgejoRelease = &repo_model.Release{
+ ID: f3_util.ParseInt(release.GetID()),
+ PublisherID: release.PublisherID.GetIDAsInt(),
+ Publisher: &user_model.User{
+ ID: release.PublisherID.GetIDAsInt(),
+ },
+ TagName: release.TagName,
+ LowerTagName: strings.ToLower(release.TagName),
+ Target: release.TargetCommitish,
+ Title: release.Name,
+ Note: release.Body,
+ IsDraft: release.Draft,
+ IsPrerelease: release.Prerelease,
+ IsTag: false,
+ CreatedUnix: timeutil.TimeStamp(release.Created.Unix()),
+ }
+}
+
+func (o *release) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ id := node.GetID().Int64()
+
+ release, err := repo_model.GetReleaseByID(ctx, id)
+ if repo_model.IsErrReleaseNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("release %v %w", id, err))
+ }
+
+ release.Publisher, err = user_model.GetUserByID(ctx, release.PublisherID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ release.Publisher = user_model.NewGhostUser()
+ } else {
+ panic(err)
+ }
+ }
+
+ o.forgejoRelease = release
+ return true
+}
+
+func (o *release) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoRelease.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoRelease.ID).Cols("title", "note").Update(o.forgejoRelease); err != nil {
+ panic(fmt.Errorf("UpdateReleaseCols: %v %v", o.forgejoRelease, err))
+ }
+}
+
+func (o *release) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ o.forgejoRelease.RepoID = f3_tree.GetProjectID(o.GetNode())
+
+ owner := f3_tree.GetOwnerName(o.GetNode())
+ project := f3_tree.GetProjectName(o.GetNode())
+ repoPath := repo_model.RepoPath(owner, project)
+ gitRepo, err := git.OpenRepository(ctx, repoPath)
+ if err != nil {
+ panic(err)
+ }
+ defer gitRepo.Close()
+ if err := release_service.CreateRelease(gitRepo, o.forgejoRelease, "", nil); err != nil {
+ panic(err)
+ }
+ o.Trace("release created %d", o.forgejoRelease.ID)
+ return generic.NewNodeID(o.forgejoRelease.ID)
+}
+
+func (o *release) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ repo, err := repo_model.GetRepositoryByID(ctx, project)
+ if err != nil {
+ panic(err)
+ }
+
+ doer, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ panic(fmt.Errorf("GetAdminUser %w", err))
+ }
+
+ if err := release_service.DeleteReleaseByID(ctx, repo, o.forgejoRelease, doer, true); err != nil {
+ panic(err)
+ }
+}
+
+func newRelease() generic.NodeDriverInterface {
+ return &release{}
+}
diff --git a/services/f3/driver/releases.go b/services/f3/driver/releases.go
new file mode 100644
index 0000000..3b46bc7
--- /dev/null
+++ b/services/f3/driver/releases.go
@@ -0,0 +1,42 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type releases struct {
+ container
+}
+
+func (o *releases) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ project := f3_tree.GetProjectID(o.GetNode())
+
+ forgejoReleases, err := db.Find[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ ListOptions: db.ListOptions{Page: page, PageSize: pageSize},
+ IncludeDrafts: true,
+ IncludeTags: false,
+ RepoID: project,
+ })
+ if err != nil {
+ panic(fmt.Errorf("error while listing releases: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoReleases...)...)
+}
+
+func newReleases() generic.NodeDriverInterface {
+ return &releases{}
+}
diff --git a/services/f3/driver/repositories.go b/services/f3/driver/repositories.go
new file mode 100644
index 0000000..03daf35
--- /dev/null
+++ b/services/f3/driver/repositories.go
@@ -0,0 +1,36 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type repositories struct {
+ container
+}
+
+func (o *repositories) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ children := generic.NewChildrenSlice(0)
+ if page > 1 {
+ return children
+ }
+
+ names := []string{f3.RepositoryNameDefault}
+ project := f3_tree.GetProject(o.GetNode()).ToFormat().(*f3.Project)
+ if project.HasWiki {
+ names = append(names, f3.RepositoryNameWiki)
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(names...)...)
+}
+
+func newRepositories() generic.NodeDriverInterface {
+ return &repositories{}
+}
diff --git a/services/f3/driver/repository.go b/services/f3/driver/repository.go
new file mode 100644
index 0000000..da968b4
--- /dev/null
+++ b/services/f3/driver/repository.go
@@ -0,0 +1,101 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ helpers_repository "code.forgejo.org/f3/gof3/v3/forges/helpers/repository"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+var _ f3_tree.ForgeDriverInterface = &repository{}
+
+type repository struct {
+ common
+
+ name string
+ h helpers_repository.Interface
+
+ f *f3.Repository
+}
+
+func (o *repository) SetNative(repository any) {
+ o.name = repository.(string)
+}
+
+func (o *repository) GetNativeID() string {
+ return o.name
+}
+
+func (o *repository) NewFormat() f3.Interface {
+ return &f3.Repository{}
+}
+
+func (o *repository) ToFormat() f3.Interface {
+ return &f3.Repository{
+ Common: f3.NewCommon(o.GetNativeID()),
+ Name: o.GetNativeID(),
+ FetchFunc: o.f.FetchFunc,
+ }
+}
+
+func (o *repository) FromFormat(content f3.Interface) {
+ f := content.Clone().(*f3.Repository)
+ o.f = f
+ o.f.SetID(f.Name)
+ o.name = f.Name
+}
+
+func (o *repository) Get(ctx context.Context) bool {
+ return o.h.Get(ctx)
+}
+
+func (o *repository) Put(ctx context.Context) generic.NodeID {
+ return o.upsert(ctx)
+}
+
+func (o *repository) Patch(ctx context.Context) {
+ o.upsert(ctx)
+}
+
+func (o *repository) upsert(ctx context.Context) generic.NodeID {
+ o.Trace("%s", o.GetNativeID())
+ o.h.Upsert(ctx, o.f)
+ return generic.NewNodeID(o.f.Name)
+}
+
+func (o *repository) SetFetchFunc(fetchFunc func(ctx context.Context, destination string)) {
+ o.f.FetchFunc = fetchFunc
+}
+
+func (o *repository) getURL() string {
+ owner := f3_tree.GetOwnerName(o.GetNode())
+ repoName := f3_tree.GetProjectName(o.GetNode())
+ if o.f.GetID() == f3.RepositoryNameWiki {
+ repoName += ".wiki"
+ }
+ return repo_model.RepoPath(owner, repoName)
+}
+
+func (o *repository) GetRepositoryURL() string {
+ return o.getURL()
+}
+
+func (o *repository) GetRepositoryPushURL() string {
+ return o.getURL()
+}
+
+func newRepository(_ context.Context) generic.NodeDriverInterface {
+ r := &repository{
+ f: &f3.Repository{},
+ }
+ r.h = helpers_repository.NewHelper(r)
+ return r
+}
diff --git a/services/f3/driver/review.go b/services/f3/driver/review.go
new file mode 100644
index 0000000..a3c074b
--- /dev/null
+++ b/services/f3/driver/review.go
@@ -0,0 +1,179 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &review{}
+
+type review struct {
+ common
+
+ forgejoReview *issues_model.Review
+}
+
+func (o *review) SetNative(review any) {
+ o.forgejoReview = review.(*issues_model.Review)
+}
+
+func (o *review) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoReview.ID)
+}
+
+func (o *review) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *review) ToFormat() f3.Interface {
+ if o.forgejoReview == nil {
+ return o.NewFormat()
+ }
+
+ review := &f3.Review{
+ Common: f3.NewCommon(o.GetNativeID()),
+ ReviewerID: f3_tree.NewUserReference(o.forgejoReview.ReviewerID),
+ Official: o.forgejoReview.Official,
+ CommitID: o.forgejoReview.CommitID,
+ Content: o.forgejoReview.Content,
+ CreatedAt: o.forgejoReview.CreatedUnix.AsTime(),
+ }
+
+ switch o.forgejoReview.Type {
+ case issues_model.ReviewTypeApprove:
+ review.State = f3.ReviewStateApproved
+ case issues_model.ReviewTypeReject:
+ review.State = f3.ReviewStateChangesRequested
+ case issues_model.ReviewTypeComment:
+ review.State = f3.ReviewStateCommented
+ case issues_model.ReviewTypePending:
+ review.State = f3.ReviewStatePending
+ case issues_model.ReviewTypeRequest:
+ review.State = f3.ReviewStateRequestReview
+ default:
+ review.State = f3.ReviewStateUnknown
+ }
+
+ if o.forgejoReview.Reviewer != nil {
+ review.ReviewerID = f3_tree.NewUserReference(o.forgejoReview.Reviewer.ID)
+ }
+
+ return review
+}
+
+func (o *review) FromFormat(content f3.Interface) {
+ review := content.(*f3.Review)
+
+ o.forgejoReview = &issues_model.Review{
+ ID: f3_util.ParseInt(review.GetID()),
+ ReviewerID: review.ReviewerID.GetIDAsInt(),
+ Reviewer: &user_model.User{
+ ID: review.ReviewerID.GetIDAsInt(),
+ },
+ Official: review.Official,
+ CommitID: review.CommitID,
+ Content: review.Content,
+ CreatedUnix: timeutil.TimeStamp(review.CreatedAt.Unix()),
+ }
+
+ switch review.State {
+ case f3.ReviewStateApproved:
+ o.forgejoReview.Type = issues_model.ReviewTypeApprove
+ case f3.ReviewStateChangesRequested:
+ o.forgejoReview.Type = issues_model.ReviewTypeReject
+ case f3.ReviewStateCommented:
+ o.forgejoReview.Type = issues_model.ReviewTypeComment
+ case f3.ReviewStatePending:
+ o.forgejoReview.Type = issues_model.ReviewTypePending
+ case f3.ReviewStateRequestReview:
+ o.forgejoReview.Type = issues_model.ReviewTypeRequest
+ default:
+ o.forgejoReview.Type = issues_model.ReviewTypeUnknown
+ }
+}
+
+func (o *review) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ id := node.GetID().Int64()
+
+ review, err := issues_model.GetReviewByID(ctx, id)
+ if issues_model.IsErrReviewNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("review %v %w", id, err))
+ }
+ if err := review.LoadReviewer(ctx); err != nil {
+ panic(fmt.Errorf("LoadReviewer %v %w", *review, err))
+ }
+ o.forgejoReview = review
+ return true
+}
+
+func (o *review) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoReview.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoReview.ID).Cols("content").Update(o.forgejoReview); err != nil {
+ panic(fmt.Errorf("UpdateReviewCols: %v %v", o.forgejoReview, err))
+ }
+}
+
+func (o *review) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ pullRequest := f3_tree.GetPullRequestID(o.GetNode())
+
+ issue, err := issues_model.GetIssueByIndex(ctx, project, pullRequest)
+ if err != nil {
+ panic(fmt.Errorf("GetIssueByIndex %v", err))
+ }
+ o.forgejoReview.IssueID = issue.ID
+
+ sess := db.GetEngine(ctx)
+
+ if _, err := sess.NoAutoTime().Insert(o.forgejoReview); err != nil {
+ panic(err)
+ }
+ o.Trace("review created %d", o.forgejoReview.ID)
+ return generic.NewNodeID(o.forgejoReview.ID)
+}
+
+func (o *review) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ pullRequest := f3_tree.GetPullRequestID(o.GetNode())
+
+ issue, err := issues_model.GetIssueByIndex(ctx, project, pullRequest)
+ if err != nil {
+ panic(fmt.Errorf("GetIssueByIndex %v", err))
+ }
+ o.forgejoReview.IssueID = issue.ID
+
+ if err := issues_model.DeleteReview(ctx, o.forgejoReview); err != nil {
+ panic(err)
+ }
+}
+
+func newReview() generic.NodeDriverInterface {
+ return &review{}
+}
diff --git a/services/f3/driver/reviewcomment.go b/services/f3/driver/reviewcomment.go
new file mode 100644
index 0000000..8e13d86
--- /dev/null
+++ b/services/f3/driver/reviewcomment.go
@@ -0,0 +1,142 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &reviewComment{}
+
+type reviewComment struct {
+ common
+
+ forgejoReviewComment *issues_model.Comment
+}
+
+func (o *reviewComment) SetNative(reviewComment any) {
+ o.forgejoReviewComment = reviewComment.(*issues_model.Comment)
+}
+
+func (o *reviewComment) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoReviewComment.ID)
+}
+
+func (o *reviewComment) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func patch2diff(patch string) string {
+ split := strings.Split(patch, "\n@@")
+ if len(split) == 2 {
+ return "@@" + split[1]
+ }
+ return patch
+}
+
+func (o *reviewComment) ToFormat() f3.Interface {
+ if o.forgejoReviewComment == nil {
+ return o.NewFormat()
+ }
+
+ return &f3.ReviewComment{
+ Common: f3.NewCommon(o.GetNativeID()),
+ PosterID: f3_tree.NewUserReference(o.forgejoReviewComment.Poster.ID),
+ Content: o.forgejoReviewComment.Content,
+ TreePath: o.forgejoReviewComment.TreePath,
+ DiffHunk: patch2diff(o.forgejoReviewComment.PatchQuoted),
+ Line: int(o.forgejoReviewComment.Line),
+ CommitID: o.forgejoReviewComment.CommitSHA,
+ CreatedAt: o.forgejoReviewComment.CreatedUnix.AsTime(),
+ UpdatedAt: o.forgejoReviewComment.UpdatedUnix.AsTime(),
+ }
+}
+
+func (o *reviewComment) FromFormat(content f3.Interface) {
+ reviewComment := content.(*f3.ReviewComment)
+ o.forgejoReviewComment = &issues_model.Comment{
+ ID: f3_util.ParseInt(reviewComment.GetID()),
+ PosterID: reviewComment.PosterID.GetIDAsInt(),
+ Poster: &user_model.User{
+ ID: reviewComment.PosterID.GetIDAsInt(),
+ },
+ TreePath: reviewComment.TreePath,
+ Content: reviewComment.Content,
+ // a hunk misses the patch header but it is never used so do not bother
+ // reconstructing it
+ Patch: reviewComment.DiffHunk,
+ PatchQuoted: reviewComment.DiffHunk,
+ Line: int64(reviewComment.Line),
+ CommitSHA: reviewComment.CommitID,
+ CreatedUnix: timeutil.TimeStamp(reviewComment.CreatedAt.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(reviewComment.UpdatedAt.Unix()),
+ }
+}
+
+func (o *reviewComment) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ id := node.GetID().Int64()
+
+ reviewComment, err := issues_model.GetCommentByID(ctx, id)
+ if issues_model.IsErrCommentNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("reviewComment %v %w", id, err))
+ }
+ if err := reviewComment.LoadPoster(ctx); err != nil {
+ panic(fmt.Errorf("LoadPoster %v %w", *reviewComment, err))
+ }
+ o.forgejoReviewComment = reviewComment
+ return true
+}
+
+func (o *reviewComment) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoReviewComment.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoReviewComment.ID).Cols("content").Update(o.forgejoReviewComment); err != nil {
+ panic(fmt.Errorf("UpdateReviewCommentCols: %v %v", o.forgejoReviewComment, err))
+ }
+}
+
+func (o *reviewComment) Put(ctx context.Context) generic.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ sess := db.GetEngine(ctx)
+
+ if _, err := sess.NoAutoTime().Insert(o.forgejoReviewComment); err != nil {
+ panic(err)
+ }
+ o.Trace("reviewComment created %d", o.forgejoReviewComment.ID)
+ return generic.NewNodeID(o.forgejoReviewComment.ID)
+}
+
+func (o *reviewComment) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ if err := issues_model.DeleteComment(ctx, o.forgejoReviewComment); err != nil {
+ panic(err)
+ }
+}
+
+func newReviewComment() generic.NodeDriverInterface {
+ return &reviewComment{}
+}
diff --git a/services/f3/driver/reviewcomments.go b/services/f3/driver/reviewcomments.go
new file mode 100644
index 0000000..e11aaa4
--- /dev/null
+++ b/services/f3/driver/reviewcomments.go
@@ -0,0 +1,43 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type reviewComments struct {
+ container
+}
+
+func (o *reviewComments) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ id := f3_tree.GetReviewID(o.GetNode())
+
+ sess := db.GetEngine(ctx).
+ Table("comment").
+ Where("`review_id` = ? AND `type` = ?", id, issues_model.CommentTypeCode)
+ if page != 0 {
+ sess = db.SetSessionPagination(sess, &db.ListOptions{Page: page, PageSize: pageSize})
+ }
+ forgejoReviewComments := make([]*issues_model.Comment, 0, pageSize)
+ if err := sess.Find(&forgejoReviewComments); err != nil {
+ panic(fmt.Errorf("error while listing reviewComments: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoReviewComments...)...)
+}
+
+func newReviewComments() generic.NodeDriverInterface {
+ return &reviewComments{}
+}
diff --git a/services/f3/driver/reviews.go b/services/f3/driver/reviews.go
new file mode 100644
index 0000000..a20d574
--- /dev/null
+++ b/services/f3/driver/reviews.go
@@ -0,0 +1,49 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type reviews struct {
+ container
+}
+
+func (o *reviews) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ project := f3_tree.GetProjectID(o.GetNode())
+ pullRequest := f3_tree.GetPullRequestID(o.GetNode())
+
+ issue, err := issues_model.GetIssueByIndex(ctx, project, pullRequest)
+ if err != nil {
+ panic(fmt.Errorf("GetIssueByIndex %v %w", pullRequest, err))
+ }
+
+ sess := db.GetEngine(ctx).
+ Table("review").
+ Where("`issue_id` = ?", issue.ID)
+ if page != 0 {
+ sess = db.SetSessionPagination(sess, &db.ListOptions{Page: page, PageSize: pageSize})
+ }
+ forgejoReviews := make([]*issues_model.Review, 0, pageSize)
+ if err := sess.Find(&forgejoReviews); err != nil {
+ panic(fmt.Errorf("error while listing reviews: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(forgejoReviews...)...)
+}
+
+func newReviews() generic.NodeDriverInterface {
+ return &reviews{}
+}
diff --git a/services/f3/driver/root.go b/services/f3/driver/root.go
new file mode 100644
index 0000000..0e8a67f
--- /dev/null
+++ b/services/f3/driver/root.go
@@ -0,0 +1,41 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type root struct {
+ generic.NullDriver
+
+ content f3.Interface
+}
+
+func newRoot(content f3.Interface) generic.NodeDriverInterface {
+ return &root{
+ content: content,
+ }
+}
+
+func (o *root) FromFormat(content f3.Interface) {
+ o.content = content
+}
+
+func (o *root) ToFormat() f3.Interface {
+ return o.content
+}
+
+func (o *root) Get(context.Context) bool { return true }
+
+func (o *root) Put(context.Context) generic.NodeID {
+ return generic.NilID
+}
+
+func (o *root) Patch(context.Context) {
+}
diff --git a/services/f3/driver/tests/init.go b/services/f3/driver/tests/init.go
new file mode 100644
index 0000000..d7bf23a
--- /dev/null
+++ b/services/f3/driver/tests/init.go
@@ -0,0 +1,15 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package tests
+
+import (
+ driver_options "code.gitea.io/gitea/services/f3/driver/options"
+
+ tests_forge "code.forgejo.org/f3/gof3/v3/tree/tests/f3/forge"
+)
+
+func init() {
+ tests_forge.RegisterFactory(driver_options.Name, newForgeTest)
+}
diff --git a/services/f3/driver/tests/new.go b/services/f3/driver/tests/new.go
new file mode 100644
index 0000000..2e3dfc3
--- /dev/null
+++ b/services/f3/driver/tests/new.go
@@ -0,0 +1,39 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package tests
+
+import (
+ "testing"
+
+ driver_options "code.gitea.io/gitea/services/f3/driver/options"
+
+ "code.forgejo.org/f3/gof3/v3/options"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ forge_test "code.forgejo.org/f3/gof3/v3/tree/tests/f3/forge"
+)
+
+type forgeTest struct {
+ forge_test.Base
+}
+
+func (o *forgeTest) NewOptions(t *testing.T) options.Interface {
+ return newTestOptions(t)
+}
+
+func (o *forgeTest) GetExceptions() []generic.Kind {
+ return []generic.Kind{}
+}
+
+func (o *forgeTest) GetNonTestUsers() []string {
+ return []string{
+ "user1",
+ }
+}
+
+func newForgeTest() forge_test.Interface {
+ t := &forgeTest{}
+ t.SetName(driver_options.Name)
+ return t
+}
diff --git a/services/f3/driver/tests/options.go b/services/f3/driver/tests/options.go
new file mode 100644
index 0000000..adaa1da
--- /dev/null
+++ b/services/f3/driver/tests/options.go
@@ -0,0 +1,21 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package tests
+
+import (
+ "testing"
+
+ forgejo_log "code.gitea.io/gitea/modules/log"
+ driver_options "code.gitea.io/gitea/services/f3/driver/options"
+ "code.gitea.io/gitea/services/f3/util"
+
+ "code.forgejo.org/f3/gof3/v3/options"
+)
+
+func newTestOptions(_ *testing.T) options.Interface {
+ o := options.GetFactory(driver_options.Name)().(*driver_options.Options)
+ o.SetLogger(util.NewF3Logger(nil, forgejo_log.GetLogger(forgejo_log.DEFAULT)))
+ return o
+}
diff --git a/services/f3/driver/topic.go b/services/f3/driver/topic.go
new file mode 100644
index 0000000..16b2eb3
--- /dev/null
+++ b/services/f3/driver/topic.go
@@ -0,0 +1,111 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &topic{}
+
+type topic struct {
+ common
+
+ forgejoTopic *repo_model.Topic
+}
+
+func (o *topic) SetNative(topic any) {
+ o.forgejoTopic = topic.(*repo_model.Topic)
+}
+
+func (o *topic) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoTopic.ID)
+}
+
+func (o *topic) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *topic) ToFormat() f3.Interface {
+ if o.forgejoTopic == nil {
+ return o.NewFormat()
+ }
+
+ return &f3.Topic{
+ Common: f3.NewCommon(o.GetNativeID()),
+ Name: o.forgejoTopic.Name,
+ }
+}
+
+func (o *topic) FromFormat(content f3.Interface) {
+ topic := content.(*f3.Topic)
+ o.forgejoTopic = &repo_model.Topic{
+ ID: f3_util.ParseInt(topic.GetID()),
+ Name: topic.Name,
+ }
+}
+
+func (o *topic) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ id := node.GetID().Int64()
+
+ if has, err := db.GetEngine(ctx).Where("ID = ?", id).Get(o.forgejoTopic); err != nil {
+ panic(fmt.Errorf("topic %v %w", id, err))
+ } else if !has {
+ return false
+ }
+
+ return true
+}
+
+func (o *topic) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoTopic.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoTopic.ID).Cols("name").Update(o.forgejoTopic); err != nil {
+ panic(fmt.Errorf("UpdateTopicCols: %v %v", o.forgejoTopic, err))
+ }
+}
+
+func (o *topic) Put(ctx context.Context) generic.NodeID {
+ sess := db.GetEngine(ctx)
+
+ if _, err := sess.Insert(o.forgejoTopic); err != nil {
+ panic(err)
+ }
+ o.Trace("topic created %d", o.forgejoTopic.ID)
+ return generic.NewNodeID(o.forgejoTopic.ID)
+}
+
+func (o *topic) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ sess := db.GetEngine(ctx)
+
+ if _, err := sess.Delete(&repo_model.RepoTopic{
+ TopicID: o.forgejoTopic.ID,
+ }); err != nil {
+ panic(fmt.Errorf("Delete RepoTopic for %v %v", o.forgejoTopic, err))
+ }
+
+ if _, err := sess.Delete(o.forgejoTopic); err != nil {
+ panic(fmt.Errorf("Delete Topic %v %v", o.forgejoTopic, err))
+ }
+}
+
+func newTopic() generic.NodeDriverInterface {
+ return &topic{}
+}
diff --git a/services/f3/driver/topics.go b/services/f3/driver/topics.go
new file mode 100644
index 0000000..2685a47
--- /dev/null
+++ b/services/f3/driver/topics.go
@@ -0,0 +1,41 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type topics struct {
+ container
+}
+
+func (o *topics) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ pageSize := o.getPageSize()
+
+ sess := db.GetEngine(ctx)
+ if page != 0 {
+ sess = db.SetSessionPagination(sess, &db.ListOptions{Page: page, PageSize: pageSize})
+ }
+ sess = sess.Select("`topic`.*")
+ topics := make([]*repo_model.Topic, 0, pageSize)
+
+ if err := sess.Find(&topics); err != nil {
+ panic(fmt.Errorf("error while listing topics: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(topics...)...)
+}
+
+func newTopics() generic.NodeDriverInterface {
+ return &topics{}
+}
diff --git a/services/f3/driver/tree.go b/services/f3/driver/tree.go
new file mode 100644
index 0000000..0302ed7
--- /dev/null
+++ b/services/f3/driver/tree.go
@@ -0,0 +1,104 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ forgejo_options "code.gitea.io/gitea/services/f3/driver/options"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type treeDriver struct {
+ generic.NullTreeDriver
+
+ options *forgejo_options.Options
+}
+
+func (o *treeDriver) Init() {
+ o.NullTreeDriver.Init()
+}
+
+func (o *treeDriver) Factory(ctx context.Context, kind generic.Kind) generic.NodeDriverInterface {
+ switch kind {
+ case f3_tree.KindForge:
+ return newForge()
+ case f3_tree.KindOrganizations:
+ return newOrganizations()
+ case f3_tree.KindOrganization:
+ return newOrganization()
+ case f3_tree.KindUsers:
+ return newUsers()
+ case f3_tree.KindUser:
+ return newUser()
+ case f3_tree.KindProjects:
+ return newProjects()
+ case f3_tree.KindProject:
+ return newProject()
+ case f3_tree.KindIssues:
+ return newIssues()
+ case f3_tree.KindIssue:
+ return newIssue()
+ case f3_tree.KindComments:
+ return newComments()
+ case f3_tree.KindComment:
+ return newComment()
+ case f3_tree.KindAssets:
+ return newAssets()
+ case f3_tree.KindAsset:
+ return newAsset()
+ case f3_tree.KindLabels:
+ return newLabels()
+ case f3_tree.KindLabel:
+ return newLabel()
+ case f3_tree.KindReactions:
+ return newReactions()
+ case f3_tree.KindReaction:
+ return newReaction()
+ case f3_tree.KindReviews:
+ return newReviews()
+ case f3_tree.KindReview:
+ return newReview()
+ case f3_tree.KindReviewComments:
+ return newReviewComments()
+ case f3_tree.KindReviewComment:
+ return newReviewComment()
+ case f3_tree.KindMilestones:
+ return newMilestones()
+ case f3_tree.KindMilestone:
+ return newMilestone()
+ case f3_tree.KindPullRequests:
+ return newPullRequests()
+ case f3_tree.KindPullRequest:
+ return newPullRequest()
+ case f3_tree.KindReleases:
+ return newReleases()
+ case f3_tree.KindRelease:
+ return newRelease()
+ case f3_tree.KindTopics:
+ return newTopics()
+ case f3_tree.KindTopic:
+ return newTopic()
+ case f3_tree.KindRepositories:
+ return newRepositories()
+ case f3_tree.KindRepository:
+ return newRepository(ctx)
+ case generic.KindRoot:
+ return newRoot(o.GetTree().(f3_tree.TreeInterface).NewFormat(kind))
+ default:
+ panic(fmt.Errorf("unexpected kind %s", kind))
+ }
+}
+
+func newTreeDriver(tree generic.TreeInterface, anyOptions any) generic.TreeDriverInterface {
+ driver := &treeDriver{
+ options: anyOptions.(*forgejo_options.Options),
+ }
+ driver.Init()
+ return driver
+}
diff --git a/services/f3/driver/user.go b/services/f3/driver/user.go
new file mode 100644
index 0000000..221b06e
--- /dev/null
+++ b/services/f3/driver/user.go
@@ -0,0 +1,128 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/optional"
+ user_service "code.gitea.io/gitea/services/user"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+)
+
+var _ f3_tree.ForgeDriverInterface = &user{}
+
+type user struct {
+ common
+
+ forgejoUser *user_model.User
+}
+
+func getSystemUserByName(name string) *user_model.User {
+ switch name {
+ case user_model.GhostUserName:
+ return user_model.NewGhostUser()
+ case user_model.ActionsUserName:
+ return user_model.NewActionsUser()
+ default:
+ return nil
+ }
+}
+
+func (o *user) SetNative(user any) {
+ o.forgejoUser = user.(*user_model.User)
+}
+
+func (o *user) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoUser.ID)
+}
+
+func (o *user) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *user) ToFormat() f3.Interface {
+ if o.forgejoUser == nil {
+ return o.NewFormat()
+ }
+ return &f3.User{
+ Common: f3.NewCommon(fmt.Sprintf("%d", o.forgejoUser.ID)),
+ UserName: o.forgejoUser.Name,
+ Name: o.forgejoUser.FullName,
+ Email: o.forgejoUser.Email,
+ IsAdmin: o.forgejoUser.IsAdmin,
+ Password: o.forgejoUser.Passwd,
+ }
+}
+
+func (o *user) FromFormat(content f3.Interface) {
+ user := content.(*f3.User)
+ o.forgejoUser = &user_model.User{
+ Type: user_model.UserTypeRemoteUser,
+ ID: f3_util.ParseInt(user.GetID()),
+ Name: user.UserName,
+ FullName: user.Name,
+ Email: user.Email,
+ IsAdmin: user.IsAdmin,
+ Passwd: user.Password,
+ }
+}
+
+func (o *user) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+ id := node.GetID().Int64()
+ u, err := user_model.GetPossibleUserByID(ctx, id)
+ if user_model.IsErrUserNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("user %v %w", id, err))
+ }
+ o.forgejoUser = u
+ return true
+}
+
+func (o *user) Patch(context.Context) {
+}
+
+func (o *user) Put(ctx context.Context) generic.NodeID {
+ if user := getSystemUserByName(o.forgejoUser.Name); user != nil {
+ return generic.NewNodeID(user.ID)
+ }
+
+ o.forgejoUser.LowerName = strings.ToLower(o.forgejoUser.Name)
+ o.Trace("%v", *o.forgejoUser)
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsActive: optional.Some(true),
+ }
+ err := user_model.CreateUser(ctx, o.forgejoUser, overwriteDefault)
+ if err != nil {
+ panic(err)
+ }
+
+ return generic.NewNodeID(o.forgejoUser.ID)
+}
+
+func (o *user) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ if err := user_service.DeleteUser(ctx, o.forgejoUser, true); err != nil {
+ panic(err)
+ }
+}
+
+func newUser() generic.NodeDriverInterface {
+ return &user{}
+}
diff --git a/services/f3/driver/users.go b/services/f3/driver/users.go
new file mode 100644
index 0000000..92ed0bc
--- /dev/null
+++ b/services/f3/driver/users.go
@@ -0,0 +1,48 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type users struct {
+ container
+}
+
+func (o *users) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ sess := db.GetEngine(ctx).In("type", user_model.UserTypeIndividual, user_model.UserTypeRemoteUser)
+ if page != 0 {
+ sess = db.SetSessionPagination(sess, &db.ListOptions{Page: page, PageSize: o.getPageSize()})
+ }
+ sess = sess.Select("`user`.*")
+ users := make([]*user_model.User, 0, o.getPageSize())
+
+ if err := sess.Find(&users); err != nil {
+ panic(fmt.Errorf("error while listing users: %v", err))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(users...)...)
+}
+
+func (o *users) GetIDFromName(ctx context.Context, name string) generic.NodeID {
+ user, err := user_model.GetUserByName(ctx, name)
+ if err != nil {
+ panic(fmt.Errorf("GetUserByName: %v", err))
+ }
+
+ return generic.NewNodeID(user.ID)
+}
+
+func newUsers() generic.NodeDriverInterface {
+ return &users{}
+}
diff --git a/services/f3/util/logger.go b/services/f3/util/logger.go
new file mode 100644
index 0000000..21d8d6b
--- /dev/null
+++ b/services/f3/util/logger.go
@@ -0,0 +1,97 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// SPDX-License-Identifier: MIT
+
+package util
+
+import (
+ "fmt"
+
+ forgejo_log "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/migration"
+
+ "code.forgejo.org/f3/gof3/v3/logger"
+)
+
+type f3Logger struct {
+ m migration.Messenger
+ l forgejo_log.Logger
+}
+
+func (o *f3Logger) Message(message string, args ...any) {
+ if o.m != nil {
+ o.m(message, args...)
+ }
+}
+
+func (o *f3Logger) SetLevel(level logger.Level) {
+}
+
+func forgejoLevelToF3Level(level forgejo_log.Level) logger.Level {
+ switch level {
+ case forgejo_log.TRACE:
+ return logger.Trace
+ case forgejo_log.DEBUG:
+ return logger.Debug
+ case forgejo_log.INFO:
+ return logger.Info
+ case forgejo_log.WARN:
+ return logger.Warn
+ case forgejo_log.ERROR:
+ return logger.Error
+ case forgejo_log.FATAL:
+ return logger.Fatal
+ default:
+ panic(fmt.Errorf("unexpected level %d", level))
+ }
+}
+
+func f3LevelToForgejoLevel(level logger.Level) forgejo_log.Level {
+ switch level {
+ case logger.Trace:
+ return forgejo_log.TRACE
+ case logger.Debug:
+ return forgejo_log.DEBUG
+ case logger.Info:
+ return forgejo_log.INFO
+ case logger.Warn:
+ return forgejo_log.WARN
+ case logger.Error:
+ return forgejo_log.ERROR
+ case logger.Fatal:
+ return forgejo_log.FATAL
+ default:
+ panic(fmt.Errorf("unexpected level %d", level))
+ }
+}
+
+func (o *f3Logger) GetLevel() logger.Level {
+ return forgejoLevelToF3Level(o.l.GetLevel())
+}
+
+func (o *f3Logger) Log(skip int, level logger.Level, format string, args ...any) {
+ o.l.Log(skip+1, f3LevelToForgejoLevel(level), format, args...)
+}
+
+func (o *f3Logger) Trace(message string, args ...any) {
+ o.l.Log(1, forgejo_log.TRACE, message, args...)
+}
+
+func (o *f3Logger) Debug(message string, args ...any) {
+ o.l.Log(1, forgejo_log.DEBUG, message, args...)
+}
+func (o *f3Logger) Info(message string, args ...any) { o.l.Log(1, forgejo_log.INFO, message, args...) }
+func (o *f3Logger) Warn(message string, args ...any) { o.l.Log(1, forgejo_log.WARN, message, args...) }
+func (o *f3Logger) Error(message string, args ...any) {
+ o.l.Log(1, forgejo_log.ERROR, message, args...)
+}
+
+func (o *f3Logger) Fatal(message string, args ...any) {
+ o.l.Log(1, forgejo_log.FATAL, message, args...)
+}
+
+func NewF3Logger(messenger migration.Messenger, logger forgejo_log.Logger) logger.Interface {
+ return &f3Logger{
+ m: messenger,
+ l: logger,
+ }
+}
diff --git a/services/f3/util/logger_test.go b/services/f3/util/logger_test.go
new file mode 100644
index 0000000..db880aa
--- /dev/null
+++ b/services/f3/util/logger_test.go
@@ -0,0 +1,89 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// SPDX-License-Identifier: MIT
+
+package util
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ forgejo_log "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/test"
+
+ "code.forgejo.org/f3/gof3/v3/logger"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestF3UtilMessage(t *testing.T) {
+ expected := "EXPECTED MESSAGE"
+ var actual string
+ logger := NewF3Logger(func(message string, args ...any) {
+ actual = fmt.Sprintf(message, args...)
+ }, nil)
+ logger.Message("EXPECTED %s", "MESSAGE")
+ assert.EqualValues(t, expected, actual)
+}
+
+func TestF3UtilLogger(t *testing.T) {
+ for _, testCase := range []struct {
+ level logger.Level
+ call func(logger.MessageInterface, string, ...any)
+ }{
+ {level: logger.Trace, call: func(logger logger.MessageInterface, message string, args ...any) { logger.Trace(message, args...) }},
+ {level: logger.Debug, call: func(logger logger.MessageInterface, message string, args ...any) { logger.Debug(message, args...) }},
+ {level: logger.Info, call: func(logger logger.MessageInterface, message string, args ...any) { logger.Info(message, args...) }},
+ {level: logger.Warn, call: func(logger logger.MessageInterface, message string, args ...any) { logger.Warn(message, args...) }},
+ {level: logger.Error, call: func(logger logger.MessageInterface, message string, args ...any) { logger.Error(message, args...) }},
+ {level: logger.Fatal, call: func(logger logger.MessageInterface, message string, args ...any) { logger.Fatal(message, args...) }},
+ } {
+ t.Run(testCase.level.String(), func(t *testing.T) {
+ testLoggerCase(t, testCase.level, testCase.call)
+ })
+ }
+}
+
+func testLoggerCase(t *testing.T, level logger.Level, loggerFunc func(logger.MessageInterface, string, ...any)) {
+ lc, cleanup := test.NewLogChecker(forgejo_log.DEFAULT, f3LevelToForgejoLevel(level))
+ defer cleanup()
+ stopMark := "STOP"
+ lc.StopMark(stopMark)
+ filtered := []string{
+ "MESSAGE HERE",
+ }
+ moreVerbose := logger.MoreVerbose(level)
+ if moreVerbose != nil {
+ filtered = append(filtered, "MESSAGE MORE VERBOSE")
+ }
+ lessVerbose := logger.LessVerbose(level)
+ if lessVerbose != nil {
+ filtered = append(filtered, "MESSAGE LESS VERBOSE")
+ }
+ lc.Filter(filtered...)
+
+ logger := NewF3Logger(nil, forgejo_log.GetLogger(forgejo_log.DEFAULT))
+ loggerFunc(logger, "MESSAGE %s", "HERE")
+ if moreVerbose != nil {
+ logger.Log(1, *moreVerbose, "MESSAGE %s", "MORE VERBOSE")
+ }
+ if lessVerbose != nil {
+ logger.Log(1, *lessVerbose, "MESSAGE %s", "LESS VERBOSE")
+ }
+ logger.Fatal(stopMark)
+
+ logFiltered, logStopped := lc.Check(5 * time.Second)
+ assert.True(t, logStopped)
+ i := 0
+ assert.True(t, logFiltered[i], filtered[i])
+ if moreVerbose != nil {
+ i++
+ require.Greater(t, len(logFiltered), i)
+ assert.False(t, logFiltered[i], filtered[i])
+ }
+ if lessVerbose != nil {
+ i++
+ require.Greater(t, len(logFiltered), i)
+ assert.True(t, logFiltered[i], filtered[i])
+ }
+}
diff --git a/services/federation/federation_service.go b/services/federation/federation_service.go
new file mode 100644
index 0000000..4c6f5ca
--- /dev/null
+++ b/services/federation/federation_service.go
@@ -0,0 +1,295 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package federation
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/forgefed"
+ "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/activitypub"
+ "code.gitea.io/gitea/modules/auth/password"
+ fm "code.gitea.io/gitea/modules/forgefed"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/validation"
+
+ "github.com/google/uuid"
+)
+
+// ProcessLikeActivity receives a ForgeLike activity and does the following:
+// Validation of the activity
+// Creation of a (remote) federationHost if not existing
+// Creation of a forgefed Person if not existing
+// Validation of incoming RepositoryID against Local RepositoryID
+// Star the repo if it wasn't already stared
+// Do some mitigation against out of order attacks
+func ProcessLikeActivity(ctx context.Context, form any, repositoryID int64) (int, string, error) {
+ activity := form.(*fm.ForgeLike)
+ if res, err := validation.IsValid(activity); !res {
+ return http.StatusNotAcceptable, "Invalid activity", err
+ }
+ log.Info("Activity validated:%v", activity)
+
+ // parse actorID (person)
+ actorURI := activity.Actor.GetID().String()
+ log.Info("actorURI was: %v", actorURI)
+ federationHost, err := GetFederationHostForURI(ctx, actorURI)
+ if err != nil {
+ return http.StatusInternalServerError, "Wrong FederationHost", err
+ }
+ if !activity.IsNewer(federationHost.LatestActivity) {
+ return http.StatusNotAcceptable, "Activity out of order.", fmt.Errorf("Activity already processed")
+ }
+ actorID, err := fm.NewPersonID(actorURI, string(federationHost.NodeInfo.SoftwareName))
+ if err != nil {
+ return http.StatusNotAcceptable, "Invalid PersonID", err
+ }
+ log.Info("Actor accepted:%v", actorID)
+
+ // parse objectID (repository)
+ objectID, err := fm.NewRepositoryID(activity.Object.GetID().String(), string(forgefed.ForgejoSourceType))
+ if err != nil {
+ return http.StatusNotAcceptable, "Invalid objectId", err
+ }
+ if objectID.ID != fmt.Sprint(repositoryID) {
+ return http.StatusNotAcceptable, "Invalid objectId", err
+ }
+ log.Info("Object accepted:%v", objectID)
+
+ // Check if user already exists
+ user, _, err := user.FindFederatedUser(ctx, actorID.ID, federationHost.ID)
+ if err != nil {
+ return http.StatusInternalServerError, "Searching for user failed", err
+ }
+ if user != nil {
+ log.Info("Found local federatedUser: %v", user)
+ } else {
+ user, _, err = CreateUserFromAP(ctx, actorID, federationHost.ID)
+ if err != nil {
+ return http.StatusInternalServerError, "Error creating federatedUser", err
+ }
+ log.Info("Created federatedUser from ap: %v", user)
+ }
+ log.Info("Got user:%v", user.Name)
+
+ // execute the activity if the repo was not stared already
+ alreadyStared := repo.IsStaring(ctx, user.ID, repositoryID)
+ if !alreadyStared {
+ err = repo.StarRepo(ctx, user.ID, repositoryID, true)
+ if err != nil {
+ return http.StatusNotAcceptable, "Error staring", err
+ }
+ }
+ federationHost.LatestActivity = activity.StartTime
+ err = forgefed.UpdateFederationHost(ctx, federationHost)
+ if err != nil {
+ return http.StatusNotAcceptable, "Error updating federatedHost", err
+ }
+
+ return 0, "", nil
+}
+
+func CreateFederationHostFromAP(ctx context.Context, actorID fm.ActorID) (*forgefed.FederationHost, error) {
+ actionsUser := user.NewActionsUser()
+ clientFactory, err := activitypub.GetClientFactory(ctx)
+ if err != nil {
+ return nil, err
+ }
+ client, err := clientFactory.WithKeys(ctx, actionsUser, "no idea where to get key material.")
+ if err != nil {
+ return nil, err
+ }
+ body, err := client.GetBody(actorID.AsWellKnownNodeInfoURI())
+ if err != nil {
+ return nil, err
+ }
+ nodeInfoWellKnown, err := forgefed.NewNodeInfoWellKnown(body)
+ if err != nil {
+ return nil, err
+ }
+ body, err = client.GetBody(nodeInfoWellKnown.Href)
+ if err != nil {
+ return nil, err
+ }
+ nodeInfo, err := forgefed.NewNodeInfo(body)
+ if err != nil {
+ return nil, err
+ }
+ result, err := forgefed.NewFederationHost(nodeInfo, actorID.Host)
+ if err != nil {
+ return nil, err
+ }
+ err = forgefed.CreateFederationHost(ctx, &result)
+ if err != nil {
+ return nil, err
+ }
+ return &result, nil
+}
+
+func GetFederationHostForURI(ctx context.Context, actorURI string) (*forgefed.FederationHost, error) {
+ log.Info("Input was: %v", actorURI)
+ rawActorID, err := fm.NewActorID(actorURI)
+ if err != nil {
+ return nil, err
+ }
+ federationHost, err := forgefed.FindFederationHostByFqdn(ctx, rawActorID.Host)
+ if err != nil {
+ return nil, err
+ }
+ if federationHost == nil {
+ result, err := CreateFederationHostFromAP(ctx, rawActorID)
+ if err != nil {
+ return nil, err
+ }
+ federationHost = result
+ }
+ return federationHost, nil
+}
+
+func CreateUserFromAP(ctx context.Context, personID fm.PersonID, federationHostID int64) (*user.User, *user.FederatedUser, error) {
+ // ToDo: Do we get a publicKeyId from server, repo or owner or repo?
+ actionsUser := user.NewActionsUser()
+ clientFactory, err := activitypub.GetClientFactory(ctx)
+ if err != nil {
+ return nil, nil, err
+ }
+ client, err := clientFactory.WithKeys(ctx, actionsUser, "no idea where to get key material.")
+ if err != nil {
+ return nil, nil, err
+ }
+
+ body, err := client.GetBody(personID.AsURI())
+ if err != nil {
+ return nil, nil, err
+ }
+
+ person := fm.ForgePerson{}
+ err = person.UnmarshalJSON(body)
+ if err != nil {
+ return nil, nil, err
+ }
+ if res, err := validation.IsValid(person); !res {
+ return nil, nil, err
+ }
+ log.Info("Fetched valid person:%q", person)
+
+ localFqdn, err := url.ParseRequestURI(setting.AppURL)
+ if err != nil {
+ return nil, nil, err
+ }
+ email := fmt.Sprintf("f%v@%v", uuid.New().String(), localFqdn.Hostname())
+ loginName := personID.AsLoginName()
+ name := fmt.Sprintf("%v%v", person.PreferredUsername.String(), personID.HostSuffix())
+ fullName := person.Name.String()
+ if len(person.Name) == 0 {
+ fullName = name
+ }
+ password, err := password.Generate(32)
+ if err != nil {
+ return nil, nil, err
+ }
+ newUser := user.User{
+ LowerName: strings.ToLower(name),
+ Name: name,
+ FullName: fullName,
+ Email: email,
+ EmailNotificationsPreference: "disabled",
+ Passwd: password,
+ MustChangePassword: false,
+ LoginName: loginName,
+ Type: user.UserTypeRemoteUser,
+ IsAdmin: false,
+ NormalizedFederatedURI: personID.AsURI(),
+ }
+ federatedUser := user.FederatedUser{
+ ExternalID: personID.ID,
+ FederationHostID: federationHostID,
+ }
+ err = user.CreateFederatedUser(ctx, &newUser, &federatedUser)
+ if err != nil {
+ return nil, nil, err
+ }
+ log.Info("Created federatedUser:%q", federatedUser)
+
+ return &newUser, &federatedUser, nil
+}
+
+// Create or update a list of FollowingRepo structs
+func StoreFollowingRepoList(ctx context.Context, localRepoID int64, followingRepoList []string) (int, string, error) {
+ followingRepos := make([]*repo.FollowingRepo, 0, len(followingRepoList))
+ for _, uri := range followingRepoList {
+ federationHost, err := GetFederationHostForURI(ctx, uri)
+ if err != nil {
+ return http.StatusInternalServerError, "Wrong FederationHost", err
+ }
+ followingRepoID, err := fm.NewRepositoryID(uri, string(federationHost.NodeInfo.SoftwareName))
+ if err != nil {
+ return http.StatusNotAcceptable, "Invalid federated repo", err
+ }
+ followingRepo, err := repo.NewFollowingRepo(localRepoID, followingRepoID.ID, federationHost.ID, uri)
+ if err != nil {
+ return http.StatusNotAcceptable, "Invalid federated repo", err
+ }
+ followingRepos = append(followingRepos, &followingRepo)
+ }
+
+ if err := repo.StoreFollowingRepos(ctx, localRepoID, followingRepos); err != nil {
+ return 0, "", err
+ }
+
+ return 0, "", nil
+}
+
+func DeleteFollowingRepos(ctx context.Context, localRepoID int64) error {
+ return repo.StoreFollowingRepos(ctx, localRepoID, []*repo.FollowingRepo{})
+}
+
+func SendLikeActivities(ctx context.Context, doer user.User, repoID int64) error {
+ followingRepos, err := repo.FindFollowingReposByRepoID(ctx, repoID)
+ log.Info("Federated Repos is: %v", followingRepos)
+ if err != nil {
+ return err
+ }
+
+ likeActivityList := make([]fm.ForgeLike, 0)
+ for _, followingRepo := range followingRepos {
+ log.Info("Found following repo: %v", followingRepo)
+ target := followingRepo.URI
+ likeActivity, err := fm.NewForgeLike(doer.APActorID(), target, time.Now())
+ if err != nil {
+ return err
+ }
+ likeActivityList = append(likeActivityList, likeActivity)
+ }
+
+ apclientFactory, err := activitypub.GetClientFactory(ctx)
+ if err != nil {
+ return err
+ }
+ apclient, err := apclientFactory.WithKeys(ctx, &doer, doer.APActorID())
+ if err != nil {
+ return err
+ }
+ for i, activity := range likeActivityList {
+ activity.StartTime = activity.StartTime.Add(time.Duration(i) * time.Second)
+ json, err := activity.MarshalJSON()
+ if err != nil {
+ return err
+ }
+
+ _, err = apclient.Post(json, fmt.Sprintf("%v/inbox/", activity.Object))
+ if err != nil {
+ log.Error("error %v while sending activity: %q", err, activity)
+ }
+ }
+
+ return nil
+}
diff --git a/services/feed/action.go b/services/feed/action.go
new file mode 100644
index 0000000..83daaa1
--- /dev/null
+++ b/services/feed/action.go
@@ -0,0 +1,458 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "context"
+ "fmt"
+ "path"
+ "strings"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/util"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+type actionNotifier struct {
+ notify_service.NullNotifier
+}
+
+var _ notify_service.Notifier = &actionNotifier{}
+
+func Init() error {
+ notify_service.RegisterNotifier(NewNotifier())
+
+ return nil
+}
+
+// NewNotifier create a new actionNotifier notifier
+func NewNotifier() notify_service.Notifier {
+ return &actionNotifier{}
+}
+
+func (a *actionNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User) {
+ if err := issue.LoadPoster(ctx); err != nil {
+ log.Error("issue.LoadPoster: %v", err)
+ return
+ }
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error("issue.LoadRepo: %v", err)
+ return
+ }
+ repo := issue.Repo
+
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: issue.Poster.ID,
+ ActUser: issue.Poster,
+ OpType: activities_model.ActionCreateIssue,
+ Content: fmt.Sprintf("%d|%s", issue.Index, issue.Title),
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+// IssueChangeStatus notifies close or reopen issue to notifiers
+func (a *actionNotifier) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, actionComment *issues_model.Comment, closeOrReopen bool) {
+ // Compose comment action, could be plain comment, close or reopen issue/pull request.
+ // This object will be used to notify watchers in the end of function.
+ act := &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ Content: fmt.Sprintf("%d|%s", issue.Index, ""),
+ RepoID: issue.Repo.ID,
+ Repo: issue.Repo,
+ Comment: actionComment,
+ CommentID: actionComment.ID,
+ IsPrivate: issue.Repo.IsPrivate,
+ }
+ // Check comment type.
+ if closeOrReopen {
+ act.OpType = activities_model.ActionCloseIssue
+ if issue.IsPull {
+ act.OpType = activities_model.ActionClosePullRequest
+ }
+ } else {
+ act.OpType = activities_model.ActionReopenIssue
+ if issue.IsPull {
+ act.OpType = activities_model.ActionReopenPullRequest
+ }
+ }
+
+ // Notify watchers for whatever action comes in, ignore if no action type.
+ if err := activities_model.NotifyWatchers(ctx, act); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+// CreateIssueComment notifies comment on an issue to notifiers
+func (a *actionNotifier) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
+) {
+ act := &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ RepoID: issue.Repo.ID,
+ Repo: issue.Repo,
+ Comment: comment,
+ CommentID: comment.ID,
+ IsPrivate: issue.Repo.IsPrivate,
+ }
+
+ truncatedContent, truncatedRight := util.SplitStringAtByteN(comment.Content, 200)
+ if truncatedRight != "" {
+ // in case the content is in a Latin family language, we remove the last broken word.
+ lastSpaceIdx := strings.LastIndex(truncatedContent, " ")
+ if lastSpaceIdx != -1 && (len(truncatedContent)-lastSpaceIdx < 15) {
+ truncatedContent = truncatedContent[:lastSpaceIdx] + "…"
+ }
+ }
+ act.Content = fmt.Sprintf("%d|%s", issue.Index, truncatedContent)
+
+ if issue.IsPull {
+ act.OpType = activities_model.ActionCommentPull
+ } else {
+ act.OpType = activities_model.ActionCommentIssue
+ }
+
+ // Notify watchers for whatever action comes in, ignore if no action type.
+ if err := activities_model.NotifyWatchers(ctx, act); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) NewPullRequest(ctx context.Context, pull *issues_model.PullRequest, mentions []*user_model.User) {
+ if err := pull.LoadIssue(ctx); err != nil {
+ log.Error("pull.LoadIssue: %v", err)
+ return
+ }
+ if err := pull.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pull.Issue.LoadRepo: %v", err)
+ return
+ }
+ if err := pull.Issue.LoadPoster(ctx); err != nil {
+ log.Error("pull.Issue.LoadPoster: %v", err)
+ return
+ }
+
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: pull.Issue.Poster.ID,
+ ActUser: pull.Issue.Poster,
+ OpType: activities_model.ActionCreatePullRequest,
+ Content: fmt.Sprintf("%d|%s", pull.Issue.Index, pull.Issue.Title),
+ RepoID: pull.Issue.Repo.ID,
+ Repo: pull.Issue.Repo,
+ IsPrivate: pull.Issue.Repo.IsPrivate,
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) RenameRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldRepoName string) {
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: activities_model.ActionRenameRepo,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ Content: oldRepoName,
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) TransferRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldOwnerName string) {
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: activities_model.ActionTransferRepo,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ Content: path.Join(oldOwnerName, repo.Name),
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: activities_model.ActionCreateRepo,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ }); err != nil {
+ log.Error("notify watchers '%d/%d': %v", doer.ID, repo.ID, err)
+ }
+}
+
+func (a *actionNotifier) ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: activities_model.ActionCreateRepo,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ }); err != nil {
+ log.Error("notify watchers '%d/%d': %v", doer.ID, repo.ID, err)
+ }
+}
+
+func (a *actionNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
+ if err := review.LoadReviewer(ctx); err != nil {
+ log.Error("LoadReviewer '%d/%d': %v", review.ID, review.ReviewerID, err)
+ return
+ }
+ if err := review.LoadCodeComments(ctx); err != nil {
+ log.Error("LoadCodeComments '%d/%d': %v", review.Reviewer.ID, review.ID, err)
+ return
+ }
+
+ actions := make([]*activities_model.Action, 0, 10)
+ for _, lines := range review.CodeComments {
+ for _, comments := range lines {
+ for _, comm := range comments {
+ actions = append(actions, &activities_model.Action{
+ ActUserID: review.Reviewer.ID,
+ ActUser: review.Reviewer,
+ Content: fmt.Sprintf("%d|%s", review.Issue.Index, strings.Split(comm.Content, "\n")[0]),
+ OpType: activities_model.ActionCommentPull,
+ RepoID: review.Issue.RepoID,
+ Repo: review.Issue.Repo,
+ IsPrivate: review.Issue.Repo.IsPrivate,
+ Comment: comm,
+ CommentID: comm.ID,
+ })
+ }
+ }
+ }
+
+ if review.Type != issues_model.ReviewTypeComment || strings.TrimSpace(comment.Content) != "" {
+ action := &activities_model.Action{
+ ActUserID: review.Reviewer.ID,
+ ActUser: review.Reviewer,
+ Content: fmt.Sprintf("%d|%s", review.Issue.Index, strings.Split(comment.Content, "\n")[0]),
+ RepoID: review.Issue.RepoID,
+ Repo: review.Issue.Repo,
+ IsPrivate: review.Issue.Repo.IsPrivate,
+ Comment: comment,
+ CommentID: comment.ID,
+ }
+
+ switch review.Type {
+ case issues_model.ReviewTypeApprove:
+ action.OpType = activities_model.ActionApprovePullRequest
+ case issues_model.ReviewTypeReject:
+ action.OpType = activities_model.ActionRejectPullRequest
+ default:
+ action.OpType = activities_model.ActionCommentPull
+ }
+
+ actions = append(actions, action)
+ }
+
+ if err := activities_model.NotifyWatchersActions(ctx, actions); err != nil {
+ log.Error("notify watchers '%d/%d': %v", review.Reviewer.ID, review.Issue.RepoID, err)
+ }
+}
+
+func (*actionNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: activities_model.ActionMergePullRequest,
+ Content: fmt.Sprintf("%d|%s", pr.Issue.Index, pr.Issue.Title),
+ RepoID: pr.Issue.Repo.ID,
+ Repo: pr.Issue.Repo,
+ IsPrivate: pr.Issue.Repo.IsPrivate,
+ }); err != nil {
+ log.Error("NotifyWatchers [%d]: %v", pr.ID, err)
+ }
+}
+
+func (*actionNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: activities_model.ActionAutoMergePullRequest,
+ Content: fmt.Sprintf("%d|%s", pr.Issue.Index, pr.Issue.Title),
+ RepoID: pr.Issue.Repo.ID,
+ Repo: pr.Issue.Repo,
+ IsPrivate: pr.Issue.Repo.IsPrivate,
+ }); err != nil {
+ log.Error("NotifyWatchers [%d]: %v", pr.ID, err)
+ }
+}
+
+func (*actionNotifier) NotifyPullRevieweDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment) {
+ reviewerName := review.Reviewer.Name
+ if len(review.OriginalAuthor) > 0 {
+ reviewerName = review.OriginalAuthor
+ }
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: activities_model.ActionPullReviewDismissed,
+ Content: fmt.Sprintf("%d|%s|%s", review.Issue.Index, reviewerName, comment.Content),
+ RepoID: review.Issue.Repo.ID,
+ Repo: review.Issue.Repo,
+ IsPrivate: review.Issue.Repo.IsPrivate,
+ CommentID: comment.ID,
+ Comment: comment,
+ }); err != nil {
+ log.Error("NotifyWatchers [%d]: %v", review.Issue.ID, err)
+ }
+}
+
+func (a *actionNotifier) PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ data, err := json.Marshal(commits)
+ if err != nil {
+ log.Error("Marshal: %v", err)
+ return
+ }
+
+ opType := activities_model.ActionCommitRepo
+
+ // Check it's tag push or branch.
+ if opts.RefFullName.IsTag() {
+ opType = activities_model.ActionPushTag
+ if opts.IsDelRef() {
+ opType = activities_model.ActionDeleteTag
+ }
+ } else if opts.IsDelRef() {
+ opType = activities_model.ActionDeleteBranch
+ }
+
+ if err = activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: pusher.ID,
+ ActUser: pusher,
+ OpType: opType,
+ Content: string(data),
+ RepoID: repo.ID,
+ Repo: repo,
+ RefName: opts.RefFullName.String(),
+ IsPrivate: repo.IsPrivate,
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) CreateRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ opType := activities_model.ActionCommitRepo
+ if refFullName.IsTag() {
+ // has sent same action in `PushCommits`, so skip it.
+ return
+ }
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: opType,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ RefName: refFullName.String(),
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) DeleteRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ opType := activities_model.ActionDeleteBranch
+ if refFullName.IsTag() {
+ // has sent same action in `PushCommits`, so skip it.
+ return
+ }
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: doer.ID,
+ ActUser: doer,
+ OpType: opType,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ RefName: refFullName.String(),
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ data, err := json.Marshal(commits)
+ if err != nil {
+ log.Error("json.Marshal: %v", err)
+ return
+ }
+
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: repo.OwnerID,
+ ActUser: repo.MustOwner(ctx),
+ OpType: activities_model.ActionMirrorSyncPush,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ RefName: opts.RefFullName.String(),
+ Content: string(data),
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) SyncCreateRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: repo.OwnerID,
+ ActUser: repo.MustOwner(ctx),
+ OpType: activities_model.ActionMirrorSyncCreate,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ RefName: refFullName.String(),
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) SyncDeleteRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: repo.OwnerID,
+ ActUser: repo.MustOwner(ctx),
+ OpType: activities_model.ActionMirrorSyncDelete,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ RefName: refFullName.String(),
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
+
+func (a *actionNotifier) NewRelease(ctx context.Context, rel *repo_model.Release) {
+ if err := rel.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+ if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ ActUserID: rel.PublisherID,
+ ActUser: rel.Publisher,
+ OpType: activities_model.ActionPublishRelease,
+ RepoID: rel.RepoID,
+ Repo: rel.Repo,
+ IsPrivate: rel.Repo.IsPrivate,
+ Content: rel.Title,
+ RefName: rel.TagName, // FIXME: use a full ref name?
+ }); err != nil {
+ log.Error("NotifyWatchers: %v", err)
+ }
+}
diff --git a/services/feed/action_test.go b/services/feed/action_test.go
new file mode 100644
index 0000000..404d89c
--- /dev/null
+++ b/services/feed/action_test.go
@@ -0,0 +1,52 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "strings"
+ "testing"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ _ "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func TestRenameRepoAction(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID})
+ repo.Owner = user
+
+ oldRepoName := repo.Name
+ const newRepoName = "newRepoName"
+ repo.Name = newRepoName
+ repo.LowerName = strings.ToLower(newRepoName)
+
+ actionBean := &activities_model.Action{
+ OpType: activities_model.ActionRenameRepo,
+ ActUserID: user.ID,
+ ActUser: user,
+ RepoID: repo.ID,
+ Repo: repo,
+ IsPrivate: repo.IsPrivate,
+ Content: oldRepoName,
+ }
+ unittest.AssertNotExistsBean(t, actionBean)
+
+ NewNotifier().RenameRepository(db.DefaultContext, user, repo, oldRepoName)
+
+ unittest.AssertExistsAndLoadBean(t, actionBean)
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+}
diff --git a/services/forgejo/main_test.go b/services/forgejo/main_test.go
new file mode 100644
index 0000000..e88b7d0
--- /dev/null
+++ b/services/forgejo/main_test.go
@@ -0,0 +1,17 @@
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models"
+ _ "code.gitea.io/gitea/models/actions"
+ _ "code.gitea.io/gitea/models/activities"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/forgejo/sanity.go b/services/forgejo/sanity.go
new file mode 100644
index 0000000..5e817d6
--- /dev/null
+++ b/services/forgejo/sanity.go
@@ -0,0 +1,26 @@
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+var (
+ ForgejoV6DatabaseVersion = int64(261) // must be updated once v6 / Gitea v1.21 is out
+ ForgejoV5DatabaseVersion = int64(260)
+ ForgejoV4DatabaseVersion = int64(244)
+)
+
+var logFatal = log.Fatal
+
+func fatal(err error) error {
+ logFatal("%v", err)
+ return err
+}
+
+func PreMigrationSanityChecks(e db.Engine, dbVersion int64, cfg setting.ConfigProvider) error {
+ return v1TOv5_0_1Included(e, dbVersion, cfg)
+}
diff --git a/services/forgejo/sanity_test.go b/services/forgejo/sanity_test.go
new file mode 100644
index 0000000..657f7e2
--- /dev/null
+++ b/services/forgejo/sanity_test.go
@@ -0,0 +1,31 @@
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestForgejo_PreMigrationSanityChecks(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ ctx := db.DefaultContext
+ e := db.GetEngine(ctx)
+
+ require.NoError(t, PreMigrationSanityChecks(e, ForgejoV4DatabaseVersion, configFixture(t, "")))
+}
+
+func configFixture(t *testing.T, content string) setting.ConfigProvider {
+ config := filepath.Join(t.TempDir(), "app.ini")
+ require.NoError(t, os.WriteFile(config, []byte(content), 0o777))
+ cfg, err := setting.NewConfigProviderFromFile(config)
+ require.NoError(t, err)
+ return cfg
+}
diff --git a/services/forgejo/sanity_v1TOv5_0_1Included.go b/services/forgejo/sanity_v1TOv5_0_1Included.go
new file mode 100644
index 0000000..49de636
--- /dev/null
+++ b/services/forgejo/sanity_v1TOv5_0_1Included.go
@@ -0,0 +1,91 @@
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/forgejo/semver"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/hashicorp/go-version"
+)
+
+var v1TOv5_0_1IncludedStorageSections = []struct {
+ section string
+ storageSection string
+}{
+ {"attachment", "storage.attachments"},
+ {"lfs", "storage.lfs"},
+ {"avatar", "storage.avatars"},
+ {"repo-avatar", "storage.repo-avatars"},
+ {"repo-archive", "storage.repo-archive"},
+ {"packages", "storage.packages"},
+ // the actions sections are not included here because they were experimental at the time
+}
+
+func v1TOv5_0_1Included(e db.Engine, dbVersion int64, cfg setting.ConfigProvider) error {
+ //
+ // When upgrading from Forgejo > v5 or Gitea > v1.20, no sanity check is necessary
+ //
+ if dbVersion > ForgejoV5DatabaseVersion {
+ return nil
+ }
+
+ //
+ // When upgrading from a Forgejo point version >= v5.0.1, no sanity
+ // check is necessary
+ //
+ // When upgrading from a Gitea >= v1.20 the sanitiy checks will
+ // always be done They are necessary for Gitea [v1.20.0..v1.20.2]
+ // but not for [v1.20.3..] but there is no way to know which point
+ // release was running prior to the upgrade. This may require the
+ // Gitea admin to update their app.ini although it is not necessary
+ // but will have no other consequence.
+ //
+ previousServerVersion, err := semver.GetVersionWithEngine(e)
+ if err != nil {
+ return err
+ }
+ upper, err := version.NewVersion("v5.0.1")
+ if err != nil {
+ return err
+ }
+
+ if previousServerVersion.GreaterThan(upper) {
+ return nil
+ }
+
+ //
+ // Sanity checks
+ //
+
+ originalCfg, err := cfg.PrepareSaving()
+ if err != nil {
+ return err
+ }
+
+ messages := make([]string, 0, 10)
+ for _, c := range v1TOv5_0_1IncludedStorageSections {
+ section, _ := originalCfg.GetSection(c.section)
+ if section == nil {
+ continue
+ }
+ storageSection, _ := originalCfg.GetSection(c.storageSection)
+ if storageSection == nil {
+ continue
+ }
+ messages = append(messages, fmt.Sprintf("[%s] and [%s] may conflict with each other", c.section, c.storageSection))
+ }
+
+ if originalCfg.Section("storage").HasKey("PATH") {
+ messages = append(messages, "[storage].PATH is set and may create storage issues")
+ }
+
+ if len(messages) > 0 {
+ return fatal(fmt.Errorf("%s\nThese issues need to be manually fixed in the app.ini file at %s. Please read https://forgejo.org/2023-08-release-v1-20-3-0/ for instructions", strings.Join(messages, "\n"), cfg.GetFile()))
+ }
+ return nil
+}
diff --git a/services/forgejo/sanity_v1TOv5_0_1Included_test.go b/services/forgejo/sanity_v1TOv5_0_1Included_test.go
new file mode 100644
index 0000000..56618eb
--- /dev/null
+++ b/services/forgejo/sanity_v1TOv5_0_1Included_test.go
@@ -0,0 +1,115 @@
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "fmt"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/forgejo/semver"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/log"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestForgejo_v1TOv5_0_1Included(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ logFatal = func(string, ...any) {}
+ defer func() {
+ logFatal = log.Fatal
+ }()
+
+ configWithSoragePath := `
+[storage]
+PATH = /something
+`
+ verifyForgejoV1TOv5_0_1Included(t, configWithSoragePath, "[storage].PATH is set")
+
+ for _, c := range v1TOv5_0_1IncludedStorageSections {
+ config := fmt.Sprintf("[%s]\n[%s]\n", c.section, c.storageSection)
+ verifyForgejoV1TOv5_0_1Included(t, config, fmt.Sprintf("[%s] and [%s]", c.section, c.storageSection))
+ }
+}
+
+func verifyForgejoV1TOv5_0_1Included(t *testing.T, config, message string) {
+ ctx := db.DefaultContext
+ e := db.GetEngine(ctx)
+
+ for _, testCase := range []struct {
+ name string
+ dbVersion int64
+ semver string
+ config string
+ }{
+ {
+ name: "5.0.0 with no " + message,
+ dbVersion: ForgejoV5DatabaseVersion,
+ semver: "5.0.0+0-gitea-1.20.1",
+ config: "",
+ },
+ {
+ name: "5.0.1 with no " + message,
+ dbVersion: ForgejoV5DatabaseVersion,
+ semver: "5.0.1+0-gitea-1.20.2",
+ config: "",
+ },
+ {
+ name: "5.0.2 with " + message,
+ dbVersion: ForgejoV5DatabaseVersion,
+ semver: "5.0.2+0-gitea-1.20.3",
+ config: config,
+ },
+ {
+ name: "6.0.0 with " + message,
+ dbVersion: ForgejoV6DatabaseVersion,
+ semver: "6.0.0+0-gitea-1.21.0",
+ config: config,
+ },
+ } {
+ cfg := configFixture(t, testCase.config)
+ semver.SetVersionString(ctx, testCase.semver)
+ require.NoError(t, v1TOv5_0_1Included(e, testCase.dbVersion, cfg))
+ }
+
+ for _, testCase := range []struct {
+ name string
+ dbVersion int64
+ semver string
+ config string
+ }{
+ {
+ name: "5.0.0 with " + message,
+ dbVersion: ForgejoV5DatabaseVersion,
+ semver: "5.0.0+0-gitea-1.20.1",
+ config: config,
+ },
+ {
+ name: "5.0.1 with " + message,
+ dbVersion: ForgejoV5DatabaseVersion,
+ semver: "5.0.1+0-gitea-1.20.2",
+ config: config,
+ },
+ {
+ //
+ // When upgrading from
+ //
+ // Forgejo >= 5.0.1+0-gitea-1.20.2
+ // Gitea > v1.21
+ //
+ // The version that the server was running prior to the upgrade
+ // is not available.
+ //
+ name: semver.DefaultVersionString + " with " + message,
+ dbVersion: ForgejoV4DatabaseVersion,
+ semver: semver.DefaultVersionString,
+ config: config,
+ },
+ } {
+ cfg := configFixture(t, testCase.config)
+ semver.SetVersionString(ctx, testCase.semver)
+ require.ErrorContains(t, v1TOv5_0_1Included(e, testCase.dbVersion, cfg), message)
+ }
+}
diff --git a/services/forms/admin.go b/services/forms/admin.go
new file mode 100644
index 0000000..7d46904
--- /dev/null
+++ b/services/forms/admin.go
@@ -0,0 +1,74 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// AdminCreateUserForm form for admin to create user
+type AdminCreateUserForm struct {
+ LoginType string `binding:"Required"`
+ LoginName string
+ UserName string `binding:"Required;Username;MaxSize(40)"`
+ Email string `binding:"Required;Email;MaxSize(254)"`
+ Password string `binding:"MaxSize(255)"`
+ SendNotify bool
+ MustChangePassword bool
+ Visibility structs.VisibleType
+}
+
+// Validate validates form fields
+func (f *AdminCreateUserForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// AdminEditUserForm form for admin to create user
+type AdminEditUserForm struct {
+ LoginType string `binding:"Required"`
+ UserName string `binding:"Username;MaxSize(40)"`
+ LoginName string
+ FullName string `binding:"MaxSize(100)"`
+ Email string `binding:"Required;Email;MaxSize(254)"`
+ Password string `binding:"MaxSize(255)"`
+ Website string `binding:"ValidUrl;MaxSize(255)"`
+ Location string `binding:"MaxSize(50)"`
+ Language string `binding:"MaxSize(5)"`
+ Pronouns string `binding:"MaxSize(50)"`
+ MaxRepoCreation int
+ Active bool
+ Admin bool
+ Restricted bool
+ AllowGitHook bool
+ AllowImportLocal bool
+ AllowCreateOrganization bool
+ ProhibitLogin bool
+ Reset2FA bool `form:"reset_2fa"`
+ Visibility structs.VisibleType
+}
+
+// Validate validates form fields
+func (f *AdminEditUserForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// AdminDashboardForm form for admin dashboard operations
+type AdminDashboardForm struct {
+ Op string `binding:"required"`
+ From string
+}
+
+// Validate validates form fields
+func (f *AdminDashboardForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/auth_form.go b/services/forms/auth_form.go
new file mode 100644
index 0000000..a3eca94
--- /dev/null
+++ b/services/forms/auth_form.go
@@ -0,0 +1,92 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// AuthenticationForm form for authentication
+type AuthenticationForm struct {
+ ID int64
+ Type int `binding:"Range(2,7)"`
+ Name string `binding:"Required;MaxSize(30)"`
+ Host string
+ Port int
+ BindDN string
+ BindPassword string
+ UserBase string
+ UserDN string
+ AttributeUsername string
+ AttributeName string
+ AttributeSurname string
+ DefaultDomainName string
+ AttributeMail string
+ AttributeSSHPublicKey string
+ AttributeAvatar string
+ AttributesInBind bool
+ UsePagedSearch bool
+ SearchPageSize int
+ Filter string
+ AdminFilter string
+ GroupsEnabled bool
+ GroupDN string
+ GroupFilter string
+ GroupMemberUID string
+ UserUID string
+ RestrictedFilter string
+ AllowDeactivateAll bool
+ IsActive bool
+ IsSyncEnabled bool
+ SMTPAuth string
+ SMTPHost string
+ SMTPPort int
+ AllowedDomains string
+ SecurityProtocol int `binding:"Range(0,2)"`
+ TLS bool
+ SkipVerify bool
+ HeloHostname string
+ DisableHelo bool
+ ForceSMTPS bool
+ PAMServiceName string
+ PAMEmailDomain string
+ Oauth2Provider string
+ Oauth2Key string
+ Oauth2Secret string
+ OpenIDConnectAutoDiscoveryURL string
+ Oauth2UseCustomURL bool
+ Oauth2TokenURL string
+ Oauth2AuthURL string
+ Oauth2ProfileURL string
+ Oauth2EmailURL string
+ Oauth2IconURL string
+ Oauth2Tenant string
+ Oauth2Scopes string
+ Oauth2RequiredClaimName string
+ Oauth2RequiredClaimValue string
+ Oauth2GroupClaimName string
+ Oauth2AdminGroup string
+ Oauth2RestrictedGroup string
+ Oauth2GroupTeamMap string `binding:"ValidGroupTeamMap"`
+ Oauth2GroupTeamMapRemoval bool
+ SkipLocalTwoFA bool
+ SSPIAutoCreateUsers bool
+ SSPIAutoActivateUsers bool
+ SSPIStripDomainNames bool
+ SSPISeparatorReplacement string `binding:"AlphaDashDot;MaxSize(5)"`
+ SSPIDefaultLanguage string
+ GroupTeamMap string `binding:"ValidGroupTeamMap"`
+ GroupTeamMapRemoval bool
+}
+
+// Validate validates fields
+func (f *AuthenticationForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/org.go b/services/forms/org.go
new file mode 100644
index 0000000..db182f7
--- /dev/null
+++ b/services/forms/org.go
@@ -0,0 +1,76 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// ________ .__ __ .__
+// \_____ \_______ _________ ____ |__|____________ _/ |_|__| ____ ____
+// / | \_ __ \/ ___\__ \ / \| \___ /\__ \\ __\ |/ _ \ / \
+// / | \ | \/ /_/ > __ \| | \ |/ / / __ \| | | ( <_> ) | \
+// \_______ /__| \___ (____ /___| /__/_____ \(____ /__| |__|\____/|___| /
+// \/ /_____/ \/ \/ \/ \/ \/
+
+// CreateOrgForm form for creating organization
+type CreateOrgForm struct {
+ OrgName string `binding:"Required;Username;MaxSize(40)" locale:"org.org_name_holder"`
+ Visibility structs.VisibleType
+ RepoAdminChangeTeamAccess bool
+}
+
+// Validate validates the fields
+func (f *CreateOrgForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// UpdateOrgSettingForm form for updating organization settings
+type UpdateOrgSettingForm struct {
+ Name string `binding:"Required;Username;MaxSize(40)" locale:"org.org_name_holder"`
+ FullName string `binding:"MaxSize(100)"`
+ Email string `binding:"MaxSize(255)"`
+ Description string `binding:"MaxSize(255)"`
+ Website string `binding:"ValidUrl;MaxSize(255)"`
+ Location string `binding:"MaxSize(50)"`
+ Visibility structs.VisibleType
+ MaxRepoCreation int
+ RepoAdminChangeTeamAccess bool
+}
+
+// Validate validates the fields
+func (f *UpdateOrgSettingForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// ___________
+// \__ ___/___ _____ _____
+// | |_/ __ \\__ \ / \
+// | |\ ___/ / __ \| Y Y \
+// |____| \___ >____ /__|_| /
+// \/ \/ \/
+
+// CreateTeamForm form for creating team
+type CreateTeamForm struct {
+ TeamName string `binding:"Required;AlphaDashDot;MaxSize(255)"`
+ Description string `binding:"MaxSize(255)"`
+ Permission string
+ RepoAccess string
+ CanCreateOrgRepo bool
+}
+
+// Validate validates the fields
+func (f *CreateTeamForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/package_form.go b/services/forms/package_form.go
new file mode 100644
index 0000000..9b6f907
--- /dev/null
+++ b/services/forms/package_form.go
@@ -0,0 +1,30 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+type PackageCleanupRuleForm struct {
+ ID int64
+ Enabled bool
+ Type string `binding:"Required;In(alpine,arch,cargo,chef,composer,conan,conda,container,cran,debian,generic,go,helm,maven,npm,nuget,pub,pypi,rpm,rubygems,swift,vagrant)"`
+ KeepCount int `binding:"In(0,1,5,10,25,50,100)"`
+ KeepPattern string `binding:"RegexPattern"`
+ RemoveDays int `binding:"In(0,7,14,30,60,90,180)"`
+ RemovePattern string `binding:"RegexPattern"`
+ MatchFullName bool
+ Action string `binding:"Required;In(save,remove)"`
+}
+
+func (f *PackageCleanupRuleForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/repo_branch_form.go b/services/forms/repo_branch_form.go
new file mode 100644
index 0000000..42e6c85
--- /dev/null
+++ b/services/forms/repo_branch_form.go
@@ -0,0 +1,38 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// NewBranchForm form for creating a new branch
+type NewBranchForm struct {
+ NewBranchName string `binding:"Required;MaxSize(100);GitRefName"`
+ CurrentPath string
+ CreateTag bool
+}
+
+// Validate validates the fields
+func (f *NewBranchForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// RenameBranchForm form for rename a branch
+type RenameBranchForm struct {
+ From string `binding:"Required;MaxSize(100);GitRefName"`
+ To string `binding:"Required;MaxSize(100);GitRefName"`
+}
+
+// Validate validates the fields
+func (f *RenameBranchForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go
new file mode 100644
index 0000000..c3d9c3e
--- /dev/null
+++ b/services/forms/repo_form.go
@@ -0,0 +1,751 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "regexp"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ issues_model "code.gitea.io/gitea/models/issues"
+ project_model "code.gitea.io/gitea/models/project"
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// CreateRepoForm form for creating repository
+type CreateRepoForm struct {
+ UID int64 `binding:"Required"`
+ RepoName string `binding:"Required;AlphaDashDot;MaxSize(100)"`
+ Private bool
+ Description string `binding:"MaxSize(2048)"`
+ DefaultBranch string `binding:"GitRefName;MaxSize(100)"`
+ AutoInit bool
+ Gitignores string
+ IssueLabels string
+ License string
+ Readme string
+ Template bool
+
+ RepoTemplate int64
+ GitContent bool
+ Topics bool
+ GitHooks bool
+ Webhooks bool
+ Avatar bool
+ Labels bool
+ ProtectedBranch bool
+
+ ForkSingleBranch string
+ ObjectFormatName string
+}
+
+// Validate validates the fields
+func (f *CreateRepoForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// MigrateRepoForm form for migrating repository
+// this is used to interact with web ui
+type MigrateRepoForm struct {
+ // required: true
+ CloneAddr string `json:"clone_addr" binding:"Required"`
+ Service structs.GitServiceType `json:"service"`
+ AuthUsername string `json:"auth_username"`
+ AuthPassword string `json:"auth_password"`
+ AuthToken string `json:"auth_token"`
+ // required: true
+ UID int64 `json:"uid" binding:"Required"`
+ // required: true
+ RepoName string `json:"repo_name" binding:"Required;AlphaDashDot;MaxSize(100)"`
+ Mirror bool `json:"mirror"`
+ LFS bool `json:"lfs"`
+ LFSEndpoint string `json:"lfs_endpoint"`
+ Private bool `json:"private"`
+ Description string `json:"description" binding:"MaxSize(2048)"`
+ Wiki bool `json:"wiki"`
+ Milestones bool `json:"milestones"`
+ Labels bool `json:"labels"`
+ Issues bool `json:"issues"`
+ PullRequests bool `json:"pull_requests"`
+ Releases bool `json:"releases"`
+ MirrorInterval string `json:"mirror_interval"`
+}
+
+// Validate validates the fields
+func (f *MigrateRepoForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// scpRegex matches the SCP-like addresses used by Git to access repositories over SSH.
+var scpRegex = regexp.MustCompile(`^([a-zA-Z0-9_]+)@([a-zA-Z0-9._-]+):(.*)$`)
+
+// ParseRemoteAddr checks if given remote address is valid,
+// and returns composed URL with needed username and password.
+func ParseRemoteAddr(remoteAddr, authUsername, authPassword string) (string, error) {
+ remoteAddr = strings.TrimSpace(remoteAddr)
+ // Remote address can be HTTP/HTTPS/Git URL or local path.
+ if strings.HasPrefix(remoteAddr, "http://") ||
+ strings.HasPrefix(remoteAddr, "https://") ||
+ strings.HasPrefix(remoteAddr, "git://") {
+ u, err := url.Parse(remoteAddr)
+ if err != nil {
+ return "", &models.ErrInvalidCloneAddr{IsURLError: true, Host: remoteAddr}
+ }
+ if len(authUsername)+len(authPassword) > 0 {
+ u.User = url.UserPassword(authUsername, authPassword)
+ }
+ return u.String(), nil
+ }
+
+ // Detect SCP-like remote addresses and return host.
+ if m := scpRegex.FindStringSubmatch(remoteAddr); m != nil {
+ // Match SCP-like syntax and convert it to a URL.
+ // Eg, "git@forgejo.org:user/repo" becomes
+ // "ssh://git@forgejo.org/user/repo".
+ return fmt.Sprintf("ssh://%s@%s/%s", url.User(m[1]), m[2], m[3]), nil
+ }
+
+ return remoteAddr, nil
+}
+
+// RepoSettingForm form for changing repository settings
+type RepoSettingForm struct {
+ RepoName string `binding:"Required;AlphaDashDot;MaxSize(100)"`
+ Description string `binding:"MaxSize(2048)"`
+ Website string `binding:"ValidUrl;MaxSize(1024)"`
+ FollowingRepos string
+ Interval string
+ MirrorAddress string
+ MirrorUsername string
+ MirrorPassword string
+ LFS bool `form:"mirror_lfs"`
+ LFSEndpoint string `form:"mirror_lfs_endpoint"`
+ PushMirrorID string
+ PushMirrorAddress string
+ PushMirrorUsername string
+ PushMirrorPassword string
+ PushMirrorSyncOnCommit bool
+ PushMirrorInterval string
+ PushMirrorUseSSH bool
+ Private bool
+ Template bool
+ EnablePrune bool
+
+ // Advanced settings
+ IsArchived bool
+
+ // Signing Settings
+ TrustModel string
+
+ // Admin settings
+ EnableHealthCheck bool
+ RequestReindexType string
+}
+
+// Validate validates the fields
+func (f *RepoSettingForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// RepoUnitSettingForm form for changing repository unit settings
+type RepoUnitSettingForm struct {
+ EnableCode bool
+ EnableWiki bool
+ GloballyWriteableWiki bool
+ EnableExternalWiki bool
+ ExternalWikiURL string
+ EnableIssues bool
+ EnableExternalTracker bool
+ ExternalTrackerURL string
+ TrackerURLFormat string
+ TrackerIssueStyle string
+ ExternalTrackerRegexpPattern string
+ EnableCloseIssuesViaCommitInAnyBranch bool
+ EnableProjects bool
+ EnableReleases bool
+ EnablePackages bool
+ EnablePulls bool
+ EnableActions bool
+ PullsIgnoreWhitespace bool
+ PullsAllowMerge bool
+ PullsAllowRebase bool
+ PullsAllowRebaseMerge bool
+ PullsAllowSquash bool
+ PullsAllowFastForwardOnly bool
+ PullsAllowManualMerge bool
+ PullsDefaultMergeStyle string
+ EnableAutodetectManualMerge bool
+ PullsAllowRebaseUpdate bool
+ DefaultDeleteBranchAfterMerge bool
+ DefaultAllowMaintainerEdit bool
+ EnableTimetracker bool
+ AllowOnlyContributorsToTrackTime bool
+ EnableIssueDependencies bool
+}
+
+// Validate validates the fields
+func (f *RepoUnitSettingForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// __________ .__
+// \______ \____________ ____ ____ | |__
+// | | _/\_ __ \__ \ / \_/ ___\| | \
+// | | \ | | \// __ \| | \ \___| Y \
+// |______ / |__| (____ /___| /\___ >___| /
+// \/ \/ \/ \/ \/
+
+// ProtectBranchForm form for changing protected branch settings
+type ProtectBranchForm struct {
+ RuleName string `binding:"Required"`
+ RuleID int64
+ EnablePush string
+ WhitelistUsers string
+ WhitelistTeams string
+ WhitelistDeployKeys bool
+ EnableMergeWhitelist bool
+ MergeWhitelistUsers string
+ MergeWhitelistTeams string
+ EnableStatusCheck bool
+ StatusCheckContexts string
+ RequiredApprovals int64
+ EnableApprovalsWhitelist bool
+ ApprovalsWhitelistUsers string
+ ApprovalsWhitelistTeams string
+ BlockOnRejectedReviews bool
+ BlockOnOfficialReviewRequests bool
+ BlockOnOutdatedBranch bool
+ DismissStaleApprovals bool
+ IgnoreStaleApprovals bool
+ RequireSignedCommits bool
+ ProtectedFilePatterns string
+ UnprotectedFilePatterns string
+ ApplyToAdmins bool
+}
+
+// Validate validates the fields
+func (f *ProtectBranchForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// __ __ ___. .__ __
+// / \ / \ ____\_ |__ | |__ ____ ____ | | __
+// \ \/\/ // __ \| __ \| | \ / _ \ / _ \| |/ /
+// \ /\ ___/| \_\ \ Y ( <_> | <_> ) <
+// \__/\ / \___ >___ /___| /\____/ \____/|__|_ \
+// \/ \/ \/ \/ \/
+
+// WebhookCoreForm form for changing web hook (common to all webhook types)
+type WebhookCoreForm struct {
+ Events string
+ Create bool
+ Delete bool
+ Fork bool
+ Issues bool
+ IssueAssign bool
+ IssueLabel bool
+ IssueMilestone bool
+ IssueComment bool
+ Release bool
+ Push bool
+ PullRequest bool
+ PullRequestAssign bool
+ PullRequestLabel bool
+ PullRequestMilestone bool
+ PullRequestComment bool
+ PullRequestReview bool
+ PullRequestSync bool
+ PullRequestReviewRequest bool
+ Wiki bool
+ Repository bool
+ Package bool
+ Active bool
+ BranchFilter string `binding:"GlobPattern"`
+ AuthorizationHeader string
+}
+
+// PushOnly if the hook will be triggered when push
+func (f WebhookCoreForm) PushOnly() bool {
+ return f.Events == "push_only"
+}
+
+// SendEverything if the hook will be triggered any event
+func (f WebhookCoreForm) SendEverything() bool {
+ return f.Events == "send_everything"
+}
+
+// ChooseEvents if the hook will be triggered choose events
+func (f WebhookCoreForm) ChooseEvents() bool {
+ return f.Events == "choose_events"
+}
+
+// WebhookForm form for changing web hook (specific handling depending on the webhook type)
+type WebhookForm struct {
+ WebhookCoreForm
+ URL string
+ ContentType webhook_model.HookContentType
+ Secret string
+ HTTPMethod string
+ Metadata any
+}
+
+// .___
+// | | ______ ________ __ ____
+// | |/ ___// ___/ | \_/ __ \
+// | |\___ \ \___ \| | /\ ___/
+// |___/____ >____ >____/ \___ >
+// \/ \/ \/
+
+// CreateIssueForm form for creating issue
+type CreateIssueForm struct {
+ Title string `binding:"Required;MaxSize(255)"`
+ LabelIDs string `form:"label_ids"`
+ AssigneeIDs string `form:"assignee_ids"`
+ Ref string `form:"ref"`
+ MilestoneID int64
+ ProjectID int64
+ AssigneeID int64
+ Content string
+ Files []string
+ AllowMaintainerEdit bool
+}
+
+// Validate validates the fields
+func (f *CreateIssueForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// CreateCommentForm form for creating comment
+type CreateCommentForm struct {
+ Content string
+ Status string `binding:"OmitEmpty;In(reopen,close)"`
+ Files []string
+}
+
+// Validate validates the fields
+func (f *CreateCommentForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// ReactionForm form for adding and removing reaction
+type ReactionForm struct {
+ Content string `binding:"Required"`
+}
+
+// Validate validates the fields
+func (f *ReactionForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// IssueLockForm form for locking an issue
+type IssueLockForm struct {
+ Reason string `binding:"Required"`
+}
+
+// Validate validates the fields
+func (i *IssueLockForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, i, ctx.Locale)
+}
+
+// HasValidReason checks to make sure that the reason submitted in
+// the form matches any of the values in the config
+func (i IssueLockForm) HasValidReason() bool {
+ if strings.TrimSpace(i.Reason) == "" {
+ return true
+ }
+
+ for _, v := range setting.Repository.Issue.LockReasons {
+ if v == i.Reason {
+ return true
+ }
+ }
+
+ return false
+}
+
+// CreateProjectForm form for creating a project
+type CreateProjectForm struct {
+ Title string `binding:"Required;MaxSize(100)"`
+ Content string
+ TemplateType project_model.TemplateType
+ CardType project_model.CardType
+}
+
+// EditProjectColumnForm is a form for editing a project column
+type EditProjectColumnForm struct {
+ Title string `binding:"Required;MaxSize(100)"`
+ Sorting int8
+ Color string `binding:"MaxSize(7)"`
+}
+
+// CreateMilestoneForm form for creating milestone
+type CreateMilestoneForm struct {
+ Title string `binding:"Required;MaxSize(50)"`
+ Content string
+ Deadline string
+}
+
+// Validate validates the fields
+func (f *CreateMilestoneForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// CreateLabelForm form for creating label
+type CreateLabelForm struct {
+ ID int64
+ Title string `binding:"Required;MaxSize(50)" locale:"repo.issues.label_title"`
+ Exclusive bool `form:"exclusive"`
+ IsArchived bool `form:"is_archived"`
+ Description string `binding:"MaxSize(200)" locale:"repo.issues.label_description"`
+ Color string `binding:"Required;MaxSize(7)" locale:"repo.issues.label_color"`
+}
+
+// Validate validates the fields
+func (f *CreateLabelForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// InitializeLabelsForm form for initializing labels
+type InitializeLabelsForm struct {
+ TemplateName string `binding:"Required"`
+}
+
+// Validate validates the fields
+func (f *InitializeLabelsForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// MergePullRequestForm form for merging Pull Request
+// swagger:model MergePullRequestOption
+type MergePullRequestForm struct {
+ // required: true
+ // enum: ["merge", "rebase", "rebase-merge", "squash", "fast-forward-only", "manually-merged"]
+ Do string `binding:"Required;In(merge,rebase,rebase-merge,squash,fast-forward-only,manually-merged)"`
+ MergeTitleField string
+ MergeMessageField string
+ MergeCommitID string // only used for manually-merged
+ HeadCommitID string `json:"head_commit_id,omitempty"`
+ ForceMerge bool `json:"force_merge,omitempty"`
+ MergeWhenChecksSucceed bool `json:"merge_when_checks_succeed,omitempty"`
+ DeleteBranchAfterMerge bool `json:"delete_branch_after_merge,omitempty"`
+}
+
+// Validate validates the fields
+func (f *MergePullRequestForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// CodeCommentForm form for adding code comments for PRs
+type CodeCommentForm struct {
+ Origin string `binding:"Required;In(timeline,diff)"`
+ Content string `binding:"Required"`
+ Side string `binding:"Required;In(previous,proposed)"`
+ Line int64
+ TreePath string `form:"path" binding:"Required"`
+ SingleReview bool `form:"single_review"`
+ Reply int64 `form:"reply"`
+ LatestCommitID string
+ Files []string
+}
+
+// Validate validates the fields
+func (f *CodeCommentForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// SubmitReviewForm for submitting a finished code review
+type SubmitReviewForm struct {
+ Content string
+ Type string
+ CommitID string
+ Files []string
+}
+
+// Validate validates the fields
+func (f *SubmitReviewForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// ReviewType will return the corresponding ReviewType for type
+func (f SubmitReviewForm) ReviewType() issues_model.ReviewType {
+ switch f.Type {
+ case "approve":
+ return issues_model.ReviewTypeApprove
+ case "comment":
+ return issues_model.ReviewTypeComment
+ case "reject":
+ return issues_model.ReviewTypeReject
+ case "":
+ return issues_model.ReviewTypeComment // default to comment when doing quick-submit (Ctrl+Enter) on the review form
+ default:
+ return issues_model.ReviewTypeUnknown
+ }
+}
+
+// HasEmptyContent checks if the content of the review form is empty.
+func (f SubmitReviewForm) HasEmptyContent() bool {
+ reviewType := f.ReviewType()
+
+ return (reviewType == issues_model.ReviewTypeComment || reviewType == issues_model.ReviewTypeReject) &&
+ len(strings.TrimSpace(f.Content)) == 0
+}
+
+// DismissReviewForm for dismissing stale review by repo admin
+type DismissReviewForm struct {
+ ReviewID int64 `binding:"Required"`
+ Message string
+}
+
+// UpdateAllowEditsForm form for changing if PR allows edits from maintainers
+type UpdateAllowEditsForm struct {
+ AllowMaintainerEdit bool
+}
+
+// __________ .__
+// \______ \ ____ | | ____ _____ ______ ____
+// | _// __ \| | _/ __ \\__ \ / ___// __ \
+// | | \ ___/| |_\ ___/ / __ \_\___ \\ ___/
+// |____|_ /\___ >____/\___ >____ /____ >\___ >
+// \/ \/ \/ \/ \/ \/
+
+// NewReleaseForm form for creating release
+type NewReleaseForm struct {
+ TagName string `binding:"Required;GitRefName;MaxSize(255)"`
+ Target string `form:"tag_target" binding:"Required;MaxSize(255)"`
+ Title string `binding:"MaxSize(255)"`
+ Content string
+ Draft string
+ TagOnly string
+ Prerelease bool
+ AddTagMsg bool
+ HideArchiveLinks bool
+ Files []string
+}
+
+// Validate validates the fields
+func (f *NewReleaseForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// EditReleaseForm form for changing release
+type EditReleaseForm struct {
+ Title string `form:"title" binding:"Required;MaxSize(255)"`
+ Content string `form:"content"`
+ Draft string `form:"draft"`
+ Prerelease bool `form:"prerelease"`
+ HideArchiveLinks bool
+ Files []string
+}
+
+// Validate validates the fields
+func (f *EditReleaseForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// __ __.__ __ .__
+// / \ / \__| | _|__|
+// \ \/\/ / | |/ / |
+// \ /| | <| |
+// \__/\ / |__|__|_ \__|
+// \/ \/
+
+// NewWikiForm form for creating wiki
+type NewWikiForm struct {
+ Title string `binding:"Required"`
+ Content string `binding:"Required"`
+ Message string
+}
+
+// Validate validates the fields
+// FIXME: use code generation to generate this method.
+func (f *NewWikiForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// ___________ .___.__ __
+// \_ _____/ __| _/|__|/ |_
+// | __)_ / __ | | \ __\
+// | \/ /_/ | | || |
+// /_______ /\____ | |__||__|
+// \/ \/
+
+// EditRepoFileForm form for changing repository file
+type EditRepoFileForm struct {
+ TreePath string `binding:"Required;MaxSize(500)"`
+ Content string
+ CommitSummary string `binding:"MaxSize(100)"`
+ CommitMessage string
+ CommitChoice string `binding:"Required;MaxSize(50)"`
+ NewBranchName string `binding:"GitRefName;MaxSize(100)"`
+ LastCommit string
+ CommitMailID int64 `binding:"Required"`
+ Signoff bool
+}
+
+// Validate validates the fields
+func (f *EditRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// EditPreviewDiffForm form for changing preview diff
+type EditPreviewDiffForm struct {
+ Content string
+}
+
+// Validate validates the fields
+func (f *EditPreviewDiffForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// _________ .__ __________.__ __
+// \_ ___ \| |__ __________________ ___.__. \______ \__| ____ | | __
+// / \ \/| | \_/ __ \_ __ \_ __ < | | | ___/ |/ ___\| |/ /
+// \ \___| Y \ ___/| | \/| | \/\___ | | | | \ \___| <
+// \______ /___| /\___ >__| |__| / ____| |____| |__|\___ >__|_ \
+// \/ \/ \/ \/ \/ \/
+
+// CherryPickForm form for changing repository file
+type CherryPickForm struct {
+ CommitSummary string `binding:"MaxSize(100)"`
+ CommitMessage string
+ CommitChoice string `binding:"Required;MaxSize(50)"`
+ NewBranchName string `binding:"GitRefName;MaxSize(100)"`
+ LastCommit string
+ CommitMailID int64 `binding:"Required"`
+ Revert bool
+ Signoff bool
+}
+
+// Validate validates the fields
+func (f *CherryPickForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// ____ ___ .__ .___
+// | | \______ | | _________ __| _/
+// | | /\____ \| | / _ \__ \ / __ |
+// | | / | |_> > |_( <_> ) __ \_/ /_/ |
+// |______/ | __/|____/\____(____ /\____ |
+// |__| \/ \/
+//
+
+// UploadRepoFileForm form for uploading repository file
+type UploadRepoFileForm struct {
+ TreePath string `binding:"MaxSize(500)"`
+ CommitSummary string `binding:"MaxSize(100)"`
+ CommitMessage string
+ CommitChoice string `binding:"Required;MaxSize(50)"`
+ NewBranchName string `binding:"GitRefName;MaxSize(100)"`
+ Files []string
+ CommitMailID int64 `binding:"Required"`
+ Signoff bool
+}
+
+// Validate validates the fields
+func (f *UploadRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// RemoveUploadFileForm form for removing uploaded file
+type RemoveUploadFileForm struct {
+ File string `binding:"Required;MaxSize(50)"`
+}
+
+// Validate validates the fields
+func (f *RemoveUploadFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// ________ .__ __
+// \______ \ ____ | | _____/ |_ ____
+// | | \_/ __ \| | _/ __ \ __\/ __ \
+// | ` \ ___/| |_\ ___/| | \ ___/
+// /_______ /\___ >____/\___ >__| \___ >
+// \/ \/ \/ \/
+
+// DeleteRepoFileForm form for deleting repository file
+type DeleteRepoFileForm struct {
+ CommitSummary string `binding:"MaxSize(100)"`
+ CommitMessage string
+ CommitChoice string `binding:"Required;MaxSize(50)"`
+ NewBranchName string `binding:"GitRefName;MaxSize(100)"`
+ LastCommit string
+ CommitMailID int64 `binding:"Required"`
+ Signoff bool
+}
+
+// Validate validates the fields
+func (f *DeleteRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// ___________.__ ___________ __
+// \__ ___/|__| _____ ____ \__ ___/___________ ____ | | __ ___________
+// | | | |/ \_/ __ \ | | \_ __ \__ \ _/ ___\| |/ // __ \_ __ \
+// | | | | Y Y \ ___/ | | | | \// __ \\ \___| <\ ___/| | \/
+// |____| |__|__|_| /\___ > |____| |__| (____ /\___ >__|_ \\___ >__|
+// \/ \/ \/ \/ \/ \/
+
+// AddTimeManuallyForm form that adds spent time manually.
+type AddTimeManuallyForm struct {
+ Hours int `binding:"Range(0,1000)"`
+ Minutes int `binding:"Range(0,1000)"`
+}
+
+// Validate validates the fields
+func (f *AddTimeManuallyForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// SaveTopicForm form for save topics for repository
+type SaveTopicForm struct {
+ Topics []string `binding:"topics;Required;"`
+}
+
+// DeadlineForm hold the validation rules for deadlines
+type DeadlineForm struct {
+ DateString string `form:"date" binding:"Required;Size(10)"`
+}
+
+// Validate validates the fields
+func (f *DeadlineForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/repo_form_test.go b/services/forms/repo_form_test.go
new file mode 100644
index 0000000..2c5a8e2
--- /dev/null
+++ b/services/forms/repo_form_test.go
@@ -0,0 +1,64 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSubmitReviewForm_IsEmpty(t *testing.T) {
+ cases := []struct {
+ form SubmitReviewForm
+ expected bool
+ }{
+ // Approved PR with a comment shouldn't count as empty
+ {SubmitReviewForm{Type: "approve", Content: "Awesome"}, false},
+
+ // Approved PR without a comment shouldn't count as empty
+ {SubmitReviewForm{Type: "approve", Content: ""}, false},
+
+ // Rejected PR without a comment should count as empty
+ {SubmitReviewForm{Type: "reject", Content: ""}, true},
+
+ // Rejected PR with a comment shouldn't count as empty
+ {SubmitReviewForm{Type: "reject", Content: "Awesome"}, false},
+
+ // Comment review on a PR with a comment shouldn't count as empty
+ {SubmitReviewForm{Type: "comment", Content: "Awesome"}, false},
+
+ // Comment review on a PR without a comment should count as empty
+ {SubmitReviewForm{Type: "comment", Content: ""}, true},
+ }
+
+ for _, v := range cases {
+ assert.Equal(t, v.expected, v.form.HasEmptyContent())
+ }
+}
+
+func TestIssueLock_HasValidReason(t *testing.T) {
+ // Init settings
+ _ = setting.Repository
+
+ cases := []struct {
+ form IssueLockForm
+ expected bool
+ }{
+ {IssueLockForm{""}, true}, // an empty reason is accepted
+ {IssueLockForm{"Off-topic"}, true},
+ {IssueLockForm{"Too heated"}, true},
+ {IssueLockForm{"Spam"}, true},
+ {IssueLockForm{"Resolved"}, true},
+
+ {IssueLockForm{"ZZZZ"}, false},
+ {IssueLockForm{"I want to lock this issue"}, false},
+ }
+
+ for _, v := range cases {
+ assert.Equal(t, v.expected, v.form.HasValidReason())
+ }
+}
diff --git a/services/forms/repo_tag_form.go b/services/forms/repo_tag_form.go
new file mode 100644
index 0000000..0135684
--- /dev/null
+++ b/services/forms/repo_tag_form.go
@@ -0,0 +1,26 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// ProtectTagForm form for changing protected tag settings
+type ProtectTagForm struct {
+ NamePattern string `binding:"Required;GlobOrRegexPattern"`
+ AllowlistUsers string
+ AllowlistTeams string
+}
+
+// Validate validates the fields
+func (f *ProtectTagForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/runner.go b/services/forms/runner.go
new file mode 100644
index 0000000..6abfc66
--- /dev/null
+++ b/services/forms/runner.go
@@ -0,0 +1,24 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// EditRunnerForm form for admin to create runner
+type EditRunnerForm struct {
+ Description string
+}
+
+// Validate validates form fields
+func (f *EditRunnerForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/user_form.go b/services/forms/user_form.go
new file mode 100644
index 0000000..cc93b27
--- /dev/null
+++ b/services/forms/user_form.go
@@ -0,0 +1,455 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "mime/multipart"
+ "net/http"
+ "strings"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// InstallForm form for installation page
+type InstallForm struct {
+ DbType string `binding:"Required"`
+ DbHost string
+ DbUser string
+ DbPasswd string
+ DbName string
+ SSLMode string
+ DbPath string
+ DbSchema string
+
+ AppName string `binding:"Required" locale:"install.app_name"`
+ AppSlogan string
+ RepoRootPath string `binding:"Required"`
+ LFSRootPath string
+ RunUser string `binding:"Required"`
+ Domain string `binding:"Required"`
+ SSHPort int
+ HTTPPort string `binding:"Required"`
+ AppURL string `binding:"Required"`
+ LogRootPath string `binding:"Required"`
+
+ SMTPAddr string
+ SMTPPort string
+ SMTPFrom string
+ SMTPUser string `binding:"OmitEmpty;MaxSize(254)" locale:"install.mailer_user"`
+ SMTPPasswd string
+ RegisterConfirm bool
+ MailNotify bool
+
+ OfflineMode bool
+ DisableGravatar bool
+ EnableFederatedAvatar bool
+ EnableOpenIDSignIn bool
+ EnableOpenIDSignUp bool
+ DisableRegistration bool
+ AllowOnlyExternalRegistration bool
+ EnableCaptcha bool
+ RequireSignInView bool
+ DefaultKeepEmailPrivate bool
+ DefaultAllowCreateOrganization bool
+ DefaultEnableTimetracking bool
+ EnableUpdateChecker bool
+ NoReplyAddress string
+
+ PasswordAlgorithm string
+
+ AdminName string `binding:"OmitEmpty;Username;MaxSize(30)" locale:"install.admin_name"`
+ AdminPasswd string `binding:"OmitEmpty;MaxSize(255)" locale:"install.admin_password"`
+ AdminConfirmPasswd string
+ AdminEmail string `binding:"OmitEmpty;MinSize(3);MaxSize(254);Include(@)" locale:"install.admin_email"`
+
+ // ReinstallConfirmFirst we can not use 1/2/3 or A/B/C here, there is a framework bug, can not parse "reinstall_confirm_1" or "reinstall_confirm_a"
+ ReinstallConfirmFirst bool
+ ReinstallConfirmSecond bool
+ ReinstallConfirmThird bool
+}
+
+// Validate validates the fields
+func (f *InstallForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// _____ ____ _________________ ___
+// / _ \ | | \__ ___/ | \
+// / /_\ \| | / | | / ~ \
+// / | \ | / | | \ Y /
+// \____|__ /______/ |____| \___|_ /
+// \/ \/
+
+// RegisterForm form for registering
+type RegisterForm struct {
+ UserName string `binding:"Required;Username;MaxSize(40)"`
+ Email string `binding:"Required;MaxSize(254)"`
+ Password string `binding:"MaxSize(255)"`
+ Retype string
+}
+
+// Validate validates the fields
+func (f *RegisterForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// IsEmailDomainAllowed validates that the email address
+// provided by the user matches what has been configured .
+// The email is marked as allowed if it matches any of the
+// domains in the whitelist or if it doesn't match any of
+// domains in the blocklist, if any such list is not empty.
+func (f *RegisterForm) IsEmailDomainAllowed() bool {
+ return user_model.IsEmailDomainAllowed(f.Email)
+}
+
+// MustChangePasswordForm form for updating your password after account creation
+// by an admin
+type MustChangePasswordForm struct {
+ Password string `binding:"Required;MaxSize(255)"`
+ Retype string
+}
+
+// Validate validates the fields
+func (f *MustChangePasswordForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// SignInForm form for signing in with user/password
+type SignInForm struct {
+ UserName string `binding:"Required;MaxSize(254)"`
+ // TODO remove required from password for SecondFactorAuthentication
+ Password string `binding:"Required;MaxSize(255)"`
+ Remember bool
+}
+
+// Validate validates the fields
+func (f *SignInForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// AuthorizationForm form for authorizing oauth2 clients
+type AuthorizationForm struct {
+ ResponseType string `binding:"Required;In(code)"`
+ ClientID string `binding:"Required"`
+ RedirectURI string
+ State string
+ Scope string
+ Nonce string
+
+ // PKCE support
+ CodeChallengeMethod string // S256, plain
+ CodeChallenge string
+}
+
+// Validate validates the fields
+func (f *AuthorizationForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// GrantApplicationForm form for authorizing oauth2 clients
+type GrantApplicationForm struct {
+ ClientID string `binding:"Required"`
+ Granted bool
+ RedirectURI string
+ State string
+ Scope string
+ Nonce string
+}
+
+// Validate validates the fields
+func (f *GrantApplicationForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// AccessTokenForm for issuing access tokens from authorization codes or refresh tokens
+type AccessTokenForm struct {
+ GrantType string `json:"grant_type"`
+ ClientID string `json:"client_id"`
+ ClientSecret string `json:"client_secret"`
+ RedirectURI string `json:"redirect_uri"`
+ Code string `json:"code"`
+ RefreshToken string `json:"refresh_token"`
+
+ // PKCE support
+ CodeVerifier string `json:"code_verifier"`
+}
+
+// Validate validates the fields
+func (f *AccessTokenForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// IntrospectTokenForm for introspecting tokens
+type IntrospectTokenForm struct {
+ Token string `json:"token"`
+}
+
+// Validate validates the fields
+func (f *IntrospectTokenForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// __________________________________________.___ _______ ________ _________
+// / _____/\_ _____/\__ ___/\__ ___/| |\ \ / _____/ / _____/
+// \_____ \ | __)_ | | | | | |/ | \/ \ ___ \_____ \
+// / \ | \ | | | | | / | \ \_\ \/ \
+// /_______ //_______ / |____| |____| |___\____|__ /\______ /_______ /
+// \/ \/ \/ \/ \/
+
+// UpdateProfileForm form for updating profile
+type UpdateProfileForm struct {
+ Name string `binding:"Username;MaxSize(40)"`
+ FullName string `binding:"MaxSize(100)"`
+ KeepEmailPrivate bool
+ Website string `binding:"ValidSiteUrl;MaxSize(255)"`
+ Location string `binding:"MaxSize(50)"`
+ Pronouns string `binding:"MaxSize(50)"`
+ Biography string `binding:"MaxSize(255)"`
+ Visibility structs.VisibleType
+ KeepActivityPrivate bool
+}
+
+// Validate validates the fields
+func (f *UpdateProfileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// UpdateLanguageForm form for updating profile
+type UpdateLanguageForm struct {
+ Language string
+}
+
+// UpdateHintsForm form for updating user hint settings
+type UpdateHintsForm struct {
+ EnableRepoUnitHints bool
+}
+
+// Validate validates the fields
+func (f *UpdateLanguageForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// Avatar types
+const (
+ AvatarLocal string = "local"
+ AvatarByMail string = "bymail"
+)
+
+// AvatarForm form for changing avatar
+type AvatarForm struct {
+ Source string
+ Avatar *multipart.FileHeader
+ Gravatar string `binding:"OmitEmpty;Email;MaxSize(254)"`
+ Federavatar bool
+}
+
+// Validate validates the fields
+func (f *AvatarForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// AddEmailForm form for adding new email
+type AddEmailForm struct {
+ Email string `binding:"Required;Email;MaxSize(254)"`
+}
+
+// Validate validates the fields
+func (f *AddEmailForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// UpdateThemeForm form for updating a users' theme
+type UpdateThemeForm struct {
+ Theme string `binding:"Required;MaxSize(64)"`
+}
+
+// Validate validates the field
+func (f *UpdateThemeForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// IsThemeExists checks if the theme is a theme available in the config.
+func (f UpdateThemeForm) IsThemeExists() bool {
+ var exists bool
+
+ for _, v := range setting.UI.Themes {
+ if strings.EqualFold(v, f.Theme) {
+ exists = true
+ break
+ }
+ }
+
+ return exists
+}
+
+// ChangePasswordForm form for changing password
+type ChangePasswordForm struct {
+ OldPassword string `form:"old_password" binding:"MaxSize(255)"`
+ Password string `form:"password" binding:"Required;MaxSize(255)"`
+ Retype string `form:"retype"`
+}
+
+// Validate validates the fields
+func (f *ChangePasswordForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// AddOpenIDForm is for changing openid uri
+type AddOpenIDForm struct {
+ Openid string `binding:"Required;MaxSize(256)"`
+}
+
+// Validate validates the fields
+func (f *AddOpenIDForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// AddKeyForm form for adding SSH/GPG key
+type AddKeyForm struct {
+ Type string `binding:"OmitEmpty"`
+ Title string `binding:"Required;MaxSize(50)"`
+ Content string `binding:"Required"`
+ Signature string `binding:"OmitEmpty"`
+ KeyID string `binding:"OmitEmpty"`
+ Fingerprint string `binding:"OmitEmpty"`
+ IsWritable bool
+}
+
+// Validate validates the fields
+func (f *AddKeyForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// AddSecretForm for adding secrets
+type AddSecretForm struct {
+ Name string `binding:"Required;MaxSize(255)"`
+ Data string `binding:"Required;MaxSize(65535)"`
+}
+
+// Validate validates the fields
+func (f *AddSecretForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+type EditVariableForm struct {
+ Name string `binding:"Required;MaxSize(255)"`
+ Data string `binding:"Required;MaxSize(65535)"`
+}
+
+func (f *EditVariableForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// NewAccessTokenForm form for creating access token
+type NewAccessTokenForm struct {
+ Name string `binding:"Required;MaxSize(255)" locale:"settings.token_name"`
+ Scope []string
+}
+
+// Validate validates the fields
+func (f *NewAccessTokenForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+func (f *NewAccessTokenForm) GetScope() (auth_model.AccessTokenScope, error) {
+ scope := strings.Join(f.Scope, ",")
+ s, err := auth_model.AccessTokenScope(scope).Normalize()
+ return s, err
+}
+
+// EditOAuth2ApplicationForm form for editing oauth2 applications
+type EditOAuth2ApplicationForm struct {
+ Name string `binding:"Required;MaxSize(255)" form:"application_name"`
+ RedirectURIs string `binding:"Required" form:"redirect_uris"`
+ ConfidentialClient bool `form:"confidential_client"`
+}
+
+// Validate validates the fields
+func (f *EditOAuth2ApplicationForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// TwoFactorAuthForm for logging in with 2FA token.
+type TwoFactorAuthForm struct {
+ Passcode string `binding:"Required"`
+}
+
+// Validate validates the fields
+func (f *TwoFactorAuthForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// TwoFactorScratchAuthForm for logging in with 2FA scratch token.
+type TwoFactorScratchAuthForm struct {
+ Token string `binding:"Required"`
+}
+
+// Validate validates the fields
+func (f *TwoFactorScratchAuthForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// WebauthnRegistrationForm for reserving an WebAuthn name
+type WebauthnRegistrationForm struct {
+ Name string `binding:"Required"`
+}
+
+// Validate validates the fields
+func (f *WebauthnRegistrationForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// WebauthnDeleteForm for deleting WebAuthn keys
+type WebauthnDeleteForm struct {
+ ID int64 `binding:"Required"`
+}
+
+// Validate validates the fields
+func (f *WebauthnDeleteForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// PackageSettingForm form for package settings
+type PackageSettingForm struct {
+ Action string
+ RepoID int64 `form:"repo_id"`
+}
+
+// Validate validates the fields
+func (f *PackageSettingForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/user_form_auth_openid.go b/services/forms/user_form_auth_openid.go
new file mode 100644
index 0000000..ca1c77e
--- /dev/null
+++ b/services/forms/user_form_auth_openid.go
@@ -0,0 +1,49 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+
+ "gitea.com/go-chi/binding"
+)
+
+// SignInOpenIDForm form for signing in with OpenID
+type SignInOpenIDForm struct {
+ Openid string `binding:"Required;MaxSize(256)"`
+ Remember bool
+}
+
+// Validate validates the fields
+func (f *SignInOpenIDForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// SignUpOpenIDForm form for signin up with OpenID
+type SignUpOpenIDForm struct {
+ UserName string `binding:"Required;Username;MaxSize(40)"`
+ Email string `binding:"Required;Email;MaxSize(254)"`
+}
+
+// Validate validates the fields
+func (f *SignUpOpenIDForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+// ConnectOpenIDForm form for connecting an existing account to an OpenID URI
+type ConnectOpenIDForm struct {
+ UserName string `binding:"Required;MaxSize(254)"`
+ Password string `binding:"Required;MaxSize(255)"`
+}
+
+// Validate validates the fields
+func (f *ConnectOpenIDForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/user_form_hidden_comments.go b/services/forms/user_form_hidden_comments.go
new file mode 100644
index 0000000..b9677c1
--- /dev/null
+++ b/services/forms/user_form_hidden_comments.go
@@ -0,0 +1,104 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "math/big"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/context"
+)
+
+type hiddenCommentTypeGroupsType map[string][]issues_model.CommentType
+
+// hiddenCommentTypeGroups maps the group names to comment types, these group names comes from the Web UI (appearance.tmpl)
+var hiddenCommentTypeGroups = hiddenCommentTypeGroupsType{
+ "reference": {
+ /*3*/ issues_model.CommentTypeIssueRef,
+ /*4*/ issues_model.CommentTypeCommitRef,
+ /*5*/ issues_model.CommentTypeCommentRef,
+ /*6*/ issues_model.CommentTypePullRef,
+ },
+ "label": {
+ /*7*/ issues_model.CommentTypeLabel,
+ },
+ "milestone": {
+ /*8*/ issues_model.CommentTypeMilestone,
+ },
+ "assignee": {
+ /*9*/ issues_model.CommentTypeAssignees,
+ },
+ "title": {
+ /*10*/ issues_model.CommentTypeChangeTitle,
+ },
+ "branch": {
+ /*11*/ issues_model.CommentTypeDeleteBranch,
+ /*25*/ issues_model.CommentTypeChangeTargetBranch,
+ },
+ "time_tracking": {
+ /*12*/ issues_model.CommentTypeStartTracking,
+ /*13*/ issues_model.CommentTypeStopTracking,
+ /*14*/ issues_model.CommentTypeAddTimeManual,
+ /*15*/ issues_model.CommentTypeCancelTracking,
+ /*26*/ issues_model.CommentTypeDeleteTimeManual,
+ },
+ "deadline": {
+ /*16*/ issues_model.CommentTypeAddedDeadline,
+ /*17*/ issues_model.CommentTypeModifiedDeadline,
+ /*18*/ issues_model.CommentTypeRemovedDeadline,
+ },
+ "dependency": {
+ /*19*/ issues_model.CommentTypeAddDependency,
+ /*20*/ issues_model.CommentTypeRemoveDependency,
+ },
+ "lock": {
+ /*23*/ issues_model.CommentTypeLock,
+ /*24*/ issues_model.CommentTypeUnlock,
+ },
+ "review_request": {
+ /*27*/ issues_model.CommentTypeReviewRequest,
+ },
+ "pull_request_push": {
+ /*29*/ issues_model.CommentTypePullRequestPush,
+ },
+ "project": {
+ /*30*/ issues_model.CommentTypeProject,
+ /*31*/ issues_model.CommentTypeProjectColumn,
+ },
+ "issue_ref": {
+ /*33*/ issues_model.CommentTypeChangeIssueRef,
+ },
+}
+
+// UserHiddenCommentTypesFromRequest parse the form to hidden comment types bitset
+func UserHiddenCommentTypesFromRequest(ctx *context.Context) *big.Int {
+ bitset := new(big.Int)
+ for group, commentTypes := range hiddenCommentTypeGroups {
+ if ctx.FormBool(group) {
+ for _, commentType := range commentTypes {
+ bitset = bitset.SetBit(bitset, int(commentType), 1)
+ }
+ }
+ }
+ return bitset
+}
+
+// IsUserHiddenCommentTypeGroupChecked check whether a hidden comment type group is "enabled" (checked on UI)
+func IsUserHiddenCommentTypeGroupChecked(group string, hiddenCommentTypes *big.Int) (ret bool) {
+ commentTypes, ok := hiddenCommentTypeGroups[group]
+ if !ok {
+ log.Critical("the group map for hidden comment types is out of sync, unknown group: %v", group)
+ return false
+ }
+ if hiddenCommentTypes == nil {
+ return false
+ }
+ for _, commentType := range commentTypes {
+ if hiddenCommentTypes.Bit(int(commentType)) == 1 {
+ return true
+ }
+ }
+ return false
+}
diff --git a/services/forms/user_form_test.go b/services/forms/user_form_test.go
new file mode 100644
index 0000000..6605018
--- /dev/null
+++ b/services/forms/user_form_test.go
@@ -0,0 +1,131 @@
+// Copyright 2018 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package forms
+
+import (
+ "strconv"
+ "testing"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/gobwas/glob"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRegisterForm_IsDomainAllowed_Empty(t *testing.T) {
+ oldService := setting.Service
+ defer func() {
+ setting.Service = oldService
+ }()
+
+ setting.Service.EmailDomainAllowList = nil
+
+ form := RegisterForm{}
+
+ assert.True(t, form.IsEmailDomainAllowed())
+}
+
+func TestRegisterForm_IsDomainAllowed_InvalidEmail(t *testing.T) {
+ oldService := setting.Service
+ defer func() {
+ setting.Service = oldService
+ }()
+
+ setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("gitea.io")}
+
+ tt := []struct {
+ email string
+ }{
+ {"invalid-email"},
+ {"gitea.io"},
+ }
+
+ for _, v := range tt {
+ form := RegisterForm{Email: v.email}
+
+ assert.False(t, form.IsEmailDomainAllowed())
+ }
+}
+
+func TestRegisterForm_IsDomainAllowed_AllowedEmail(t *testing.T) {
+ oldService := setting.Service
+ defer func() {
+ setting.Service = oldService
+ }()
+
+ setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.allow")}
+
+ tt := []struct {
+ email string
+ valid bool
+ }{
+ {"security@gitea.io", true},
+ {"security@gITea.io", true},
+ {"invalid", false},
+ {"seee@example.com", false},
+
+ {"user@my.allow", true},
+ {"user@my.allow1", false},
+ }
+
+ for _, v := range tt {
+ form := RegisterForm{Email: v.email}
+
+ assert.Equal(t, v.valid, form.IsEmailDomainAllowed())
+ }
+}
+
+func TestRegisterForm_IsDomainAllowed_BlockedEmail(t *testing.T) {
+ oldService := setting.Service
+ defer func() {
+ setting.Service = oldService
+ }()
+
+ setting.Service.EmailDomainAllowList = nil
+ setting.Service.EmailDomainBlockList = []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.block")}
+
+ tt := []struct {
+ email string
+ valid bool
+ }{
+ {"security@gitea.io", false},
+ {"security@gitea.example", true},
+ {"invalid", true},
+
+ {"user@my.block", false},
+ {"user@my.block1", true},
+ }
+
+ for _, v := range tt {
+ form := RegisterForm{Email: v.email}
+
+ assert.Equal(t, v.valid, form.IsEmailDomainAllowed())
+ }
+}
+
+func TestNewAccessTokenForm_GetScope(t *testing.T) {
+ tests := []struct {
+ form NewAccessTokenForm
+ scope auth_model.AccessTokenScope
+ expectedErr error
+ }{
+ {
+ form: NewAccessTokenForm{Name: "test", Scope: []string{"read:repository"}},
+ scope: "read:repository",
+ },
+ {
+ form: NewAccessTokenForm{Name: "test", Scope: []string{"read:repository", "write:user"}},
+ scope: "read:repository,write:user",
+ },
+ }
+
+ for i, test := range tests {
+ t.Run(strconv.Itoa(i), func(t *testing.T) {
+ scope, err := test.form.GetScope()
+ assert.Equal(t, test.expectedErr, err)
+ assert.Equal(t, test.scope, scope)
+ })
+ }
+}
diff --git a/services/gitdiff/csv.go b/services/gitdiff/csv.go
new file mode 100644
index 0000000..8db73c5
--- /dev/null
+++ b/services/gitdiff/csv.go
@@ -0,0 +1,469 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "encoding/csv"
+ "errors"
+ "io"
+)
+
+const (
+ unmappedColumn = -1
+ maxRowsToInspect int = 10
+ minRatioToMatch float32 = 0.8
+)
+
+// TableDiffCellType represents the type of a TableDiffCell.
+type TableDiffCellType uint8
+
+// TableDiffCellType possible values.
+const (
+ TableDiffCellUnchanged TableDiffCellType = iota + 1
+ TableDiffCellChanged
+ TableDiffCellAdd
+ TableDiffCellDel
+ TableDiffCellMovedUnchanged
+ TableDiffCellMovedChanged
+)
+
+// TableDiffCell represents a cell of a TableDiffRow
+type TableDiffCell struct {
+ LeftCell string
+ RightCell string
+ Type TableDiffCellType
+}
+
+// TableDiffRow represents a row of a TableDiffSection.
+type TableDiffRow struct {
+ RowIdx int
+ Cells []*TableDiffCell
+}
+
+// TableDiffSection represents a section of a DiffFile.
+type TableDiffSection struct {
+ Rows []*TableDiffRow
+}
+
+// csvReader wraps a csv.Reader which buffers the first rows.
+type csvReader struct {
+ reader *csv.Reader
+ buffer [][]string
+ line int
+ eof bool
+}
+
+// ErrorUndefinedCell is for when a row, column coordinates do not exist in the CSV
+var ErrorUndefinedCell = errors.New("undefined cell")
+
+// createCsvReader creates a csvReader and fills the buffer
+func createCsvReader(reader *csv.Reader, bufferRowCount int) (*csvReader, error) {
+ csv := &csvReader{reader: reader}
+ csv.buffer = make([][]string, bufferRowCount)
+ for i := 0; i < bufferRowCount && !csv.eof; i++ {
+ row, err := csv.readNextRow()
+ if err != nil {
+ return nil, err
+ }
+ csv.buffer[i] = row
+ }
+ csv.line = bufferRowCount
+ return csv, nil
+}
+
+// GetRow gets a row from the buffer if present or advances the reader to the requested row. On the end of the file only nil gets returned.
+func (csv *csvReader) GetRow(row int) ([]string, error) {
+ if row < len(csv.buffer) && row >= 0 {
+ return csv.buffer[row], nil
+ }
+ if csv.eof {
+ return nil, nil
+ }
+ for {
+ fields, err := csv.readNextRow()
+ if err != nil {
+ return nil, err
+ }
+ if csv.eof {
+ return nil, nil
+ }
+ csv.line++
+ if csv.line-1 == row {
+ return fields, nil
+ }
+ }
+}
+
+func (csv *csvReader) readNextRow() ([]string, error) {
+ if csv.eof {
+ return nil, nil
+ }
+ row, err := csv.reader.Read()
+ if err != nil {
+ if err != io.EOF {
+ return nil, err
+ }
+ csv.eof = true
+ }
+ return row, nil
+}
+
+// CreateCsvDiff creates a tabular diff based on two CSV readers.
+func CreateCsvDiff(diffFile *DiffFile, baseReader, headReader *csv.Reader) ([]*TableDiffSection, error) {
+ if baseReader != nil && headReader != nil {
+ return createCsvDiff(diffFile, baseReader, headReader)
+ }
+
+ if baseReader != nil {
+ return createCsvDiffSingle(baseReader, TableDiffCellDel)
+ }
+ return createCsvDiffSingle(headReader, TableDiffCellAdd)
+}
+
+// createCsvDiffSingle creates a tabular diff based on a single CSV reader. All cells are added or deleted.
+func createCsvDiffSingle(reader *csv.Reader, celltype TableDiffCellType) ([]*TableDiffSection, error) {
+ var rows []*TableDiffRow
+ i := 1
+ for {
+ row, err := reader.Read()
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ return nil, err
+ }
+ cells := make([]*TableDiffCell, len(row))
+ for j := 0; j < len(row); j++ {
+ if celltype == TableDiffCellDel {
+ cells[j] = &TableDiffCell{LeftCell: row[j], Type: celltype}
+ } else {
+ cells[j] = &TableDiffCell{RightCell: row[j], Type: celltype}
+ }
+ }
+ rows = append(rows, &TableDiffRow{RowIdx: i, Cells: cells})
+ i++
+ }
+
+ return []*TableDiffSection{{Rows: rows}}, nil
+}
+
+func createCsvDiff(diffFile *DiffFile, baseReader, headReader *csv.Reader) ([]*TableDiffSection, error) {
+ // Given the baseReader and headReader, we are going to create CSV Reader for each, baseCSVReader and b respectively
+ baseCSVReader, err := createCsvReader(baseReader, maxRowsToInspect)
+ if err != nil {
+ return nil, err
+ }
+ headCSVReader, err := createCsvReader(headReader, maxRowsToInspect)
+ if err != nil {
+ return nil, err
+ }
+
+ // Initializing the mappings of base to head (a2bColMap) and head to base (b2aColMap) columns
+ a2bColMap, b2aColMap := getColumnMapping(baseCSVReader, headCSVReader)
+
+ // Determines how many cols there will be in the diff table, which includes deleted columns from base and added columns to base
+ numDiffTableCols := len(a2bColMap) + countUnmappedColumns(b2aColMap)
+ if len(a2bColMap) < len(b2aColMap) {
+ numDiffTableCols = len(b2aColMap) + countUnmappedColumns(a2bColMap)
+ }
+
+ // createDiffTableRow takes the row # of the `a` line and `b` line of a diff (starting from 1), 0 if the line doesn't exist (undefined)
+ // in the base or head respectively.
+ // Returns a TableDiffRow which has the row index
+ createDiffTableRow := func(aLineNum, bLineNum int) (*TableDiffRow, error) {
+ // diffTableCells is a row of the diff table. It will have a cells for added, deleted, changed, and unchanged content, thus either
+ // the same size as the head table or bigger
+ diffTableCells := make([]*TableDiffCell, numDiffTableCols)
+ var bRow *[]string
+ if bLineNum > 0 {
+ row, err := headCSVReader.GetRow(bLineNum - 1)
+ if err != nil {
+ return nil, err
+ }
+ bRow = &row
+ }
+ var aRow *[]string
+ if aLineNum > 0 {
+ row, err := baseCSVReader.GetRow(aLineNum - 1)
+ if err != nil {
+ return nil, err
+ }
+ aRow = &row
+ }
+ if aRow == nil && bRow == nil {
+ // No content
+ return nil, nil
+ }
+
+ aIndex := 0 // tracks where we are in the a2bColMap
+ bIndex := 0 // tracks where we are in the b2aColMap
+ colsAdded := 0 // incremented whenever we found a column was added
+ colsDeleted := 0 // incrememted whenever a column was deleted
+
+ // We loop until both the aIndex and bIndex are greater than their col map, which then we are done
+ for aIndex < len(a2bColMap) || bIndex < len(b2aColMap) {
+ // Starting from where aIndex is currently pointing, we see if the map is -1 (dleeted) and if is, create column to note that, increment, and look at the next aIndex
+ for aIndex < len(a2bColMap) && a2bColMap[aIndex] == -1 && (bIndex >= len(b2aColMap) || aIndex <= bIndex) {
+ var aCell string
+ if aRow != nil {
+ if cell, err := getCell(*aRow, aIndex); err != nil {
+ if err != ErrorUndefinedCell {
+ return nil, err
+ }
+ } else {
+ aCell = cell
+ }
+ }
+ diffTableCells[bIndex+colsDeleted] = &TableDiffCell{LeftCell: aCell, Type: TableDiffCellDel}
+ aIndex++
+ colsDeleted++
+ }
+
+ // aIndex is now pointing to a column that also exists in b, or is at the end of a2bColMap. If the former,
+ // we can just increment aIndex until it points to a -1 column or one greater than the current bIndex
+ for aIndex < len(a2bColMap) && a2bColMap[aIndex] != -1 {
+ aIndex++
+ }
+
+ // Starting from where bIndex is currently pointing, we see if the map is -1 (added) and if is, create column to note that, increment, and look at the next aIndex
+ for bIndex < len(b2aColMap) && b2aColMap[bIndex] == -1 && (aIndex >= len(a2bColMap) || bIndex < aIndex) {
+ var bCell string
+ cellType := TableDiffCellAdd
+ if bRow != nil {
+ if cell, err := getCell(*bRow, bIndex); err != nil {
+ if err != ErrorUndefinedCell {
+ return nil, err
+ }
+ } else {
+ bCell = cell
+ }
+ } else {
+ cellType = TableDiffCellDel
+ }
+ diffTableCells[bIndex+colsDeleted] = &TableDiffCell{RightCell: bCell, Type: cellType}
+ bIndex++
+ colsAdded++
+ }
+
+ // aIndex is now pointing to a column that also exists in a, or is at the end of b2aColMap. If the former,
+ // we get the a col and b col values (if they exist), figure out if they are the same or not, and if the column moved, and add it to the diff table
+ for bIndex < len(b2aColMap) && b2aColMap[bIndex] != -1 && (aIndex >= len(a2bColMap) || bIndex < aIndex) {
+ var diffTableCell TableDiffCell
+
+ var aCell *string
+ // get the aCell value if the aRow exists
+ if aRow != nil {
+ if cell, err := getCell(*aRow, b2aColMap[bIndex]); err != nil {
+ if err != ErrorUndefinedCell {
+ return nil, err
+ }
+ } else {
+ aCell = &cell
+ diffTableCell.LeftCell = cell
+ }
+ } else {
+ diffTableCell.Type = TableDiffCellAdd
+ }
+
+ var bCell *string
+ // get the bCell value if the bRow exists
+ if bRow != nil {
+ if cell, err := getCell(*bRow, bIndex); err != nil {
+ if err != ErrorUndefinedCell {
+ return nil, err
+ }
+ } else {
+ bCell = &cell
+ diffTableCell.RightCell = cell
+ }
+ } else {
+ diffTableCell.Type = TableDiffCellDel
+ }
+
+ // if both a and b have a row that exists, compare the value and determine if the row has moved
+ if aCell != nil && bCell != nil {
+ moved := ((bIndex + colsDeleted) != (b2aColMap[bIndex] + colsAdded))
+ if *aCell != *bCell {
+ if moved {
+ diffTableCell.Type = TableDiffCellMovedChanged
+ } else {
+ diffTableCell.Type = TableDiffCellChanged
+ }
+ } else {
+ if moved {
+ diffTableCell.Type = TableDiffCellMovedUnchanged
+ } else {
+ diffTableCell.Type = TableDiffCellUnchanged
+ }
+ diffTableCell.LeftCell = ""
+ }
+ }
+
+ // Add the diff column to the diff row
+ diffTableCells[bIndex+colsDeleted] = &diffTableCell
+ bIndex++
+ }
+ }
+
+ return &TableDiffRow{RowIdx: bLineNum, Cells: diffTableCells}, nil
+ }
+
+ // diffTableSections are TableDiffSections which represent the diffTableSections we get when doing a diff, each will be its own table in the view
+ var diffTableSections []*TableDiffSection
+
+ for i, section := range diffFile.Sections {
+ // Each section has multiple diffTableRows
+ var diffTableRows []*TableDiffRow
+ lines := tryMergeLines(section.Lines)
+ // Loop through the merged lines to get each row of the CSV diff table for this section
+ for j, line := range lines {
+ if i == 0 && j == 0 && (line[0] != 1 || line[1] != 1) {
+ diffTableRow, err := createDiffTableRow(1, 1)
+ if err != nil {
+ return nil, err
+ }
+ if diffTableRow != nil {
+ diffTableRows = append(diffTableRows, diffTableRow)
+ }
+ }
+ diffTableRow, err := createDiffTableRow(line[0], line[1])
+ if err != nil {
+ return nil, err
+ }
+ if diffTableRow != nil {
+ diffTableRows = append(diffTableRows, diffTableRow)
+ }
+ }
+
+ if len(diffTableRows) > 0 {
+ diffTableSections = append(diffTableSections, &TableDiffSection{Rows: diffTableRows})
+ }
+ }
+
+ return diffTableSections, nil
+}
+
+// getColumnMapping creates a mapping of columns between a and b
+func getColumnMapping(baseCSVReader, headCSVReader *csvReader) ([]int, []int) {
+ baseRow, _ := baseCSVReader.GetRow(0)
+ headRow, _ := headCSVReader.GetRow(0)
+
+ base2HeadColMap := []int{}
+ head2BaseColMap := []int{}
+
+ if baseRow != nil {
+ base2HeadColMap = make([]int, len(baseRow))
+ }
+ if headRow != nil {
+ head2BaseColMap = make([]int, len(headRow))
+ }
+
+ // Initializes all head2base mappings to be unmappedColumn (-1)
+ for i := 0; i < len(head2BaseColMap); i++ {
+ head2BaseColMap[i] = unmappedColumn
+ }
+
+ // Loops through the baseRow and see if there is a match in the head row
+ for i := 0; i < len(baseRow); i++ {
+ base2HeadColMap[i] = unmappedColumn
+ baseCell, err := getCell(baseRow, i)
+ if err == nil {
+ for j := 0; j < len(headRow); j++ {
+ if head2BaseColMap[j] == -1 {
+ headCell, err := getCell(headRow, j)
+ if err == nil && baseCell == headCell {
+ base2HeadColMap[i] = j
+ head2BaseColMap[j] = i
+ break
+ }
+ }
+ }
+ }
+ }
+
+ tryMapColumnsByContent(baseCSVReader, base2HeadColMap, headCSVReader, head2BaseColMap)
+ tryMapColumnsByContent(headCSVReader, head2BaseColMap, baseCSVReader, base2HeadColMap)
+
+ return base2HeadColMap, head2BaseColMap
+}
+
+// tryMapColumnsByContent tries to map missing columns by the content of the first lines.
+func tryMapColumnsByContent(baseCSVReader *csvReader, base2HeadColMap []int, headCSVReader *csvReader, head2BaseColMap []int) {
+ for i := 0; i < len(base2HeadColMap); i++ {
+ headStart := 0
+ for base2HeadColMap[i] == unmappedColumn && headStart < len(head2BaseColMap) {
+ if head2BaseColMap[headStart] == unmappedColumn {
+ rows := min(maxRowsToInspect, max(0, min(len(baseCSVReader.buffer), len(headCSVReader.buffer))-1))
+ same := 0
+ for j := 1; j <= rows; j++ {
+ baseCell, baseErr := getCell(baseCSVReader.buffer[j], i)
+ headCell, headErr := getCell(headCSVReader.buffer[j], headStart)
+ if baseErr == nil && headErr == nil && baseCell == headCell {
+ same++
+ }
+ }
+ if (float32(same) / float32(rows)) > minRatioToMatch {
+ base2HeadColMap[i] = headStart
+ head2BaseColMap[headStart] = i
+ }
+ }
+ headStart++
+ }
+ }
+}
+
+// getCell returns the specific cell or nil if not present.
+func getCell(row []string, column int) (string, error) {
+ if column < len(row) {
+ return row[column], nil
+ }
+ return "", ErrorUndefinedCell
+}
+
+// countUnmappedColumns returns the count of unmapped columns.
+func countUnmappedColumns(mapping []int) int {
+ count := 0
+ for i := 0; i < len(mapping); i++ {
+ if mapping[i] == unmappedColumn {
+ count++
+ }
+ }
+ return count
+}
+
+// tryMergeLines maps the separated line numbers of a git diff. The result is assumed to be ordered.
+func tryMergeLines(lines []*DiffLine) [][2]int {
+ ids := make([][2]int, len(lines))
+
+ i := 0
+ for _, line := range lines {
+ if line.Type != DiffLineSection {
+ ids[i][0] = line.LeftIdx
+ ids[i][1] = line.RightIdx
+ i++
+ }
+ }
+
+ ids = ids[:i]
+
+ result := make([][2]int, len(ids))
+
+ j := 0
+ for i = 0; i < len(ids); i++ {
+ if ids[i][0] == 0 {
+ if j > 0 && result[j-1][1] == 0 {
+ temp := j
+ for temp > 0 && result[temp-1][1] == 0 {
+ temp--
+ }
+ result[temp][1] = ids[i][1]
+ continue
+ }
+ }
+ result[j] = ids[i]
+ j++
+ }
+
+ return result[:j]
+}
diff --git a/services/gitdiff/csv_test.go b/services/gitdiff/csv_test.go
new file mode 100644
index 0000000..1dbe616
--- /dev/null
+++ b/services/gitdiff/csv_test.go
@@ -0,0 +1,229 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "encoding/csv"
+ "strings"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ csv_module "code.gitea.io/gitea/modules/csv"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCSVDiff(t *testing.T) {
+ cases := []struct {
+ diff string
+ base string
+ head string
+ cells [][]TableDiffCellType
+ }{
+ // case 0 - initial commit of a csv
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -0,0 +1,2 @@
++col1,col2
++a,a`,
+ base: "",
+ head: `col1,col2
+a,a`,
+ cells: [][]TableDiffCellType{
+ {TableDiffCellAdd, TableDiffCellAdd},
+ {TableDiffCellAdd, TableDiffCellAdd},
+ },
+ },
+ // case 1 - adding 1 row at end
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -1,2 +1,3 @@
+ col1,col2
+-a,a
++a,a
++b,b`,
+ base: `col1,col2
+a,a`,
+ head: `col1,col2
+a,a
+b,b`,
+ cells: [][]TableDiffCellType{
+ {TableDiffCellUnchanged, TableDiffCellUnchanged},
+ {TableDiffCellUnchanged, TableDiffCellUnchanged},
+ {TableDiffCellAdd, TableDiffCellAdd},
+ },
+ },
+ // case 2 - row deleted
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -1,3 +1,2 @@
+ col1,col2
+-a,a
+ b,b`,
+ base: `col1,col2
+a,a
+b,b`,
+ head: `col1,col2
+b,b`,
+ cells: [][]TableDiffCellType{
+ {TableDiffCellUnchanged, TableDiffCellUnchanged},
+ {TableDiffCellDel, TableDiffCellDel},
+ {TableDiffCellUnchanged, TableDiffCellUnchanged},
+ },
+ },
+ // case 3 - row changed
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -1,2 +1,2 @@
+ col1,col2
+-b,b
++b,c`,
+ base: `col1,col2
+b,b`,
+ head: `col1,col2
+b,c`,
+ cells: [][]TableDiffCellType{
+ {TableDiffCellUnchanged, TableDiffCellUnchanged},
+ {TableDiffCellUnchanged, TableDiffCellChanged},
+ },
+ },
+ // case 4 - all deleted
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -1,2 +0,0 @@
+-col1,col2
+-b,c`,
+ base: `col1,col2
+b,c`,
+ head: "",
+ cells: [][]TableDiffCellType{
+ {TableDiffCellDel, TableDiffCellDel},
+ {TableDiffCellDel, TableDiffCellDel},
+ },
+ },
+ // case 5 - renames first column
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -1,3 +1,3 @@
+-col1,col2,col3
++cola,col2,col3
+ a,b,c`,
+ base: `col1,col2,col3
+a,b,c`,
+ head: `cola,col2,col3
+a,b,c`,
+ cells: [][]TableDiffCellType{
+ {TableDiffCellDel, TableDiffCellAdd, TableDiffCellUnchanged, TableDiffCellUnchanged},
+ {TableDiffCellDel, TableDiffCellAdd, TableDiffCellUnchanged, TableDiffCellUnchanged},
+ },
+ },
+ // case 6 - inserts a column after first, deletes last column
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -1,2 +1,2 @@
+-col1,col2,col3
+-a,b,c
++col1,col1a,col2
++a,d,b`,
+ base: `col1,col2,col3
+a,b,c`,
+ head: `col1,col1a,col2
+a,d,b`,
+ cells: [][]TableDiffCellType{
+ {TableDiffCellUnchanged, TableDiffCellAdd, TableDiffCellDel, TableDiffCellMovedUnchanged},
+ {TableDiffCellUnchanged, TableDiffCellAdd, TableDiffCellDel, TableDiffCellMovedUnchanged},
+ },
+ },
+ // case 7 - deletes first column, inserts column after last
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -1,2 +1,2 @@
+-col1,col2,col3
+-a,b,c
++col2,col3,col4
++b,c,d`,
+ base: `col1,col2,col3
+a,b,c`,
+ head: `col2,col3,col4
+b,c,d`,
+ cells: [][]TableDiffCellType{
+ {TableDiffCellDel, TableDiffCellUnchanged, TableDiffCellUnchanged, TableDiffCellAdd},
+ {TableDiffCellDel, TableDiffCellUnchanged, TableDiffCellUnchanged, TableDiffCellAdd},
+ },
+ },
+ // case 8 - two columns deleted, 2 added
+ {
+ diff: `diff --git a/unittest.csv b/unittest.csv
+--- a/unittest.csv
++++ b/unittest.csv
+@@ -1,2 +1,2 @@
+-col1,col2,col
+-a,b,c
++col3,col4,col5
++c,d,e`,
+ base: `col1,col2,col3
+a,b,c`,
+ head: `col3,col4,col5
+c,d,e`,
+ cells: [][]TableDiffCellType{
+ {TableDiffCellDel, TableDiffCellMovedUnchanged, TableDiffCellDel, TableDiffCellAdd, TableDiffCellAdd},
+ {TableDiffCellDel, TableDiffCellMovedUnchanged, TableDiffCellDel, TableDiffCellAdd, TableDiffCellAdd},
+ },
+ },
+ }
+
+ for n, c := range cases {
+ diff, err := ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(c.diff), "")
+ if err != nil {
+ t.Errorf("ParsePatch failed: %s", err)
+ }
+
+ var baseReader *csv.Reader
+ if len(c.base) > 0 {
+ baseReader, err = csv_module.CreateReaderAndDetermineDelimiter(nil, strings.NewReader(c.base))
+ if err != nil {
+ t.Errorf("CreateReaderAndDetermineDelimiter failed: %s", err)
+ }
+ }
+ var headReader *csv.Reader
+ if len(c.head) > 0 {
+ headReader, err = csv_module.CreateReaderAndDetermineDelimiter(nil, strings.NewReader(c.head))
+ if err != nil {
+ t.Errorf("CreateReaderAndDetermineDelimiter failed: %s", err)
+ }
+ }
+
+ result, err := CreateCsvDiff(diff.Files[0], baseReader, headReader)
+ require.NoError(t, err)
+ assert.Len(t, result, 1, "case %d: should be one section", n)
+
+ section := result[0]
+ assert.Len(t, section.Rows, len(c.cells), "case %d: should be %d rows", n, len(c.cells))
+
+ for i, row := range section.Rows {
+ assert.Len(t, row.Cells, len(c.cells[i]), "case %d: row %d should have %d cells", n, i, len(c.cells[i]))
+ for j, cell := range row.Cells {
+ assert.Equal(t, c.cells[i][j], cell.Type, "case %d: row %d cell %d should be equal", n, i, j)
+ }
+ }
+ }
+}
diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go
new file mode 100644
index 0000000..8f376a1
--- /dev/null
+++ b/services/gitdiff/gitdiff.go
@@ -0,0 +1,1396 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "bufio"
+ "bytes"
+ "cmp"
+ "context"
+ "fmt"
+ "html"
+ "html/template"
+ "io"
+ "net/url"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ pull_model "code.gitea.io/gitea/models/pull"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/analyze"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/highlight"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/translation"
+
+ "github.com/sergi/go-diff/diffmatchpatch"
+ stdcharset "golang.org/x/net/html/charset"
+ "golang.org/x/text/encoding"
+ "golang.org/x/text/transform"
+)
+
+// DiffLineType represents the type of DiffLine.
+type DiffLineType uint8
+
+// DiffLineType possible values.
+const (
+ DiffLinePlain DiffLineType = iota + 1
+ DiffLineAdd
+ DiffLineDel
+ DiffLineSection
+)
+
+// DiffFileType represents the type of DiffFile.
+type DiffFileType uint8
+
+// DiffFileType possible values.
+const (
+ DiffFileAdd DiffFileType = iota + 1
+ DiffFileChange
+ DiffFileDel
+ DiffFileRename
+ DiffFileCopy
+)
+
+// DiffLineExpandDirection represents the DiffLineSection expand direction
+type DiffLineExpandDirection uint8
+
+// DiffLineExpandDirection possible values.
+const (
+ DiffLineExpandNone DiffLineExpandDirection = iota + 1
+ DiffLineExpandSingle
+ DiffLineExpandUpDown
+ DiffLineExpandUp
+ DiffLineExpandDown
+)
+
+// DiffLine represents a line difference in a DiffSection.
+type DiffLine struct {
+ LeftIdx int
+ RightIdx int
+ Match int
+ Type DiffLineType
+ Content string
+ Conversations []issues_model.CodeConversation
+ SectionInfo *DiffLineSectionInfo
+}
+
+// DiffLineSectionInfo represents diff line section meta data
+type DiffLineSectionInfo struct {
+ Path string
+ LastLeftIdx int
+ LastRightIdx int
+ LeftIdx int
+ RightIdx int
+ LeftHunkSize int
+ RightHunkSize int
+}
+
+// BlobExcerptChunkSize represent max lines of excerpt
+const BlobExcerptChunkSize = 20
+
+// GetType returns the type of DiffLine.
+func (d *DiffLine) GetType() int {
+ return int(d.Type)
+}
+
+// GetHTMLDiffLineType returns the diff line type name for HTML
+func (d *DiffLine) GetHTMLDiffLineType() string {
+ switch d.Type {
+ case DiffLineAdd:
+ return "add"
+ case DiffLineDel:
+ return "del"
+ case DiffLineSection:
+ return "tag"
+ }
+ return "same"
+}
+
+// CanComment returns whether a line can get commented
+func (d *DiffLine) CanComment() bool {
+ return len(d.Conversations) == 0 && d.Type != DiffLineSection
+}
+
+// GetCommentSide returns the comment side of the first comment, if not set returns empty string
+func (d *DiffLine) GetCommentSide() string {
+ if len(d.Conversations) == 0 || len(d.Conversations[0]) == 0 {
+ return ""
+ }
+ return d.Conversations[0][0].DiffSide()
+}
+
+// GetLineTypeMarker returns the line type marker
+func (d *DiffLine) GetLineTypeMarker() string {
+ if strings.IndexByte(" +-", d.Content[0]) > -1 {
+ return d.Content[0:1]
+ }
+ return ""
+}
+
+// GetBlobExcerptQuery builds query string to get blob excerpt
+func (d *DiffLine) GetBlobExcerptQuery() string {
+ query := fmt.Sprintf(
+ "last_left=%d&last_right=%d&"+
+ "left=%d&right=%d&"+
+ "left_hunk_size=%d&right_hunk_size=%d&"+
+ "path=%s",
+ d.SectionInfo.LastLeftIdx, d.SectionInfo.LastRightIdx,
+ d.SectionInfo.LeftIdx, d.SectionInfo.RightIdx,
+ d.SectionInfo.LeftHunkSize, d.SectionInfo.RightHunkSize,
+ url.QueryEscape(d.SectionInfo.Path))
+ return query
+}
+
+// GetExpandDirection gets DiffLineExpandDirection
+func (d *DiffLine) GetExpandDirection() DiffLineExpandDirection {
+ if d.Type != DiffLineSection || d.SectionInfo == nil || d.SectionInfo.LeftIdx-d.SectionInfo.LastLeftIdx <= 1 || d.SectionInfo.RightIdx-d.SectionInfo.LastRightIdx <= 1 {
+ return DiffLineExpandNone
+ }
+ if d.SectionInfo.LastLeftIdx <= 0 && d.SectionInfo.LastRightIdx <= 0 {
+ return DiffLineExpandUp
+ } else if d.SectionInfo.RightIdx-d.SectionInfo.LastRightIdx > BlobExcerptChunkSize && d.SectionInfo.RightHunkSize > 0 {
+ return DiffLineExpandUpDown
+ } else if d.SectionInfo.LeftHunkSize <= 0 && d.SectionInfo.RightHunkSize <= 0 {
+ return DiffLineExpandDown
+ }
+ return DiffLineExpandSingle
+}
+
+func getDiffLineSectionInfo(treePath, line string, lastLeftIdx, lastRightIdx int) *DiffLineSectionInfo {
+ leftLine, leftHunk, rightLine, righHunk := git.ParseDiffHunkString(line)
+
+ return &DiffLineSectionInfo{
+ Path: treePath,
+ LastLeftIdx: lastLeftIdx,
+ LastRightIdx: lastRightIdx,
+ LeftIdx: leftLine,
+ RightIdx: rightLine,
+ LeftHunkSize: leftHunk,
+ RightHunkSize: righHunk,
+ }
+}
+
+// escape a line's content or return <br> needed for copy/paste purposes
+func getLineContent(content string, locale translation.Locale) DiffInline {
+ if len(content) > 0 {
+ return DiffInlineWithUnicodeEscape(template.HTML(html.EscapeString(content)), locale)
+ }
+ return DiffInline{EscapeStatus: &charset.EscapeStatus{}, Content: "<br>"}
+}
+
+// DiffSection represents a section of a DiffFile.
+type DiffSection struct {
+ file *DiffFile
+ FileName string
+ Name string
+ Lines []*DiffLine
+}
+
+var (
+ addedCodePrefix = []byte(`<span class="added-code">`)
+ removedCodePrefix = []byte(`<span class="removed-code">`)
+ codeTagSuffix = []byte(`</span>`)
+)
+
+func diffToHTML(lineWrapperTags []string, diffs []diffmatchpatch.Diff, lineType DiffLineType) string {
+ buf := bytes.NewBuffer(nil)
+ // restore the line wrapper tags <span class="line"> and <span class="cl">, if necessary
+ for _, tag := range lineWrapperTags {
+ buf.WriteString(tag)
+ }
+ for _, diff := range diffs {
+ switch {
+ case diff.Type == diffmatchpatch.DiffEqual:
+ buf.WriteString(diff.Text)
+ case diff.Type == diffmatchpatch.DiffInsert && lineType == DiffLineAdd:
+ buf.Write(addedCodePrefix)
+ buf.WriteString(diff.Text)
+ buf.Write(codeTagSuffix)
+ case diff.Type == diffmatchpatch.DiffDelete && lineType == DiffLineDel:
+ buf.Write(removedCodePrefix)
+ buf.WriteString(diff.Text)
+ buf.Write(codeTagSuffix)
+ }
+ }
+ for range lineWrapperTags {
+ buf.WriteString("</span>")
+ }
+ return buf.String()
+}
+
+// GetLine gets a specific line by type (add or del) and file line number
+func (diffSection *DiffSection) GetLine(lineType DiffLineType, idx int) *DiffLine {
+ var (
+ difference = 0
+ addCount = 0
+ delCount = 0
+ matchDiffLine *DiffLine
+ )
+
+LOOP:
+ for _, diffLine := range diffSection.Lines {
+ switch diffLine.Type {
+ case DiffLineAdd:
+ addCount++
+ case DiffLineDel:
+ delCount++
+ default:
+ if matchDiffLine != nil {
+ break LOOP
+ }
+ difference = diffLine.RightIdx - diffLine.LeftIdx
+ addCount = 0
+ delCount = 0
+ }
+
+ switch lineType {
+ case DiffLineDel:
+ if diffLine.RightIdx == 0 && diffLine.LeftIdx == idx-difference {
+ matchDiffLine = diffLine
+ }
+ case DiffLineAdd:
+ if diffLine.LeftIdx == 0 && diffLine.RightIdx == idx+difference {
+ matchDiffLine = diffLine
+ }
+ }
+ }
+
+ if addCount == delCount {
+ return matchDiffLine
+ }
+ return nil
+}
+
+var diffMatchPatch = diffmatchpatch.New()
+
+func init() {
+ diffMatchPatch.DiffEditCost = 100
+}
+
+// DiffInline is a struct that has a content and escape status
+type DiffInline struct {
+ EscapeStatus *charset.EscapeStatus
+ Content template.HTML
+}
+
+// DiffInlineWithUnicodeEscape makes a DiffInline with hidden unicode characters escaped
+func DiffInlineWithUnicodeEscape(s template.HTML, locale translation.Locale) DiffInline {
+ status, content := charset.EscapeControlHTML(s, locale, charset.DiffContext)
+ return DiffInline{EscapeStatus: status, Content: content}
+}
+
+// DiffInlineWithHighlightCode makes a DiffInline with code highlight and hidden unicode characters escaped
+func DiffInlineWithHighlightCode(fileName, language, code string, locale translation.Locale) DiffInline {
+ highlighted, _ := highlight.Code(fileName, language, code)
+ status, content := charset.EscapeControlHTML(highlighted, locale, charset.DiffContext)
+ return DiffInline{EscapeStatus: status, Content: content}
+}
+
+// GetComputedInlineDiffFor computes inline diff for the given line.
+func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine, locale translation.Locale) DiffInline {
+ if setting.Git.DisableDiffHighlight {
+ return getLineContent(diffLine.Content[1:], locale)
+ }
+
+ var (
+ compareDiffLine *DiffLine
+ diff1 string
+ diff2 string
+ )
+
+ language := ""
+ if diffSection.file != nil {
+ language = diffSection.file.Language
+ }
+
+ // try to find equivalent diff line. ignore, otherwise
+ switch diffLine.Type {
+ case DiffLineSection:
+ return getLineContent(diffLine.Content[1:], locale)
+ case DiffLineAdd:
+ compareDiffLine = diffSection.GetLine(DiffLineDel, diffLine.RightIdx)
+ if compareDiffLine == nil {
+ return DiffInlineWithHighlightCode(diffSection.FileName, language, diffLine.Content[1:], locale)
+ }
+ diff1 = compareDiffLine.Content
+ diff2 = diffLine.Content
+ case DiffLineDel:
+ compareDiffLine = diffSection.GetLine(DiffLineAdd, diffLine.LeftIdx)
+ if compareDiffLine == nil {
+ return DiffInlineWithHighlightCode(diffSection.FileName, language, diffLine.Content[1:], locale)
+ }
+ diff1 = diffLine.Content
+ diff2 = compareDiffLine.Content
+ default:
+ if strings.IndexByte(" +-", diffLine.Content[0]) > -1 {
+ return DiffInlineWithHighlightCode(diffSection.FileName, language, diffLine.Content[1:], locale)
+ }
+ return DiffInlineWithHighlightCode(diffSection.FileName, language, diffLine.Content, locale)
+ }
+
+ hcd := NewHighlightCodeDiff()
+ diffRecord := hcd.diffWithHighlight(diffSection.FileName, language, diff1[1:], diff2[1:])
+ // it seems that Gitea doesn't need the line wrapper of Chroma, so do not add them back
+ // if the line wrappers are still needed in the future, it can be added back by "diffToHTML(hcd.lineWrapperTags. ...)"
+ diffHTML := diffToHTML(nil, diffRecord, diffLine.Type)
+ return DiffInlineWithUnicodeEscape(template.HTML(diffHTML), locale)
+}
+
+// DiffFile represents a file diff.
+type DiffFile struct {
+ Name string
+ NameHash string
+ OldName string
+ Index int
+ Addition, Deletion int
+ Type DiffFileType
+ IsCreated bool
+ IsDeleted bool
+ IsBin bool
+ IsLFSFile bool
+ IsRenamed bool
+ IsAmbiguous bool
+ IsSubmodule bool
+ Sections []*DiffSection
+ IsIncomplete bool
+ IsIncompleteLineTooLong bool
+ IsProtected bool
+ IsGenerated bool
+ IsVendored bool
+ IsViewed bool // User specific
+ HasChangedSinceLastReview bool // User specific
+ Language string
+ Mode string
+ OldMode string
+}
+
+// GetType returns type of diff file.
+func (diffFile *DiffFile) GetType() int {
+ return int(diffFile.Type)
+}
+
+// GetTailSection creates a fake DiffLineSection if the last section is not the end of the file
+func (diffFile *DiffFile) GetTailSection(gitRepo *git.Repository, leftCommitID, rightCommitID string) *DiffSection {
+ if len(diffFile.Sections) == 0 || diffFile.Type != DiffFileChange || diffFile.IsBin || diffFile.IsLFSFile {
+ return nil
+ }
+ leftCommit, err := gitRepo.GetCommit(leftCommitID)
+ if err != nil {
+ return nil
+ }
+ rightCommit, err := gitRepo.GetCommit(rightCommitID)
+ if err != nil {
+ return nil
+ }
+ lastSection := diffFile.Sections[len(diffFile.Sections)-1]
+ lastLine := lastSection.Lines[len(lastSection.Lines)-1]
+ leftLineCount := getCommitFileLineCount(leftCommit, diffFile.Name)
+ rightLineCount := getCommitFileLineCount(rightCommit, diffFile.Name)
+ if leftLineCount <= lastLine.LeftIdx || rightLineCount <= lastLine.RightIdx {
+ return nil
+ }
+ tailDiffLine := &DiffLine{
+ Type: DiffLineSection,
+ Content: " ",
+ SectionInfo: &DiffLineSectionInfo{
+ Path: diffFile.Name,
+ LastLeftIdx: lastLine.LeftIdx,
+ LastRightIdx: lastLine.RightIdx,
+ LeftIdx: leftLineCount,
+ RightIdx: rightLineCount,
+ },
+ }
+ tailSection := &DiffSection{FileName: diffFile.Name, Lines: []*DiffLine{tailDiffLine}}
+ return tailSection
+}
+
+// GetDiffFileName returns the name of the diff file, or its old name in case it was deleted
+func (diffFile *DiffFile) GetDiffFileName() string {
+ if diffFile.Name == "" {
+ return diffFile.OldName
+ }
+ return diffFile.Name
+}
+
+func (diffFile *DiffFile) ShouldBeHidden() bool {
+ return diffFile.IsGenerated || diffFile.IsViewed
+}
+
+func (diffFile *DiffFile) ModeTranslationKey(mode string) string {
+ switch mode {
+ case "040000":
+ return "git.filemode.directory"
+ case "100644":
+ return "git.filemode.normal_file"
+ case "100755":
+ return "git.filemode.executable_file"
+ case "120000":
+ return "git.filemode.symbolic_link"
+ case "160000":
+ return "git.filemode.submodule"
+ default:
+ return mode
+ }
+}
+
+func getCommitFileLineCount(commit *git.Commit, filePath string) int {
+ blob, err := commit.GetBlobByPath(filePath)
+ if err != nil {
+ return 0
+ }
+ lineCount, err := blob.GetBlobLineCount()
+ if err != nil {
+ return 0
+ }
+ return lineCount
+}
+
+// Diff represents a difference between two git trees.
+type Diff struct {
+ Start, End string
+ NumFiles int
+ TotalAddition, TotalDeletion int
+ Files []*DiffFile
+ IsIncomplete bool
+ NumViewedFiles int // user-specific
+}
+
+// LoadComments loads comments into each line
+func (diff *Diff) LoadComments(ctx context.Context, issue *issues_model.Issue, currentUser *user_model.User, showOutdatedComments bool) error {
+ allConversations, err := issues_model.FetchCodeConversations(ctx, issue, currentUser, showOutdatedComments)
+ if err != nil {
+ return err
+ }
+ for _, file := range diff.Files {
+ if lineCommits, ok := allConversations[file.Name]; ok {
+ for _, section := range file.Sections {
+ for _, line := range section.Lines {
+ if conversations, ok := lineCommits[int64(line.LeftIdx*-1)]; ok {
+ line.Conversations = append(line.Conversations, conversations...)
+ }
+ if comments, ok := lineCommits[int64(line.RightIdx)]; ok {
+ line.Conversations = append(line.Conversations, comments...)
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+const cmdDiffHead = "diff --git "
+
+// ParsePatch builds a Diff object from a io.Reader and some parameters.
+func ParsePatch(ctx context.Context, maxLines, maxLineCharacters, maxFiles int, reader io.Reader, skipToFile string) (*Diff, error) {
+ log.Debug("ParsePatch(%d, %d, %d, ..., %s)", maxLines, maxLineCharacters, maxFiles, skipToFile)
+ var curFile *DiffFile
+
+ skipping := skipToFile != ""
+
+ diff := &Diff{Files: make([]*DiffFile, 0)}
+
+ sb := strings.Builder{}
+
+ // OK let's set a reasonable buffer size.
+ // This should be at least the size of maxLineCharacters or 4096 whichever is larger.
+ readerSize := maxLineCharacters
+ if readerSize < 4096 {
+ readerSize = 4096
+ }
+
+ input := bufio.NewReaderSize(reader, readerSize)
+ line, err := input.ReadString('\n')
+ if err != nil {
+ if err == io.EOF {
+ return diff, nil
+ }
+ return diff, err
+ }
+
+ prepareValue := func(s, p string) string {
+ return strings.TrimSpace(strings.TrimPrefix(s, p))
+ }
+
+parsingLoop:
+ for {
+ // 1. A patch file always begins with `diff --git ` + `a/path b/path` (possibly quoted)
+ // if it does not we have bad input!
+ if !strings.HasPrefix(line, cmdDiffHead) {
+ return diff, fmt.Errorf("invalid first file line: %s", line)
+ }
+
+ if maxFiles > -1 && len(diff.Files) >= maxFiles {
+ lastFile := createDiffFile(diff, line)
+ diff.End = lastFile.Name
+ diff.IsIncomplete = true
+ _, err := io.Copy(io.Discard, reader)
+ if err != nil {
+ // By the definition of io.Copy this never returns io.EOF
+ return diff, fmt.Errorf("error during io.Copy: %w", err)
+ }
+ break parsingLoop
+ }
+
+ curFile = createDiffFile(diff, line)
+ if skipping {
+ if curFile.Name != skipToFile {
+ line, err = skipToNextDiffHead(input)
+ if err != nil {
+ if err == io.EOF {
+ return diff, nil
+ }
+ return diff, err
+ }
+ continue
+ }
+ skipping = false
+ }
+
+ diff.Files = append(diff.Files, curFile)
+
+ // 2. It is followed by one or more extended header lines:
+ //
+ // old mode <mode>
+ // new mode <mode>
+ // deleted file mode <mode>
+ // new file mode <mode>
+ // copy from <path>
+ // copy to <path>
+ // rename from <path>
+ // rename to <path>
+ // similarity index <number>
+ // dissimilarity index <number>
+ // index <hash>..<hash> <mode>
+ //
+ // * <mode> 6-digit octal numbers including the file type and file permission bits.
+ // * <path> does not include the a/ and b/ prefixes
+ // * <number> percentage of unchanged lines for similarity, percentage of changed
+ // lines dissimilarity as integer rounded down with terminal %. 100% => equal files.
+ // * The index line includes the blob object names before and after the change.
+ // The <mode> is included if the file mode does not change; otherwise, separate
+ // lines indicate the old and the new mode.
+ // 3. Following this header the "standard unified" diff format header may be encountered: (but not for every case...)
+ //
+ // --- a/<path>
+ // +++ b/<path>
+ //
+ // With multiple hunks
+ //
+ // @@ <hunk descriptor> @@
+ // +added line
+ // -removed line
+ // unchanged line
+ //
+ // 4. Binary files get:
+ //
+ // Binary files a/<path> and b/<path> differ
+ //
+ // but one of a/<path> and b/<path> could be /dev/null.
+ curFileLoop:
+ for {
+ line, err = input.ReadString('\n')
+ if err != nil {
+ if err != io.EOF {
+ return diff, err
+ }
+ break parsingLoop
+ }
+
+ switch {
+ case strings.HasPrefix(line, cmdDiffHead):
+ break curFileLoop
+ case strings.HasPrefix(line, "old mode ") ||
+ strings.HasPrefix(line, "new mode "):
+
+ if strings.HasPrefix(line, "old mode ") {
+ curFile.OldMode = prepareValue(line, "old mode ")
+ }
+ if strings.HasPrefix(line, "new mode ") {
+ curFile.Mode = prepareValue(line, "new mode ")
+ }
+
+ if strings.HasSuffix(line, " 160000\n") {
+ curFile.IsSubmodule = true
+ }
+ case strings.HasPrefix(line, "rename from "):
+ curFile.IsRenamed = true
+ curFile.Type = DiffFileRename
+ if curFile.IsAmbiguous {
+ curFile.OldName = prepareValue(line, "rename from ")
+ }
+ case strings.HasPrefix(line, "rename to "):
+ curFile.IsRenamed = true
+ curFile.Type = DiffFileRename
+ if curFile.IsAmbiguous {
+ curFile.Name = prepareValue(line, "rename to ")
+ curFile.IsAmbiguous = false
+ }
+ case strings.HasPrefix(line, "copy from "):
+ curFile.IsRenamed = true
+ curFile.Type = DiffFileCopy
+ if curFile.IsAmbiguous {
+ curFile.OldName = prepareValue(line, "copy from ")
+ }
+ case strings.HasPrefix(line, "copy to "):
+ curFile.IsRenamed = true
+ curFile.Type = DiffFileCopy
+ if curFile.IsAmbiguous {
+ curFile.Name = prepareValue(line, "copy to ")
+ curFile.IsAmbiguous = false
+ }
+ case strings.HasPrefix(line, "new file"):
+ curFile.Type = DiffFileAdd
+ curFile.IsCreated = true
+ if strings.HasPrefix(line, "new file mode ") {
+ curFile.Mode = prepareValue(line, "new file mode ")
+ }
+ if strings.HasSuffix(line, " 160000\n") {
+ curFile.IsSubmodule = true
+ }
+ case strings.HasPrefix(line, "deleted"):
+ curFile.Type = DiffFileDel
+ curFile.IsDeleted = true
+ if strings.HasSuffix(line, " 160000\n") {
+ curFile.IsSubmodule = true
+ }
+ case strings.HasPrefix(line, "index"):
+ if strings.HasSuffix(line, " 160000\n") {
+ curFile.IsSubmodule = true
+ }
+ case strings.HasPrefix(line, "similarity index 100%"):
+ curFile.Type = DiffFileRename
+ case strings.HasPrefix(line, "Binary"):
+ curFile.IsBin = true
+ case strings.HasPrefix(line, "--- "):
+ // Handle ambiguous filenames
+ if curFile.IsAmbiguous {
+ // The shortest string that can end up here is:
+ // "--- a\t\n" without the quotes.
+ // This line has a len() of 7 but doesn't contain a oldName.
+ // So the amount that the line need is at least 8 or more.
+ // The code will otherwise panic for a out-of-bounds.
+ if len(line) > 7 && line[4] == 'a' {
+ curFile.OldName = line[6 : len(line)-1]
+ if line[len(line)-2] == '\t' {
+ curFile.OldName = curFile.OldName[:len(curFile.OldName)-1]
+ }
+ } else {
+ curFile.OldName = ""
+ }
+ }
+ // Otherwise do nothing with this line
+ case strings.HasPrefix(line, "+++ "):
+ // Handle ambiguous filenames
+ if curFile.IsAmbiguous {
+ if len(line) > 6 && line[4] == 'b' {
+ curFile.Name = line[6 : len(line)-1]
+ if line[len(line)-2] == '\t' {
+ curFile.Name = curFile.Name[:len(curFile.Name)-1]
+ }
+ if curFile.OldName == "" {
+ curFile.OldName = curFile.Name
+ }
+ } else {
+ curFile.Name = curFile.OldName
+ }
+ curFile.IsAmbiguous = false
+ }
+ // Otherwise do nothing with this line, but now switch to parsing hunks
+ lineBytes, isFragment, err := parseHunks(ctx, curFile, maxLines, maxLineCharacters, input)
+ diff.TotalAddition += curFile.Addition
+ diff.TotalDeletion += curFile.Deletion
+ if err != nil {
+ if err != io.EOF {
+ return diff, err
+ }
+ break parsingLoop
+ }
+ sb.Reset()
+ _, _ = sb.Write(lineBytes)
+ for isFragment {
+ lineBytes, isFragment, err = input.ReadLine()
+ if err != nil {
+ // Now by the definition of ReadLine this cannot be io.EOF
+ return diff, fmt.Errorf("unable to ReadLine: %w", err)
+ }
+ _, _ = sb.Write(lineBytes)
+ }
+ line = sb.String()
+ sb.Reset()
+
+ break curFileLoop
+ }
+ }
+ }
+
+ // TODO: There are numerous issues with this:
+ // - we might want to consider detecting encoding while parsing but...
+ // - we're likely to fail to get the correct encoding here anyway as we won't have enough information
+ diffLineTypeBuffers := make(map[DiffLineType]*bytes.Buffer, 3)
+ diffLineTypeDecoders := make(map[DiffLineType]*encoding.Decoder, 3)
+ diffLineTypeBuffers[DiffLinePlain] = new(bytes.Buffer)
+ diffLineTypeBuffers[DiffLineAdd] = new(bytes.Buffer)
+ diffLineTypeBuffers[DiffLineDel] = new(bytes.Buffer)
+ for _, f := range diff.Files {
+ f.NameHash = git.HashFilePathForWebUI(f.Name)
+
+ for _, buffer := range diffLineTypeBuffers {
+ buffer.Reset()
+ }
+ for _, sec := range f.Sections {
+ for _, l := range sec.Lines {
+ if l.Type == DiffLineSection {
+ continue
+ }
+ diffLineTypeBuffers[l.Type].WriteString(l.Content[1:])
+ diffLineTypeBuffers[l.Type].WriteString("\n")
+ }
+ }
+ for lineType, buffer := range diffLineTypeBuffers {
+ diffLineTypeDecoders[lineType] = nil
+ if buffer.Len() == 0 {
+ continue
+ }
+ charsetLabel, err := charset.DetectEncoding(buffer.Bytes())
+ if charsetLabel != "UTF-8" && err == nil {
+ encoding, _ := stdcharset.Lookup(charsetLabel)
+ if encoding != nil {
+ diffLineTypeDecoders[lineType] = encoding.NewDecoder()
+ }
+ }
+ }
+ for _, sec := range f.Sections {
+ for _, l := range sec.Lines {
+ decoder := diffLineTypeDecoders[l.Type]
+ if decoder != nil {
+ if c, _, err := transform.String(decoder, l.Content[1:]); err == nil {
+ l.Content = l.Content[0:1] + c
+ }
+ }
+ }
+ }
+ }
+
+ diff.NumFiles = len(diff.Files)
+ return diff, nil
+}
+
+func skipToNextDiffHead(input *bufio.Reader) (line string, err error) {
+ // need to skip until the next cmdDiffHead
+ var isFragment, wasFragment bool
+ var lineBytes []byte
+ for {
+ lineBytes, isFragment, err = input.ReadLine()
+ if err != nil {
+ return "", err
+ }
+ if wasFragment {
+ wasFragment = isFragment
+ continue
+ }
+ if bytes.HasPrefix(lineBytes, []byte(cmdDiffHead)) {
+ break
+ }
+ wasFragment = isFragment
+ }
+ line = string(lineBytes)
+ if isFragment {
+ var tail string
+ tail, err = input.ReadString('\n')
+ if err != nil {
+ return "", err
+ }
+ line += tail
+ }
+ return line, err
+}
+
+func parseHunks(ctx context.Context, curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio.Reader) (lineBytes []byte, isFragment bool, err error) {
+ sb := strings.Builder{}
+
+ var (
+ curSection *DiffSection
+ curFileLinesCount int
+ curFileLFSPrefix bool
+ )
+
+ lastLeftIdx := -1
+ leftLine, rightLine := 1, 1
+
+ for {
+ for isFragment {
+ curFile.IsIncomplete = true
+ curFile.IsIncompleteLineTooLong = true
+ _, isFragment, err = input.ReadLine()
+ if err != nil {
+ // Now by the definition of ReadLine this cannot be io.EOF
+ return nil, false, fmt.Errorf("unable to ReadLine: %w", err)
+ }
+ }
+ sb.Reset()
+ lineBytes, isFragment, err = input.ReadLine()
+ if err != nil {
+ if err == io.EOF {
+ return lineBytes, isFragment, err
+ }
+ err = fmt.Errorf("unable to ReadLine: %w", err)
+ return nil, false, err
+ }
+ if lineBytes[0] == 'd' {
+ // End of hunks
+ return lineBytes, isFragment, err
+ }
+
+ switch lineBytes[0] {
+ case '@':
+ if maxLines > -1 && curFileLinesCount >= maxLines {
+ curFile.IsIncomplete = true
+ continue
+ }
+
+ _, _ = sb.Write(lineBytes)
+ for isFragment {
+ // This is very odd indeed - we're in a section header and the line is too long
+ // This really shouldn't happen...
+ lineBytes, isFragment, err = input.ReadLine()
+ if err != nil {
+ // Now by the definition of ReadLine this cannot be io.EOF
+ return nil, false, fmt.Errorf("unable to ReadLine: %w", err)
+ }
+ _, _ = sb.Write(lineBytes)
+ }
+ line := sb.String()
+
+ // Create a new section to represent this hunk
+ curSection = &DiffSection{file: curFile}
+ lastLeftIdx = -1
+ curFile.Sections = append(curFile.Sections, curSection)
+
+ lineSectionInfo := getDiffLineSectionInfo(curFile.Name, line, leftLine-1, rightLine-1)
+ diffLine := &DiffLine{
+ Type: DiffLineSection,
+ Content: line,
+ SectionInfo: lineSectionInfo,
+ }
+ curSection.Lines = append(curSection.Lines, diffLine)
+ curSection.FileName = curFile.Name
+ // update line number.
+ leftLine = lineSectionInfo.LeftIdx
+ rightLine = lineSectionInfo.RightIdx
+ continue
+ case '\\':
+ if maxLines > -1 && curFileLinesCount >= maxLines {
+ curFile.IsIncomplete = true
+ continue
+ }
+ // This is used only to indicate that the current file does not have a terminal newline
+ if !bytes.Equal(lineBytes, []byte("\\ No newline at end of file")) {
+ return nil, false, fmt.Errorf("unexpected line in hunk: %s", string(lineBytes))
+ }
+ // Technically this should be the end the file!
+ // FIXME: we should be putting a marker at the end of the file if there is no terminal new line
+ continue
+ case '+':
+ curFileLinesCount++
+ curFile.Addition++
+ if maxLines > -1 && curFileLinesCount >= maxLines {
+ curFile.IsIncomplete = true
+ continue
+ }
+ diffLine := &DiffLine{Type: DiffLineAdd, RightIdx: rightLine, Match: -1}
+ rightLine++
+ if curSection == nil {
+ // Create a new section to represent this hunk
+ curSection = &DiffSection{file: curFile}
+ curFile.Sections = append(curFile.Sections, curSection)
+ lastLeftIdx = -1
+ }
+ if lastLeftIdx > -1 {
+ diffLine.Match = lastLeftIdx
+ curSection.Lines[lastLeftIdx].Match = len(curSection.Lines)
+ lastLeftIdx++
+ if lastLeftIdx >= len(curSection.Lines) || curSection.Lines[lastLeftIdx].Type != DiffLineDel {
+ lastLeftIdx = -1
+ }
+ }
+ curSection.Lines = append(curSection.Lines, diffLine)
+ case '-':
+ curFileLinesCount++
+ curFile.Deletion++
+ if maxLines > -1 && curFileLinesCount >= maxLines {
+ curFile.IsIncomplete = true
+ continue
+ }
+ diffLine := &DiffLine{Type: DiffLineDel, LeftIdx: leftLine, Match: -1}
+ if leftLine > 0 {
+ leftLine++
+ }
+ if curSection == nil {
+ // Create a new section to represent this hunk
+ curSection = &DiffSection{file: curFile}
+ curFile.Sections = append(curFile.Sections, curSection)
+ lastLeftIdx = -1
+ }
+ if len(curSection.Lines) == 0 || curSection.Lines[len(curSection.Lines)-1].Type != DiffLineDel {
+ lastLeftIdx = len(curSection.Lines)
+ }
+ curSection.Lines = append(curSection.Lines, diffLine)
+ case ' ':
+ curFileLinesCount++
+ if maxLines > -1 && curFileLinesCount >= maxLines {
+ curFile.IsIncomplete = true
+ continue
+ }
+ diffLine := &DiffLine{Type: DiffLinePlain, LeftIdx: leftLine, RightIdx: rightLine}
+ leftLine++
+ rightLine++
+ lastLeftIdx = -1
+ if curSection == nil {
+ // Create a new section to represent this hunk
+ curSection = &DiffSection{file: curFile}
+ curFile.Sections = append(curFile.Sections, curSection)
+ }
+ curSection.Lines = append(curSection.Lines, diffLine)
+ default:
+ // This is unexpected
+ return nil, false, fmt.Errorf("unexpected line in hunk: %s", string(lineBytes))
+ }
+
+ line := string(lineBytes)
+ if isFragment {
+ curFile.IsIncomplete = true
+ curFile.IsIncompleteLineTooLong = true
+ for isFragment {
+ lineBytes, isFragment, err = input.ReadLine()
+ if err != nil {
+ // Now by the definition of ReadLine this cannot be io.EOF
+ return lineBytes, isFragment, fmt.Errorf("unable to ReadLine: %w", err)
+ }
+ }
+ }
+ if len(line) > maxLineCharacters {
+ curFile.IsIncomplete = true
+ curFile.IsIncompleteLineTooLong = true
+ line = line[:maxLineCharacters]
+ }
+ curSection.Lines[len(curSection.Lines)-1].Content = line
+
+ // handle LFS
+ if line[1:] == lfs.MetaFileIdentifier {
+ curFileLFSPrefix = true
+ } else if curFileLFSPrefix && strings.HasPrefix(line[1:], lfs.MetaFileOidPrefix) {
+ oid := strings.TrimPrefix(line[1:], lfs.MetaFileOidPrefix)
+ if len(oid) == 64 {
+ m := &git_model.LFSMetaObject{Pointer: lfs.Pointer{Oid: oid}}
+ count, err := db.CountByBean(ctx, m)
+
+ if err == nil && count > 0 {
+ curFile.IsBin = true
+ curFile.IsLFSFile = true
+ curSection.Lines = nil
+ lastLeftIdx = -1
+ }
+ }
+ }
+ }
+}
+
+func createDiffFile(diff *Diff, line string) *DiffFile {
+ // The a/ and b/ filenames are the same unless rename/copy is involved.
+ // Especially, even for a creation or a deletion, /dev/null is not used
+ // in place of the a/ or b/ filenames.
+ //
+ // When rename/copy is involved, file1 and file2 show the name of the
+ // source file of the rename/copy and the name of the file that rename/copy
+ // produces, respectively.
+ //
+ // Path names are quoted if necessary.
+ //
+ // This means that you should always be able to determine the file name even when there
+ // there is potential ambiguity...
+ //
+ // but we can be simpler with our heuristics by just forcing git to prefix things nicely
+ curFile := &DiffFile{
+ Index: len(diff.Files) + 1,
+ Type: DiffFileChange,
+ Sections: make([]*DiffSection, 0, 10),
+ }
+
+ rd := strings.NewReader(line[len(cmdDiffHead):] + " ")
+ curFile.Type = DiffFileChange
+ var oldNameAmbiguity, newNameAmbiguity bool
+
+ curFile.OldName, oldNameAmbiguity = readFileName(rd)
+ curFile.Name, newNameAmbiguity = readFileName(rd)
+ if oldNameAmbiguity && newNameAmbiguity {
+ curFile.IsAmbiguous = true
+ // OK we should bet that the oldName and the newName are the same if they can be made to be same
+ // So we need to start again ...
+ if (len(line)-len(cmdDiffHead)-1)%2 == 0 {
+ // diff --git a/b b/b b/b b/b b/b b/b
+ //
+ midpoint := (len(line) + len(cmdDiffHead) - 1) / 2
+ newl, old := line[len(cmdDiffHead):midpoint], line[midpoint+1:]
+ if len(newl) > 2 && len(old) > 2 && newl[2:] == old[2:] {
+ curFile.OldName = old[2:]
+ curFile.Name = old[2:]
+ }
+ }
+ }
+
+ curFile.IsRenamed = curFile.Name != curFile.OldName
+ return curFile
+}
+
+func readFileName(rd *strings.Reader) (string, bool) {
+ ambiguity := false
+ var name string
+ char, _ := rd.ReadByte()
+ _ = rd.UnreadByte()
+ if char == '"' {
+ _, _ = fmt.Fscanf(rd, "%q ", &name)
+ if len(name) == 0 {
+ log.Error("Reader has no file name: reader=%+v", rd)
+ return "", true
+ }
+
+ if name[0] == '\\' {
+ name = name[1:]
+ }
+ } else {
+ // This technique is potentially ambiguous it may not be possible to uniquely identify the filenames from the diff line alone
+ ambiguity = true
+ _, _ = fmt.Fscanf(rd, "%s ", &name)
+ char, _ := rd.ReadByte()
+ _ = rd.UnreadByte()
+ for !(char == 0 || char == '"' || char == 'b') {
+ var suffix string
+ _, _ = fmt.Fscanf(rd, "%s ", &suffix)
+ name += " " + suffix
+ char, _ = rd.ReadByte()
+ _ = rd.UnreadByte()
+ }
+ }
+ if len(name) < 2 {
+ log.Error("Unable to determine name from reader: reader=%+v", rd)
+ return "", true
+ }
+ return name[2:], ambiguity
+}
+
+// DiffOptions represents the options for a DiffRange
+type DiffOptions struct {
+ BeforeCommitID string
+ AfterCommitID string
+ SkipTo string
+ MaxLines int
+ MaxLineCharacters int
+ MaxFiles int
+ WhitespaceBehavior git.TrustedCmdArgs
+ DirectComparison bool
+}
+
+// GetDiff builds a Diff between two commits of a repository.
+// Passing the empty string as beforeCommitID returns a diff from the parent commit.
+// The whitespaceBehavior is either an empty string or a git flag
+func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
+ repoPath := gitRepo.Path
+
+ commit, err := gitRepo.GetCommit(opts.AfterCommitID)
+ if err != nil {
+ return nil, err
+ }
+
+ cmdDiff := git.NewCommand(gitRepo.Ctx)
+ objectFormat, err := gitRepo.GetObjectFormat()
+ if err != nil {
+ return nil, err
+ }
+
+ if (len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == objectFormat.EmptyObjectID().String()) && commit.ParentCount() == 0 {
+ cmdDiff.AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M").
+ AddArguments(opts.WhitespaceBehavior...).
+ AddDynamicArguments(objectFormat.EmptyTree().String()).
+ AddDynamicArguments(opts.AfterCommitID)
+ } else {
+ actualBeforeCommitID := opts.BeforeCommitID
+ if len(actualBeforeCommitID) == 0 {
+ parentCommit, _ := commit.Parent(0)
+ actualBeforeCommitID = parentCommit.ID.String()
+ }
+
+ cmdDiff.AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M").
+ AddArguments(opts.WhitespaceBehavior...).
+ AddDynamicArguments(actualBeforeCommitID, opts.AfterCommitID)
+ opts.BeforeCommitID = actualBeforeCommitID
+ }
+
+ // In git 2.31, git diff learned --skip-to which we can use to shortcut skip to file
+ // so if we are using at least this version of git we don't have to tell ParsePatch to do
+ // the skipping for us
+ parsePatchSkipToFile := opts.SkipTo
+ if opts.SkipTo != "" && git.CheckGitVersionAtLeast("2.31") == nil {
+ cmdDiff.AddOptionFormat("--skip-to=%s", opts.SkipTo)
+ parsePatchSkipToFile = ""
+ }
+
+ cmdDiff.AddDashesAndList(files...)
+
+ reader, writer := io.Pipe()
+ defer func() {
+ _ = reader.Close()
+ _ = writer.Close()
+ }()
+
+ go func() {
+ stderr := &bytes.Buffer{}
+ cmdDiff.SetDescription(fmt.Sprintf("GetDiffRange [repo_path: %s]", repoPath))
+ if err := cmdDiff.Run(&git.RunOpts{
+ Timeout: time.Duration(setting.Git.Timeout.Default) * time.Second,
+ Dir: repoPath,
+ Stdout: writer,
+ Stderr: stderr,
+ }); err != nil {
+ log.Error("error during GetDiff(git diff dir: %s): %v, stderr: %s", repoPath, err, stderr.String())
+ }
+
+ _ = writer.Close()
+ }()
+
+ diff, err := ParsePatch(ctx, opts.MaxLines, opts.MaxLineCharacters, opts.MaxFiles, reader, parsePatchSkipToFile)
+ if err != nil {
+ return nil, fmt.Errorf("unable to ParsePatch: %w", err)
+ }
+ diff.Start = opts.SkipTo
+
+ checker, err := gitRepo.GitAttributeChecker(opts.AfterCommitID, git.LinguistAttributes...)
+ if err != nil {
+ return nil, fmt.Errorf("unable to GitAttributeChecker: %w", err)
+ }
+ defer checker.Close()
+
+ for _, diffFile := range diff.Files {
+ gotVendor := false
+ gotGenerated := false
+
+ attrs, err := checker.CheckPath(diffFile.Name)
+ if err != nil {
+ log.Error("checker.CheckPath(%s) failed: %v", diffFile.Name, err)
+ } else {
+ vendored := attrs["linguist-vendored"].Bool()
+ diffFile.IsVendored = vendored.Value()
+ gotVendor = vendored.Has()
+
+ generated := attrs["linguist-generated"].Bool()
+ diffFile.IsGenerated = generated.Value()
+ gotGenerated = generated.Has()
+
+ diffFile.Language = cmp.Or(
+ attrs["linguist-language"].String(),
+ attrs["gitlab-language"].Prefix(),
+ )
+ }
+
+ if !gotVendor {
+ diffFile.IsVendored = analyze.IsVendor(diffFile.Name)
+ }
+ if !gotGenerated {
+ diffFile.IsGenerated = analyze.IsGenerated(diffFile.Name)
+ }
+
+ tailSection := diffFile.GetTailSection(gitRepo, opts.BeforeCommitID, opts.AfterCommitID)
+ if tailSection != nil {
+ diffFile.Sections = append(diffFile.Sections, tailSection)
+ }
+ }
+
+ separator := "..."
+ if opts.DirectComparison {
+ separator = ".."
+ }
+
+ diffPaths := []string{opts.BeforeCommitID + separator + opts.AfterCommitID}
+ if len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == objectFormat.EmptyObjectID().String() {
+ diffPaths = []string{objectFormat.EmptyTree().String(), opts.AfterCommitID}
+ }
+ diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, nil, diffPaths...)
+ if err != nil && strings.Contains(err.Error(), "no merge base") {
+ // git >= 2.28 now returns an error if base and head have become unrelated.
+ // previously it would return the results of git diff --shortstat base head so let's try that...
+ diffPaths = []string{opts.BeforeCommitID, opts.AfterCommitID}
+ diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, nil, diffPaths...)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ return diff, nil
+}
+
+type PullDiffStats struct {
+ TotalAddition, TotalDeletion int
+}
+
+// GetPullDiffStats
+func GetPullDiffStats(gitRepo *git.Repository, opts *DiffOptions) (*PullDiffStats, error) {
+ repoPath := gitRepo.Path
+
+ diff := &PullDiffStats{}
+
+ separator := "..."
+ if opts.DirectComparison {
+ separator = ".."
+ }
+
+ objectFormat, err := gitRepo.GetObjectFormat()
+ if err != nil {
+ return nil, err
+ }
+
+ diffPaths := []string{opts.BeforeCommitID + separator + opts.AfterCommitID}
+ if len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == objectFormat.EmptyObjectID().String() {
+ diffPaths = []string{objectFormat.EmptyTree().String(), opts.AfterCommitID}
+ }
+
+ _, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, nil, diffPaths...)
+ if err != nil && strings.Contains(err.Error(), "no merge base") {
+ // git >= 2.28 now returns an error if base and head have become unrelated.
+ // previously it would return the results of git diff --shortstat base head so let's try that...
+ diffPaths = []string{opts.BeforeCommitID, opts.AfterCommitID}
+ _, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, nil, diffPaths...)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ return diff, nil
+}
+
+// SyncAndGetUserSpecificDiff is like GetDiff, except that user specific data such as which files the given user has already viewed on the given PR will also be set
+// Additionally, the database asynchronously is updated if files have changed since the last review
+func SyncAndGetUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model.PullRequest, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
+ diff, err := GetDiff(ctx, gitRepo, opts, files...)
+ if err != nil {
+ return nil, err
+ }
+ review, err := pull_model.GetNewestReviewState(ctx, userID, pull.ID)
+ if err != nil || review == nil || review.UpdatedFiles == nil {
+ return diff, err
+ }
+
+ latestCommit := opts.AfterCommitID
+ if latestCommit == "" {
+ latestCommit = pull.HeadBranch // opts.AfterCommitID is preferred because it handles PRs from forks correctly and the branch name doesn't
+ }
+
+ changedFiles, err := gitRepo.GetFilesChangedBetween(review.CommitSHA, latestCommit)
+ // There are way too many possible errors.
+ // Examples are various git errors such as the commit the review was based on was gc'ed and hence doesn't exist anymore as well as unrecoverable errors where we should serve a 500 response
+ // Due to the current architecture and physical limitation of needing to compare explicit error messages, we can only choose one approach without the code getting ugly
+ // For SOME of the errors such as the gc'ed commit, it would be best to mark all files as changed
+ // But as that does not work for all potential errors, we simply mark all files as unchanged and drop the error which always works, even if not as good as possible
+ if err != nil {
+ log.Error("Could not get changed files between %s and %s for pull request %d in repo with path %s. Assuming no changes. Error: %w", review.CommitSHA, latestCommit, pull.Index, gitRepo.Path, err)
+ }
+
+ filesChangedSinceLastDiff := make(map[string]pull_model.ViewedState)
+outer:
+ for _, diffFile := range diff.Files {
+ fileViewedState := review.UpdatedFiles[diffFile.GetDiffFileName()]
+
+ // Check whether it was previously detected that the file has changed since the last review
+ if fileViewedState == pull_model.HasChanged {
+ diffFile.HasChangedSinceLastReview = true
+ continue
+ }
+
+ filename := diffFile.GetDiffFileName()
+
+ // Check explicitly whether the file has changed since the last review
+ for _, changedFile := range changedFiles {
+ diffFile.HasChangedSinceLastReview = filename == changedFile
+ if diffFile.HasChangedSinceLastReview {
+ filesChangedSinceLastDiff[filename] = pull_model.HasChanged
+ continue outer // We don't want to check if the file is viewed here as that would fold the file, which is in this case unwanted
+ }
+ }
+ // Check whether the file has already been viewed
+ if fileViewedState == pull_model.Viewed {
+ diffFile.IsViewed = true
+ diff.NumViewedFiles++
+ }
+ }
+
+ // Explicitly store files that have changed in the database, if any is present at all.
+ // This has the benefit that the "Has Changed" attribute will be present as long as the user does not explicitly mark this file as viewed, so it will even survive a page reload after marking another file as viewed.
+ // On the other hand, this means that even if a commit reverting an unseen change is committed, the file will still be seen as changed.
+ if len(filesChangedSinceLastDiff) > 0 {
+ err := pull_model.UpdateReviewState(ctx, review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff)
+ if err != nil {
+ log.Warn("Could not update review for user %d, pull %d, commit %s and the changed files %v: %v", review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff, err)
+ return nil, err
+ }
+ }
+
+ return diff, nil
+}
+
+// CommentAsDiff returns c.Patch as *Diff
+func CommentAsDiff(ctx context.Context, c *issues_model.Comment) (*Diff, error) {
+ diff, err := ParsePatch(ctx, setting.Git.MaxGitDiffLines,
+ setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(c.Patch), "")
+ if err != nil {
+ log.Error("Unable to parse patch: %v", err)
+ return nil, err
+ }
+ if len(diff.Files) == 0 {
+ return nil, fmt.Errorf("no file found for comment ID: %d", c.ID)
+ }
+ secs := diff.Files[0].Sections
+ if len(secs) == 0 {
+ return nil, fmt.Errorf("no sections found for comment ID: %d", c.ID)
+ }
+ return diff, nil
+}
+
+// CommentMustAsDiff executes AsDiff and logs the error instead of returning
+func CommentMustAsDiff(ctx context.Context, c *issues_model.Comment) *Diff {
+ if c == nil {
+ return nil
+ }
+ defer func() {
+ if err := recover(); err != nil {
+ log.Error("PANIC whilst retrieving diff for comment[%d] Error: %v\nStack: %s", c.ID, err, log.Stack(2))
+ }
+ }()
+ diff, err := CommentAsDiff(ctx, c)
+ if err != nil {
+ log.Warn("CommentMustAsDiff: %v", err)
+ }
+ return diff
+}
+
+// GetWhitespaceFlag returns git diff flag for treating whitespaces
+func GetWhitespaceFlag(whitespaceBehavior string) git.TrustedCmdArgs {
+ whitespaceFlags := map[string]git.TrustedCmdArgs{
+ "ignore-all": {"-w"},
+ "ignore-change": {"-b"},
+ "ignore-eol": {"--ignore-space-at-eol"},
+ "show-all": nil,
+ }
+
+ if flag, ok := whitespaceFlags[whitespaceBehavior]; ok {
+ return flag
+ }
+ log.Warn("unknown whitespace behavior: %q, default to 'show-all'", whitespaceBehavior)
+ return nil
+}
diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go
new file mode 100644
index 0000000..f2c099d
--- /dev/null
+++ b/services/gitdiff/gitdiff_test.go
@@ -0,0 +1,671 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "strconv"
+ "strings"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+
+ dmp "github.com/sergi/go-diff/diffmatchpatch"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDiffToHTML(t *testing.T) {
+ assert.Equal(t, "foo <span class=\"added-code\">bar</span> biz", diffToHTML(nil, []dmp.Diff{
+ {Type: dmp.DiffEqual, Text: "foo "},
+ {Type: dmp.DiffInsert, Text: "bar"},
+ {Type: dmp.DiffDelete, Text: " baz"},
+ {Type: dmp.DiffEqual, Text: " biz"},
+ }, DiffLineAdd))
+
+ assert.Equal(t, "foo <span class=\"removed-code\">bar</span> biz", diffToHTML(nil, []dmp.Diff{
+ {Type: dmp.DiffEqual, Text: "foo "},
+ {Type: dmp.DiffDelete, Text: "bar"},
+ {Type: dmp.DiffInsert, Text: " baz"},
+ {Type: dmp.DiffEqual, Text: " biz"},
+ }, DiffLineDel))
+}
+
+func TestParsePatch_skipTo(t *testing.T) {
+ type testcase struct {
+ name string
+ gitdiff string
+ wantErr bool
+ addition int
+ deletion int
+ oldFilename string
+ filename string
+ skipTo string
+ }
+ tests := []testcase{
+ {
+ name: "readme.md2readme.md",
+ gitdiff: `diff --git "a/A \\ B" "b/A \\ B"
+--- "a/A \\ B"
++++ "b/A \\ B"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off
+diff --git "\\a/README.md" "\\b/README.md"
+--- "\\a/README.md"
++++ "\\b/README.md"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off
+`,
+ addition: 4,
+ deletion: 1,
+ filename: "README.md",
+ oldFilename: "README.md",
+ skipTo: "README.md",
+ },
+ {
+ name: "A \\ B",
+ gitdiff: `diff --git "a/A \\ B" "b/A \\ B"
+--- "a/A \\ B"
++++ "b/A \\ B"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off`,
+ addition: 4,
+ deletion: 1,
+ filename: "A \\ B",
+ oldFilename: "A \\ B",
+ skipTo: "A \\ B",
+ },
+ {
+ name: "A \\ B",
+ gitdiff: `diff --git "\\a/README.md" "\\b/README.md"
+--- "\\a/README.md"
++++ "\\b/README.md"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off
+diff --git "a/A \\ B" "b/A \\ B"
+--- "a/A \\ B"
++++ "b/A \\ B"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off`,
+ addition: 4,
+ deletion: 1,
+ filename: "A \\ B",
+ oldFilename: "A \\ B",
+ skipTo: "A \\ B",
+ },
+ {
+ name: "readme.md2readme.md",
+ gitdiff: `diff --git "a/A \\ B" "b/A \\ B"
+--- "a/A \\ B"
++++ "b/A \\ B"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off
+diff --git "a/A \\ B" "b/A \\ B"
+--- "a/A \\ B"
++++ "b/A \\ B"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off
+diff --git "\\a/README.md" "\\b/README.md"
+--- "\\a/README.md"
++++ "\\b/README.md"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off
+`,
+ addition: 4,
+ deletion: 1,
+ filename: "README.md",
+ oldFilename: "README.md",
+ skipTo: "README.md",
+ },
+ }
+ for _, testcase := range tests {
+ t.Run(testcase.name, func(t *testing.T) {
+ got, err := ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(testcase.gitdiff), testcase.skipTo)
+ if (err != nil) != testcase.wantErr {
+ t.Errorf("ParsePatch(%q) error = %v, wantErr %v", testcase.name, err, testcase.wantErr)
+ return
+ }
+
+ gotMarshaled, _ := json.MarshalIndent(got, "", " ")
+ if got.NumFiles != 1 {
+ t.Errorf("ParsePath(%q) did not receive 1 file:\n%s", testcase.name, string(gotMarshaled))
+ return
+ }
+ if got.TotalAddition != testcase.addition {
+ t.Errorf("ParsePath(%q) does not have correct totalAddition %d, wanted %d", testcase.name, got.TotalAddition, testcase.addition)
+ }
+ if got.TotalDeletion != testcase.deletion {
+ t.Errorf("ParsePath(%q) did not have correct totalDeletion %d, wanted %d", testcase.name, got.TotalDeletion, testcase.deletion)
+ }
+ file := got.Files[0]
+ if file.Addition != testcase.addition {
+ t.Errorf("ParsePath(%q) does not have correct file addition %d, wanted %d", testcase.name, file.Addition, testcase.addition)
+ }
+ if file.Deletion != testcase.deletion {
+ t.Errorf("ParsePath(%q) did not have correct file deletion %d, wanted %d", testcase.name, file.Deletion, testcase.deletion)
+ }
+ if file.OldName != testcase.oldFilename {
+ t.Errorf("ParsePath(%q) did not have correct OldName %q, wanted %q", testcase.name, file.OldName, testcase.oldFilename)
+ }
+ if file.Name != testcase.filename {
+ t.Errorf("ParsePath(%q) did not have correct Name %q, wanted %q", testcase.name, file.Name, testcase.filename)
+ }
+ })
+ }
+}
+
+func TestParsePatch_singlefile(t *testing.T) {
+ type testcase struct {
+ name string
+ gitdiff string
+ wantErr bool
+ addition int
+ deletion int
+ oldFilename string
+ filename string
+ }
+
+ tests := []testcase{
+ {
+ name: "readme.md2readme.md",
+ gitdiff: `diff --git "\\a/README.md" "\\b/README.md"
+--- "\\a/README.md"
++++ "\\b/README.md"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off
+`,
+ addition: 4,
+ deletion: 1,
+ filename: "README.md",
+ oldFilename: "README.md",
+ },
+ {
+ name: "A \\ B",
+ gitdiff: `diff --git "a/A \\ B" "b/A \\ B"
+--- "a/A \\ B"
++++ "b/A \\ B"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off`,
+ addition: 4,
+ deletion: 1,
+ filename: "A \\ B",
+ oldFilename: "A \\ B",
+ },
+ {
+ name: "really weird filename",
+ gitdiff: `diff --git "\\a/a b/file b/a a/file" "\\b/a b/file b/a a/file"
+index d2186f1..f5c8ed2 100644
+--- "\\a/a b/file b/a a/file" ` + `
++++ "\\b/a b/file b/a a/file" ` + `
+@@ -1,3 +1,2 @@
+ Create a weird file.
+ ` + `
+-and what does diff do here?
+\ No newline at end of file`,
+ addition: 0,
+ deletion: 1,
+ filename: "a b/file b/a a/file",
+ oldFilename: "a b/file b/a a/file",
+ },
+ {
+ name: "delete file with blanks",
+ gitdiff: `diff --git "\\a/file with blanks" "\\b/file with blanks"
+deleted file mode 100644
+index 898651a..0000000
+--- "\\a/file with blanks" ` + `
++++ /dev/null
+@@ -1,5 +0,0 @@
+-a blank file
+-
+-has a couple o line
+-
+-the 5th line is the last
+`,
+ addition: 0,
+ deletion: 5,
+ filename: "file with blanks",
+ oldFilename: "file with blanks",
+ },
+ {
+ name: "rename a—as",
+ gitdiff: `diff --git "a/\360\243\220\265b\342\200\240vs" "b/a\342\200\224as"
+similarity index 100%
+rename from "\360\243\220\265b\342\200\240vs"
+rename to "a\342\200\224as"
+`,
+ addition: 0,
+ deletion: 0,
+ oldFilename: "ð£µb†vs",
+ filename: "a—as",
+ },
+ {
+ name: "rename with spaces",
+ gitdiff: `diff --git "\\a/a b/file b/a a/file" "\\b/a b/a a/file b/b file"
+similarity index 100%
+rename from a b/file b/a a/file
+rename to a b/a a/file b/b file
+`,
+ oldFilename: "a b/file b/a a/file",
+ filename: "a b/a a/file b/b file",
+ },
+ {
+ name: "ambiguous deleted",
+ gitdiff: `diff --git a/b b/b b/b b/b
+deleted file mode 100644
+index 92e798b..0000000
+--- a/b b/b` + "\t" + `
++++ /dev/null
+@@ -1 +0,0 @@
+-b b/b
+`,
+ oldFilename: "b b/b",
+ filename: "b b/b",
+ addition: 0,
+ deletion: 1,
+ },
+ {
+ name: "ambiguous addition",
+ gitdiff: `diff --git a/b b/b b/b b/b
+new file mode 100644
+index 0000000..92e798b
+--- /dev/null
++++ b/b b/b` + "\t" + `
+@@ -0,0 +1 @@
++b b/b
+`,
+ oldFilename: "b b/b",
+ filename: "b b/b",
+ addition: 1,
+ deletion: 0,
+ },
+ {
+ name: "rename",
+ gitdiff: `diff --git a/b b/b b/b b/b b/b b/b
+similarity index 100%
+rename from b b/b b/b b/b b/b
+rename to b
+`,
+ oldFilename: "b b/b b/b b/b b/b",
+ filename: "b",
+ },
+ {
+ name: "ambiguous 1",
+ gitdiff: `diff --git a/b b/b b/b b/b b/b b/b
+similarity index 100%
+rename from b b/b b/b b/b b/b
+rename to b
+`,
+ oldFilename: "b b/b b/b b/b b/b",
+ filename: "b",
+ },
+ {
+ name: "ambiguous 2",
+ gitdiff: `diff --git a/b b/b b/b b/b b/b b/b
+similarity index 100%
+rename from b b/b b/b b/b
+rename to b b/b
+`,
+ oldFilename: "b b/b b/b b/b",
+ filename: "b b/b",
+ },
+ {
+ name: "minuses-and-pluses",
+ gitdiff: `diff --git a/minuses-and-pluses b/minuses-and-pluses
+index 6961180..9ba1a00 100644
+--- a/minuses-and-pluses
++++ b/minuses-and-pluses
+@@ -1,4 +1,4 @@
+--- 1st line
+-++ 2nd line
+--- 3rd line
+-++ 4th line
++++ 1st line
++-- 2nd line
++++ 3rd line
++-- 4th line
+`,
+ oldFilename: "minuses-and-pluses",
+ filename: "minuses-and-pluses",
+ addition: 4,
+ deletion: 4,
+ },
+ }
+
+ for _, testcase := range tests {
+ t.Run(testcase.name, func(t *testing.T) {
+ got, err := ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(testcase.gitdiff), "")
+ if (err != nil) != testcase.wantErr {
+ t.Errorf("ParsePatch(%q) error = %v, wantErr %v", testcase.name, err, testcase.wantErr)
+ return
+ }
+
+ gotMarshaled, _ := json.MarshalIndent(got, "", " ")
+ if got.NumFiles != 1 {
+ t.Errorf("ParsePath(%q) did not receive 1 file:\n%s", testcase.name, string(gotMarshaled))
+ return
+ }
+ if got.TotalAddition != testcase.addition {
+ t.Errorf("ParsePath(%q) does not have correct totalAddition %d, wanted %d", testcase.name, got.TotalAddition, testcase.addition)
+ }
+ if got.TotalDeletion != testcase.deletion {
+ t.Errorf("ParsePath(%q) did not have correct totalDeletion %d, wanted %d", testcase.name, got.TotalDeletion, testcase.deletion)
+ }
+ file := got.Files[0]
+ if file.Addition != testcase.addition {
+ t.Errorf("ParsePath(%q) does not have correct file addition %d, wanted %d", testcase.name, file.Addition, testcase.addition)
+ }
+ if file.Deletion != testcase.deletion {
+ t.Errorf("ParsePath(%q) did not have correct file deletion %d, wanted %d", testcase.name, file.Deletion, testcase.deletion)
+ }
+ if file.OldName != testcase.oldFilename {
+ t.Errorf("ParsePath(%q) did not have correct OldName %q, wanted %q", testcase.name, file.OldName, testcase.oldFilename)
+ }
+ if file.Name != testcase.filename {
+ t.Errorf("ParsePath(%q) did not have correct Name %q, wanted %q", testcase.name, file.Name, testcase.filename)
+ }
+ })
+ }
+
+ // Test max lines
+ diffBuilder := &strings.Builder{}
+
+ diff := `diff --git a/newfile2 b/newfile2
+new file mode 100644
+index 0000000..6bb8f39
+--- /dev/null
++++ b/newfile2
+@@ -0,0 +1,35 @@
+`
+ diffBuilder.WriteString(diff)
+
+ for i := 0; i < 35; i++ {
+ diffBuilder.WriteString("+line" + strconv.Itoa(i) + "\n")
+ }
+ diff = diffBuilder.String()
+ result, err := ParsePatch(db.DefaultContext, 20, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff), "")
+ if err != nil {
+ t.Errorf("There should not be an error: %v", err)
+ }
+ if !result.Files[0].IsIncomplete {
+ t.Errorf("Files should be incomplete! %v", result.Files[0])
+ }
+ result, err = ParsePatch(db.DefaultContext, 40, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff), "")
+ if err != nil {
+ t.Errorf("There should not be an error: %v", err)
+ }
+ if result.Files[0].IsIncomplete {
+ t.Errorf("Files should not be incomplete! %v", result.Files[0])
+ }
+ result, err = ParsePatch(db.DefaultContext, 40, 5, setting.Git.MaxGitDiffFiles, strings.NewReader(diff), "")
+ if err != nil {
+ t.Errorf("There should not be an error: %v", err)
+ }
+ if !result.Files[0].IsIncomplete {
+ t.Errorf("Files should be incomplete! %v", result.Files[0])
+ }
+
+ // Test max characters
+ diff = `diff --git a/newfile2 b/newfile2
+new file mode 100644
+index 0000000..6bb8f39
+--- /dev/null
++++ b/newfile2
+@@ -0,0 +1,35 @@
+`
+ diffBuilder.Reset()
+ diffBuilder.WriteString(diff)
+
+ for i := 0; i < 33; i++ {
+ diffBuilder.WriteString("+line" + strconv.Itoa(i) + "\n")
+ }
+ diffBuilder.WriteString("+line33")
+ for i := 0; i < 512; i++ {
+ diffBuilder.WriteString("0123456789ABCDEF")
+ }
+ diffBuilder.WriteByte('\n')
+ diffBuilder.WriteString("+line" + strconv.Itoa(34) + "\n")
+ diffBuilder.WriteString("+line" + strconv.Itoa(35) + "\n")
+ diff = diffBuilder.String()
+
+ result, err = ParsePatch(db.DefaultContext, 20, 4096, setting.Git.MaxGitDiffFiles, strings.NewReader(diff), "")
+ if err != nil {
+ t.Errorf("There should not be an error: %v", err)
+ }
+ if !result.Files[0].IsIncomplete {
+ t.Errorf("Files should be incomplete! %v", result.Files[0])
+ }
+ result, err = ParsePatch(db.DefaultContext, 40, 4096, setting.Git.MaxGitDiffFiles, strings.NewReader(diff), "")
+ if err != nil {
+ t.Errorf("There should not be an error: %v", err)
+ }
+ if !result.Files[0].IsIncomplete {
+ t.Errorf("Files should be incomplete! %v", result.Files[0])
+ }
+
+ diff = `diff --git "a/README.md" "b/README.md"
+--- a/README.md
++++ b/README.md
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off`
+ _, err = ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff), "")
+ if err != nil {
+ t.Errorf("ParsePatch failed: %s", err)
+ }
+
+ diff2 := `diff --git "a/A \\ B" "b/A \\ B"
+--- "a/A \\ B"
++++ "b/A \\ B"
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off`
+ _, err = ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff2), "")
+ if err != nil {
+ t.Errorf("ParsePatch failed: %s", err)
+ }
+
+ diff2a := `diff --git "a/A \\ B" b/A/B
+--- "a/A \\ B"
++++ b/A/B
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off`
+ _, err = ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff2a), "")
+ if err != nil {
+ t.Errorf("ParsePatch failed: %s", err)
+ }
+
+ diff3 := `diff --git a/README.md b/README.md
+--- a/README.md
++++ b/README.md
+@@ -1,3 +1,6 @@
+ # gitea-github-migrator
++
++ Build Status
+- Latest Release
+ Docker Pulls
++ cut off
++ cut off`
+ _, err = ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff3), "")
+ if err != nil {
+ t.Errorf("ParsePatch failed: %s", err)
+ }
+}
+
+func setupDefaultDiff() *Diff {
+ return &Diff{
+ Files: []*DiffFile{
+ {
+ Name: "README.md",
+ Sections: []*DiffSection{
+ {
+ Lines: []*DiffLine{
+ {
+ LeftIdx: 4,
+ RightIdx: 4,
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+}
+
+func TestDiff_LoadCommentsNoOutdated(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ diff := setupDefaultDiff()
+ require.NoError(t, diff.LoadComments(db.DefaultContext, issue, user, false))
+ assert.Len(t, diff.Files[0].Sections[0].Lines[0].Conversations, 2)
+}
+
+func TestDiff_LoadCommentsWithOutdated(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ diff := setupDefaultDiff()
+ require.NoError(t, diff.LoadComments(db.DefaultContext, issue, user, true))
+ assert.Len(t, diff.Files[0].Sections[0].Lines[0].Conversations, 2)
+ assert.Len(t, diff.Files[0].Sections[0].Lines[0].Conversations[0], 2)
+ assert.Len(t, diff.Files[0].Sections[0].Lines[0].Conversations[1], 1)
+}
+
+func TestDiffLine_CanComment(t *testing.T) {
+ assert.False(t, (&DiffLine{Type: DiffLineSection}).CanComment())
+ assert.False(t, (&DiffLine{Type: DiffLineAdd, Conversations: []issues_model.CodeConversation{{{Content: "bla"}}}}).CanComment())
+ assert.True(t, (&DiffLine{Type: DiffLineAdd}).CanComment())
+ assert.True(t, (&DiffLine{Type: DiffLineDel}).CanComment())
+ assert.True(t, (&DiffLine{Type: DiffLinePlain}).CanComment())
+}
+
+func TestDiffLine_GetCommentSide(t *testing.T) {
+ assert.Equal(t, "previous", (&DiffLine{Conversations: []issues_model.CodeConversation{{{Line: -3}}}}).GetCommentSide())
+ assert.Equal(t, "proposed", (&DiffLine{Conversations: []issues_model.CodeConversation{{{Line: 3}}}}).GetCommentSide())
+}
+
+func TestGetDiffRangeWithWhitespaceBehavior(t *testing.T) {
+ gitRepo, err := git.OpenRepository(git.DefaultContext, "./testdata/academic-module")
+ require.NoError(t, err)
+
+ defer gitRepo.Close()
+ for _, behavior := range []git.TrustedCmdArgs{{"-w"}, {"--ignore-space-at-eol"}, {"-b"}, nil} {
+ diffs, err := GetDiff(db.DefaultContext, gitRepo,
+ &DiffOptions{
+ AfterCommitID: "bd7063cc7c04689c4d082183d32a604ed27a24f9",
+ BeforeCommitID: "559c156f8e0178b71cb44355428f24001b08fc68",
+ MaxLines: setting.Git.MaxGitDiffLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: setting.Git.MaxGitDiffFiles,
+ WhitespaceBehavior: behavior,
+ })
+ require.NoError(t, err, "Error when diff with %s", behavior)
+ for _, f := range diffs.Files {
+ assert.NotEmpty(t, f.Sections, "%s should have sections", f.Name)
+ }
+ }
+}
+
+func TestNoCrashes(t *testing.T) {
+ type testcase struct {
+ gitdiff string
+ }
+
+ tests := []testcase{
+ {
+ gitdiff: "diff --git \n--- a\t\n",
+ },
+ {
+ gitdiff: "diff --git \"0\n",
+ },
+ }
+ for _, testcase := range tests {
+ // It shouldn't crash, so don't care about the output.
+ ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(testcase.gitdiff), "")
+ }
+}
diff --git a/services/gitdiff/highlightdiff.go b/services/gitdiff/highlightdiff.go
new file mode 100644
index 0000000..c72959e
--- /dev/null
+++ b/services/gitdiff/highlightdiff.go
@@ -0,0 +1,227 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "strings"
+
+ "code.gitea.io/gitea/modules/highlight"
+
+ "github.com/sergi/go-diff/diffmatchpatch"
+)
+
+// token is a html tag or entity, eg: "<span ...>", "</span>", "&lt;"
+func extractHTMLToken(s string) (before, token, after string, valid bool) {
+ for pos1 := 0; pos1 < len(s); pos1++ {
+ if s[pos1] == '<' {
+ pos2 := strings.IndexByte(s[pos1:], '>')
+ if pos2 == -1 {
+ return "", "", s, false
+ }
+ return s[:pos1], s[pos1 : pos1+pos2+1], s[pos1+pos2+1:], true
+ } else if s[pos1] == '&' {
+ pos2 := strings.IndexByte(s[pos1:], ';')
+ if pos2 == -1 {
+ return "", "", s, false
+ }
+ return s[:pos1], s[pos1 : pos1+pos2+1], s[pos1+pos2+1:], true
+ }
+ }
+ return "", "", s, true
+}
+
+// HighlightCodeDiff is used to do diff with highlighted HTML code.
+// It totally depends on Chroma's valid HTML output and its structure, do not use these functions for other purposes.
+// The HTML tags and entities will be replaced by Unicode placeholders: "<span>{TEXT}</span>" => "\uE000{TEXT}\uE001"
+// These Unicode placeholders are friendly to the diff.
+// Then after diff, the placeholders in diff result will be recovered to the HTML tags and entities.
+// It's guaranteed that the tags in final diff result are paired correctly.
+type HighlightCodeDiff struct {
+ placeholderBegin rune
+ placeholderMaxCount int
+ placeholderIndex int
+ PlaceholderTokenMap map[rune]string
+ tokenPlaceholderMap map[string]rune
+
+ placeholderOverflowCount int
+
+ lineWrapperTags []string
+}
+
+func NewHighlightCodeDiff() *HighlightCodeDiff {
+ return &HighlightCodeDiff{
+ placeholderBegin: rune(0x100000), // Plane 16: Supplementary Private Use Area B (U+100000..U+10FFFD)
+ placeholderMaxCount: 64000,
+ PlaceholderTokenMap: map[rune]string{},
+ tokenPlaceholderMap: map[string]rune{},
+ }
+}
+
+// NextPlaceholder returns 0 if no more placeholder can be used
+// the diff is done line by line, usually there are only a few (no more than 10) placeholders in one line
+// so the placeholderMaxCount is impossible to be exhausted in real cases.
+func (hcd *HighlightCodeDiff) NextPlaceholder() rune {
+ for hcd.placeholderIndex < hcd.placeholderMaxCount {
+ r := hcd.placeholderBegin + rune(hcd.placeholderIndex)
+ hcd.placeholderIndex++
+ // only use non-existing (not used by code) rune as placeholders
+ if _, ok := hcd.PlaceholderTokenMap[r]; !ok {
+ return r
+ }
+ }
+ return 0 // no more available placeholder
+}
+
+func (hcd *HighlightCodeDiff) isInPlaceholderRange(r rune) bool {
+ return hcd.placeholderBegin <= r && r < hcd.placeholderBegin+rune(hcd.placeholderMaxCount)
+}
+
+func (hcd *HighlightCodeDiff) CollectUsedRunes(code string) {
+ for _, r := range code {
+ if hcd.isInPlaceholderRange(r) {
+ // put the existing rune (used by code) in map, then this rune won't be used a placeholder anymore.
+ hcd.PlaceholderTokenMap[r] = ""
+ }
+ }
+}
+
+func (hcd *HighlightCodeDiff) diffWithHighlight(filename, language, codeA, codeB string) []diffmatchpatch.Diff {
+ hcd.CollectUsedRunes(codeA)
+ hcd.CollectUsedRunes(codeB)
+
+ highlightCodeA, _ := highlight.Code(filename, language, codeA)
+ highlightCodeB, _ := highlight.Code(filename, language, codeB)
+
+ convertedCodeA := hcd.ConvertToPlaceholders(string(highlightCodeA))
+ convertedCodeB := hcd.ConvertToPlaceholders(string(highlightCodeB))
+
+ diffs := diffMatchPatch.DiffMain(convertedCodeA, convertedCodeB, true)
+ diffs = diffMatchPatch.DiffCleanupSemantic(diffs)
+ diffs = diffMatchPatch.DiffCleanupEfficiency(diffs)
+
+ for i := range diffs {
+ hcd.recoverOneDiff(&diffs[i])
+ }
+ return diffs
+}
+
+// convertToPlaceholders totally depends on Chroma's valid HTML output and its structure, do not use these functions for other purposes.
+func (hcd *HighlightCodeDiff) ConvertToPlaceholders(htmlCode string) string {
+ var tagStack []string
+ res := strings.Builder{}
+
+ firstRunForLineTags := hcd.lineWrapperTags == nil
+
+ var beforeToken, token string
+ var valid bool
+
+ // the standard chroma highlight HTML is "<span class="line [hl]"><span class="cl"> ... </span></span>"
+ for {
+ beforeToken, token, htmlCode, valid = extractHTMLToken(htmlCode)
+ if !valid || token == "" {
+ break
+ }
+ // write the content before the token into result string, and consume the token in the string
+ res.WriteString(beforeToken)
+
+ // the line wrapper tags should be removed before diff
+ if strings.HasPrefix(token, `<span class="line`) || strings.HasPrefix(token, `<span class="cl"`) {
+ if firstRunForLineTags {
+ // if this is the first run for converting, save the line wrapper tags for later use, they should be added back
+ hcd.lineWrapperTags = append(hcd.lineWrapperTags, token)
+ }
+ htmlCode = strings.TrimSuffix(htmlCode, "</span>")
+ continue
+ }
+
+ var tokenInMap string
+ if strings.HasSuffix(token, "</") { // for closing tag
+ if len(tagStack) == 0 {
+ break // invalid diff result, no opening tag but see closing tag
+ }
+ // make sure the closing tag in map is related to the open tag, to make the diff algorithm can match the opening/closing tags
+ // the closing tag will be recorded in the map by key "</span><!-- <span the-opening> -->" for "<span the-opening>"
+ tokenInMap = token + "<!-- " + tagStack[len(tagStack)-1] + "-->"
+ tagStack = tagStack[:len(tagStack)-1]
+ } else if token[0] == '<' { // for opening tag
+ tokenInMap = token
+ tagStack = append(tagStack, token)
+ } else if token[0] == '&' { // for html entity
+ tokenInMap = token
+ } // else: impossible
+
+ // remember the placeholder and token in the map
+ placeholder, ok := hcd.tokenPlaceholderMap[tokenInMap]
+ if !ok {
+ placeholder = hcd.NextPlaceholder()
+ if placeholder != 0 {
+ hcd.tokenPlaceholderMap[tokenInMap] = placeholder
+ hcd.PlaceholderTokenMap[placeholder] = tokenInMap
+ }
+ }
+
+ if placeholder != 0 {
+ res.WriteRune(placeholder) // use the placeholder to replace the token
+ } else {
+ // unfortunately, all private use runes has been exhausted, no more placeholder could be used, no more converting
+ // usually, the exhausting won't occur in real cases, the magnitude of used placeholders is not larger than that of the CSS classes outputted by chroma.
+ hcd.placeholderOverflowCount++
+ if strings.HasPrefix(token, "&") {
+ // when the token is a html entity, something must be outputted even if there is no placeholder.
+ res.WriteRune(0xFFFD) // replacement character TODO: how to handle this case more gracefully?
+ res.WriteString(token[1:]) // still output the entity code part, otherwise there will be no diff result.
+ }
+ }
+ }
+
+ // write the remaining string
+ res.WriteString(htmlCode)
+ return res.String()
+}
+
+func (hcd *HighlightCodeDiff) recoverOneDiff(diff *diffmatchpatch.Diff) {
+ diff.Text = hcd.Recover(diff.Text)
+}
+
+func (hcd *HighlightCodeDiff) Recover(src string) string {
+ sb := strings.Builder{}
+ var tagStack []string
+
+ for _, r := range src {
+ token, ok := hcd.PlaceholderTokenMap[r]
+ if !ok || token == "" {
+ sb.WriteRune(r) // if the rune is not a placeholder, write it as it is
+ continue
+ }
+ var tokenToRecover string
+ if strings.HasPrefix(token, "</") { // for closing tag
+ // only get the tag itself, ignore the trailing comment (for how the comment is generated, see the code in `convert` function)
+ tokenToRecover = token[:strings.IndexByte(token, '>')+1]
+ if len(tagStack) == 0 {
+ continue // if no opening tag in stack yet, skip the closing tag
+ }
+ tagStack = tagStack[:len(tagStack)-1]
+ } else if token[0] == '<' { // for opening tag
+ tokenToRecover = token
+ tagStack = append(tagStack, token)
+ } else if token[0] == '&' { // for html entity
+ tokenToRecover = token
+ } // else: impossible
+ sb.WriteString(tokenToRecover)
+ }
+
+ if len(tagStack) > 0 {
+ // close all opening tags
+ for i := len(tagStack) - 1; i >= 0; i-- {
+ tagToClose := tagStack[i]
+ // get the closing tag "</span>" from "<span class=...>" or "<span>"
+ pos := strings.IndexAny(tagToClose, " >")
+ if pos != -1 {
+ sb.WriteString("</" + tagToClose[1:pos] + ">")
+ } // else: impossible. every tag was pushed into the stack by the code above and is valid HTML opening tag
+ }
+ }
+
+ return sb.String()
+}
diff --git a/services/gitdiff/highlightdiff_test.go b/services/gitdiff/highlightdiff_test.go
new file mode 100644
index 0000000..2ff4472
--- /dev/null
+++ b/services/gitdiff/highlightdiff_test.go
@@ -0,0 +1,125 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/sergi/go-diff/diffmatchpatch"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestDiffWithHighlight(t *testing.T) {
+ hcd := NewHighlightCodeDiff()
+ diffs := hcd.diffWithHighlight(
+ "main.v", "",
+ " run('<>')\n",
+ " run(db)\n",
+ )
+
+ expected := ` <span class="n">run</span><span class="o">(</span><span class="removed-code"><span class="k">&#39;</span><span class="o">&lt;</span><span class="o">&gt;</span><span class="k">&#39;</span></span><span class="o">)</span>`
+ output := diffToHTML(nil, diffs, DiffLineDel)
+ assert.Equal(t, expected, output)
+
+ expected = ` <span class="n">run</span><span class="o">(</span><span class="added-code"><span class="n">db</span></span><span class="o">)</span>`
+ output = diffToHTML(nil, diffs, DiffLineAdd)
+ assert.Equal(t, expected, output)
+
+ hcd = NewHighlightCodeDiff()
+ hcd.PlaceholderTokenMap['O'] = "<span>"
+ hcd.PlaceholderTokenMap['C'] = "</span>"
+ diff := diffmatchpatch.Diff{}
+
+ diff.Text = "OC"
+ hcd.recoverOneDiff(&diff)
+ assert.Equal(t, "<span></span>", diff.Text)
+
+ diff.Text = "O"
+ hcd.recoverOneDiff(&diff)
+ assert.Equal(t, "<span></span>", diff.Text)
+
+ diff.Text = "C"
+ hcd.recoverOneDiff(&diff)
+ assert.Equal(t, "", diff.Text)
+}
+
+func TestDiffWithHighlightPlaceholder(t *testing.T) {
+ hcd := NewHighlightCodeDiff()
+ diffs := hcd.diffWithHighlight(
+ "main.js", "",
+ "a='\U00100000'",
+ "a='\U0010FFFD''",
+ )
+ assert.Equal(t, "", hcd.PlaceholderTokenMap[0x00100000])
+ assert.Equal(t, "", hcd.PlaceholderTokenMap[0x0010FFFD])
+
+ expected := fmt.Sprintf(`<span class="nx">a</span><span class="o">=</span><span class="s1">&#39;</span><span class="removed-code">%s</span>&#39;`, "\U00100000")
+ output := diffToHTML(hcd.lineWrapperTags, diffs, DiffLineDel)
+ assert.Equal(t, expected, output)
+
+ hcd = NewHighlightCodeDiff()
+ diffs = hcd.diffWithHighlight(
+ "main.js", "",
+ "a='\U00100000'",
+ "a='\U0010FFFD'",
+ )
+ expected = fmt.Sprintf(`<span class="nx">a</span><span class="o">=</span><span class="s1">&#39;</span><span class="added-code">%s</span>&#39;`, "\U0010FFFD")
+ output = diffToHTML(nil, diffs, DiffLineAdd)
+ assert.Equal(t, expected, output)
+}
+
+func TestDiffWithHighlightPlaceholderExhausted(t *testing.T) {
+ hcd := NewHighlightCodeDiff()
+ hcd.placeholderMaxCount = 0
+ diffs := hcd.diffWithHighlight(
+ "main.js", "",
+ "'",
+ ``,
+ )
+ output := diffToHTML(nil, diffs, DiffLineDel)
+ expected := fmt.Sprintf(`<span class="removed-code">%s#39;</span>`, "\uFFFD")
+ assert.Equal(t, expected, output)
+
+ hcd = NewHighlightCodeDiff()
+ hcd.placeholderMaxCount = 0
+ diffs = hcd.diffWithHighlight(
+ "main.js", "",
+ "a < b",
+ "a > b",
+ )
+ output = diffToHTML(nil, diffs, DiffLineDel)
+ expected = fmt.Sprintf(`a %s<span class="removed-code">l</span>t; b`, "\uFFFD")
+ assert.Equal(t, expected, output)
+
+ output = diffToHTML(nil, diffs, DiffLineAdd)
+ expected = fmt.Sprintf(`a %s<span class="added-code">g</span>t; b`, "\uFFFD")
+ assert.Equal(t, expected, output)
+}
+
+func TestDiffWithHighlightTagMatch(t *testing.T) {
+ totalOverflow := 0
+ for i := 0; i < 100; i++ {
+ hcd := NewHighlightCodeDiff()
+ hcd.placeholderMaxCount = i
+ diffs := hcd.diffWithHighlight(
+ "main.js", "",
+ "a='1'",
+ "b='2'",
+ )
+ totalOverflow += hcd.placeholderOverflowCount
+
+ output := diffToHTML(nil, diffs, DiffLineDel)
+ c1 := strings.Count(output, "<span")
+ c2 := strings.Count(output, "</span")
+ assert.Equal(t, c1, c2)
+
+ output = diffToHTML(nil, diffs, DiffLineAdd)
+ c1 = strings.Count(output, "<span")
+ c2 = strings.Count(output, "</span")
+ assert.Equal(t, c1, c2)
+ }
+ assert.NotZero(t, totalOverflow)
+}
diff --git a/services/gitdiff/main_test.go b/services/gitdiff/main_test.go
new file mode 100644
index 0000000..cd9dcd8
--- /dev/null
+++ b/services/gitdiff/main_test.go
@@ -0,0 +1,18 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models"
+ _ "code.gitea.io/gitea/models/actions"
+ _ "code.gitea.io/gitea/models/activities"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/gitdiff/testdata/academic-module/HEAD b/services/gitdiff/testdata/academic-module/HEAD
new file mode 100644
index 0000000..cb089cd
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/master
diff --git a/services/gitdiff/testdata/academic-module/config b/services/gitdiff/testdata/academic-module/config
new file mode 100644
index 0000000..1bc26be
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/config
@@ -0,0 +1,10 @@
+[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = false
+ logallrefupdates = true
+ ignorecase = true
+ precomposeunicode = true
+[branch "master"]
+ remote = origin
+ merge = refs/heads/master
diff --git a/services/gitdiff/testdata/academic-module/description b/services/gitdiff/testdata/academic-module/description
new file mode 100644
index 0000000..498b267
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/description
@@ -0,0 +1 @@
+Unnamed repository; edit this file 'description' to name the repository.
diff --git a/services/gitdiff/testdata/academic-module/index b/services/gitdiff/testdata/academic-module/index
new file mode 100644
index 0000000..e712c90
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/index
Binary files differ
diff --git a/services/gitdiff/testdata/academic-module/info/exclude b/services/gitdiff/testdata/academic-module/info/exclude
new file mode 100644
index 0000000..a5196d1
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/info/exclude
@@ -0,0 +1,6 @@
+# git ls-files --others --exclude-from=.git/info/exclude
+# Lines that start with '#' are comments.
+# For a project mostly in C, the following would be a good set of
+# exclude patterns (uncomment them if you want to use them):
+# *.[oa]
+# *~
diff --git a/services/gitdiff/testdata/academic-module/logs/HEAD b/services/gitdiff/testdata/academic-module/logs/HEAD
new file mode 100644
index 0000000..16b2e1c
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/logs/HEAD
@@ -0,0 +1 @@
+0000000000000000000000000000000000000000 bd7063cc7c04689c4d082183d32a604ed27a24f9 Lunny Xiao <xiaolunwen@gmail.com> 1574829684 +0800 clone: from https://try.gitea.io/shemgp-aiias/academic-module
diff --git a/services/gitdiff/testdata/academic-module/logs/refs/heads/master b/services/gitdiff/testdata/academic-module/logs/refs/heads/master
new file mode 100644
index 0000000..16b2e1c
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/logs/refs/heads/master
@@ -0,0 +1 @@
+0000000000000000000000000000000000000000 bd7063cc7c04689c4d082183d32a604ed27a24f9 Lunny Xiao <xiaolunwen@gmail.com> 1574829684 +0800 clone: from https://try.gitea.io/shemgp-aiias/academic-module
diff --git a/services/gitdiff/testdata/academic-module/logs/refs/remotes/origin/HEAD b/services/gitdiff/testdata/academic-module/logs/refs/remotes/origin/HEAD
new file mode 100644
index 0000000..16b2e1c
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/logs/refs/remotes/origin/HEAD
@@ -0,0 +1 @@
+0000000000000000000000000000000000000000 bd7063cc7c04689c4d082183d32a604ed27a24f9 Lunny Xiao <xiaolunwen@gmail.com> 1574829684 +0800 clone: from https://try.gitea.io/shemgp-aiias/academic-module
diff --git a/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.idx b/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.idx
new file mode 100644
index 0000000..4d759aa
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.idx
Binary files differ
diff --git a/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.pack b/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.pack
new file mode 100644
index 0000000..2dc49cf
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.pack
Binary files differ
diff --git a/services/gitdiff/testdata/academic-module/packed-refs b/services/gitdiff/testdata/academic-module/packed-refs
new file mode 100644
index 0000000..13b5611
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/packed-refs
@@ -0,0 +1,2 @@
+# pack-refs with: peeled fully-peeled sorted
+bd7063cc7c04689c4d082183d32a604ed27a24f9 refs/remotes/origin/master
diff --git a/services/gitdiff/testdata/academic-module/refs/heads/master b/services/gitdiff/testdata/academic-module/refs/heads/master
new file mode 100644
index 0000000..bd2b56e
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/refs/heads/master
@@ -0,0 +1 @@
+bd7063cc7c04689c4d082183d32a604ed27a24f9
diff --git a/services/gitdiff/testdata/academic-module/refs/remotes/origin/HEAD b/services/gitdiff/testdata/academic-module/refs/remotes/origin/HEAD
new file mode 100644
index 0000000..6efe28f
--- /dev/null
+++ b/services/gitdiff/testdata/academic-module/refs/remotes/origin/HEAD
@@ -0,0 +1 @@
+ref: refs/remotes/origin/master
diff --git a/services/indexer/indexer.go b/services/indexer/indexer.go
new file mode 100644
index 0000000..38dd012
--- /dev/null
+++ b/services/indexer/indexer.go
@@ -0,0 +1,20 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package indexer
+
+import (
+ code_indexer "code.gitea.io/gitea/modules/indexer/code"
+ issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
+ stats_indexer "code.gitea.io/gitea/modules/indexer/stats"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// Init initialize the repo indexer
+func Init() error {
+ notify_service.RegisterNotifier(NewNotifier())
+
+ issue_indexer.InitIssueIndexer(false)
+ code_indexer.Init()
+ return stats_indexer.Init()
+}
diff --git a/services/indexer/notify.go b/services/indexer/notify.go
new file mode 100644
index 0000000..e2cfe47
--- /dev/null
+++ b/services/indexer/notify.go
@@ -0,0 +1,170 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package indexer
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ code_indexer "code.gitea.io/gitea/modules/indexer/code"
+ issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
+ stats_indexer "code.gitea.io/gitea/modules/indexer/stats"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+type indexerNotifier struct {
+ notify_service.NullNotifier
+}
+
+var _ notify_service.Notifier = &indexerNotifier{}
+
+// NewNotifier create a new indexerNotifier notifier
+func NewNotifier() notify_service.Notifier {
+ return &indexerNotifier{}
+}
+
+func (r *indexerNotifier) AdoptRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ r.MigrateRepository(ctx, doer, u, repo)
+}
+
+func (r *indexerNotifier) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
+) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) NewPullRequest(ctx context.Context, pr *issues_model.PullRequest, mentions []*user_model.User) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ issue_indexer.UpdateIssueIndexer(ctx, pr.Issue.ID)
+}
+
+func (r *indexerNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
+ if err := c.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ issue_indexer.UpdateIssueIndexer(ctx, c.Issue.ID)
+}
+
+func (r *indexerNotifier) DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) {
+ if err := comment.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ issue_indexer.UpdateIssueIndexer(ctx, comment.Issue.ID)
+}
+
+func (r *indexerNotifier) DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository) {
+ issue_indexer.DeleteRepoIssueIndexer(ctx, repo.ID)
+ if setting.Indexer.RepoIndexerEnabled {
+ code_indexer.UpdateRepoIndexer(repo)
+ }
+}
+
+func (r *indexerNotifier) MigrateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ issue_indexer.UpdateRepoIndexer(ctx, repo.ID)
+ if setting.Indexer.RepoIndexerEnabled && !repo.IsEmpty {
+ code_indexer.UpdateRepoIndexer(repo)
+ }
+ if err := stats_indexer.UpdateRepoIndexer(repo); err != nil {
+ log.Error("stats_indexer.UpdateRepoIndexer(%d) failed: %v", repo.ID, err)
+ }
+}
+
+func (r *indexerNotifier) PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ if !opts.RefFullName.IsBranch() {
+ return
+ }
+
+ if setting.Indexer.RepoIndexerEnabled && opts.RefFullName.BranchName() == repo.DefaultBranch {
+ code_indexer.UpdateRepoIndexer(repo)
+ }
+ if err := stats_indexer.UpdateRepoIndexer(repo); err != nil {
+ log.Error("stats_indexer.UpdateRepoIndexer(%d) failed: %v", repo.ID, err)
+ }
+}
+
+func (r *indexerNotifier) SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ if !opts.RefFullName.IsBranch() {
+ return
+ }
+
+ if setting.Indexer.RepoIndexerEnabled && opts.RefFullName.BranchName() == repo.DefaultBranch {
+ code_indexer.UpdateRepoIndexer(repo)
+ }
+ if err := stats_indexer.UpdateRepoIndexer(repo); err != nil {
+ log.Error("stats_indexer.UpdateRepoIndexer(%d) failed: %v", repo.ID, err)
+ }
+}
+
+func (r *indexerNotifier) ChangeDefaultBranch(ctx context.Context, repo *repo_model.Repository) {
+ if setting.Indexer.RepoIndexerEnabled && !repo.IsEmpty {
+ code_indexer.UpdateRepoIndexer(repo)
+ }
+ if err := stats_indexer.UpdateRepoIndexer(repo); err != nil {
+ log.Error("stats_indexer.UpdateRepoIndexer(%d) failed: %v", repo.ID, err)
+ }
+}
+
+func (r *indexerNotifier) IssueChangeContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldContent string) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) IssueChangeRef(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldRef string) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, actionComment *issues_model.Comment, closeOrReopen bool) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) IssueChangeMilestone(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) IssueChangeLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue,
+ addedLabels, removedLabels []*issues_model.Label,
+) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) IssueClearLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
+ issue_indexer.UpdateIssueIndexer(ctx, issue.ID)
+}
+
+func (r *indexerNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ issue_indexer.UpdateIssueIndexer(ctx, pr.Issue.ID)
+}
+
+func (r *indexerNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ issue_indexer.UpdateIssueIndexer(ctx, pr.Issue.ID)
+}
diff --git a/services/issue/assignee.go b/services/issue/assignee.go
new file mode 100644
index 0000000..9c2ef74
--- /dev/null
+++ b/services/issue/assignee.go
@@ -0,0 +1,314 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// DeleteNotPassedAssignee deletes all assignees who aren't passed via the "assignees" array
+func DeleteNotPassedAssignee(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, assignees []*user_model.User) (err error) {
+ var found bool
+ oriAssignes := make([]*user_model.User, len(issue.Assignees))
+ _ = copy(oriAssignes, issue.Assignees)
+
+ for _, assignee := range oriAssignes {
+ found = false
+ for _, alreadyAssignee := range assignees {
+ if assignee.ID == alreadyAssignee.ID {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ // This function also does comments and hooks, which is why we call it separately instead of directly removing the assignees here
+ if _, _, err := ToggleAssigneeWithNotify(ctx, issue, doer, assignee.ID); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+// ToggleAssigneeWithNoNotify changes a user between assigned and not assigned for this issue, and make issue comment for it.
+func ToggleAssigneeWithNotify(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, assigneeID int64) (removed bool, comment *issues_model.Comment, err error) {
+ removed, comment, err = issues_model.ToggleIssueAssignee(ctx, issue, doer, assigneeID)
+ if err != nil {
+ return false, nil, err
+ }
+
+ assignee, err := user_model.GetUserByID(ctx, assigneeID)
+ if err != nil {
+ return false, nil, err
+ }
+
+ notify_service.IssueChangeAssignee(ctx, doer, issue, assignee, removed, comment)
+
+ return removed, comment, err
+}
+
+// ReviewRequest add or remove a review request from a user for this PR, and make comment for it.
+func ReviewRequest(ctx context.Context, issue *issues_model.Issue, doer, reviewer *user_model.User, isAdd bool) (comment *issues_model.Comment, err error) {
+ if isAdd {
+ comment, err = issues_model.AddReviewRequest(ctx, issue, reviewer, doer)
+ } else {
+ comment, err = issues_model.RemoveReviewRequest(ctx, issue, reviewer, doer)
+ }
+
+ if err != nil {
+ return nil, err
+ }
+
+ if comment != nil {
+ notify_service.PullRequestReviewRequest(ctx, doer, issue, reviewer, isAdd, comment)
+ }
+
+ return comment, err
+}
+
+// IsValidReviewRequest Check permission for ReviewRequest
+func IsValidReviewRequest(ctx context.Context, reviewer, doer *user_model.User, isAdd bool, issue *issues_model.Issue, permDoer *access_model.Permission) error {
+ if reviewer.IsOrganization() {
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Organization can't be added as reviewer",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+ }
+ if doer.IsOrganization() {
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Organization can't be doer to add reviewer",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+ }
+
+ permReviewer, err := access_model.GetUserRepoPermission(ctx, issue.Repo, reviewer)
+ if err != nil {
+ return err
+ }
+
+ if permDoer == nil {
+ permDoer = new(access_model.Permission)
+ *permDoer, err = access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err != nil {
+ return err
+ }
+ }
+
+ lastreview, err := issues_model.GetReviewByIssueIDAndUserID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ return err
+ }
+
+ canDoerChangeReviewRequests := CanDoerChangeReviewRequests(ctx, doer, issue.Repo, issue)
+
+ if isAdd {
+ if !permReviewer.CanAccessAny(perm.AccessModeRead, unit.TypePullRequests) {
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Reviewer can't read",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+ }
+
+ if reviewer.ID == issue.PosterID && issue.OriginalAuthorID == 0 {
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "poster of pr can't be reviewer",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+ }
+
+ if canDoerChangeReviewRequests {
+ return nil
+ }
+
+ if doer.ID == issue.PosterID && issue.OriginalAuthorID == 0 && lastreview != nil && lastreview.Type != issues_model.ReviewTypeRequest {
+ return nil
+ }
+
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Doer can't choose reviewer",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+ }
+
+ if canDoerChangeReviewRequests {
+ return nil
+ }
+
+ if lastreview != nil && lastreview.Type == issues_model.ReviewTypeRequest && lastreview.ReviewerID == doer.ID {
+ return nil
+ }
+
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Doer can't remove reviewer",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+}
+
+// IsValidTeamReviewRequest Check permission for ReviewRequest Team
+func IsValidTeamReviewRequest(ctx context.Context, reviewer *organization.Team, doer *user_model.User, isAdd bool, issue *issues_model.Issue) error {
+ if doer.IsOrganization() {
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Organization can't be doer to add reviewer",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+ }
+
+ canDoerChangeReviewRequests := CanDoerChangeReviewRequests(ctx, doer, issue.Repo, issue)
+
+ if isAdd {
+ if issue.Repo.IsPrivate {
+ hasTeam := organization.HasTeamRepo(ctx, reviewer.OrgID, reviewer.ID, issue.RepoID)
+
+ if !hasTeam {
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Reviewing team can't read repo",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+ }
+ }
+
+ if canDoerChangeReviewRequests {
+ return nil
+ }
+
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Doer can't choose reviewer",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+ }
+
+ if canDoerChangeReviewRequests {
+ return nil
+ }
+
+ return issues_model.ErrNotValidReviewRequest{
+ Reason: "Doer can't remove reviewer",
+ UserID: doer.ID,
+ RepoID: issue.Repo.ID,
+ }
+}
+
+// TeamReviewRequest add or remove a review request from a team for this PR, and make comment for it.
+func TeamReviewRequest(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, reviewer *organization.Team, isAdd bool) (comment *issues_model.Comment, err error) {
+ if isAdd {
+ comment, err = issues_model.AddTeamReviewRequest(ctx, issue, reviewer, doer)
+ } else {
+ comment, err = issues_model.RemoveTeamReviewRequest(ctx, issue, reviewer, doer)
+ }
+
+ if err != nil {
+ return nil, err
+ }
+
+ if comment == nil || !isAdd {
+ return nil, nil
+ }
+
+ return comment, teamReviewRequestNotify(ctx, issue, doer, reviewer, isAdd, comment)
+}
+
+func ReviewRequestNotify(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, reviewNotifers []*ReviewRequestNotifier) {
+ for _, reviewNotifer := range reviewNotifers {
+ if reviewNotifer.Reviewer != nil {
+ notify_service.PullRequestReviewRequest(ctx, issue.Poster, issue, reviewNotifer.Reviewer, reviewNotifer.IsAdd, reviewNotifer.Comment)
+ } else if reviewNotifer.ReviewTeam != nil {
+ if err := teamReviewRequestNotify(ctx, issue, issue.Poster, reviewNotifer.ReviewTeam, reviewNotifer.IsAdd, reviewNotifer.Comment); err != nil {
+ log.Error("teamReviewRequestNotify: %v", err)
+ }
+ }
+ }
+}
+
+// teamReviewRequestNotify notify all user in this team
+func teamReviewRequestNotify(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, reviewer *organization.Team, isAdd bool, comment *issues_model.Comment) error {
+ // notify all user in this team
+ if err := comment.LoadIssue(ctx); err != nil {
+ return err
+ }
+
+ members, err := organization.GetTeamMembers(ctx, &organization.SearchMembersOptions{
+ TeamID: reviewer.ID,
+ })
+ if err != nil {
+ return err
+ }
+
+ for _, member := range members {
+ if member.ID == comment.Issue.PosterID {
+ continue
+ }
+ comment.AssigneeID = member.ID
+ notify_service.PullRequestReviewRequest(ctx, doer, issue, member, isAdd, comment)
+ }
+
+ return err
+}
+
+// CanDoerChangeReviewRequests returns if the doer can add/remove review requests of a PR
+func CanDoerChangeReviewRequests(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue) bool {
+ // The poster of the PR can change the reviewers
+ if doer.ID == issue.PosterID {
+ return true
+ }
+
+ // The owner of the repo can change the reviewers
+ if doer.ID == repo.OwnerID {
+ return true
+ }
+
+ // Collaborators of the repo can change the reviewers
+ isCollaborator, err := repo_model.IsCollaborator(ctx, repo.ID, doer.ID)
+ if err != nil {
+ log.Error("IsCollaborator: %v", err)
+ return false
+ }
+ if isCollaborator {
+ return true
+ }
+
+ // If the repo's owner is an organization, members of teams with read permission on pull requests can change reviewers
+ if repo.Owner.IsOrganization() {
+ teams, err := organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead)
+ if err != nil {
+ log.Error("GetTeamsWithAccessToRepo: %v", err)
+ return false
+ }
+ for _, team := range teams {
+ if !team.UnitEnabled(ctx, unit.TypePullRequests) {
+ continue
+ }
+ isMember, err := organization.IsTeamMember(ctx, repo.OwnerID, team.ID, doer.ID)
+ if err != nil {
+ log.Error("IsTeamMember: %v", err)
+ continue
+ }
+ if isMember {
+ return true
+ }
+ }
+ }
+
+ return false
+}
diff --git a/services/issue/assignee_test.go b/services/issue/assignee_test.go
new file mode 100644
index 0000000..2b70b8c
--- /dev/null
+++ b/services/issue/assignee_test.go
@@ -0,0 +1,48 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDeleteNotPassedAssignee(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // Fake issue with assignees
+ issue, err := issues_model.GetIssueByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ err = issue.LoadAttributes(db.DefaultContext)
+ require.NoError(t, err)
+
+ assert.Len(t, issue.Assignees, 1)
+
+ user1, err := user_model.GetUserByID(db.DefaultContext, 1) // This user is already assigned (see the definition in fixtures), so running UpdateAssignee should unassign him
+ require.NoError(t, err)
+
+ // Check if he got removed
+ isAssigned, err := issues_model.IsUserAssignedToIssue(db.DefaultContext, issue, user1)
+ require.NoError(t, err)
+ assert.True(t, isAssigned)
+
+ // Clean everyone
+ err = DeleteNotPassedAssignee(db.DefaultContext, issue, user1, []*user_model.User{})
+ require.NoError(t, err)
+ assert.Empty(t, issue.Assignees)
+
+ // Reload to check they're gone
+ issue.ResetAttributesLoaded()
+ require.NoError(t, issue.LoadAssignees(db.DefaultContext))
+ assert.Empty(t, issue.Assignees)
+ assert.Empty(t, issue.Assignee)
+}
diff --git a/services/issue/comments.go b/services/issue/comments.go
new file mode 100644
index 0000000..3ab577b
--- /dev/null
+++ b/services/issue/comments.go
@@ -0,0 +1,136 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// CreateRefComment creates a commit reference comment to issue.
+func CreateRefComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, content, commitSHA string) error {
+ if len(commitSHA) == 0 {
+ return fmt.Errorf("cannot create reference with empty commit SHA")
+ }
+
+ // Check if same reference from same commit has already existed.
+ has, err := db.GetEngine(ctx).Get(&issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ IssueID: issue.ID,
+ CommitSHA: commitSHA,
+ })
+ if err != nil {
+ return fmt.Errorf("check reference comment: %w", err)
+ } else if has {
+ return nil
+ }
+
+ _, err = issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeCommitRef,
+ Doer: doer,
+ Repo: repo,
+ Issue: issue,
+ CommitSHA: commitSHA,
+ Content: content,
+ })
+ return err
+}
+
+// CreateIssueComment creates a plain issue comment.
+func CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, content string, attachments []string) (*issues_model.Comment, error) {
+ // Check if doer is blocked by the poster of the issue or by the owner of the repository.
+ if user_model.IsBlockedMultiple(ctx, []int64{issue.PosterID, repo.OwnerID}, doer.ID) {
+ return nil, user_model.ErrBlockedByUser
+ }
+
+ comment, err := issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeComment,
+ Doer: doer,
+ Repo: repo,
+ Issue: issue,
+ Content: content,
+ Attachments: attachments,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, doer, comment.Content)
+ if err != nil {
+ return nil, err
+ }
+
+ notify_service.CreateIssueComment(ctx, doer, repo, issue, comment, mentions)
+
+ return comment, nil
+}
+
+// UpdateComment updates information of comment.
+func UpdateComment(ctx context.Context, c *issues_model.Comment, contentVersion int, doer *user_model.User, oldContent string) error {
+ if err := c.LoadReview(ctx); err != nil {
+ return err
+ }
+ isPartOfPendingReview := c.Review != nil && c.Review.Type == issues_model.ReviewTypePending
+
+ needsContentHistory := c.Content != oldContent && c.Type.HasContentSupport() && !isPartOfPendingReview
+ if needsContentHistory {
+ hasContentHistory, err := issues_model.HasIssueContentHistory(ctx, c.IssueID, c.ID)
+ if err != nil {
+ return err
+ }
+ if !hasContentHistory {
+ if err = issues_model.SaveIssueContentHistory(ctx, c.PosterID, c.IssueID, c.ID,
+ c.CreatedUnix, oldContent, true); err != nil {
+ return err
+ }
+ }
+ }
+
+ if err := issues_model.UpdateComment(ctx, c, contentVersion, doer); err != nil {
+ return err
+ }
+
+ if needsContentHistory {
+ historyDate := timeutil.TimeStampNow()
+ if c.Issue.NoAutoTime {
+ historyDate = c.Issue.UpdatedUnix
+ }
+ err := issues_model.SaveIssueContentHistory(ctx, doer.ID, c.IssueID, c.ID, historyDate, c.Content, false)
+ if err != nil {
+ return err
+ }
+ }
+
+ if !isPartOfPendingReview {
+ notify_service.UpdateComment(ctx, doer, c, oldContent)
+ }
+
+ return nil
+}
+
+// DeleteComment deletes the comment
+func DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) error {
+ err := db.WithTx(ctx, func(ctx context.Context) error {
+ return issues_model.DeleteComment(ctx, comment)
+ })
+ if err != nil {
+ return err
+ }
+
+ if err := comment.LoadReview(ctx); err != nil {
+ return err
+ }
+ if comment.Review == nil || comment.Review.Type != issues_model.ReviewTypePending {
+ notify_service.DeleteComment(ctx, doer, comment)
+ }
+
+ return nil
+}
diff --git a/services/issue/comments_test.go b/services/issue/comments_test.go
new file mode 100644
index 0000000..62547a5
--- /dev/null
+++ b/services/issue/comments_test.go
@@ -0,0 +1,147 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+ issue_service "code.gitea.io/gitea/services/issue"
+ "code.gitea.io/gitea/tests"
+
+ _ "code.gitea.io/gitea/services/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDeleteComment(t *testing.T) {
+ // Use the webhook notification to check if a notification is fired for an action.
+ defer test.MockVariableValue(&setting.DisableWebhooks, false)()
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ t.Run("Normal comment", func(t *testing.T) {
+ defer tests.PrintCurrentTest(t)()
+
+ comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2})
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID})
+ unittest.AssertCount(t, &issues_model.Reaction{CommentID: comment.ID}, 2)
+
+ require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, &webhook_model.Webhook{
+ RepoID: issue.RepoID,
+ IsActive: true,
+ Events: `{"choose_events":true,"events":{"issue_comment": true}}`,
+ }))
+ hookTaskCount := unittest.GetCount(t, &webhook_model.HookTask{})
+
+ require.NoError(t, issue_service.DeleteComment(db.DefaultContext, nil, comment))
+
+ // The comment doesn't exist anymore.
+ unittest.AssertNotExistsBean(t, &issues_model.Comment{ID: comment.ID})
+ // Reactions don't exist anymore for this comment.
+ unittest.AssertNotExistsBean(t, &issues_model.Reaction{CommentID: comment.ID})
+ // Number of comments was decreased.
+ assert.EqualValues(t, issue.NumComments-1, unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID}).NumComments)
+ // A notification was fired for the deletion of this comment.
+ assert.EqualValues(t, hookTaskCount+1, unittest.GetCount(t, &webhook_model.HookTask{}))
+ })
+
+ t.Run("Comment of pending review", func(t *testing.T) {
+ defer tests.PrintCurrentTest(t)()
+
+ // We have to ensure that this comment's linked review is pending.
+ comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 4}, "review_id != 0")
+ review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: comment.ReviewID})
+ assert.EqualValues(t, issues_model.ReviewTypePending, review.Type)
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID})
+
+ require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, &webhook_model.Webhook{
+ RepoID: issue.RepoID,
+ IsActive: true,
+ Events: `{"choose_events":true,"events":{"issue_comment": true}}`,
+ }))
+ hookTaskCount := unittest.GetCount(t, &webhook_model.HookTask{})
+
+ require.NoError(t, issue_service.DeleteComment(db.DefaultContext, nil, comment))
+
+ // The comment doesn't exist anymore.
+ unittest.AssertNotExistsBean(t, &issues_model.Comment{ID: comment.ID})
+ // Ensure that the number of comments wasn't decreased.
+ assert.EqualValues(t, issue.NumComments, unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID}).NumComments)
+ // No notification was fired for the deletion of this comment.
+ assert.EqualValues(t, hookTaskCount, unittest.GetCount(t, &webhook_model.HookTask{}))
+ })
+}
+
+func TestUpdateComment(t *testing.T) {
+ // Use the webhook notification to check if a notification is fired for an action.
+ defer test.MockVariableValue(&setting.DisableWebhooks, false)()
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{IsAdmin: true})
+ t.Run("Normal comment", func(t *testing.T) {
+ defer tests.PrintCurrentTest(t)()
+
+ comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2})
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID})
+ unittest.AssertNotExistsBean(t, &issues_model.ContentHistory{CommentID: comment.ID})
+ require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, &webhook_model.Webhook{
+ RepoID: issue.RepoID,
+ IsActive: true,
+ Events: `{"choose_events":true,"events":{"issue_comment": true}}`,
+ }))
+ hookTaskCount := unittest.GetCount(t, &webhook_model.HookTask{})
+ oldContent := comment.Content
+ comment.Content = "Hello!"
+
+ require.NoError(t, issue_service.UpdateComment(db.DefaultContext, comment, 1, admin, oldContent))
+
+ newComment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2})
+ // Content was updated.
+ assert.EqualValues(t, comment.Content, newComment.Content)
+ // Content version was updated.
+ assert.EqualValues(t, 2, newComment.ContentVersion)
+ // A notification was fired for the update of this comment.
+ assert.EqualValues(t, hookTaskCount+1, unittest.GetCount(t, &webhook_model.HookTask{}))
+ // Issue history was saved for this comment.
+ unittest.AssertExistsAndLoadBean(t, &issues_model.ContentHistory{CommentID: comment.ID, IsFirstCreated: true, ContentText: oldContent})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.ContentHistory{CommentID: comment.ID, ContentText: comment.Content}, "is_first_created = false")
+ })
+
+ t.Run("Comment of pending review", func(t *testing.T) {
+ defer tests.PrintCurrentTest(t)()
+
+ comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 4}, "review_id != 0")
+ review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: comment.ReviewID})
+ assert.EqualValues(t, issues_model.ReviewTypePending, review.Type)
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID})
+ unittest.AssertNotExistsBean(t, &issues_model.ContentHistory{CommentID: comment.ID})
+ require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, &webhook_model.Webhook{
+ RepoID: issue.RepoID,
+ IsActive: true,
+ Events: `{"choose_events":true,"events":{"issue_comment": true}}`,
+ }))
+ hookTaskCount := unittest.GetCount(t, &webhook_model.HookTask{})
+ oldContent := comment.Content
+ comment.Content = "Hello!"
+
+ require.NoError(t, issue_service.UpdateComment(db.DefaultContext, comment, 1, admin, oldContent))
+
+ newComment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2})
+ // Content was updated.
+ assert.EqualValues(t, comment.Content, newComment.Content)
+ // Content version was updated.
+ assert.EqualValues(t, 2, newComment.ContentVersion)
+ // No notification was fired for the update of this comment.
+ assert.EqualValues(t, hookTaskCount, unittest.GetCount(t, &webhook_model.HookTask{}))
+ // Issue history was not saved for this comment.
+ unittest.AssertNotExistsBean(t, &issues_model.ContentHistory{CommentID: comment.ID})
+ })
+}
diff --git a/services/issue/commit.go b/services/issue/commit.go
new file mode 100644
index 0000000..8b927d5
--- /dev/null
+++ b/services/issue/commit.go
@@ -0,0 +1,202 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+ "fmt"
+ "html"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/references"
+ "code.gitea.io/gitea/modules/repository"
+)
+
+const (
+ secondsByMinute = float64(time.Minute / time.Second) // seconds in a minute
+ secondsByHour = 60 * secondsByMinute // seconds in an hour
+ secondsByDay = 8 * secondsByHour // seconds in a day
+ secondsByWeek = 5 * secondsByDay // seconds in a week
+ secondsByMonth = 4 * secondsByWeek // seconds in a month
+)
+
+var reDuration = regexp.MustCompile(`(?i)^(?:(\d+([\.,]\d+)?)(?:mo))?(?:(\d+([\.,]\d+)?)(?:w))?(?:(\d+([\.,]\d+)?)(?:d))?(?:(\d+([\.,]\d+)?)(?:h))?(?:(\d+([\.,]\d+)?)(?:m))?$`)
+
+// timeLogToAmount parses time log string and returns amount in seconds
+func timeLogToAmount(str string) int64 {
+ matches := reDuration.FindAllStringSubmatch(str, -1)
+ if len(matches) == 0 {
+ return 0
+ }
+
+ match := matches[0]
+
+ var a int64
+
+ // months
+ if len(match[1]) > 0 {
+ mo, _ := strconv.ParseFloat(strings.Replace(match[1], ",", ".", 1), 64)
+ a += int64(mo * secondsByMonth)
+ }
+
+ // weeks
+ if len(match[3]) > 0 {
+ w, _ := strconv.ParseFloat(strings.Replace(match[3], ",", ".", 1), 64)
+ a += int64(w * secondsByWeek)
+ }
+
+ // days
+ if len(match[5]) > 0 {
+ d, _ := strconv.ParseFloat(strings.Replace(match[5], ",", ".", 1), 64)
+ a += int64(d * secondsByDay)
+ }
+
+ // hours
+ if len(match[7]) > 0 {
+ h, _ := strconv.ParseFloat(strings.Replace(match[7], ",", ".", 1), 64)
+ a += int64(h * secondsByHour)
+ }
+
+ // minutes
+ if len(match[9]) > 0 {
+ d, _ := strconv.ParseFloat(strings.Replace(match[9], ",", ".", 1), 64)
+ a += int64(d * secondsByMinute)
+ }
+
+ return a
+}
+
+func issueAddTime(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, time time.Time, timeLog string) error {
+ amount := timeLogToAmount(timeLog)
+ if amount == 0 {
+ return nil
+ }
+
+ _, err := issues_model.AddTime(ctx, doer, issue, amount, time)
+ return err
+}
+
+// getIssueFromRef returns the issue referenced by a ref. Returns a nil *Issue
+// if the provided ref references a non-existent issue.
+func getIssueFromRef(ctx context.Context, repo *repo_model.Repository, index int64) (*issues_model.Issue, error) {
+ issue, err := issues_model.GetIssueByIndex(ctx, repo.ID, index)
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return issue, nil
+}
+
+// UpdateIssuesCommit checks if issues are manipulated by commit message.
+func UpdateIssuesCommit(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, commits []*repository.PushCommit, branchName string) error {
+ // Commits are appended in the reverse order.
+ for i := len(commits) - 1; i >= 0; i-- {
+ c := commits[i]
+
+ type markKey struct {
+ ID int64
+ Action references.XRefAction
+ }
+
+ refMarked := make(container.Set[markKey])
+ var refRepo *repo_model.Repository
+ var refIssue *issues_model.Issue
+ var err error
+ for _, ref := range references.FindAllIssueReferences(c.Message) {
+ // issue is from another repo
+ if len(ref.Owner) > 0 && len(ref.Name) > 0 {
+ refRepo, err = repo_model.GetRepositoryByOwnerAndName(ctx, ref.Owner, ref.Name)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ log.Warn("Repository referenced in commit but does not exist: %v", err)
+ } else {
+ log.Error("repo_model.GetRepositoryByOwnerAndName: %v", err)
+ }
+ continue
+ }
+ } else {
+ refRepo = repo
+ }
+ if refIssue, err = getIssueFromRef(ctx, refRepo, ref.Index); err != nil {
+ return err
+ }
+ if refIssue == nil {
+ continue
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, refRepo, doer)
+ if err != nil {
+ return err
+ }
+
+ key := markKey{ID: refIssue.ID, Action: ref.Action}
+ if !refMarked.Add(key) {
+ continue
+ }
+
+ // FIXME: this kind of condition is all over the code, it should be consolidated in a single place
+ canclose := perm.IsAdmin() || perm.IsOwner() || perm.CanWriteIssuesOrPulls(refIssue.IsPull) || refIssue.PosterID == doer.ID
+ cancomment := canclose || perm.CanReadIssuesOrPulls(refIssue.IsPull)
+
+ // Don't proceed if the user can't comment
+ if !cancomment {
+ continue
+ }
+
+ message := fmt.Sprintf(`<a href="%s/commit/%s">%s</a>`, html.EscapeString(repo.Link()), html.EscapeString(url.PathEscape(c.Sha1)), html.EscapeString(strings.SplitN(c.Message, "\n", 2)[0]))
+ if err = CreateRefComment(ctx, doer, refRepo, refIssue, message, c.Sha1); err != nil {
+ return err
+ }
+
+ // Only issues can be closed/reopened this way, and user needs the correct permissions
+ if refIssue.IsPull || !canclose {
+ continue
+ }
+
+ // Only process closing/reopening keywords
+ if ref.Action != references.XRefActionCloses && ref.Action != references.XRefActionReopens {
+ continue
+ }
+
+ if !repo.CloseIssuesViaCommitInAnyBranch {
+ // If the issue was specified to be in a particular branch, don't allow commits in other branches to close it
+ if refIssue.Ref != "" {
+ issueBranchName := strings.TrimPrefix(refIssue.Ref, git.BranchPrefix)
+ if branchName != issueBranchName {
+ continue
+ }
+ // Otherwise, only process commits to the default branch
+ } else if branchName != repo.DefaultBranch {
+ continue
+ }
+ }
+ isClosed := ref.Action == references.XRefActionCloses
+ if isClosed && len(ref.TimeLog) > 0 {
+ if err := issueAddTime(ctx, refIssue, doer, c.Timestamp, ref.TimeLog); err != nil {
+ return err
+ }
+ }
+ if isClosed != refIssue.IsClosed {
+ refIssue.Repo = refRepo
+ if err := ChangeStatus(ctx, refIssue, doer, c.Sha1, isClosed); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
diff --git a/services/issue/commit_test.go b/services/issue/commit_test.go
new file mode 100644
index 0000000..c3c3e4c
--- /dev/null
+++ b/services/issue/commit_test.go
@@ -0,0 +1,301 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "testing"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestUpdateIssuesCommit(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pushCommits := []*repository.PushCommit{
+ {
+ Sha1: "abcdef1",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user4@example.com",
+ AuthorName: "User Four",
+ Message: "start working on #FST-1, #1",
+ },
+ {
+ Sha1: "abcdef2",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "a plain message",
+ },
+ {
+ Sha1: "abcdef2",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "close #2",
+ },
+ }
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ repo.Owner = user
+
+ commentBean := &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef1",
+ PosterID: user.ID,
+ IssueID: 1,
+ }
+ issueBean := &issues_model.Issue{RepoID: repo.ID, Index: 4}
+
+ unittest.AssertNotExistsBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, &issues_model.Issue{RepoID: repo.ID, Index: 2}, "is_closed=1")
+ require.NoError(t, UpdateIssuesCommit(db.DefaultContext, user, repo, pushCommits, repo.DefaultBranch))
+ unittest.AssertExistsAndLoadBean(t, commentBean)
+ unittest.AssertExistsAndLoadBean(t, issueBean, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+
+ // Test that push to a non-default branch closes no issue.
+ pushCommits = []*repository.PushCommit{
+ {
+ Sha1: "abcdef1",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user4@example.com",
+ AuthorName: "User Four",
+ Message: "close #1",
+ },
+ }
+ repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+ commentBean = &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef1",
+ PosterID: user.ID,
+ IssueID: 6,
+ }
+ issueBean = &issues_model.Issue{RepoID: repo.ID, Index: 1}
+
+ unittest.AssertNotExistsBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, &issues_model.Issue{RepoID: repo.ID, Index: 1}, "is_closed=1")
+ require.NoError(t, UpdateIssuesCommit(db.DefaultContext, user, repo, pushCommits, "non-existing-branch"))
+ unittest.AssertExistsAndLoadBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, issueBean, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+
+ pushCommits = []*repository.PushCommit{
+ {
+ Sha1: "abcdef3",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "close " + setting.AppURL + repo.FullName() + "/pulls/1",
+ },
+ }
+ repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+ commentBean = &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef3",
+ PosterID: user.ID,
+ IssueID: 6,
+ }
+ issueBean = &issues_model.Issue{RepoID: repo.ID, Index: 1}
+
+ unittest.AssertNotExistsBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, &issues_model.Issue{RepoID: repo.ID, Index: 1}, "is_closed=1")
+ require.NoError(t, UpdateIssuesCommit(db.DefaultContext, user, repo, pushCommits, repo.DefaultBranch))
+ unittest.AssertExistsAndLoadBean(t, commentBean)
+ unittest.AssertExistsAndLoadBean(t, issueBean, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+}
+
+func TestUpdateIssuesCommit_Colon(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pushCommits := []*repository.PushCommit{
+ {
+ Sha1: "abcdef2",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "close: #2",
+ },
+ }
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ repo.Owner = user
+
+ issueBean := &issues_model.Issue{RepoID: repo.ID, Index: 4}
+
+ unittest.AssertNotExistsBean(t, &issues_model.Issue{RepoID: repo.ID, Index: 2}, "is_closed=1")
+ require.NoError(t, UpdateIssuesCommit(db.DefaultContext, user, repo, pushCommits, repo.DefaultBranch))
+ unittest.AssertExistsAndLoadBean(t, issueBean, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+}
+
+func TestUpdateIssuesCommit_Issue5957(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ // Test that push to a non-default branch closes an issue.
+ pushCommits := []*repository.PushCommit{
+ {
+ Sha1: "abcdef1",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user4@example.com",
+ AuthorName: "User Four",
+ Message: "close #2",
+ },
+ }
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ commentBean := &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef1",
+ PosterID: user.ID,
+ IssueID: 7,
+ }
+
+ issueBean := &issues_model.Issue{RepoID: repo.ID, Index: 2, ID: 7}
+
+ unittest.AssertNotExistsBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, issueBean, "is_closed=1")
+ require.NoError(t, UpdateIssuesCommit(db.DefaultContext, user, repo, pushCommits, "non-existing-branch"))
+ unittest.AssertExistsAndLoadBean(t, commentBean)
+ unittest.AssertExistsAndLoadBean(t, issueBean, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+}
+
+func TestUpdateIssuesCommit_AnotherRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ // Test that a push to default branch closes issue in another repo
+ // If the user also has push permissions to that repo
+ pushCommits := []*repository.PushCommit{
+ {
+ Sha1: "abcdef1",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "close user2/repo1#1",
+ },
+ }
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ commentBean := &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef1",
+ PosterID: user.ID,
+ IssueID: 1,
+ }
+
+ issueBean := &issues_model.Issue{RepoID: 1, Index: 1, ID: 1}
+
+ unittest.AssertNotExistsBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, issueBean, "is_closed=1")
+ require.NoError(t, UpdateIssuesCommit(db.DefaultContext, user, repo, pushCommits, repo.DefaultBranch))
+ unittest.AssertExistsAndLoadBean(t, commentBean)
+ unittest.AssertExistsAndLoadBean(t, issueBean, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+}
+
+func TestUpdateIssuesCommit_AnotherRepo_FullAddress(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ // Test that a push to default branch closes issue in another repo
+ // If the user also has push permissions to that repo
+ pushCommits := []*repository.PushCommit{
+ {
+ Sha1: "abcdef1",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "close " + setting.AppURL + "user2/repo1/issues/1",
+ },
+ }
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ commentBean := &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef1",
+ PosterID: user.ID,
+ IssueID: 1,
+ }
+
+ issueBean := &issues_model.Issue{RepoID: 1, Index: 1, ID: 1}
+
+ unittest.AssertNotExistsBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, issueBean, "is_closed=1")
+ require.NoError(t, UpdateIssuesCommit(db.DefaultContext, user, repo, pushCommits, repo.DefaultBranch))
+ unittest.AssertExistsAndLoadBean(t, commentBean)
+ unittest.AssertExistsAndLoadBean(t, issueBean, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+}
+
+func TestUpdateIssuesCommit_AnotherRepoNoPermission(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 10})
+
+ // Test that a push with close reference *can not* close issue
+ // If the committer doesn't have push rights in that repo
+ pushCommits := []*repository.PushCommit{
+ {
+ Sha1: "abcdef3",
+ CommitterEmail: "user10@example.com",
+ CommitterName: "User Ten",
+ AuthorEmail: "user10@example.com",
+ AuthorName: "User Ten",
+ Message: "close org3/repo3#1",
+ },
+ {
+ Sha1: "abcdef4",
+ CommitterEmail: "user10@example.com",
+ CommitterName: "User Ten",
+ AuthorEmail: "user10@example.com",
+ AuthorName: "User Ten",
+ Message: "close " + setting.AppURL + "org3/repo3/issues/1",
+ },
+ }
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 6})
+ commentBean := &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef3",
+ PosterID: user.ID,
+ IssueID: 6,
+ }
+ commentBean2 := &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef4",
+ PosterID: user.ID,
+ IssueID: 6,
+ }
+
+ issueBean := &issues_model.Issue{RepoID: 3, Index: 1, ID: 6}
+
+ unittest.AssertNotExistsBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, commentBean2)
+ unittest.AssertNotExistsBean(t, issueBean, "is_closed=1")
+ require.NoError(t, UpdateIssuesCommit(db.DefaultContext, user, repo, pushCommits, repo.DefaultBranch))
+ unittest.AssertNotExistsBean(t, commentBean)
+ unittest.AssertNotExistsBean(t, commentBean2)
+ unittest.AssertNotExistsBean(t, issueBean, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+}
diff --git a/services/issue/content.go b/services/issue/content.go
new file mode 100644
index 0000000..612a9a6
--- /dev/null
+++ b/services/issue/content.go
@@ -0,0 +1,25 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// ChangeContent changes issue content, as the given user.
+func ChangeContent(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, content string, contentVersion int) (err error) {
+ oldContent := issue.Content
+
+ if err := issues_model.ChangeIssueContent(ctx, issue, doer, content, contentVersion); err != nil {
+ return err
+ }
+
+ notify_service.IssueChangeContent(ctx, doer, issue, oldContent)
+
+ return nil
+}
diff --git a/services/issue/issue.go b/services/issue/issue.go
new file mode 100644
index 0000000..5e72617
--- /dev/null
+++ b/services/issue/issue.go
@@ -0,0 +1,349 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ project_model "code.gitea.io/gitea/models/project"
+ repo_model "code.gitea.io/gitea/models/repo"
+ system_model "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// NewIssue creates new issue with labels for repository.
+func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *issues_model.Issue, labelIDs []int64, uuids []string, assigneeIDs []int64) error {
+ // Check if the user is not blocked by the repo's owner.
+ if user_model.IsBlocked(ctx, repo.OwnerID, issue.PosterID) {
+ return user_model.ErrBlockedByUser
+ }
+
+ if err := issues_model.NewIssue(ctx, repo, issue, labelIDs, uuids); err != nil {
+ return err
+ }
+
+ for _, assigneeID := range assigneeIDs {
+ if _, err := AddAssigneeIfNotAssigned(ctx, issue, issue.Poster, assigneeID, true); err != nil {
+ return err
+ }
+ }
+
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, issue.Poster, issue.Content)
+ if err != nil {
+ return err
+ }
+
+ notify_service.NewIssue(ctx, issue, mentions)
+ if len(issue.Labels) > 0 {
+ notify_service.IssueChangeLabels(ctx, issue.Poster, issue, issue.Labels, nil)
+ }
+ if issue.Milestone != nil {
+ notify_service.IssueChangeMilestone(ctx, issue.Poster, issue, 0)
+ }
+
+ return nil
+}
+
+// ChangeTitle changes the title of this issue, as the given user.
+func ChangeTitle(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, title string) error {
+ oldTitle := issue.Title
+ issue.Title = title
+
+ if oldTitle == title {
+ return nil
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ if user_model.IsBlockedMultiple(ctx, []int64{issue.PosterID, issue.Repo.OwnerID}, doer.ID) {
+ return user_model.ErrBlockedByUser
+ }
+
+ if err := issues_model.ChangeIssueTitle(ctx, issue, doer, oldTitle); err != nil {
+ return err
+ }
+
+ var reviewNotifers []*ReviewRequestNotifier
+ if issue.IsPull && issues_model.HasWorkInProgressPrefix(oldTitle) && !issues_model.HasWorkInProgressPrefix(title) {
+ var err error
+ reviewNotifers, err = PullRequestCodeOwnersReview(ctx, issue, issue.PullRequest)
+ if err != nil {
+ log.Error("PullRequestCodeOwnersReview: %v", err)
+ }
+ }
+
+ notify_service.IssueChangeTitle(ctx, doer, issue, oldTitle)
+ ReviewRequestNotify(ctx, issue, issue.Poster, reviewNotifers)
+
+ return nil
+}
+
+// ChangeIssueRef changes the branch of this issue, as the given user.
+func ChangeIssueRef(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, ref string) error {
+ oldRef := issue.Ref
+ issue.Ref = ref
+
+ if err := issues_model.ChangeIssueRef(ctx, issue, doer, oldRef); err != nil {
+ return err
+ }
+
+ notify_service.IssueChangeRef(ctx, doer, issue, oldRef)
+
+ return nil
+}
+
+// UpdateAssignees is a helper function to add or delete one or multiple issue assignee(s)
+// Deleting is done the GitHub way (quote from their api documentation):
+// https://developer.github.com/v3/issues/#edit-an-issue
+// "assignees" (array): Logins for Users to assign to this issue.
+// Pass one or more user logins to replace the set of assignees on this Issue.
+// Send an empty array ([]) to clear all assignees from the Issue.
+func UpdateAssignees(ctx context.Context, issue *issues_model.Issue, oneAssignee string, multipleAssignees []string, doer *user_model.User) (err error) {
+ var allNewAssignees []*user_model.User
+
+ // Keep the old assignee thingy for compatibility reasons
+ if oneAssignee != "" {
+ // Prevent double adding assignees
+ var isDouble bool
+ for _, assignee := range multipleAssignees {
+ if assignee == oneAssignee {
+ isDouble = true
+ break
+ }
+ }
+
+ if !isDouble {
+ multipleAssignees = append(multipleAssignees, oneAssignee)
+ }
+ }
+
+ // Loop through all assignees to add them
+ for _, assigneeName := range multipleAssignees {
+ assignee, err := user_model.GetUserByName(ctx, assigneeName)
+ if err != nil {
+ return err
+ }
+
+ allNewAssignees = append(allNewAssignees, assignee)
+ }
+
+ // Delete all old assignees not passed
+ if err = DeleteNotPassedAssignee(ctx, issue, doer, allNewAssignees); err != nil {
+ return err
+ }
+
+ // Add all new assignees
+ // Update the assignee. The function will check if the user exists, is already
+ // assigned (which he shouldn't as we deleted all assignees before) and
+ // has access to the repo.
+ for _, assignee := range allNewAssignees {
+ // Extra method to prevent double adding (which would result in removing)
+ _, err = AddAssigneeIfNotAssigned(ctx, issue, doer, assignee.ID, true)
+ if err != nil {
+ return err
+ }
+ }
+
+ return err
+}
+
+// DeleteIssue deletes an issue
+func DeleteIssue(ctx context.Context, doer *user_model.User, gitRepo *git.Repository, issue *issues_model.Issue) error {
+ // load issue before deleting it
+ if err := issue.LoadAttributes(ctx); err != nil {
+ return err
+ }
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return err
+ }
+
+ // delete entries in database
+ if err := deleteIssue(ctx, issue); err != nil {
+ return err
+ }
+
+ // delete pull request related git data
+ if issue.IsPull && gitRepo != nil {
+ if err := gitRepo.RemoveReference(fmt.Sprintf("%s%d/head", git.PullPrefix, issue.PullRequest.Index)); err != nil {
+ return err
+ }
+ }
+
+ // If the Issue is pinned, we should unpin it before deletion to avoid problems with other pinned Issues
+ if issue.IsPinned() {
+ if err := issue.Unpin(ctx, doer); err != nil {
+ return err
+ }
+ }
+
+ notify_service.DeleteIssue(ctx, doer, issue)
+
+ return nil
+}
+
+// AddAssigneeIfNotAssigned adds an assignee only if he isn't already assigned to the issue.
+// Also checks for access of assigned user
+func AddAssigneeIfNotAssigned(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, assigneeID int64, notify bool) (comment *issues_model.Comment, err error) {
+ assignee, err := user_model.GetUserByID(ctx, assigneeID)
+ if err != nil {
+ return nil, err
+ }
+
+ // Check if the user is already assigned
+ isAssigned, err := issues_model.IsUserAssignedToIssue(ctx, issue, assignee)
+ if err != nil {
+ return nil, err
+ }
+ if isAssigned {
+ // nothing to to
+ return nil, nil
+ }
+
+ valid, err := access_model.CanBeAssigned(ctx, assignee, issue.Repo, issue.IsPull)
+ if err != nil {
+ return nil, err
+ }
+ if !valid {
+ return nil, repo_model.ErrUserDoesNotHaveAccessToRepo{UserID: assigneeID, RepoName: issue.Repo.Name}
+ }
+
+ if notify {
+ _, comment, err = ToggleAssigneeWithNotify(ctx, issue, doer, assigneeID)
+ return comment, err
+ }
+ _, comment, err = issues_model.ToggleIssueAssignee(ctx, issue, doer, assigneeID)
+ return comment, err
+}
+
+// GetRefEndNamesAndURLs retrieves the ref end names (e.g. refs/heads/branch-name -> branch-name)
+// and their respective URLs.
+func GetRefEndNamesAndURLs(issues []*issues_model.Issue, repoLink string) (map[int64]string, map[int64]string) {
+ issueRefEndNames := make(map[int64]string, len(issues))
+ issueRefURLs := make(map[int64]string, len(issues))
+ for _, issue := range issues {
+ if issue.Ref != "" {
+ issueRefEndNames[issue.ID] = git.RefName(issue.Ref).ShortName()
+ issueRefURLs[issue.ID] = git.RefURL(repoLink, issue.Ref)
+ }
+ }
+ return issueRefEndNames, issueRefURLs
+}
+
+// deleteIssue deletes the issue
+func deleteIssue(ctx context.Context, issue *issues_model.Issue) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ e := db.GetEngine(ctx)
+ if _, err := e.ID(issue.ID).NoAutoCondition().Delete(issue); err != nil {
+ return err
+ }
+
+ // update the total issue numbers
+ if err := repo_model.UpdateRepoIssueNumbers(ctx, issue.RepoID, issue.IsPull, false); err != nil {
+ return err
+ }
+ // if the issue is closed, update the closed issue numbers
+ if issue.IsClosed {
+ if err := repo_model.UpdateRepoIssueNumbers(ctx, issue.RepoID, issue.IsPull, true); err != nil {
+ return err
+ }
+ }
+
+ if err := issues_model.UpdateMilestoneCounters(ctx, issue.MilestoneID); err != nil {
+ return fmt.Errorf("error updating counters for milestone id %d: %w",
+ issue.MilestoneID, err)
+ }
+
+ if err := activities_model.DeleteIssueActions(ctx, issue.RepoID, issue.ID, issue.Index); err != nil {
+ return err
+ }
+
+ // find attachments related to this issue and remove them
+ if err := issue.LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ for i := range issue.Attachments {
+ system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", issue.Attachments[i].RelativePath())
+ }
+
+ // delete all database data still assigned to this issue
+ if err := db.DeleteBeans(ctx,
+ &issues_model.ContentHistory{IssueID: issue.ID},
+ &issues_model.Comment{IssueID: issue.ID},
+ &issues_model.IssueLabel{IssueID: issue.ID},
+ &issues_model.IssueDependency{IssueID: issue.ID},
+ &issues_model.IssueAssignees{IssueID: issue.ID},
+ &issues_model.IssueUser{IssueID: issue.ID},
+ &activities_model.Notification{IssueID: issue.ID},
+ &issues_model.Reaction{IssueID: issue.ID},
+ &issues_model.IssueWatch{IssueID: issue.ID},
+ &issues_model.Stopwatch{IssueID: issue.ID},
+ &issues_model.TrackedTime{IssueID: issue.ID},
+ &project_model.ProjectIssue{IssueID: issue.ID},
+ &repo_model.Attachment{IssueID: issue.ID},
+ &issues_model.PullRequest{IssueID: issue.ID},
+ &issues_model.Comment{RefIssueID: issue.ID},
+ &issues_model.IssueDependency{DependencyID: issue.ID},
+ &issues_model.Comment{DependentIssueID: issue.ID},
+ ); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// Set the UpdatedUnix date and the NoAutoTime field of an Issue if a non
+// nil 'updated' time is provided
+//
+// In order to set a specific update time, the DB will be updated with
+// NoAutoTime(). A 'NoAutoTime' boolean field in the Issue struct is used to
+// propagate down to the DB update calls the will to apply autoupdate or not.
+func SetIssueUpdateDate(ctx context.Context, issue *issues_model.Issue, updated *time.Time, doer *user_model.User) error {
+ issue.NoAutoTime = false
+ if updated == nil {
+ return nil
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ // Check if the poster is allowed to set an update date
+ perm, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err != nil {
+ return err
+ }
+ if !perm.IsAdmin() && !perm.IsOwner() {
+ return fmt.Errorf("user needs to have admin or owner right")
+ }
+
+ // A simple guard against potential inconsistent calls
+ updatedUnix := timeutil.TimeStamp(updated.Unix())
+ if updatedUnix < issue.CreatedUnix || updatedUnix > timeutil.TimeStampNow() {
+ return fmt.Errorf("unallowed update date")
+ }
+
+ issue.UpdatedUnix = updatedUnix
+ issue.NoAutoTime = true
+
+ return nil
+}
diff --git a/services/issue/issue_test.go b/services/issue/issue_test.go
new file mode 100644
index 0000000..a0bb88e
--- /dev/null
+++ b/services/issue/issue_test.go
@@ -0,0 +1,87 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGetRefEndNamesAndURLs(t *testing.T) {
+ issues := []*issues_model.Issue{
+ {ID: 1, Ref: "refs/heads/branch1"},
+ {ID: 2, Ref: "refs/tags/tag1"},
+ {ID: 3, Ref: "c0ffee"},
+ }
+ repoLink := "/foo/bar"
+
+ endNames, urls := GetRefEndNamesAndURLs(issues, repoLink)
+ assert.EqualValues(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames)
+ assert.EqualValues(t, map[int64]string{
+ 1: repoLink + "/src/branch/branch1",
+ 2: repoLink + "/src/tag/tag1",
+ 3: repoLink + "/src/commit/c0ffee",
+ }, urls)
+}
+
+func TestIssue_DeleteIssue(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issueIDs, err := issues_model.GetIssueIDsByRepoID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ assert.Len(t, issueIDs, 5)
+
+ issue := &issues_model.Issue{
+ RepoID: 1,
+ ID: issueIDs[2],
+ }
+
+ err = deleteIssue(db.DefaultContext, issue)
+ require.NoError(t, err)
+ issueIDs, err = issues_model.GetIssueIDsByRepoID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ assert.Len(t, issueIDs, 4)
+
+ // check attachment removal
+ attachments, err := repo_model.GetAttachmentsByIssueID(db.DefaultContext, 4)
+ require.NoError(t, err)
+ issue, err = issues_model.GetIssueByID(db.DefaultContext, 4)
+ require.NoError(t, err)
+ err = deleteIssue(db.DefaultContext, issue)
+ require.NoError(t, err)
+ assert.Len(t, attachments, 2)
+ for i := range attachments {
+ attachment, err := repo_model.GetAttachmentByUUID(db.DefaultContext, attachments[i].UUID)
+ require.Error(t, err)
+ assert.True(t, repo_model.IsErrAttachmentNotExist(err))
+ assert.Nil(t, attachment)
+ }
+
+ // check issue dependencies
+ user, err := user_model.GetUserByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ issue2, err := issues_model.GetIssueByID(db.DefaultContext, 2)
+ require.NoError(t, err)
+ err = issues_model.CreateIssueDependency(db.DefaultContext, user, issue1, issue2)
+ require.NoError(t, err)
+ left, err := issues_model.IssueNoDependenciesLeft(db.DefaultContext, issue1)
+ require.NoError(t, err)
+ assert.False(t, left)
+
+ err = deleteIssue(db.DefaultContext, issue2)
+ require.NoError(t, err)
+ left, err = issues_model.IssueNoDependenciesLeft(db.DefaultContext, issue1)
+ require.NoError(t, err)
+ assert.True(t, left)
+}
diff --git a/services/issue/label.go b/services/issue/label.go
new file mode 100644
index 0000000..6b8070d
--- /dev/null
+++ b/services/issue/label.go
@@ -0,0 +1,95 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ user_model "code.gitea.io/gitea/models/user"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// ClearLabels clears all of an issue's labels
+func ClearLabels(ctx context.Context, issue *issues_model.Issue, doer *user_model.User) error {
+ if err := issues_model.ClearIssueLabels(ctx, issue, doer); err != nil {
+ return err
+ }
+
+ notify_service.IssueClearLabels(ctx, doer, issue)
+
+ return nil
+}
+
+// AddLabel adds a new label to the issue.
+func AddLabel(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, label *issues_model.Label) error {
+ if err := issues_model.NewIssueLabel(ctx, issue, label, doer); err != nil {
+ return err
+ }
+
+ notify_service.IssueChangeLabels(ctx, doer, issue, []*issues_model.Label{label}, nil)
+ return nil
+}
+
+// AddLabels adds a list of new labels to the issue.
+func AddLabels(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, labels []*issues_model.Label) error {
+ if err := issues_model.NewIssueLabels(ctx, issue, labels, doer); err != nil {
+ return err
+ }
+
+ notify_service.IssueChangeLabels(ctx, doer, issue, labels, nil)
+ return nil
+}
+
+// RemoveLabel removes a label from issue by given ID.
+func RemoveLabel(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, label *issues_model.Label) error {
+ dbCtx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := issue.LoadRepo(dbCtx); err != nil {
+ return err
+ }
+
+ perm, err := access_model.GetUserRepoPermission(dbCtx, issue.Repo, doer)
+ if err != nil {
+ return err
+ }
+ if !perm.CanWriteIssuesOrPulls(issue.IsPull) {
+ if label.OrgID > 0 {
+ return issues_model.ErrOrgLabelNotExist{}
+ }
+ return issues_model.ErrRepoLabelNotExist{}
+ }
+
+ if err := issues_model.DeleteIssueLabel(dbCtx, issue, label, doer); err != nil {
+ return err
+ }
+
+ if err := committer.Commit(); err != nil {
+ return err
+ }
+
+ notify_service.IssueChangeLabels(ctx, doer, issue, nil, []*issues_model.Label{label})
+ return nil
+}
+
+// ReplaceLabels removes all current labels and add new labels to the issue.
+func ReplaceLabels(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, labels []*issues_model.Label) error {
+ old, err := issues_model.GetLabelsByIssueID(ctx, issue.ID)
+ if err != nil {
+ return err
+ }
+
+ if err := issues_model.ReplaceIssueLabels(ctx, issue, labels, doer); err != nil {
+ return err
+ }
+
+ notify_service.IssueChangeLabels(ctx, doer, issue, labels, old)
+ return nil
+}
diff --git a/services/issue/label_test.go b/services/issue/label_test.go
new file mode 100644
index 0000000..b9d2634
--- /dev/null
+++ b/services/issue/label_test.go
@@ -0,0 +1,62 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestIssue_AddLabels(t *testing.T) {
+ tests := []struct {
+ issueID int64
+ labelIDs []int64
+ doerID int64
+ }{
+ {1, []int64{1, 2}, 2}, // non-pull-request
+ {1, []int64{}, 2}, // non-pull-request, empty
+ {2, []int64{1, 2}, 2}, // pull-request
+ {2, []int64{}, 1}, // pull-request, empty
+ }
+ for _, test := range tests {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: test.issueID})
+ labels := make([]*issues_model.Label, len(test.labelIDs))
+ for i, labelID := range test.labelIDs {
+ labels[i] = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: labelID})
+ }
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: test.doerID})
+ require.NoError(t, AddLabels(db.DefaultContext, issue, doer, labels))
+ for _, labelID := range test.labelIDs {
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: test.issueID, LabelID: labelID})
+ }
+ }
+}
+
+func TestIssue_AddLabel(t *testing.T) {
+ tests := []struct {
+ issueID int64
+ labelID int64
+ doerID int64
+ }{
+ {1, 2, 2}, // non-pull-request, not-already-added label
+ {1, 1, 2}, // non-pull-request, already-added label
+ {2, 2, 2}, // pull-request, not-already-added label
+ {2, 1, 2}, // pull-request, already-added label
+ }
+ for _, test := range tests {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: test.issueID})
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: test.labelID})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: test.doerID})
+ require.NoError(t, AddLabel(db.DefaultContext, issue, doer, label))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: test.issueID, LabelID: test.labelID})
+ }
+}
diff --git a/services/issue/main_test.go b/services/issue/main_test.go
new file mode 100644
index 0000000..c3da441
--- /dev/null
+++ b/services/issue/main_test.go
@@ -0,0 +1,23 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/webhook"
+
+ _ "code.gitea.io/gitea/models/actions"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m, &unittest.TestOptions{
+ SetUp: func() error {
+ setting.LoadQueueSettings()
+ return webhook.Init()
+ },
+ })
+}
diff --git a/services/issue/milestone.go b/services/issue/milestone.go
new file mode 100644
index 0000000..31490c7
--- /dev/null
+++ b/services/issue/milestone.go
@@ -0,0 +1,110 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+func updateMilestoneCounters(ctx context.Context, issue *issues_model.Issue, id int64) error {
+ if issue.NoAutoTime {
+ // We set the milestone's update date to the max of the
+ // milestone and issue update dates.
+ // Note: we can not call UpdateMilestoneCounters() if the
+ // milestone's update date is to be kept, because that function
+ // auto-updates the dates.
+ milestone, err := issues_model.GetMilestoneByRepoID(ctx, issue.RepoID, id)
+ if err != nil {
+ return fmt.Errorf("GetMilestoneByRepoID: %w", err)
+ }
+ updatedUnix := milestone.UpdatedUnix
+ if issue.UpdatedUnix > updatedUnix {
+ updatedUnix = issue.UpdatedUnix
+ }
+ if err := issues_model.UpdateMilestoneCountersWithDate(ctx, id, updatedUnix); err != nil {
+ return err
+ }
+ } else {
+ if err := issues_model.UpdateMilestoneCounters(ctx, id); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func changeMilestoneAssign(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64) error {
+ // Only check if milestone exists if we don't remove it.
+ if issue.MilestoneID > 0 {
+ has, err := issues_model.HasMilestoneByRepoID(ctx, issue.RepoID, issue.MilestoneID)
+ if err != nil {
+ return fmt.Errorf("HasMilestoneByRepoID: %w", err)
+ }
+ if !has {
+ return fmt.Errorf("HasMilestoneByRepoID: issue doesn't exist")
+ }
+ }
+
+ if err := issues_model.UpdateIssueCols(ctx, issue, "milestone_id"); err != nil {
+ return err
+ }
+
+ if oldMilestoneID > 0 {
+ if err := updateMilestoneCounters(ctx, issue, oldMilestoneID); err != nil {
+ return err
+ }
+ }
+
+ if issue.MilestoneID > 0 {
+ if err := updateMilestoneCounters(ctx, issue, issue.MilestoneID); err != nil {
+ return err
+ }
+ }
+
+ if oldMilestoneID > 0 || issue.MilestoneID > 0 {
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ opts := &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeMilestone,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ OldMilestoneID: oldMilestoneID,
+ MilestoneID: issue.MilestoneID,
+ }
+ if _, err := issues_model.CreateComment(ctx, opts); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// ChangeMilestoneAssign changes assignment of milestone for issue.
+func ChangeMilestoneAssign(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, oldMilestoneID int64) (err error) {
+ dbCtx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = changeMilestoneAssign(dbCtx, doer, issue, oldMilestoneID); err != nil {
+ return err
+ }
+
+ if err = committer.Commit(); err != nil {
+ return fmt.Errorf("Commit: %w", err)
+ }
+
+ notify_service.IssueChangeMilestone(ctx, doer, issue, oldMilestoneID)
+
+ return nil
+}
diff --git a/services/issue/milestone_test.go b/services/issue/milestone_test.go
new file mode 100644
index 0000000..1c06572
--- /dev/null
+++ b/services/issue/milestone_test.go
@@ -0,0 +1,35 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestChangeMilestoneAssign(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{RepoID: 1})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ assert.NotNil(t, issue)
+ assert.NotNil(t, doer)
+
+ oldMilestoneID := issue.MilestoneID
+ issue.MilestoneID = 2
+ require.NoError(t, ChangeMilestoneAssign(db.DefaultContext, issue, doer, oldMilestoneID))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{
+ IssueID: issue.ID,
+ Type: issues_model.CommentTypeMilestone,
+ MilestoneID: issue.MilestoneID,
+ OldMilestoneID: oldMilestoneID,
+ })
+ unittest.CheckConsistencyFor(t, &issues_model.Milestone{}, &issues_model.Issue{})
+}
diff --git a/services/issue/pull.go b/services/issue/pull.go
new file mode 100644
index 0000000..3b61c00
--- /dev/null
+++ b/services/issue/pull.go
@@ -0,0 +1,153 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ org_model "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+func getMergeBase(repo *git.Repository, pr *issues_model.PullRequest, baseBranch, headBranch string) (string, error) {
+ // Add a temporary remote
+ tmpRemote := fmt.Sprintf("mergebase-%d-%d", pr.ID, time.Now().UnixNano())
+ if err := repo.AddRemote(tmpRemote, repo.Path, false); err != nil {
+ return "", fmt.Errorf("AddRemote: %w", err)
+ }
+ defer func() {
+ if err := repo.RemoveRemote(tmpRemote); err != nil {
+ log.Error("getMergeBase: RemoveRemote: %v", err)
+ }
+ }()
+
+ mergeBase, _, err := repo.GetMergeBase(tmpRemote, baseBranch, headBranch)
+ return mergeBase, err
+}
+
+type ReviewRequestNotifier struct {
+ Comment *issues_model.Comment
+ IsAdd bool
+ Reviewer *user_model.User
+ ReviewTeam *org_model.Team
+}
+
+func PullRequestCodeOwnersReview(ctx context.Context, issue *issues_model.Issue, pr *issues_model.PullRequest) ([]*ReviewRequestNotifier, error) {
+ files := []string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}
+
+ if pr.IsWorkInProgress(ctx) {
+ return nil, nil
+ }
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ if pr.BaseRepo.IsFork {
+ return nil, nil
+ }
+
+ repo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ return nil, err
+ }
+ defer repo.Close()
+
+ commit, err := repo.GetBranchCommit(pr.BaseRepo.DefaultBranch)
+ if err != nil {
+ return nil, err
+ }
+
+ var data string
+ for _, file := range files {
+ if blob, err := commit.GetBlobByPath(file); err == nil {
+ data, err = blob.GetBlobContent(setting.UI.MaxDisplayFileSize)
+ if err == nil {
+ break
+ }
+ }
+ }
+
+ rules, _ := issues_model.GetCodeOwnersFromContent(ctx, data)
+
+ // get the mergebase
+ mergeBase, err := getMergeBase(repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName())
+ if err != nil {
+ return nil, err
+ }
+
+ // https://github.com/go-gitea/gitea/issues/29763, we need to get the files changed
+ // between the merge base and the head commit but not the base branch and the head commit
+ changedFiles, err := repo.GetFilesChangedBetween(mergeBase, pr.GetGitRefName())
+ if err != nil {
+ return nil, err
+ }
+
+ uniqUsers := make(map[int64]*user_model.User)
+ uniqTeams := make(map[string]*org_model.Team)
+ for _, rule := range rules {
+ for _, f := range changedFiles {
+ if (rule.Rule.MatchString(f) && !rule.Negative) || (!rule.Rule.MatchString(f) && rule.Negative) {
+ for _, u := range rule.Users {
+ uniqUsers[u.ID] = u
+ }
+ for _, t := range rule.Teams {
+ uniqTeams[fmt.Sprintf("%d/%d", t.OrgID, t.ID)] = t
+ }
+ }
+ }
+ }
+
+ notifiers := make([]*ReviewRequestNotifier, 0, len(uniqUsers)+len(uniqTeams))
+
+ if err := issue.LoadPoster(ctx); err != nil {
+ return nil, err
+ }
+
+ for _, u := range uniqUsers {
+ permission, err := access_model.GetUserRepoPermission(ctx, issue.Repo, u)
+ if err != nil {
+ return nil, fmt.Errorf("GetUserRepoPermission: %w", err)
+ }
+ if u.ID != issue.Poster.ID && permission.CanRead(unit.TypePullRequests) {
+ comment, err := issues_model.AddReviewRequest(ctx, issue, u, issue.Poster)
+ if err != nil {
+ log.Warn("Failed add assignee user: %s to PR review: %s#%d, error: %s", u.Name, pr.BaseRepo.Name, pr.ID, err)
+ return nil, err
+ }
+ notifiers = append(notifiers, &ReviewRequestNotifier{
+ Comment: comment,
+ IsAdd: true,
+ Reviewer: u,
+ })
+ }
+ }
+ for _, t := range uniqTeams {
+ comment, err := issues_model.AddTeamReviewRequest(ctx, issue, t, issue.Poster)
+ if err != nil {
+ log.Warn("Failed add assignee team: %s to PR review: %s#%d, error: %s", t.Name, pr.BaseRepo.Name, pr.ID, err)
+ return nil, err
+ }
+ notifiers = append(notifiers, &ReviewRequestNotifier{
+ Comment: comment,
+ IsAdd: true,
+ ReviewTeam: t,
+ })
+ }
+
+ return notifiers, nil
+}
diff --git a/services/issue/reaction.go b/services/issue/reaction.go
new file mode 100644
index 0000000..dbb4735
--- /dev/null
+++ b/services/issue/reaction.go
@@ -0,0 +1,47 @@
+// Copyright 2023 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+package issue
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// CreateIssueReaction creates a reaction on issue.
+func CreateIssueReaction(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, content string) (*issues_model.Reaction, error) {
+ if err := issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ // Check if the doer is blocked by the issue's poster or repository owner.
+ if user_model.IsBlockedMultiple(ctx, []int64{issue.PosterID, issue.Repo.OwnerID}, doer.ID) {
+ return nil, user_model.ErrBlockedByUser
+ }
+
+ return issues_model.CreateReaction(ctx, &issues_model.ReactionOptions{
+ Type: content,
+ DoerID: doer.ID,
+ IssueID: issue.ID,
+ })
+}
+
+// CreateCommentReaction creates a reaction on comment.
+func CreateCommentReaction(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, comment *issues_model.Comment, content string) (*issues_model.Reaction, error) {
+ if err := issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ // Check if the doer is blocked by the issue's poster, the comment's poster or repository owner.
+ if user_model.IsBlockedMultiple(ctx, []int64{comment.PosterID, issue.PosterID, issue.Repo.OwnerID}, doer.ID) {
+ return nil, user_model.ErrBlockedByUser
+ }
+
+ return issues_model.CreateReaction(ctx, &issues_model.ReactionOptions{
+ Type: content,
+ DoerID: doer.ID,
+ IssueID: issue.ID,
+ CommentID: comment.ID,
+ })
+}
diff --git a/services/issue/status.go b/services/issue/status.go
new file mode 100644
index 0000000..9b6c683
--- /dev/null
+++ b/services/issue/status.go
@@ -0,0 +1,36 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// ChangeStatus changes issue status to open or closed.
+func ChangeStatus(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, commitID string, closed bool) error {
+ comment, err := issues_model.ChangeIssueStatus(ctx, issue, doer, closed)
+ if err != nil {
+ if issues_model.IsErrDependenciesLeft(err) && closed {
+ if err := issues_model.FinishIssueStopwatchIfPossible(ctx, doer, issue); err != nil {
+ log.Error("Unable to stop stopwatch for issue[%d]#%d: %v", issue.ID, issue.Index, err)
+ }
+ }
+ return err
+ }
+
+ if closed {
+ if err := issues_model.FinishIssueStopwatchIfPossible(ctx, doer, issue); err != nil {
+ return err
+ }
+ }
+
+ notify_service.IssueChangeStatus(ctx, doer, commitID, issue, comment, closed)
+
+ return nil
+}
diff --git a/services/issue/template.go b/services/issue/template.go
new file mode 100644
index 0000000..47633e5
--- /dev/null
+++ b/services/issue/template.go
@@ -0,0 +1,193 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "fmt"
+ "io"
+ "net/url"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/issue/template"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+
+ "gopkg.in/yaml.v3"
+)
+
+// templateDirCandidates issue templates directory
+var templateDirCandidates = []string{
+ "ISSUE_TEMPLATE",
+ "issue_template",
+ ".forgejo/ISSUE_TEMPLATE",
+ ".forgejo/issue_template",
+ ".gitea/ISSUE_TEMPLATE",
+ ".gitea/issue_template",
+ ".github/ISSUE_TEMPLATE",
+ ".github/issue_template",
+ ".gitlab/ISSUE_TEMPLATE",
+ ".gitlab/issue_template",
+}
+
+var templateConfigCandidates = []string{
+ ".forgejo/ISSUE_TEMPLATE/config",
+ ".forgejo/issue_template/config",
+ ".gitea/ISSUE_TEMPLATE/config",
+ ".gitea/issue_template/config",
+ ".github/ISSUE_TEMPLATE/config",
+ ".github/issue_template/config",
+}
+
+func GetDefaultTemplateConfig() api.IssueConfig {
+ return api.IssueConfig{
+ BlankIssuesEnabled: true,
+ ContactLinks: make([]api.IssueConfigContactLink, 0),
+ }
+}
+
+// GetTemplateConfig loads the given issue config file.
+// It never returns a nil config.
+func GetTemplateConfig(gitRepo *git.Repository, path string, commit *git.Commit) (api.IssueConfig, error) {
+ if gitRepo == nil {
+ return GetDefaultTemplateConfig(), nil
+ }
+
+ var err error
+
+ treeEntry, err := commit.GetTreeEntryByPath(path)
+ if err != nil {
+ return GetDefaultTemplateConfig(), err
+ }
+
+ reader, err := treeEntry.Blob().DataAsync()
+ if err != nil {
+ log.Debug("DataAsync: %v", err)
+ return GetDefaultTemplateConfig(), nil
+ }
+
+ defer reader.Close()
+
+ configContent, err := io.ReadAll(reader)
+ if err != nil {
+ return GetDefaultTemplateConfig(), err
+ }
+
+ issueConfig := GetDefaultTemplateConfig()
+ if err := yaml.Unmarshal(configContent, &issueConfig); err != nil {
+ return GetDefaultTemplateConfig(), err
+ }
+
+ for pos, link := range issueConfig.ContactLinks {
+ if link.Name == "" {
+ return GetDefaultTemplateConfig(), fmt.Errorf("contact_link at position %d is missing name key", pos+1)
+ }
+
+ if link.URL == "" {
+ return GetDefaultTemplateConfig(), fmt.Errorf("contact_link at position %d is missing url key", pos+1)
+ }
+
+ if link.About == "" {
+ return GetDefaultTemplateConfig(), fmt.Errorf("contact_link at position %d is missing about key", pos+1)
+ }
+
+ _, err = url.ParseRequestURI(link.URL)
+ if err != nil {
+ return GetDefaultTemplateConfig(), fmt.Errorf("%s is not a valid URL", link.URL)
+ }
+ }
+
+ return issueConfig, nil
+}
+
+// IsTemplateConfig returns if the given path is a issue config file.
+func IsTemplateConfig(path string) bool {
+ for _, configName := range templateConfigCandidates {
+ if path == configName+".yaml" || path == configName+".yml" {
+ return true
+ }
+ }
+ return false
+}
+
+// GetTemplatesFromDefaultBranch checks for issue templates in the repo's default branch,
+// returns valid templates and the errors of invalid template files.
+func GetTemplatesFromDefaultBranch(repo *repo.Repository, gitRepo *git.Repository) ([]*api.IssueTemplate, map[string]error) {
+ var issueTemplates []*api.IssueTemplate
+
+ if repo.IsEmpty {
+ return issueTemplates, nil
+ }
+
+ commit, err := gitRepo.GetBranchCommit(repo.DefaultBranch)
+ if err != nil {
+ return issueTemplates, nil
+ }
+
+ invalidFiles := map[string]error{}
+ for _, dirName := range templateDirCandidates {
+ tree, err := commit.SubTree(dirName)
+ if err != nil {
+ log.Debug("get sub tree of %s: %v", dirName, err)
+ continue
+ }
+ entries, err := tree.ListEntries()
+ if err != nil {
+ log.Debug("list entries in %s: %v", dirName, err)
+ return issueTemplates, nil
+ }
+ for _, entry := range entries {
+ if !template.CouldBe(entry.Name()) {
+ continue
+ }
+ fullName := path.Join(dirName, entry.Name())
+ if it, err := template.UnmarshalFromEntry(entry, dirName); err != nil {
+ invalidFiles[fullName] = err
+ } else {
+ if !strings.HasPrefix(it.Ref, "refs/") { // Assume that the ref intended is always a branch - for tags users should use refs/tags/<ref>
+ it.Ref = git.BranchPrefix + it.Ref
+ }
+ issueTemplates = append(issueTemplates, it)
+ }
+ }
+ }
+ return issueTemplates, invalidFiles
+}
+
+// GetTemplateConfigFromDefaultBranch returns the issue config for this repo.
+// It never returns a nil config.
+func GetTemplateConfigFromDefaultBranch(repo *repo.Repository, gitRepo *git.Repository) (api.IssueConfig, error) {
+ if repo.IsEmpty {
+ return GetDefaultTemplateConfig(), nil
+ }
+
+ commit, err := gitRepo.GetBranchCommit(repo.DefaultBranch)
+ if err != nil {
+ return GetDefaultTemplateConfig(), err
+ }
+
+ for _, configName := range templateConfigCandidates {
+ if _, err := commit.GetTreeEntryByPath(configName + ".yaml"); err == nil {
+ return GetTemplateConfig(gitRepo, configName+".yaml", commit)
+ }
+
+ if _, err := commit.GetTreeEntryByPath(configName + ".yml"); err == nil {
+ return GetTemplateConfig(gitRepo, configName+".yml", commit)
+ }
+ }
+
+ return GetDefaultTemplateConfig(), nil
+}
+
+func HasTemplatesOrContactLinks(repo *repo.Repository, gitRepo *git.Repository) bool {
+ ret, _ := GetTemplatesFromDefaultBranch(repo, gitRepo)
+ if len(ret) > 0 {
+ return true
+ }
+
+ issueConfig, _ := GetTemplateConfigFromDefaultBranch(repo, gitRepo)
+ return len(issueConfig.ContactLinks) > 0
+}
diff --git a/services/lfs/locks.go b/services/lfs/locks.go
new file mode 100644
index 0000000..2a362b1
--- /dev/null
+++ b/services/lfs/locks.go
@@ -0,0 +1,340 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package lfs
+
+import (
+ "net/http"
+ "strconv"
+ "strings"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/json"
+ lfs_module "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+)
+
+func handleLockListOut(ctx *context.Context, repo *repo_model.Repository, lock *git_model.LFSLock, err error) {
+ if err != nil {
+ if git_model.IsErrLFSLockNotExist(err) {
+ ctx.JSON(http.StatusOK, api.LFSLockList{
+ Locks: []*api.LFSLock{},
+ })
+ return
+ }
+ ctx.JSON(http.StatusInternalServerError, api.LFSLockError{
+ Message: "unable to list locks : Internal Server Error",
+ })
+ return
+ }
+ if repo.ID != lock.RepoID {
+ ctx.JSON(http.StatusOK, api.LFSLockList{
+ Locks: []*api.LFSLock{},
+ })
+ return
+ }
+ ctx.JSON(http.StatusOK, api.LFSLockList{
+ Locks: []*api.LFSLock{convert.ToLFSLock(ctx, lock)},
+ })
+}
+
+// GetListLockHandler list locks
+func GetListLockHandler(ctx *context.Context) {
+ rv := getRequestContext(ctx)
+
+ repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, rv.User, rv.Repo)
+ if err != nil {
+ log.Debug("Could not find repository: %s/%s - %s", rv.User, rv.Repo, err)
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have pull access to list locks",
+ })
+ return
+ }
+ repository.MustOwner(ctx)
+
+ context.CheckRepoScopedToken(ctx, repository, auth_model.Read)
+ if ctx.Written() {
+ return
+ }
+
+ authenticated := authenticate(ctx, repository, rv.Authorization, true, false)
+ if !authenticated {
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have pull access to list locks",
+ })
+ return
+ }
+ ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType)
+
+ cursor := ctx.FormInt("cursor")
+ if cursor < 0 {
+ cursor = 0
+ }
+ limit := ctx.FormInt("limit")
+ if limit > setting.LFS.LocksPagingNum && setting.LFS.LocksPagingNum > 0 {
+ limit = setting.LFS.LocksPagingNum
+ } else if limit < 0 {
+ limit = 0
+ }
+ id := ctx.FormString("id")
+ if id != "" { // Case where we request a specific id
+ v, err := strconv.ParseInt(id, 10, 64)
+ if err != nil {
+ ctx.JSON(http.StatusBadRequest, api.LFSLockError{
+ Message: "bad request : " + err.Error(),
+ })
+ return
+ }
+ lock, err := git_model.GetLFSLockByID(ctx, v)
+ if err != nil && !git_model.IsErrLFSLockNotExist(err) {
+ log.Error("Unable to get lock with ID[%s]: Error: %v", v, err)
+ }
+ handleLockListOut(ctx, repository, lock, err)
+ return
+ }
+
+ path := ctx.FormString("path")
+ if path != "" { // Case where we request a specific id
+ lock, err := git_model.GetLFSLock(ctx, repository, path)
+ if err != nil && !git_model.IsErrLFSLockNotExist(err) {
+ log.Error("Unable to get lock for repository %-v with path %s: Error: %v", repository, path, err)
+ }
+ handleLockListOut(ctx, repository, lock, err)
+ return
+ }
+
+ // If no query params path or id
+ lockList, err := git_model.GetLFSLockByRepoID(ctx, repository.ID, cursor, limit)
+ if err != nil {
+ log.Error("Unable to list locks for repository ID[%d]: Error: %v", repository.ID, err)
+ ctx.JSON(http.StatusInternalServerError, api.LFSLockError{
+ Message: "unable to list locks : Internal Server Error",
+ })
+ return
+ }
+ lockListAPI := make([]*api.LFSLock, len(lockList))
+ next := ""
+ for i, l := range lockList {
+ lockListAPI[i] = convert.ToLFSLock(ctx, l)
+ }
+ if limit > 0 && len(lockList) == limit {
+ next = strconv.Itoa(cursor + 1)
+ }
+ ctx.JSON(http.StatusOK, api.LFSLockList{
+ Locks: lockListAPI,
+ Next: next,
+ })
+}
+
+// PostLockHandler create lock
+func PostLockHandler(ctx *context.Context) {
+ userName := ctx.Params("username")
+ repoName := strings.TrimSuffix(ctx.Params("reponame"), ".git")
+ authorization := ctx.Req.Header.Get("Authorization")
+
+ repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, userName, repoName)
+ if err != nil {
+ log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err)
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have push access to create locks",
+ })
+ return
+ }
+ repository.MustOwner(ctx)
+
+ context.CheckRepoScopedToken(ctx, repository, auth_model.Write)
+ if ctx.Written() {
+ return
+ }
+
+ authenticated := authenticate(ctx, repository, authorization, true, true)
+ if !authenticated {
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have push access to create locks",
+ })
+ return
+ }
+
+ ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType)
+
+ var req api.LFSLockRequest
+ bodyReader := ctx.Req.Body
+ defer bodyReader.Close()
+
+ dec := json.NewDecoder(bodyReader)
+ if err := dec.Decode(&req); err != nil {
+ log.Warn("Failed to decode lock request as json. Error: %v", err)
+ writeStatus(ctx, http.StatusBadRequest)
+ return
+ }
+
+ lock, err := git_model.CreateLFSLock(ctx, repository, &git_model.LFSLock{
+ Path: req.Path,
+ OwnerID: ctx.Doer.ID,
+ })
+ if err != nil {
+ if git_model.IsErrLFSLockAlreadyExist(err) {
+ ctx.JSON(http.StatusConflict, api.LFSLockError{
+ Lock: convert.ToLFSLock(ctx, lock),
+ Message: "already created lock",
+ })
+ return
+ }
+ if git_model.IsErrLFSUnauthorizedAction(err) {
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have push access to create locks : " + err.Error(),
+ })
+ return
+ }
+ log.Error("Unable to CreateLFSLock in repository %-v at %s for user %-v: Error: %v", repository, req.Path, ctx.Doer, err)
+ ctx.JSON(http.StatusInternalServerError, api.LFSLockError{
+ Message: "internal server error : Internal Server Error",
+ })
+ return
+ }
+ ctx.JSON(http.StatusCreated, api.LFSLockResponse{Lock: convert.ToLFSLock(ctx, lock)})
+}
+
+// VerifyLockHandler list locks for verification
+func VerifyLockHandler(ctx *context.Context) {
+ userName := ctx.Params("username")
+ repoName := strings.TrimSuffix(ctx.Params("reponame"), ".git")
+ authorization := ctx.Req.Header.Get("Authorization")
+
+ repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, userName, repoName)
+ if err != nil {
+ log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err)
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have push access to verify locks",
+ })
+ return
+ }
+ repository.MustOwner(ctx)
+
+ context.CheckRepoScopedToken(ctx, repository, auth_model.Read)
+ if ctx.Written() {
+ return
+ }
+
+ authenticated := authenticate(ctx, repository, authorization, true, true)
+ if !authenticated {
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have push access to verify locks",
+ })
+ return
+ }
+
+ ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType)
+
+ cursor := ctx.FormInt("cursor")
+ if cursor < 0 {
+ cursor = 0
+ }
+ limit := ctx.FormInt("limit")
+ if limit > setting.LFS.LocksPagingNum && setting.LFS.LocksPagingNum > 0 {
+ limit = setting.LFS.LocksPagingNum
+ } else if limit < 0 {
+ limit = 0
+ }
+ lockList, err := git_model.GetLFSLockByRepoID(ctx, repository.ID, cursor, limit)
+ if err != nil {
+ log.Error("Unable to list locks for repository ID[%d]: Error: %v", repository.ID, err)
+ ctx.JSON(http.StatusInternalServerError, api.LFSLockError{
+ Message: "unable to list locks : Internal Server Error",
+ })
+ return
+ }
+ next := ""
+ if limit > 0 && len(lockList) == limit {
+ next = strconv.Itoa(cursor + 1)
+ }
+ lockOursListAPI := make([]*api.LFSLock, 0, len(lockList))
+ lockTheirsListAPI := make([]*api.LFSLock, 0, len(lockList))
+ for _, l := range lockList {
+ if l.OwnerID == ctx.Doer.ID {
+ lockOursListAPI = append(lockOursListAPI, convert.ToLFSLock(ctx, l))
+ } else {
+ lockTheirsListAPI = append(lockTheirsListAPI, convert.ToLFSLock(ctx, l))
+ }
+ }
+ ctx.JSON(http.StatusOK, api.LFSLockListVerify{
+ Ours: lockOursListAPI,
+ Theirs: lockTheirsListAPI,
+ Next: next,
+ })
+}
+
+// UnLockHandler delete locks
+func UnLockHandler(ctx *context.Context) {
+ userName := ctx.Params("username")
+ repoName := strings.TrimSuffix(ctx.Params("reponame"), ".git")
+ authorization := ctx.Req.Header.Get("Authorization")
+
+ repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, userName, repoName)
+ if err != nil {
+ log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err)
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have push access to delete locks",
+ })
+ return
+ }
+ repository.MustOwner(ctx)
+
+ context.CheckRepoScopedToken(ctx, repository, auth_model.Write)
+ if ctx.Written() {
+ return
+ }
+
+ authenticated := authenticate(ctx, repository, authorization, true, true)
+ if !authenticated {
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have push access to delete locks",
+ })
+ return
+ }
+
+ ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType)
+
+ var req api.LFSLockDeleteRequest
+ bodyReader := ctx.Req.Body
+ defer bodyReader.Close()
+
+ dec := json.NewDecoder(bodyReader)
+ if err := dec.Decode(&req); err != nil {
+ log.Warn("Failed to decode lock request as json. Error: %v", err)
+ writeStatus(ctx, http.StatusBadRequest)
+ return
+ }
+
+ lock, err := git_model.DeleteLFSLockByID(ctx, ctx.ParamsInt64("lid"), repository, ctx.Doer, req.Force)
+ if err != nil {
+ if git_model.IsErrLFSUnauthorizedAction(err) {
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ ctx.JSON(http.StatusUnauthorized, api.LFSLockError{
+ Message: "You must have push access to delete locks : " + err.Error(),
+ })
+ return
+ }
+ log.Error("Unable to DeleteLFSLockByID[%d] by user %-v with force %t: Error: %v", ctx.ParamsInt64("lid"), ctx.Doer, req.Force, err)
+ ctx.JSON(http.StatusInternalServerError, api.LFSLockError{
+ Message: "unable to delete lock : Internal Server Error",
+ })
+ return
+ }
+ ctx.JSON(http.StatusOK, api.LFSLockResponse{Lock: convert.ToLFSLock(ctx, lock)})
+}
diff --git a/services/lfs/server.go b/services/lfs/server.go
new file mode 100644
index 0000000..a300de1
--- /dev/null
+++ b/services/lfs/server.go
@@ -0,0 +1,633 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package lfs
+
+import (
+ stdCtx "context"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "path"
+ "regexp"
+ "strconv"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ auth_model "code.gitea.io/gitea/models/auth"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/json"
+ lfs_module "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/golang-jwt/jwt/v5"
+)
+
+// requestContext contain variables from the HTTP request.
+type requestContext struct {
+ User string
+ Repo string
+ Authorization string
+}
+
+// Claims is a JWT Token Claims
+type Claims struct {
+ RepoID int64
+ Op string
+ UserID int64
+ jwt.RegisteredClaims
+}
+
+// DownloadLink builds a URL to download the object.
+func (rc *requestContext) DownloadLink(p lfs_module.Pointer) string {
+ return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/objects", url.PathEscape(p.Oid))
+}
+
+// UploadLink builds a URL to upload the object.
+func (rc *requestContext) UploadLink(p lfs_module.Pointer) string {
+ return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/objects", url.PathEscape(p.Oid), strconv.FormatInt(p.Size, 10))
+}
+
+// VerifyLink builds a URL for verifying the object.
+func (rc *requestContext) VerifyLink(p lfs_module.Pointer) string {
+ return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/verify")
+}
+
+// CheckAcceptMediaType checks if the client accepts the LFS media type.
+func CheckAcceptMediaType(ctx *context.Context) {
+ mediaParts := strings.Split(ctx.Req.Header.Get("Accept"), ";")
+
+ if mediaParts[0] != lfs_module.MediaType {
+ log.Trace("Calling a LFS method without accepting the correct media type: %s", lfs_module.MediaType)
+ writeStatus(ctx, http.StatusUnsupportedMediaType)
+ return
+ }
+}
+
+var rangeHeaderRegexp = regexp.MustCompile(`bytes=(\d+)\-(\d*).*`)
+
+// DownloadHandler gets the content from the content store
+func DownloadHandler(ctx *context.Context) {
+ rc := getRequestContext(ctx)
+ p := lfs_module.Pointer{Oid: ctx.Params("oid")}
+
+ meta := getAuthenticatedMeta(ctx, rc, p, false)
+ if meta == nil {
+ return
+ }
+
+ // Support resume download using Range header
+ var fromByte, toByte int64
+ toByte = meta.Size - 1
+ statusCode := http.StatusOK
+ if rangeHdr := ctx.Req.Header.Get("Range"); rangeHdr != "" {
+ match := rangeHeaderRegexp.FindStringSubmatch(rangeHdr)
+ if len(match) > 1 {
+ statusCode = http.StatusPartialContent
+ fromByte, _ = strconv.ParseInt(match[1], 10, 32)
+
+ if fromByte >= meta.Size {
+ writeStatus(ctx, http.StatusRequestedRangeNotSatisfiable)
+ return
+ }
+
+ if match[2] != "" {
+ _toByte, _ := strconv.ParseInt(match[2], 10, 32)
+ if _toByte >= fromByte && _toByte < toByte {
+ toByte = _toByte
+ }
+ }
+
+ ctx.Resp.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", fromByte, toByte, meta.Size-fromByte))
+ ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Range")
+ }
+ }
+
+ contentStore := lfs_module.NewContentStore()
+ content, err := contentStore.Get(meta.Pointer)
+ if err != nil {
+ writeStatus(ctx, http.StatusNotFound)
+ return
+ }
+ defer content.Close()
+
+ if fromByte > 0 {
+ _, err = content.Seek(fromByte, io.SeekStart)
+ if err != nil {
+ log.Error("Whilst trying to read LFS OID[%s]: Unable to seek to %d Error: %v", meta.Oid, fromByte, err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ return
+ }
+ }
+
+ contentLength := toByte + 1 - fromByte
+ ctx.Resp.Header().Set("Content-Length", strconv.FormatInt(contentLength, 10))
+ ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
+
+ filename := ctx.Params("filename")
+ if len(filename) > 0 {
+ decodedFilename, err := base64.RawURLEncoding.DecodeString(filename)
+ if err == nil {
+ ctx.Resp.Header().Set("Content-Disposition", "attachment; filename=\""+string(decodedFilename)+"\"")
+ ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition")
+ }
+ }
+
+ ctx.Resp.WriteHeader(statusCode)
+ if written, err := io.CopyN(ctx.Resp, content, contentLength); err != nil {
+ log.Error("Error whilst copying LFS OID[%s] to the response after %d bytes. Error: %v", meta.Oid, written, err)
+ }
+}
+
+// BatchHandler provides the batch api
+func BatchHandler(ctx *context.Context) {
+ var br lfs_module.BatchRequest
+ if err := decodeJSON(ctx.Req, &br); err != nil {
+ log.Trace("Unable to decode BATCH request vars: Error: %v", err)
+ writeStatus(ctx, http.StatusBadRequest)
+ return
+ }
+
+ var isUpload bool
+ if br.Operation == "upload" {
+ isUpload = true
+ } else if br.Operation == "download" {
+ isUpload = false
+ } else {
+ log.Trace("Attempt to BATCH with invalid operation: %s", br.Operation)
+ writeStatus(ctx, http.StatusBadRequest)
+ return
+ }
+
+ rc := getRequestContext(ctx)
+
+ repository := getAuthenticatedRepository(ctx, rc, isUpload)
+ if repository == nil {
+ return
+ }
+
+ if isUpload {
+ ok, err := quota_model.EvaluateForUser(ctx, ctx.Doer.ID, quota_model.LimitSubjectSizeGitLFS)
+ if err != nil {
+ log.Error("quota_model.EvaluateForUser: %v", err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ return
+ }
+ if !ok {
+ writeStatusMessage(ctx, http.StatusRequestEntityTooLarge, "quota exceeded")
+ }
+ }
+
+ contentStore := lfs_module.NewContentStore()
+
+ var responseObjects []*lfs_module.ObjectResponse
+
+ for _, p := range br.Objects {
+ if !p.IsValid() {
+ responseObjects = append(responseObjects, buildObjectResponse(rc, p, false, false, &lfs_module.ObjectError{
+ Code: http.StatusUnprocessableEntity,
+ Message: "Oid or size are invalid",
+ }))
+ continue
+ }
+
+ exists, err := contentStore.Exists(p)
+ if err != nil {
+ log.Error("Unable to check if LFS OID[%s] exist. Error: %v", p.Oid, rc.User, rc.Repo, err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ return
+ }
+
+ meta, err := git_model.GetLFSMetaObjectByOid(ctx, repository.ID, p.Oid)
+ if err != nil && err != git_model.ErrLFSObjectNotExist {
+ log.Error("Unable to get LFS MetaObject [%s] for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ return
+ }
+
+ if meta != nil && p.Size != meta.Size {
+ responseObjects = append(responseObjects, buildObjectResponse(rc, p, false, false, &lfs_module.ObjectError{
+ Code: http.StatusUnprocessableEntity,
+ Message: fmt.Sprintf("Object %s is not %d bytes", p.Oid, p.Size),
+ }))
+ continue
+ }
+
+ var responseObject *lfs_module.ObjectResponse
+ if isUpload {
+ var err *lfs_module.ObjectError
+ if !exists && setting.LFS.MaxFileSize > 0 && p.Size > setting.LFS.MaxFileSize {
+ err = &lfs_module.ObjectError{
+ Code: http.StatusUnprocessableEntity,
+ Message: fmt.Sprintf("Size must be less than or equal to %d", setting.LFS.MaxFileSize),
+ }
+ }
+
+ if exists && meta == nil {
+ accessible, err := git_model.LFSObjectAccessible(ctx, ctx.Doer, p.Oid)
+ if err != nil {
+ log.Error("Unable to check if LFS MetaObject [%s] is accessible. Error: %v", p.Oid, err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ return
+ }
+ if accessible {
+ _, err := git_model.NewLFSMetaObject(ctx, repository.ID, p)
+ if err != nil {
+ log.Error("Unable to create LFS MetaObject [%s] for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ return
+ }
+ } else {
+ exists = false
+ }
+ }
+
+ responseObject = buildObjectResponse(rc, p, false, !exists, err)
+ } else {
+ var err *lfs_module.ObjectError
+ if !exists || meta == nil {
+ err = &lfs_module.ObjectError{
+ Code: http.StatusNotFound,
+ Message: http.StatusText(http.StatusNotFound),
+ }
+ }
+
+ responseObject = buildObjectResponse(rc, p, true, false, err)
+ }
+ responseObjects = append(responseObjects, responseObject)
+ }
+
+ respobj := &lfs_module.BatchResponse{Objects: responseObjects}
+
+ ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType)
+
+ enc := json.NewEncoder(ctx.Resp)
+ if err := enc.Encode(respobj); err != nil {
+ log.Error("Failed to encode representation as json. Error: %v", err)
+ }
+}
+
+// UploadHandler receives data from the client and puts it into the content store
+func UploadHandler(ctx *context.Context) {
+ rc := getRequestContext(ctx)
+
+ p := lfs_module.Pointer{Oid: ctx.Params("oid")}
+ var err error
+ if p.Size, err = strconv.ParseInt(ctx.Params("size"), 10, 64); err != nil {
+ writeStatusMessage(ctx, http.StatusUnprocessableEntity, err.Error())
+ }
+
+ if !p.IsValid() {
+ log.Trace("Attempt to access invalid LFS OID[%s] in %s/%s", p.Oid, rc.User, rc.Repo)
+ writeStatus(ctx, http.StatusUnprocessableEntity)
+ return
+ }
+
+ repository := getAuthenticatedRepository(ctx, rc, true)
+ if repository == nil {
+ return
+ }
+
+ contentStore := lfs_module.NewContentStore()
+ exists, err := contentStore.Exists(p)
+ if err != nil {
+ log.Error("Unable to check if LFS OID[%s] exist. Error: %v", p.Oid, err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ return
+ }
+
+ if exists {
+ ok, err := quota_model.EvaluateForUser(ctx, ctx.Doer.ID, quota_model.LimitSubjectSizeGitLFS)
+ if err != nil {
+ log.Error("quota_model.EvaluateForUser: %v", err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ return
+ }
+ if !ok {
+ writeStatusMessage(ctx, http.StatusRequestEntityTooLarge, "quota exceeded")
+ }
+ }
+
+ uploadOrVerify := func() error {
+ if exists {
+ accessible, err := git_model.LFSObjectAccessible(ctx, ctx.Doer, p.Oid)
+ if err != nil {
+ log.Error("Unable to check if LFS MetaObject [%s] is accessible. Error: %v", p.Oid, err)
+ return err
+ }
+ if !accessible {
+ // The file exists but the user has no access to it.
+ // The upload gets verified by hashing and size comparison to prove access to it.
+ hash := sha256.New()
+ written, err := io.Copy(hash, ctx.Req.Body)
+ if err != nil {
+ log.Error("Error creating hash. Error: %v", err)
+ return err
+ }
+
+ if written != p.Size {
+ return lfs_module.ErrSizeMismatch
+ }
+ if hex.EncodeToString(hash.Sum(nil)) != p.Oid {
+ return lfs_module.ErrHashMismatch
+ }
+ }
+ } else if err := contentStore.Put(p, ctx.Req.Body); err != nil {
+ log.Error("Error putting LFS MetaObject [%s] into content store. Error: %v", p.Oid, err)
+ return err
+ }
+ _, err := git_model.NewLFSMetaObject(ctx, repository.ID, p)
+ return err
+ }
+
+ defer ctx.Req.Body.Close()
+ if err := uploadOrVerify(); err != nil {
+ if errors.Is(err, lfs_module.ErrSizeMismatch) || errors.Is(err, lfs_module.ErrHashMismatch) {
+ log.Error("Upload does not match LFS MetaObject [%s]. Error: %v", p.Oid, err)
+ writeStatusMessage(ctx, http.StatusUnprocessableEntity, err.Error())
+ } else {
+ log.Error("Error whilst uploadOrVerify LFS OID[%s]: %v", p.Oid, err)
+ writeStatus(ctx, http.StatusInternalServerError)
+ }
+ if _, err = git_model.RemoveLFSMetaObjectByOid(ctx, repository.ID, p.Oid); err != nil {
+ log.Error("Error whilst removing MetaObject for LFS OID[%s]: %v", p.Oid, err)
+ }
+ return
+ }
+
+ writeStatus(ctx, http.StatusOK)
+}
+
+// VerifyHandler verify oid and its size from the content store
+func VerifyHandler(ctx *context.Context) {
+ var p lfs_module.Pointer
+ if err := decodeJSON(ctx.Req, &p); err != nil {
+ writeStatus(ctx, http.StatusUnprocessableEntity)
+ return
+ }
+
+ rc := getRequestContext(ctx)
+
+ meta := getAuthenticatedMeta(ctx, rc, p, true)
+ if meta == nil {
+ return
+ }
+
+ contentStore := lfs_module.NewContentStore()
+ ok, err := contentStore.Verify(meta.Pointer)
+
+ status := http.StatusOK
+ if err != nil {
+ log.Error("Error whilst verifying LFS OID[%s]: %v", p.Oid, err)
+ status = http.StatusInternalServerError
+ } else if !ok {
+ status = http.StatusNotFound
+ }
+ writeStatus(ctx, status)
+}
+
+func decodeJSON(req *http.Request, v any) error {
+ defer req.Body.Close()
+
+ dec := json.NewDecoder(req.Body)
+ return dec.Decode(v)
+}
+
+func getRequestContext(ctx *context.Context) *requestContext {
+ return &requestContext{
+ User: ctx.Params("username"),
+ Repo: strings.TrimSuffix(ctx.Params("reponame"), ".git"),
+ Authorization: ctx.Req.Header.Get("Authorization"),
+ }
+}
+
+func getAuthenticatedMeta(ctx *context.Context, rc *requestContext, p lfs_module.Pointer, requireWrite bool) *git_model.LFSMetaObject {
+ if !p.IsValid() {
+ log.Info("Attempt to access invalid LFS OID[%s] in %s/%s", p.Oid, rc.User, rc.Repo)
+ writeStatusMessage(ctx, http.StatusUnprocessableEntity, "Oid or size are invalid")
+ return nil
+ }
+
+ repository := getAuthenticatedRepository(ctx, rc, requireWrite)
+ if repository == nil {
+ return nil
+ }
+
+ meta, err := git_model.GetLFSMetaObjectByOid(ctx, repository.ID, p.Oid)
+ if err != nil {
+ log.Error("Unable to get LFS OID[%s] Error: %v", p.Oid, err)
+ writeStatus(ctx, http.StatusNotFound)
+ return nil
+ }
+
+ return meta
+}
+
+func getAuthenticatedRepository(ctx *context.Context, rc *requestContext, requireWrite bool) *repo_model.Repository {
+ repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, rc.User, rc.Repo)
+ if err != nil {
+ log.Error("Unable to get repository: %s/%s Error: %v", rc.User, rc.Repo, err)
+ writeStatus(ctx, http.StatusNotFound)
+ return nil
+ }
+
+ if !authenticate(ctx, repository, rc.Authorization, false, requireWrite) {
+ requireAuth(ctx)
+ return nil
+ }
+
+ if requireWrite {
+ context.CheckRepoScopedToken(ctx, repository, auth_model.Write)
+ } else {
+ context.CheckRepoScopedToken(ctx, repository, auth_model.Read)
+ }
+
+ if ctx.Written() {
+ return nil
+ }
+
+ return repository
+}
+
+func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, download, upload bool, err *lfs_module.ObjectError) *lfs_module.ObjectResponse {
+ rep := &lfs_module.ObjectResponse{Pointer: pointer}
+ if err != nil {
+ rep.Error = err
+ } else {
+ rep.Actions = make(map[string]*lfs_module.Link)
+
+ header := make(map[string]string)
+
+ if len(rc.Authorization) > 0 {
+ header["Authorization"] = rc.Authorization
+ }
+
+ if download {
+ var link *lfs_module.Link
+ if setting.LFS.Storage.MinioConfig.ServeDirect {
+ // If we have a signed url (S3, object storage), redirect to this directly.
+ u, err := storage.LFS.URL(pointer.RelativePath(), pointer.Oid)
+ if u != nil && err == nil {
+ // Presigned url does not need the Authorization header
+ // https://github.com/go-gitea/gitea/issues/21525
+ delete(header, "Authorization")
+ link = &lfs_module.Link{Href: u.String(), Header: header}
+ }
+ }
+ if link == nil {
+ link = &lfs_module.Link{Href: rc.DownloadLink(pointer), Header: header}
+ }
+ rep.Actions["download"] = link
+ }
+ if upload {
+ rep.Actions["upload"] = &lfs_module.Link{Href: rc.UploadLink(pointer), Header: header}
+
+ verifyHeader := make(map[string]string)
+ for key, value := range header {
+ verifyHeader[key] = value
+ }
+
+ // This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662
+ verifyHeader["Accept"] = lfs_module.AcceptHeader
+
+ rep.Actions["verify"] = &lfs_module.Link{Href: rc.VerifyLink(pointer), Header: verifyHeader}
+ }
+ }
+ return rep
+}
+
+func writeStatus(ctx *context.Context, status int) {
+ writeStatusMessage(ctx, status, http.StatusText(status))
+}
+
+func writeStatusMessage(ctx *context.Context, status int, message string) {
+ ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType)
+ ctx.Resp.WriteHeader(status)
+
+ er := lfs_module.ErrorResponse{Message: message}
+
+ enc := json.NewEncoder(ctx.Resp)
+ if err := enc.Encode(er); err != nil {
+ log.Error("Failed to encode error response as json. Error: %v", err)
+ }
+}
+
+// authenticate uses the authorization string to determine whether
+// or not to proceed. This server assumes an HTTP Basic auth format.
+func authenticate(ctx *context.Context, repository *repo_model.Repository, authorization string, requireSigned, requireWrite bool) bool {
+ accessMode := perm.AccessModeRead
+ if requireWrite {
+ accessMode = perm.AccessModeWrite
+ }
+
+ if ctx.Data["IsActionsToken"] == true {
+ taskID := ctx.Data["ActionsTaskID"].(int64)
+ task, err := actions_model.GetTaskByID(ctx, taskID)
+ if err != nil {
+ log.Error("Unable to GetTaskByID for task[%d] Error: %v", taskID, err)
+ return false
+ }
+ if task.RepoID != repository.ID {
+ return false
+ }
+
+ if task.IsForkPullRequest {
+ return accessMode <= perm.AccessModeRead
+ }
+ return accessMode <= perm.AccessModeWrite
+ }
+
+ // ctx.IsSigned is unnecessary here, this will be checked in perm.CanAccess
+ perm, err := access_model.GetUserRepoPermission(ctx, repository, ctx.Doer)
+ if err != nil {
+ log.Error("Unable to GetUserRepoPermission for user %-v in repo %-v Error: %v", ctx.Doer, repository, err)
+ return false
+ }
+
+ canRead := perm.CanAccess(accessMode, unit.TypeCode)
+ if canRead && (!requireSigned || ctx.IsSigned) {
+ return true
+ }
+
+ user, err := parseToken(ctx, authorization, repository, accessMode)
+ if err != nil {
+ // Most of these are Warn level - the true internal server errors are logged in parseToken already
+ log.Warn("Authentication failure for provided token with Error: %v", err)
+ return false
+ }
+ ctx.Doer = user
+ return true
+}
+
+func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repository, mode perm.AccessMode) (*user_model.User, error) {
+ if !strings.Contains(tokenSHA, ".") {
+ return nil, nil
+ }
+ token, err := jwt.ParseWithClaims(tokenSHA, &Claims{}, func(t *jwt.Token) (any, error) {
+ if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
+ return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
+ }
+ return setting.LFS.JWTSecretBytes, nil
+ })
+ if err != nil {
+ return nil, nil
+ }
+
+ claims, claimsOk := token.Claims.(*Claims)
+ if !token.Valid || !claimsOk {
+ return nil, fmt.Errorf("invalid token claim")
+ }
+
+ if claims.RepoID != target.ID {
+ return nil, fmt.Errorf("invalid token claim")
+ }
+
+ if mode == perm.AccessModeWrite && claims.Op != "upload" {
+ return nil, fmt.Errorf("invalid token claim")
+ }
+
+ u, err := user_model.GetUserByID(ctx, claims.UserID)
+ if err != nil {
+ log.Error("Unable to GetUserById[%d]: Error: %v", claims.UserID, err)
+ return nil, err
+ }
+ return u, nil
+}
+
+func parseToken(ctx stdCtx.Context, authorization string, target *repo_model.Repository, mode perm.AccessMode) (*user_model.User, error) {
+ if authorization == "" {
+ return nil, fmt.Errorf("no token")
+ }
+
+ parts := strings.SplitN(authorization, " ", 2)
+ if len(parts) != 2 {
+ return nil, fmt.Errorf("no token")
+ }
+ tokenSHA := parts[1]
+ switch strings.ToLower(parts[0]) {
+ case "bearer":
+ fallthrough
+ case "token":
+ return handleLFSToken(ctx, tokenSHA, target, mode)
+ }
+ return nil, fmt.Errorf("token not found")
+}
+
+func requireAuth(ctx *context.Context) {
+ ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs")
+ writeStatus(ctx, http.StatusUnauthorized)
+}
diff --git a/services/mailer/incoming/incoming.go b/services/mailer/incoming/incoming.go
new file mode 100644
index 0000000..ac6f32c
--- /dev/null
+++ b/services/mailer/incoming/incoming.go
@@ -0,0 +1,394 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package incoming
+
+import (
+ "context"
+ "crypto/tls"
+ "fmt"
+ net_mail "net/mail"
+ "regexp"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/mailer/token"
+
+ "code.forgejo.org/forgejo/reply"
+ "github.com/emersion/go-imap"
+ "github.com/emersion/go-imap/client"
+ "github.com/jhillyerd/enmime"
+)
+
+var (
+ addressTokenRegex *regexp.Regexp
+ referenceTokenRegex *regexp.Regexp
+)
+
+func Init(ctx context.Context) error {
+ if !setting.IncomingEmail.Enabled {
+ return nil
+ }
+
+ var err error
+ addressTokenRegex, err = regexp.Compile(
+ fmt.Sprintf(
+ `\A%s\z`,
+ strings.Replace(regexp.QuoteMeta(setting.IncomingEmail.ReplyToAddress), regexp.QuoteMeta(setting.IncomingEmail.TokenPlaceholder), "(.+)", 1),
+ ),
+ )
+ if err != nil {
+ return err
+ }
+ referenceTokenRegex, err = regexp.Compile(fmt.Sprintf(`\Areply-(.+)@%s\z`, regexp.QuoteMeta(setting.Domain)))
+ if err != nil {
+ return err
+ }
+
+ go func() {
+ ctx, _, finished := process.GetManager().AddTypedContext(ctx, "Incoming Email", process.SystemProcessType, true)
+ defer finished()
+
+ // This background job processes incoming emails. It uses the IMAP IDLE command to get notified about incoming emails.
+ // The following loop restarts the processing logic after errors until ctx indicates to stop.
+
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ if err := processIncomingEmails(ctx); err != nil {
+ log.Error("Error while processing incoming emails: %v", err)
+ }
+ select {
+ case <-ctx.Done():
+ return
+ case <-time.NewTimer(10 * time.Second).C:
+ }
+ }
+ }
+ }()
+
+ return nil
+}
+
+// processIncomingEmails is the "main" method with the wait/process loop
+func processIncomingEmails(ctx context.Context) error {
+ server := fmt.Sprintf("%s:%d", setting.IncomingEmail.Host, setting.IncomingEmail.Port)
+
+ var c *client.Client
+ var err error
+ if setting.IncomingEmail.UseTLS {
+ c, err = client.DialTLS(server, &tls.Config{InsecureSkipVerify: setting.IncomingEmail.SkipTLSVerify})
+ } else {
+ c, err = client.Dial(server)
+ }
+ if err != nil {
+ return fmt.Errorf("could not connect to server '%s': %w", server, err)
+ }
+
+ if err := c.Login(setting.IncomingEmail.Username, setting.IncomingEmail.Password); err != nil {
+ return fmt.Errorf("could not login: %w", err)
+ }
+ defer func() {
+ if err := c.Logout(); err != nil {
+ log.Error("Logout from incoming email server failed: %v", err)
+ }
+ }()
+
+ if _, err := c.Select(setting.IncomingEmail.Mailbox, false); err != nil {
+ return fmt.Errorf("selecting box '%s' failed: %w", setting.IncomingEmail.Mailbox, err)
+ }
+
+ // The following loop processes messages. If there are no messages available, IMAP IDLE is used to wait for new messages.
+ // This process is repeated until an IMAP error occurs or ctx indicates to stop.
+
+ for {
+ select {
+ case <-ctx.Done():
+ return nil
+ default:
+ if err := processMessages(ctx, c); err != nil {
+ return fmt.Errorf("could not process messages: %w", err)
+ }
+ if err := waitForUpdates(ctx, c); err != nil {
+ return fmt.Errorf("wait for updates failed: %w", err)
+ }
+ select {
+ case <-ctx.Done():
+ return nil
+ case <-time.NewTimer(time.Second).C:
+ }
+ }
+ }
+}
+
+// waitForUpdates uses IMAP IDLE to wait for new emails
+func waitForUpdates(ctx context.Context, c *client.Client) error {
+ updates := make(chan client.Update, 1)
+
+ c.Updates = updates
+ defer func() {
+ c.Updates = nil
+ }()
+
+ errs := make(chan error, 1)
+ stop := make(chan struct{})
+ go func() {
+ errs <- c.Idle(stop, nil)
+ }()
+
+ stopped := false
+ for {
+ select {
+ case update := <-updates:
+ switch update.(type) {
+ case *client.MailboxUpdate:
+ if !stopped {
+ close(stop)
+ stopped = true
+ }
+ default:
+ }
+ case err := <-errs:
+ if err != nil {
+ return fmt.Errorf("imap idle failed: %w", err)
+ }
+ return nil
+ case <-ctx.Done():
+ return nil
+ }
+ }
+}
+
+// processMessages searches unread mails and processes them.
+func processMessages(ctx context.Context, c *client.Client) error {
+ criteria := imap.NewSearchCriteria()
+ criteria.WithoutFlags = []string{imap.SeenFlag}
+ criteria.Smaller = setting.IncomingEmail.MaximumMessageSize
+ ids, err := c.Search(criteria)
+ if err != nil {
+ return fmt.Errorf("imap search failed: %w", err)
+ }
+
+ if len(ids) == 0 {
+ return nil
+ }
+
+ seqset := new(imap.SeqSet)
+ seqset.AddNum(ids...)
+ messages := make(chan *imap.Message, 10)
+
+ section := &imap.BodySectionName{}
+
+ errs := make(chan error, 1)
+ go func() {
+ errs <- c.Fetch(
+ seqset,
+ []imap.FetchItem{section.FetchItem()},
+ messages,
+ )
+ }()
+
+ handledSet := new(imap.SeqSet)
+loop:
+ for {
+ select {
+ case <-ctx.Done():
+ break loop
+ case msg, ok := <-messages:
+ if !ok {
+ if setting.IncomingEmail.DeleteHandledMessage && !handledSet.Empty() {
+ if err := c.Store(
+ handledSet,
+ imap.FormatFlagsOp(imap.AddFlags, true),
+ []any{imap.DeletedFlag},
+ nil,
+ ); err != nil {
+ return fmt.Errorf("imap store failed: %w", err)
+ }
+
+ if err := c.Expunge(nil); err != nil {
+ return fmt.Errorf("imap expunge failed: %w", err)
+ }
+ }
+ return nil
+ }
+
+ err := func() error {
+ if isAlreadyHandled(handledSet, msg) {
+ log.Debug("Skipping already handled message")
+ return nil
+ }
+
+ r := msg.GetBody(section)
+ if r == nil {
+ return fmt.Errorf("could not get body from message: %w", err)
+ }
+
+ env, err := enmime.ReadEnvelope(r)
+ if err != nil {
+ return fmt.Errorf("could not read envelope: %w", err)
+ }
+
+ if isAutomaticReply(env) {
+ log.Debug("Skipping automatic email reply")
+ return nil
+ }
+
+ t := searchTokenInHeaders(env)
+ if t == "" {
+ log.Debug("Incoming email token not found in headers")
+ return nil
+ }
+
+ handlerType, user, payload, err := token.ExtractToken(ctx, t)
+ if err != nil {
+ if _, ok := err.(*token.ErrToken); ok {
+ log.Info("Invalid incoming email token: %v", err)
+ return nil
+ }
+ return err
+ }
+
+ handler, ok := handlers[handlerType]
+ if !ok {
+ return fmt.Errorf("unexpected handler type: %v", handlerType)
+ }
+
+ content := getContentFromMailReader(env)
+
+ if err := handler.Handle(ctx, content, user, payload); err != nil {
+ return fmt.Errorf("could not handle message: %w", err)
+ }
+
+ handledSet.AddNum(msg.SeqNum)
+
+ return nil
+ }()
+ if err != nil {
+ log.Error("Error while processing incoming email[%v]: %v", msg.Uid, err)
+ }
+ }
+ }
+
+ if err := <-errs; err != nil {
+ return fmt.Errorf("imap fetch failed: %w", err)
+ }
+
+ return nil
+}
+
+// isAlreadyHandled tests if the message was already handled
+func isAlreadyHandled(handledSet *imap.SeqSet, msg *imap.Message) bool {
+ return handledSet.Contains(msg.SeqNum)
+}
+
+// isAutomaticReply tests if the headers indicate an automatic reply
+func isAutomaticReply(env *enmime.Envelope) bool {
+ autoSubmitted := env.GetHeader("Auto-Submitted")
+ if autoSubmitted != "" && autoSubmitted != "no" {
+ return true
+ }
+ autoReply := env.GetHeader("X-Autoreply")
+ if autoReply == "yes" {
+ return true
+ }
+ autoRespond := env.GetHeader("X-Autorespond")
+ return autoRespond != ""
+}
+
+// searchTokenInHeaders looks for the token in To, Delivered-To and References
+func searchTokenInHeaders(env *enmime.Envelope) string {
+ if addressTokenRegex != nil {
+ to, _ := env.AddressList("To")
+
+ token := searchTokenInAddresses(to)
+ if token != "" {
+ return token
+ }
+
+ deliveredTo, _ := env.AddressList("Delivered-To")
+
+ token = searchTokenInAddresses(deliveredTo)
+ if token != "" {
+ return token
+ }
+ }
+
+ references := env.GetHeader("References")
+ for {
+ begin := strings.IndexByte(references, '<')
+ if begin == -1 {
+ break
+ }
+ begin++
+
+ end := strings.IndexByte(references, '>')
+ if end == -1 || begin > end {
+ break
+ }
+
+ match := referenceTokenRegex.FindStringSubmatch(references[begin:end])
+ if len(match) == 2 {
+ return match[1]
+ }
+
+ references = references[end+1:]
+ }
+
+ return ""
+}
+
+// searchTokenInAddresses looks for the token in an address
+func searchTokenInAddresses(addresses []*net_mail.Address) string {
+ for _, address := range addresses {
+ match := addressTokenRegex.FindStringSubmatch(address.Address)
+ if len(match) != 2 {
+ continue
+ }
+
+ return match[1]
+ }
+
+ return ""
+}
+
+type MailContent struct {
+ Content string
+ Attachments []*Attachment
+}
+
+type Attachment struct {
+ Name string
+ Content []byte
+}
+
+// getContentFromMailReader grabs the plain content and the attachments from the mail.
+// A potential reply/signature gets stripped from the content.
+func getContentFromMailReader(env *enmime.Envelope) *MailContent {
+ attachments := make([]*Attachment, 0, len(env.Attachments))
+ for _, attachment := range env.Attachments {
+ attachments = append(attachments, &Attachment{
+ Name: attachment.FileName,
+ Content: attachment.Content,
+ })
+ }
+ inlineAttachments := make([]*Attachment, 0, len(env.Inlines))
+ for _, inline := range env.Inlines {
+ if inline.FileName != "" && inline.ContentType != "text/plain" {
+ inlineAttachments = append(inlineAttachments, &Attachment{
+ Name: inline.FileName,
+ Content: inline.Content,
+ })
+ }
+ }
+
+ return &MailContent{
+ Content: reply.FromText(env.Text),
+ Attachments: append(attachments, inlineAttachments...),
+ }
+}
diff --git a/services/mailer/incoming/incoming_handler.go b/services/mailer/incoming/incoming_handler.go
new file mode 100644
index 0000000..dc3c4ec
--- /dev/null
+++ b/services/mailer/incoming/incoming_handler.go
@@ -0,0 +1,187 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package incoming
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ attachment_service "code.gitea.io/gitea/services/attachment"
+ "code.gitea.io/gitea/services/context/upload"
+ issue_service "code.gitea.io/gitea/services/issue"
+ incoming_payload "code.gitea.io/gitea/services/mailer/incoming/payload"
+ "code.gitea.io/gitea/services/mailer/token"
+ pull_service "code.gitea.io/gitea/services/pull"
+)
+
+type MailHandler interface {
+ Handle(ctx context.Context, content *MailContent, doer *user_model.User, payload []byte) error
+}
+
+var handlers = map[token.HandlerType]MailHandler{
+ token.ReplyHandlerType: &ReplyHandler{},
+ token.UnsubscribeHandlerType: &UnsubscribeHandler{},
+}
+
+// ReplyHandler handles incoming emails to create a reply from them
+type ReplyHandler struct{}
+
+func (h *ReplyHandler) Handle(ctx context.Context, content *MailContent, doer *user_model.User, payload []byte) error {
+ if doer == nil {
+ return util.NewInvalidArgumentErrorf("doer can't be nil")
+ }
+
+ ref, err := incoming_payload.GetReferenceFromPayload(ctx, payload)
+ if err != nil {
+ return err
+ }
+
+ var issue *issues_model.Issue
+
+ switch r := ref.(type) {
+ case *issues_model.Issue:
+ issue = r
+ case *issues_model.Comment:
+ comment := r
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ return err
+ }
+
+ issue = comment.Issue
+ default:
+ return util.NewInvalidArgumentErrorf("unsupported reply reference: %v", ref)
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err != nil {
+ return err
+ }
+
+ // Locked issues require write permissions
+ if issue.IsLocked && !perm.CanWriteIssuesOrPulls(issue.IsPull) && !doer.IsAdmin {
+ log.Debug("can't write issue or pull")
+ return nil
+ }
+
+ if !perm.CanReadIssuesOrPulls(issue.IsPull) {
+ log.Debug("can't read issue or pull")
+ return nil
+ }
+
+ log.Trace("incoming mail related to %T", ref)
+
+ attachmentIDs := make([]string, 0, len(content.Attachments))
+ if setting.Attachment.Enabled {
+ for _, attachment := range content.Attachments {
+ a, err := attachment_service.UploadAttachment(ctx, bytes.NewReader(attachment.Content), setting.Attachment.AllowedTypes, int64(len(attachment.Content)), &repo_model.Attachment{
+ Name: attachment.Name,
+ UploaderID: doer.ID,
+ RepoID: issue.Repo.ID,
+ })
+ if err != nil {
+ if upload.IsErrFileTypeForbidden(err) {
+ log.Info("Skipping disallowed attachment type: %s", attachment.Name)
+ continue
+ }
+ return err
+ }
+ attachmentIDs = append(attachmentIDs, a.UUID)
+ }
+ }
+
+ if content.Content == "" && len(attachmentIDs) == 0 {
+ log.Trace("incoming mail has no content and no attachment", ref)
+ return nil
+ }
+
+ switch r := ref.(type) {
+ case *issues_model.Issue:
+ _, err = issue_service.CreateIssueComment(ctx, doer, issue.Repo, issue, content.Content, attachmentIDs)
+ if err != nil {
+ return fmt.Errorf("CreateIssueComment failed: %w", err)
+ }
+ case *issues_model.Comment:
+ comment := r
+
+ switch comment.Type {
+ case issues_model.CommentTypeComment, issues_model.CommentTypeReview:
+ _, err = issue_service.CreateIssueComment(ctx, doer, issue.Repo, issue, content.Content, attachmentIDs)
+ if err != nil {
+ return fmt.Errorf("CreateIssueComment failed: %w", err)
+ }
+ case issues_model.CommentTypeCode:
+ _, err := pull_service.CreateCodeComment(
+ ctx,
+ doer,
+ nil,
+ issue,
+ comment.Line,
+ content.Content,
+ comment.TreePath,
+ false, // not pending review but a single review
+ comment.ReviewID,
+ "",
+ attachmentIDs,
+ )
+ if err != nil {
+ return fmt.Errorf("CreateCodeComment failed: %w", err)
+ }
+ default:
+ log.Trace("incoming mail related to comment of type %v is ignored", comment.Type)
+ }
+ default:
+ log.Trace("incoming mail related to %T is ignored", ref)
+ }
+ return nil
+}
+
+// UnsubscribeHandler handles unwatching issues/pulls
+type UnsubscribeHandler struct{}
+
+func (h *UnsubscribeHandler) Handle(ctx context.Context, _ *MailContent, doer *user_model.User, payload []byte) error {
+ if doer == nil {
+ return util.NewInvalidArgumentErrorf("doer can't be nil")
+ }
+
+ ref, err := incoming_payload.GetReferenceFromPayload(ctx, payload)
+ if err != nil {
+ return err
+ }
+
+ switch r := ref.(type) {
+ case *issues_model.Issue:
+ issue := r
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err != nil {
+ return err
+ }
+
+ if !perm.CanReadIssuesOrPulls(issue.IsPull) {
+ log.Debug("can't read issue or pull")
+ return nil
+ }
+
+ return issues_model.CreateOrUpdateIssueWatch(ctx, doer.ID, issue.ID, false)
+ default:
+ return fmt.Errorf("unsupported unsubscribe reference: %v", ref)
+ }
+}
diff --git a/services/mailer/incoming/incoming_test.go b/services/mailer/incoming/incoming_test.go
new file mode 100644
index 0000000..1ff12d0
--- /dev/null
+++ b/services/mailer/incoming/incoming_test.go
@@ -0,0 +1,191 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package incoming
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/emersion/go-imap"
+ "github.com/jhillyerd/enmime"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNotHandleTwice(t *testing.T) {
+ handledSet := new(imap.SeqSet)
+ msg := imap.NewMessage(90, []imap.FetchItem{imap.FetchBody})
+
+ handled := isAlreadyHandled(handledSet, msg)
+ assert.False(t, handled)
+
+ handledSet.AddNum(msg.SeqNum)
+
+ handled = isAlreadyHandled(handledSet, msg)
+ assert.True(t, handled)
+}
+
+func TestIsAutomaticReply(t *testing.T) {
+ cases := []struct {
+ Headers map[string]string
+ Expected bool
+ }{
+ {
+ Headers: map[string]string{},
+ Expected: false,
+ },
+ {
+ Headers: map[string]string{
+ "Auto-Submitted": "no",
+ },
+ Expected: false,
+ },
+ {
+ Headers: map[string]string{
+ "Auto-Submitted": "yes",
+ },
+ Expected: true,
+ },
+ {
+ Headers: map[string]string{
+ "X-Autoreply": "no",
+ },
+ Expected: false,
+ },
+ {
+ Headers: map[string]string{
+ "X-Autoreply": "yes",
+ },
+ Expected: true,
+ },
+ {
+ Headers: map[string]string{
+ "X-Autorespond": "yes",
+ },
+ Expected: true,
+ },
+ }
+
+ for _, c := range cases {
+ b := enmime.Builder().
+ From("Dummy", "dummy@gitea.io").
+ To("Dummy", "dummy@gitea.io")
+ for k, v := range c.Headers {
+ b = b.Header(k, v)
+ }
+ root, err := b.Build()
+ require.NoError(t, err)
+ env, err := enmime.EnvelopeFromPart(root)
+ require.NoError(t, err)
+
+ assert.Equal(t, c.Expected, isAutomaticReply(env))
+ }
+}
+
+func TestGetContentFromMailReader(t *testing.T) {
+ mailString := "Content-Type: multipart/mixed; boundary=message-boundary\r\n" +
+ "\r\n" +
+ "--message-boundary\r\n" +
+ "Content-Type: multipart/alternative; boundary=text-boundary\r\n" +
+ "\r\n" +
+ "--text-boundary\r\n" +
+ "Content-Type: text/plain\r\n" +
+ "Content-Disposition: inline\r\n" +
+ "\r\n" +
+ "mail content\r\n" +
+ "--text-boundary--\r\n" +
+ "--message-boundary\r\n" +
+ "Content-Type: text/plain\r\n" +
+ "Content-Disposition: attachment; filename=attachment.txt\r\n" +
+ "\r\n" +
+ "attachment content\r\n" +
+ "--message-boundary--\r\n"
+
+ env, err := enmime.ReadEnvelope(strings.NewReader(mailString))
+ require.NoError(t, err)
+ content := getContentFromMailReader(env)
+ assert.Equal(t, "mail content", content.Content)
+ assert.Len(t, content.Attachments, 1)
+ assert.Equal(t, "attachment.txt", content.Attachments[0].Name)
+ assert.Equal(t, []byte("attachment content"), content.Attachments[0].Content)
+
+ mailString = "Content-Type: multipart/mixed; boundary=message-boundary\r\n" +
+ "\r\n" +
+ "--message-boundary\r\n" +
+ "Content-Type: multipart/alternative; boundary=text-boundary\r\n" +
+ "\r\n" +
+ "--text-boundary\r\n" +
+ "Content-Type: text/plain\r\n" +
+ "Content-Disposition: inline\r\n" +
+ "\r\n" +
+ "mail content\r\n" +
+ "--text-boundary--\r\n" +
+ "--message-boundary\r\n" +
+ "Content-Type: text/plain\r\n" +
+ "Content-Disposition: inline; filename=attachment.txt\r\n" +
+ "\r\n" +
+ "attachment content\r\n" +
+ "--message-boundary\r\n" +
+ "Content-Type: text/html\r\n" +
+ "Content-Disposition: inline; filename=attachment.html\r\n" +
+ "\r\n" +
+ "<p>html attachment content</p>\r\n" +
+ "--message-boundary\r\n" +
+ "Content-Type: image/png\r\n" +
+ "Content-Disposition: inline; filename=attachment.png\r\n" +
+ "Content-Transfer-Encoding: base64\r\n" +
+ "\r\n" +
+ "iVBORw0KGgoAAAANSUhEUgAAAAgAAAAIAQMAAAD+wSzIAAAABlBMVEX///+/v7+jQ3Y5AAAADklEQVQI12P4AIX8EAgALgAD/aNpbtEAAAAASUVORK5CYII\r\n" +
+ "--message-boundary--\r\n"
+
+ env, err = enmime.ReadEnvelope(strings.NewReader(mailString))
+ require.NoError(t, err)
+ content = getContentFromMailReader(env)
+ assert.Equal(t, "mail content\n--\nattachment content", content.Content)
+ assert.Len(t, content.Attachments, 2)
+ assert.Equal(t, "attachment.html", content.Attachments[0].Name)
+ assert.Equal(t, []byte("<p>html attachment content</p>"), content.Attachments[0].Content)
+ assert.Equal(t, "attachment.png", content.Attachments[1].Name)
+
+ mailString = "Content-Type: multipart/mixed; boundary=message-boundary\r\n" +
+ "\r\n" +
+ "--message-boundary\r\n" +
+ "Content-Type: multipart/alternative; boundary=text-boundary\r\n" +
+ "\r\n" +
+ "--text-boundary\r\n" +
+ "Content-Type: text/html\r\n" +
+ "Content-Disposition: inline\r\n" +
+ "\r\n" +
+ "<p>mail content</p>\r\n" +
+ "--text-boundary--\r\n" +
+ "--message-boundary--\r\n"
+
+ env, err = enmime.ReadEnvelope(strings.NewReader(mailString))
+ require.NoError(t, err)
+ content = getContentFromMailReader(env)
+ assert.Equal(t, "mail content", content.Content)
+ assert.Empty(t, content.Attachments)
+
+ mailString = "Content-Type: multipart/mixed; boundary=message-boundary\r\n" +
+ "\r\n" +
+ "--message-boundary\r\n" +
+ "Content-Type: multipart/alternative; boundary=text-boundary\r\n" +
+ "\r\n" +
+ "--text-boundary\r\n" +
+ "Content-Type: text/plain\r\n" +
+ "Content-Disposition: inline\r\n" +
+ "\r\n" +
+ "mail content without signature\r\n" +
+ "----\r\n" +
+ "signature\r\n" +
+ "--text-boundary--\r\n" +
+ "--message-boundary--\r\n"
+
+ env, err = enmime.ReadEnvelope(strings.NewReader(mailString))
+ require.NoError(t, err)
+ content = getContentFromMailReader(env)
+ require.NoError(t, err)
+ assert.Equal(t, "mail content without signature", content.Content)
+ assert.Empty(t, content.Attachments)
+}
diff --git a/services/mailer/incoming/payload/payload.go b/services/mailer/incoming/payload/payload.go
new file mode 100644
index 0000000..00ada78
--- /dev/null
+++ b/services/mailer/incoming/payload/payload.go
@@ -0,0 +1,70 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package payload
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/util"
+)
+
+const replyPayloadVersion1 byte = 1
+
+type payloadReferenceType byte
+
+const (
+ payloadReferenceIssue payloadReferenceType = iota
+ payloadReferenceComment
+)
+
+// CreateReferencePayload creates data which GetReferenceFromPayload resolves to the reference again.
+func CreateReferencePayload(reference any) ([]byte, error) {
+ var refType payloadReferenceType
+ var refID int64
+
+ switch r := reference.(type) {
+ case *issues_model.Issue:
+ refType = payloadReferenceIssue
+ refID = r.ID
+ case *issues_model.Comment:
+ refType = payloadReferenceComment
+ refID = r.ID
+ default:
+ return nil, util.NewInvalidArgumentErrorf("unsupported reference type: %T", r)
+ }
+
+ payload, err := util.PackData(refType, refID)
+ if err != nil {
+ return nil, err
+ }
+
+ return append([]byte{replyPayloadVersion1}, payload...), nil
+}
+
+// GetReferenceFromPayload resolves the reference from the payload
+func GetReferenceFromPayload(ctx context.Context, payload []byte) (any, error) {
+ if len(payload) < 1 {
+ return nil, util.NewInvalidArgumentErrorf("payload to small")
+ }
+
+ if payload[0] != replyPayloadVersion1 {
+ return nil, util.NewInvalidArgumentErrorf("unsupported payload version")
+ }
+
+ var ref payloadReferenceType
+ var id int64
+ if err := util.UnpackData(payload[1:], &ref, &id); err != nil {
+ return nil, err
+ }
+
+ switch ref {
+ case payloadReferenceIssue:
+ return issues_model.GetIssueByID(ctx, id)
+ case payloadReferenceComment:
+ return issues_model.GetCommentByID(ctx, id)
+ default:
+ return nil, util.NewInvalidArgumentErrorf("unsupported reference type: %T", ref)
+ }
+}
diff --git a/services/mailer/mail.go b/services/mailer/mail.go
new file mode 100644
index 0000000..bfede28
--- /dev/null
+++ b/services/mailer/mail.go
@@ -0,0 +1,751 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "html/template"
+ "mime"
+ "regexp"
+ "strconv"
+ "strings"
+ texttmpl "text/template"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ auth_model "code.gitea.io/gitea/models/auth"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/emoji"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/translation"
+ incoming_payload "code.gitea.io/gitea/services/mailer/incoming/payload"
+ "code.gitea.io/gitea/services/mailer/token"
+
+ "gopkg.in/gomail.v2"
+)
+
+const (
+ mailAuthActivate base.TplName = "auth/activate"
+ mailAuthActivateEmail base.TplName = "auth/activate_email"
+ mailAuthResetPassword base.TplName = "auth/reset_passwd"
+ mailAuthRegisterNotify base.TplName = "auth/register_notify"
+ mailAuthPasswordChange base.TplName = "auth/password_change"
+ mailAuthPrimaryMailChange base.TplName = "auth/primary_mail_change"
+ mailAuth2faDisabled base.TplName = "auth/2fa_disabled"
+ mailAuthRemovedSecurityKey base.TplName = "auth/removed_security_key"
+ mailAuthTOTPEnrolled base.TplName = "auth/totp_enrolled"
+
+ mailNotifyCollaborator base.TplName = "notify/collaborator"
+
+ mailRepoTransferNotify base.TplName = "notify/repo_transfer"
+
+ // There's no actual limit for subject in RFC 5322
+ mailMaxSubjectRunes = 256
+)
+
+var (
+ bodyTemplates *template.Template
+ subjectTemplates *texttmpl.Template
+ subjectRemoveSpaces = regexp.MustCompile(`[\s]+`)
+)
+
+// SendTestMail sends a test mail
+func SendTestMail(email string) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+ return gomail.Send(Sender, NewMessage(email, "Forgejo Test Email!", "Forgejo Test Email!").ToMessage())
+}
+
+// sendUserMail sends a mail to the user
+func sendUserMail(language string, u *user_model.User, tpl base.TplName, code, subject, info string) error {
+ locale := translation.NewLocale(language)
+ data := map[string]any{
+ "locale": locale,
+ "DisplayName": u.DisplayName(),
+ "ActiveCodeLives": timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, locale),
+ "ResetPwdCodeLives": timeutil.MinutesToFriendly(setting.Service.ResetPwdCodeLives, locale),
+ "Code": code,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(tpl), data); err != nil {
+ return err
+ }
+
+ msg := NewMessage(u.EmailTo(), subject, content.String())
+ msg.Info = fmt.Sprintf("UID: %d, %s", u.ID, info)
+
+ SendAsync(msg)
+ return nil
+}
+
+// SendActivateAccountMail sends an activation mail to the user (new user registration)
+func SendActivateAccountMail(ctx context.Context, u *user_model.User) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ locale := translation.NewLocale(u.Language)
+ code, err := u.GenerateEmailAuthorizationCode(ctx, auth_model.UserActivation)
+ if err != nil {
+ return err
+ }
+
+ return sendUserMail(locale.Language(), u, mailAuthActivate, code, locale.TrString("mail.activate_account"), "activate account")
+}
+
+// SendResetPasswordMail sends a password reset mail to the user
+func SendResetPasswordMail(ctx context.Context, u *user_model.User) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ locale := translation.NewLocale(u.Language)
+ code, err := u.GenerateEmailAuthorizationCode(ctx, auth_model.PasswordReset)
+ if err != nil {
+ return err
+ }
+
+ return sendUserMail(u.Language, u, mailAuthResetPassword, code, locale.TrString("mail.reset_password"), "recover account")
+}
+
+// SendActivateEmailMail sends confirmation email to confirm new email address
+func SendActivateEmailMail(ctx context.Context, u *user_model.User, email string) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ locale := translation.NewLocale(u.Language)
+ code, err := u.GenerateEmailAuthorizationCode(ctx, auth_model.EmailActivation(email))
+ if err != nil {
+ return err
+ }
+
+ data := map[string]any{
+ "locale": locale,
+ "DisplayName": u.DisplayName(),
+ "ActiveCodeLives": timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, locale),
+ "Code": code,
+ "Email": email,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthActivateEmail), data); err != nil {
+ return err
+ }
+
+ msg := NewMessage(email, locale.TrString("mail.activate_email"), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, activate email", u.ID)
+
+ SendAsync(msg)
+ return nil
+}
+
+// SendRegisterNotifyMail triggers a notify e-mail by admin created a account.
+func SendRegisterNotifyMail(u *user_model.User) {
+ if setting.MailService == nil || !u.IsActive {
+ // No mail service configured OR user is inactive
+ return
+ }
+ locale := translation.NewLocale(u.Language)
+
+ data := map[string]any{
+ "locale": locale,
+ "DisplayName": u.DisplayName(),
+ "Username": u.Name,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthRegisterNotify), data); err != nil {
+ log.Error("Template: %v", err)
+ return
+ }
+
+ msg := NewMessage(u.EmailTo(), locale.TrString("mail.register_notify", setting.AppName), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, registration notify", u.ID)
+
+ SendAsync(msg)
+}
+
+// SendCollaboratorMail sends mail notification to new collaborator.
+func SendCollaboratorMail(u, doer *user_model.User, repo *repo_model.Repository) {
+ if setting.MailService == nil || !u.IsActive {
+ // No mail service configured OR the user is inactive
+ return
+ }
+ locale := translation.NewLocale(u.Language)
+ repoName := repo.FullName()
+
+ subject := locale.TrString("mail.repo.collaborator.added.subject", doer.DisplayName(), repoName)
+ data := map[string]any{
+ "locale": locale,
+ "Subject": subject,
+ "RepoName": repoName,
+ "Link": repo.HTMLURL(),
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailNotifyCollaborator), data); err != nil {
+ log.Error("Template: %v", err)
+ return
+ }
+
+ msg := NewMessage(u.EmailTo(), subject, content.String())
+ msg.Info = fmt.Sprintf("UID: %d, add collaborator", u.ID)
+
+ SendAsync(msg)
+}
+
+func composeIssueCommentMessages(ctx *mailCommentContext, lang string, recipients []*user_model.User, fromMention bool, info string) ([]*Message, error) {
+ var (
+ subject string
+ link string
+ prefix string
+ // Fall back subject for bad templates, make sure subject is never empty
+ fallback string
+ reviewComments []*issues_model.Comment
+ )
+
+ commentType := issues_model.CommentTypeComment
+ if ctx.Comment != nil {
+ commentType = ctx.Comment.Type
+ link = ctx.Issue.HTMLURL() + "#" + ctx.Comment.HashTag()
+ } else {
+ link = ctx.Issue.HTMLURL()
+ }
+
+ reviewType := issues_model.ReviewTypeComment
+ if ctx.Comment != nil && ctx.Comment.Review != nil {
+ reviewType = ctx.Comment.Review.Type
+ }
+
+ // This is the body of the new issue or comment, not the mail body
+ body, err := markdown.RenderString(&markup.RenderContext{
+ Ctx: ctx,
+ Links: markup.Links{
+ AbsolutePrefix: true,
+ Base: ctx.Issue.Repo.HTMLURL(),
+ },
+ Metas: ctx.Issue.Repo.ComposeMetas(ctx),
+ }, ctx.Content)
+ if err != nil {
+ return nil, err
+ }
+
+ actType, actName, tplName := actionToTemplate(ctx.Issue, ctx.ActionType, commentType, reviewType)
+
+ if actName != "new" {
+ prefix = "Re: "
+ }
+ fallback = prefix + fallbackMailSubject(ctx.Issue)
+
+ if ctx.Comment != nil && ctx.Comment.Review != nil {
+ reviewComments = make([]*issues_model.Comment, 0, 10)
+ for _, lines := range ctx.Comment.Review.CodeComments {
+ for _, comments := range lines {
+ reviewComments = append(reviewComments, comments...)
+ }
+ }
+ }
+ locale := translation.NewLocale(lang)
+
+ mailMeta := map[string]any{
+ "locale": locale,
+ "FallbackSubject": fallback,
+ "Body": body,
+ "Link": link,
+ "Issue": ctx.Issue,
+ "Comment": ctx.Comment,
+ "IsPull": ctx.Issue.IsPull,
+ "User": ctx.Issue.Repo.MustOwner(ctx),
+ "Repo": ctx.Issue.Repo.FullName(),
+ "Doer": ctx.Doer,
+ "IsMention": fromMention,
+ "SubjectPrefix": prefix,
+ "ActionType": actType,
+ "ActionName": actName,
+ "ReviewComments": reviewComments,
+ "Language": locale.Language(),
+ "CanReply": setting.IncomingEmail.Enabled && commentType != issues_model.CommentTypePullRequestPush,
+ }
+
+ var mailSubject bytes.Buffer
+ if err := subjectTemplates.ExecuteTemplate(&mailSubject, tplName, mailMeta); err == nil {
+ subject = sanitizeSubject(mailSubject.String())
+ if subject == "" {
+ subject = fallback
+ }
+ } else {
+ log.Error("ExecuteTemplate [%s]: %v", tplName+"/subject", err)
+ }
+
+ subject = emoji.ReplaceAliases(subject)
+
+ mailMeta["Subject"] = subject
+
+ var mailBody bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&mailBody, tplName, mailMeta); err != nil {
+ log.Error("ExecuteTemplate [%s]: %v", tplName+"/body", err)
+ }
+
+ // Make sure to compose independent messages to avoid leaking user emails
+ msgID := createReference(ctx.Issue, ctx.Comment, ctx.ActionType)
+ reference := createReference(ctx.Issue, nil, activities_model.ActionType(0))
+
+ var replyPayload []byte
+ if ctx.Comment != nil {
+ if ctx.Comment.Type.HasMailReplySupport() {
+ replyPayload, err = incoming_payload.CreateReferencePayload(ctx.Comment)
+ }
+ } else {
+ replyPayload, err = incoming_payload.CreateReferencePayload(ctx.Issue)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ unsubscribePayload, err := incoming_payload.CreateReferencePayload(ctx.Issue)
+ if err != nil {
+ return nil, err
+ }
+
+ msgs := make([]*Message, 0, len(recipients))
+ for _, recipient := range recipients {
+ msg := NewMessageFrom(
+ recipient.Email,
+ fromDisplayName(ctx.Doer),
+ setting.MailService.FromEmail,
+ subject,
+ mailBody.String(),
+ )
+ msg.Info = fmt.Sprintf("Subject: %s, %s", subject, info)
+
+ msg.SetHeader("Message-ID", msgID)
+ msg.SetHeader("In-Reply-To", reference)
+
+ references := []string{reference}
+ listUnsubscribe := []string{"<" + ctx.Issue.HTMLURL() + ">"}
+
+ if setting.IncomingEmail.Enabled {
+ if replyPayload != nil {
+ token, err := token.CreateToken(token.ReplyHandlerType, recipient, replyPayload)
+ if err != nil {
+ log.Error("CreateToken failed: %v", err)
+ } else {
+ replyAddress := strings.Replace(setting.IncomingEmail.ReplyToAddress, setting.IncomingEmail.TokenPlaceholder, token, 1)
+ msg.ReplyTo = replyAddress
+ msg.SetHeader("List-Post", fmt.Sprintf("<mailto:%s>", replyAddress))
+
+ references = append(references, fmt.Sprintf("<reply-%s@%s>", token, setting.Domain))
+ }
+ }
+
+ token, err := token.CreateToken(token.UnsubscribeHandlerType, recipient, unsubscribePayload)
+ if err != nil {
+ log.Error("CreateToken failed: %v", err)
+ } else {
+ unsubAddress := strings.Replace(setting.IncomingEmail.ReplyToAddress, setting.IncomingEmail.TokenPlaceholder, token, 1)
+ listUnsubscribe = append(listUnsubscribe, "<mailto:"+unsubAddress+">")
+ }
+ }
+
+ msg.SetHeader("References", references...)
+ msg.SetHeader("List-Unsubscribe", listUnsubscribe...)
+
+ for key, value := range generateAdditionalHeaders(ctx, actType, recipient) {
+ msg.SetHeader(key, value)
+ }
+
+ msgs = append(msgs, msg)
+ }
+
+ return msgs, nil
+}
+
+func createReference(issue *issues_model.Issue, comment *issues_model.Comment, actionType activities_model.ActionType) string {
+ var path string
+ if issue.IsPull {
+ path = "pulls"
+ } else {
+ path = "issues"
+ }
+
+ var extra string
+ if comment != nil {
+ extra = fmt.Sprintf("/comment/%d", comment.ID)
+ } else {
+ switch actionType {
+ case activities_model.ActionCloseIssue, activities_model.ActionClosePullRequest:
+ extra = fmt.Sprintf("/close/%d", time.Now().UnixNano()/1e6)
+ case activities_model.ActionReopenIssue, activities_model.ActionReopenPullRequest:
+ extra = fmt.Sprintf("/reopen/%d", time.Now().UnixNano()/1e6)
+ case activities_model.ActionMergePullRequest, activities_model.ActionAutoMergePullRequest:
+ extra = fmt.Sprintf("/merge/%d", time.Now().UnixNano()/1e6)
+ case activities_model.ActionPullRequestReadyForReview:
+ extra = fmt.Sprintf("/ready/%d", time.Now().UnixNano()/1e6)
+ }
+ }
+
+ return fmt.Sprintf("<%s/%s/%d%s@%s>", issue.Repo.FullName(), path, issue.Index, extra, setting.Domain)
+}
+
+func createMessageIDForRelease(rel *repo_model.Release) string {
+ return fmt.Sprintf("<%s/releases/%d@%s>", rel.Repo.FullName(), rel.ID, setting.Domain)
+}
+
+func generateAdditionalHeaders(ctx *mailCommentContext, reason string, recipient *user_model.User) map[string]string {
+ repo := ctx.Issue.Repo
+
+ return map[string]string{
+ // https://datatracker.ietf.org/doc/html/rfc2919
+ "List-ID": fmt.Sprintf("%s <%s.%s.%s>", repo.FullName(), repo.Name, repo.OwnerName, setting.Domain),
+
+ // https://datatracker.ietf.org/doc/html/rfc2369
+ "List-Archive": fmt.Sprintf("<%s>", repo.HTMLURL()),
+
+ "X-Mailer": "Forgejo",
+ "X-Gitea-Reason": reason,
+ "X-Gitea-Sender": ctx.Doer.Name,
+ "X-Gitea-Recipient": recipient.Name,
+ "X-Gitea-Recipient-Address": recipient.Email,
+ "X-Gitea-Repository": repo.Name,
+ "X-Gitea-Repository-Path": repo.FullName(),
+ "X-Gitea-Repository-Link": repo.HTMLURL(),
+ "X-Gitea-Issue-ID": strconv.FormatInt(ctx.Issue.Index, 10),
+ "X-Gitea-Issue-Link": ctx.Issue.HTMLURL(),
+
+ "X-Forgejo-Reason": reason,
+ "X-Forgejo-Sender": ctx.Doer.Name,
+ "X-Forgejo-Recipient": recipient.Name,
+ "X-Forgejo-Recipient-Address": recipient.Email,
+ "X-Forgejo-Repository": repo.Name,
+ "X-Forgejo-Repository-Path": repo.FullName(),
+ "X-Forgejo-Repository-Link": repo.HTMLURL(),
+ "X-Forgejo-Issue-ID": strconv.FormatInt(ctx.Issue.Index, 10),
+ "X-Forgejo-Issue-Link": ctx.Issue.HTMLURL(),
+
+ "X-GitHub-Reason": reason,
+ "X-GitHub-Sender": ctx.Doer.Name,
+ "X-GitHub-Recipient": recipient.Name,
+ "X-GitHub-Recipient-Address": recipient.Email,
+
+ "X-GitLab-NotificationReason": reason,
+ "X-GitLab-Project": repo.Name,
+ "X-GitLab-Project-Path": repo.FullName(),
+ "X-GitLab-Issue-IID": strconv.FormatInt(ctx.Issue.Index, 10),
+ }
+}
+
+func sanitizeSubject(subject string) string {
+ runes := []rune(strings.TrimSpace(subjectRemoveSpaces.ReplaceAllLiteralString(subject, " ")))
+ if len(runes) > mailMaxSubjectRunes {
+ runes = runes[:mailMaxSubjectRunes]
+ }
+ // Encode non-ASCII characters
+ return mime.QEncoding.Encode("utf-8", string(runes))
+}
+
+// SendIssueAssignedMail composes and sends issue assigned email
+func SendIssueAssignedMail(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, content string, comment *issues_model.Comment, recipients []*user_model.User) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error("Unable to load repo [%d] for issue #%d [%d]. Error: %v", issue.RepoID, issue.Index, issue.ID, err)
+ return err
+ }
+
+ langMap := make(map[string][]*user_model.User)
+ for _, user := range recipients {
+ if !user.IsActive {
+ // don't send emails to inactive users
+ continue
+ }
+ langMap[user.Language] = append(langMap[user.Language], user)
+ }
+
+ for lang, tos := range langMap {
+ msgs, err := composeIssueCommentMessages(&mailCommentContext{
+ Context: ctx,
+ Issue: issue,
+ Doer: doer,
+ ActionType: activities_model.ActionType(0),
+ Content: content,
+ Comment: comment,
+ }, lang, tos, false, "issue assigned")
+ if err != nil {
+ return err
+ }
+ SendAsync(msgs...)
+ }
+ return nil
+}
+
+// actionToTemplate returns the type and name of the action facing the user
+// (slightly different from activities_model.ActionType) and the name of the template to use (based on availability)
+func actionToTemplate(issue *issues_model.Issue, actionType activities_model.ActionType,
+ commentType issues_model.CommentType, reviewType issues_model.ReviewType,
+) (typeName, name, template string) {
+ if issue.IsPull {
+ typeName = "pull"
+ } else {
+ typeName = "issue"
+ }
+ switch actionType {
+ case activities_model.ActionCreateIssue, activities_model.ActionCreatePullRequest:
+ name = "new"
+ case activities_model.ActionCommentIssue, activities_model.ActionCommentPull:
+ name = "comment"
+ case activities_model.ActionCloseIssue, activities_model.ActionClosePullRequest:
+ name = "close"
+ case activities_model.ActionReopenIssue, activities_model.ActionReopenPullRequest:
+ name = "reopen"
+ case activities_model.ActionMergePullRequest, activities_model.ActionAutoMergePullRequest:
+ name = "merge"
+ case activities_model.ActionPullReviewDismissed:
+ name = "review_dismissed"
+ case activities_model.ActionPullRequestReadyForReview:
+ name = "ready_for_review"
+ default:
+ switch commentType {
+ case issues_model.CommentTypeReview:
+ switch reviewType {
+ case issues_model.ReviewTypeApprove:
+ name = "approve"
+ case issues_model.ReviewTypeReject:
+ name = "reject"
+ default:
+ name = "review" // TODO: there is no activities_model.Action* when sending a review comment, this is deadcode and should be removed
+ }
+ case issues_model.CommentTypeCode:
+ name = "code"
+ case issues_model.CommentTypeAssignees:
+ name = "assigned"
+ case issues_model.CommentTypePullRequestPush:
+ name = "push"
+ default:
+ name = "default"
+ }
+ }
+
+ template = typeName + "/" + name
+ ok := bodyTemplates.Lookup(template) != nil
+ if !ok && typeName != "issue" {
+ template = "issue/" + name
+ ok = bodyTemplates.Lookup(template) != nil
+ }
+ if !ok {
+ template = typeName + "/default"
+ ok = bodyTemplates.Lookup(template) != nil
+ }
+ if !ok {
+ template = "issue/default"
+ }
+ return typeName, name, template
+}
+
+func fromDisplayName(u *user_model.User) string {
+ if setting.MailService.FromDisplayNameFormatTemplate != nil {
+ var ctx bytes.Buffer
+ err := setting.MailService.FromDisplayNameFormatTemplate.Execute(&ctx, map[string]any{
+ "DisplayName": u.DisplayName(),
+ "AppName": setting.AppName,
+ "Domain": setting.Domain,
+ })
+ if err == nil {
+ return mime.QEncoding.Encode("utf-8", ctx.String())
+ }
+ log.Error("fromDisplayName: %w", err)
+ }
+ return u.GetCompleteName()
+}
+
+// SendPasswordChange informs the user on their primary email address that
+// their password was changed.
+func SendPasswordChange(u *user_model.User) error {
+ if setting.MailService == nil {
+ return nil
+ }
+ locale := translation.NewLocale(u.Language)
+
+ data := map[string]any{
+ "locale": locale,
+ "DisplayName": u.DisplayName(),
+ "Username": u.Name,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthPasswordChange), data); err != nil {
+ return err
+ }
+
+ msg := NewMessage(u.EmailTo(), locale.TrString("mail.password_change.subject"), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, password change notification", u.ID)
+
+ SendAsync(msg)
+ return nil
+}
+
+// SendPrimaryMailChange informs the user on their old primary email address
+// that it's no longer used as primary mail and will no longer receive
+// notification on that email address.
+func SendPrimaryMailChange(u *user_model.User, oldPrimaryEmail string) error {
+ if setting.MailService == nil {
+ return nil
+ }
+ locale := translation.NewLocale(u.Language)
+
+ data := map[string]any{
+ "locale": locale,
+ "NewPrimaryMail": u.Email,
+ "DisplayName": u.DisplayName(),
+ "Username": u.Name,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthPrimaryMailChange), data); err != nil {
+ return err
+ }
+
+ msg := NewMessage(u.EmailTo(oldPrimaryEmail), locale.TrString("mail.primary_mail_change.subject"), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, primary email change notification", u.ID)
+
+ SendAsync(msg)
+ return nil
+}
+
+// SendDisabledTOTP informs the user that their totp has been disabled.
+func SendDisabledTOTP(ctx context.Context, u *user_model.User) error {
+ if setting.MailService == nil {
+ return nil
+ }
+ locale := translation.NewLocale(u.Language)
+
+ hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(ctx, u.ID)
+ if err != nil {
+ return err
+ }
+
+ data := map[string]any{
+ "locale": locale,
+ "HasWebAuthn": hasWebAuthn,
+ "DisplayName": u.DisplayName(),
+ "Username": u.Name,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuth2faDisabled), data); err != nil {
+ return err
+ }
+
+ msg := NewMessage(u.EmailTo(), locale.TrString("mail.totp_disabled.subject"), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, 2fa disabled notification", u.ID)
+
+ SendAsync(msg)
+ return nil
+}
+
+// SendRemovedWebAuthn informs the user that one of their security keys has been removed.
+func SendRemovedSecurityKey(ctx context.Context, u *user_model.User, securityKeyName string) error {
+ if setting.MailService == nil {
+ return nil
+ }
+ locale := translation.NewLocale(u.Language)
+
+ hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(ctx, u.ID)
+ if err != nil {
+ return err
+ }
+ hasTOTP, err := auth_model.HasTwoFactorByUID(ctx, u.ID)
+ if err != nil {
+ return err
+ }
+
+ data := map[string]any{
+ "locale": locale,
+ "HasWebAuthn": hasWebAuthn,
+ "HasTOTP": hasTOTP,
+ "SecurityKeyName": securityKeyName,
+ "DisplayName": u.DisplayName(),
+ "Username": u.Name,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthRemovedSecurityKey), data); err != nil {
+ return err
+ }
+
+ msg := NewMessage(u.EmailTo(), locale.TrString("mail.removed_security_key.subject"), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, security key removed notification", u.ID)
+
+ SendAsync(msg)
+ return nil
+}
+
+// SendTOTPEnrolled informs the user that they've been enrolled into TOTP.
+func SendTOTPEnrolled(ctx context.Context, u *user_model.User) error {
+ if setting.MailService == nil {
+ return nil
+ }
+ locale := translation.NewLocale(u.Language)
+
+ hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(ctx, u.ID)
+ if err != nil {
+ return err
+ }
+
+ data := map[string]any{
+ "locale": locale,
+ "HasWebAuthn": hasWebAuthn,
+ "DisplayName": u.DisplayName(),
+ "Username": u.Name,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthTOTPEnrolled), data); err != nil {
+ return err
+ }
+
+ msg := NewMessage(u.EmailTo(), locale.TrString("mail.totp_enrolled.subject"), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, enrolled into TOTP notification", u.ID)
+
+ SendAsync(msg)
+ return nil
+}
diff --git a/services/mailer/mail_admin_new_user.go b/services/mailer/mail_admin_new_user.go
new file mode 100644
index 0000000..0713de8
--- /dev/null
+++ b/services/mailer/mail_admin_new_user.go
@@ -0,0 +1,78 @@
+// Copyright 2023 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+package mailer
+
+import (
+ "bytes"
+ "context"
+ "strconv"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/translation"
+)
+
+const (
+ tplNewUserMail base.TplName = "notify/admin_new_user"
+)
+
+// MailNewUser sends notification emails on new user registrations to all admins
+func MailNewUser(ctx context.Context, u *user_model.User) {
+ if !setting.Admin.SendNotificationEmailOnNewUser {
+ return
+ }
+ if setting.MailService == nil {
+ // No mail service configured
+ return
+ }
+
+ recipients, err := user_model.GetAllAdmins(ctx)
+ if err != nil {
+ log.Error("user_model.GetAllAdmins: %v", err)
+ return
+ }
+
+ langMap := make(map[string][]string)
+ for _, r := range recipients {
+ langMap[r.Language] = append(langMap[r.Language], r.Email)
+ }
+
+ for lang, tos := range langMap {
+ mailNewUser(ctx, u, lang, tos)
+ }
+}
+
+func mailNewUser(_ context.Context, u *user_model.User, lang string, tos []string) {
+ locale := translation.NewLocale(lang)
+
+ manageUserURL := setting.AppURL + "admin/users/" + strconv.FormatInt(u.ID, 10)
+ subject := locale.TrString("mail.admin.new_user.subject", u.Name)
+ body := locale.TrString("mail.admin.new_user.text", manageUserURL)
+ mailMeta := map[string]any{
+ "NewUser": u,
+ "NewUserUrl": u.HTMLURL(),
+ "Subject": subject,
+ "Body": body,
+ "Language": locale.Language(),
+ "Locale": locale,
+ "SanitizeHTML": templates.SanitizeHTML,
+ }
+
+ var mailBody bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&mailBody, string(tplNewUserMail), mailMeta); err != nil {
+ log.Error("ExecuteTemplate [%s]: %v", string(tplNewUserMail)+"/body", err)
+ return
+ }
+
+ msgs := make([]*Message, 0, len(tos))
+ for _, to := range tos {
+ msg := NewMessage(to, subject, mailBody.String())
+ msg.Info = subject
+ msgs = append(msgs, msg)
+ }
+ SendAsync(msgs...)
+}
diff --git a/services/mailer/mail_admin_new_user_test.go b/services/mailer/mail_admin_new_user_test.go
new file mode 100644
index 0000000..f7f2783
--- /dev/null
+++ b/services/mailer/mail_admin_new_user_test.go
@@ -0,0 +1,79 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "context"
+ "strconv"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func getTestUsers(t *testing.T) []*user_model.User {
+ t.Helper()
+ admin := new(user_model.User)
+ admin.Name = "testadmin"
+ admin.IsAdmin = true
+ admin.Language = "en_US"
+ admin.Email = "admin@example.com"
+ require.NoError(t, user_model.CreateUser(db.DefaultContext, admin))
+
+ newUser := new(user_model.User)
+ newUser.Name = "new_user"
+ newUser.Language = "en_US"
+ newUser.IsAdmin = false
+ newUser.Email = "new_user@example.com"
+ newUser.LastLoginUnix = 1693648327
+ newUser.CreatedUnix = 1693648027
+ require.NoError(t, user_model.CreateUser(db.DefaultContext, newUser))
+
+ return []*user_model.User{admin, newUser}
+}
+
+func cleanUpUsers(ctx context.Context, users []*user_model.User) {
+ for _, u := range users {
+ db.DeleteByID[user_model.User](ctx, u.ID)
+ }
+}
+
+func TestAdminNotificationMail_test(t *testing.T) {
+ ctx := context.Background()
+
+ users := getTestUsers(t)
+
+ t.Run("SendNotificationEmailOnNewUser_true", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Admin.SendNotificationEmailOnNewUser, true)()
+
+ called := false
+ defer MockMailSettings(func(msgs ...*Message) {
+ assert.Len(t, msgs, 1, "Test provides only one admin user, so only one email must be sent")
+ assert.Equal(t, msgs[0].To, users[0].Email, "checks if the recipient is the admin of the instance")
+ manageUserURL := setting.AppURL + "admin/users/" + strconv.FormatInt(users[1].ID, 10)
+ assert.Contains(t, msgs[0].Body, manageUserURL)
+ assert.Contains(t, msgs[0].Body, users[1].HTMLURL())
+ assert.Contains(t, msgs[0].Body, users[1].Name, "user name of the newly created user")
+ AssertTranslatedLocale(t, msgs[0].Body, "mail.admin", "admin.users")
+ called = true
+ })()
+ MailNewUser(ctx, users[1])
+ assert.True(t, called)
+ })
+
+ t.Run("SendNotificationEmailOnNewUser_false", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Admin.SendNotificationEmailOnNewUser, false)()
+ defer MockMailSettings(func(msgs ...*Message) {
+ assert.Equal(t, 1, 0, "this shouldn't execute. MailNewUser must exit early since SEND_NOTIFICATION_EMAIL_ON_NEW_USER is disabled")
+ })()
+ MailNewUser(ctx, users[1])
+ })
+
+ cleanUpUsers(ctx, users)
+}
diff --git a/services/mailer/mail_auth_test.go b/services/mailer/mail_auth_test.go
new file mode 100644
index 0000000..38e3721
--- /dev/null
+++ b/services/mailer/mail_auth_test.go
@@ -0,0 +1,62 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/services/mailer"
+ user_service "code.gitea.io/gitea/services/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestPasswordChangeMail(t *testing.T) {
+ defer require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ called := false
+ defer mailer.MockMailSettings(func(msgs ...*mailer.Message) {
+ assert.Len(t, msgs, 1)
+ assert.Equal(t, user.EmailTo(), msgs[0].To)
+ assert.EqualValues(t, translation.NewLocale("en-US").Tr("mail.password_change.subject"), msgs[0].Subject)
+ mailer.AssertTranslatedLocale(t, msgs[0].Body, "mail.password_change.text_1", "mail.password_change.text_2", "mail.password_change.text_3")
+ called = true
+ })()
+
+ require.NoError(t, user_service.UpdateAuth(db.DefaultContext, user, &user_service.UpdateAuthOptions{Password: optional.Some("NewPasswordYolo!")}))
+ assert.True(t, called)
+}
+
+func TestPrimaryMailChange(t *testing.T) {
+ defer require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ firstEmail := unittest.AssertExistsAndLoadBean(t, &user_model.EmailAddress{ID: 3, UID: user.ID, IsPrimary: true})
+ secondEmail := unittest.AssertExistsAndLoadBean(t, &user_model.EmailAddress{ID: 35, UID: user.ID}, "is_primary = false")
+
+ called := false
+ defer mailer.MockMailSettings(func(msgs ...*mailer.Message) {
+ assert.False(t, called)
+ assert.Len(t, msgs, 1)
+ assert.Equal(t, user.EmailTo(firstEmail.Email), msgs[0].To)
+ assert.EqualValues(t, translation.NewLocale("en-US").Tr("mail.primary_mail_change.subject"), msgs[0].Subject)
+ assert.Contains(t, msgs[0].Body, secondEmail.Email)
+ assert.Contains(t, msgs[0].Body, setting.AppURL)
+ mailer.AssertTranslatedLocale(t, msgs[0].Body, "mail.primary_mail_change.text_1", "mail.primary_mail_change.text_2", "mail.primary_mail_change.text_3")
+ called = true
+ })()
+
+ require.NoError(t, user_service.MakeEmailAddressPrimary(db.DefaultContext, user, secondEmail, true))
+ assert.True(t, called)
+
+ require.NoError(t, user_service.MakeEmailAddressPrimary(db.DefaultContext, user, firstEmail, false))
+}
diff --git a/services/mailer/mail_comment.go b/services/mailer/mail_comment.go
new file mode 100644
index 0000000..1812441
--- /dev/null
+++ b/services/mailer/mail_comment.go
@@ -0,0 +1,63 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "context"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// MailParticipantsComment sends new comment emails to repository watchers and mentioned people.
+func MailParticipantsComment(ctx context.Context, c *issues_model.Comment, opType activities_model.ActionType, issue *issues_model.Issue, mentions []*user_model.User) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ content := c.Content
+ if c.Type == issues_model.CommentTypePullRequestPush {
+ content = ""
+ }
+ if err := mailIssueCommentToParticipants(
+ &mailCommentContext{
+ Context: ctx,
+ Issue: issue,
+ Doer: c.Poster,
+ ActionType: opType,
+ Content: content,
+ Comment: c,
+ }, mentions); err != nil {
+ log.Error("mailIssueCommentToParticipants: %v", err)
+ }
+ return nil
+}
+
+// MailMentionsComment sends email to users mentioned in a code comment
+func MailMentionsComment(ctx context.Context, pr *issues_model.PullRequest, c *issues_model.Comment, mentions []*user_model.User) (err error) {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ visited := make(container.Set[int64], len(mentions)+1)
+ visited.Add(c.Poster.ID)
+ if err = mailIssueCommentBatch(
+ &mailCommentContext{
+ Context: ctx,
+ Issue: pr.Issue,
+ Doer: c.Poster,
+ ActionType: activities_model.ActionCommentPull,
+ Content: c.Content,
+ Comment: c,
+ }, mentions, visited, true); err != nil {
+ log.Error("mailIssueCommentBatch: %v", err)
+ }
+ return nil
+}
diff --git a/services/mailer/mail_issue.go b/services/mailer/mail_issue.go
new file mode 100644
index 0000000..fab3315
--- /dev/null
+++ b/services/mailer/mail_issue.go
@@ -0,0 +1,201 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "context"
+ "fmt"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+func fallbackMailSubject(issue *issues_model.Issue) string {
+ return fmt.Sprintf("[%s] %s (#%d)", issue.Repo.FullName(), issue.Title, issue.Index)
+}
+
+type mailCommentContext struct {
+ context.Context
+ Issue *issues_model.Issue
+ Doer *user_model.User
+ ActionType activities_model.ActionType
+ Content string
+ Comment *issues_model.Comment
+ ForceDoerNotification bool
+}
+
+const (
+ // MailBatchSize set the batch size used in mailIssueCommentBatch
+ MailBatchSize = 100
+)
+
+// mailIssueCommentToParticipants can be used for both new issue creation and comment.
+// This function sends two list of emails:
+// 1. Repository watchers (except for WIP pull requests) and users who are participated in comments.
+// 2. Users who are not in 1. but get mentioned in current issue/comment.
+func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*user_model.User) error {
+ // Required by the mail composer; make sure to load these before calling the async function
+ if err := ctx.Issue.LoadRepo(ctx); err != nil {
+ return fmt.Errorf("LoadRepo: %w", err)
+ }
+ if err := ctx.Issue.LoadPoster(ctx); err != nil {
+ return fmt.Errorf("LoadPoster: %w", err)
+ }
+ if err := ctx.Issue.LoadPullRequest(ctx); err != nil {
+ return fmt.Errorf("LoadPullRequest: %w", err)
+ }
+
+ // Enough room to avoid reallocations
+ unfiltered := make([]int64, 1, 64)
+
+ // =========== Original poster ===========
+ unfiltered[0] = ctx.Issue.PosterID
+
+ // =========== Assignees ===========
+ ids, err := issues_model.GetAssigneeIDsByIssue(ctx, ctx.Issue.ID)
+ if err != nil {
+ return fmt.Errorf("GetAssigneeIDsByIssue(%d): %w", ctx.Issue.ID, err)
+ }
+ unfiltered = append(unfiltered, ids...)
+
+ // =========== Participants (i.e. commenters, reviewers) ===========
+ ids, err = issues_model.GetParticipantsIDsByIssueID(ctx, ctx.Issue.ID)
+ if err != nil {
+ return fmt.Errorf("GetParticipantsIDsByIssueID(%d): %w", ctx.Issue.ID, err)
+ }
+ unfiltered = append(unfiltered, ids...)
+
+ // =========== Issue watchers ===========
+ ids, err = issues_model.GetIssueWatchersIDs(ctx, ctx.Issue.ID, true)
+ if err != nil {
+ return fmt.Errorf("GetIssueWatchersIDs(%d): %w", ctx.Issue.ID, err)
+ }
+ unfiltered = append(unfiltered, ids...)
+
+ // =========== Repo watchers ===========
+ // Make repo watchers last, since it's likely the list with the most users
+ if !(ctx.Issue.IsPull && ctx.Issue.PullRequest.IsWorkInProgress(ctx) && ctx.ActionType != activities_model.ActionCreatePullRequest) {
+ ids, err = repo_model.GetRepoWatchersIDs(ctx, ctx.Issue.RepoID)
+ if err != nil {
+ return fmt.Errorf("GetRepoWatchersIDs(%d): %w", ctx.Issue.RepoID, err)
+ }
+ unfiltered = append(ids, unfiltered...)
+ }
+
+ visited := make(container.Set[int64], len(unfiltered)+len(mentions)+1)
+
+ // Avoid mailing the doer
+ if ctx.Doer.EmailNotificationsPreference != user_model.EmailNotificationsAndYourOwn && !ctx.ForceDoerNotification {
+ visited.Add(ctx.Doer.ID)
+ }
+
+ // =========== Mentions ===========
+ if err = mailIssueCommentBatch(ctx, mentions, visited, true); err != nil {
+ return fmt.Errorf("mailIssueCommentBatch() mentions: %w", err)
+ }
+
+ // Avoid mailing explicit unwatched
+ ids, err = issues_model.GetIssueWatchersIDs(ctx, ctx.Issue.ID, false)
+ if err != nil {
+ return fmt.Errorf("GetIssueWatchersIDs(%d): %w", ctx.Issue.ID, err)
+ }
+ visited.AddMultiple(ids...)
+
+ unfilteredUsers, err := user_model.GetMaileableUsersByIDs(ctx, unfiltered, false)
+ if err != nil {
+ return err
+ }
+ if err = mailIssueCommentBatch(ctx, unfilteredUsers, visited, false); err != nil {
+ return fmt.Errorf("mailIssueCommentBatch(): %w", err)
+ }
+
+ return nil
+}
+
+func mailIssueCommentBatch(ctx *mailCommentContext, users []*user_model.User, visited container.Set[int64], fromMention bool) error {
+ checkUnit := unit.TypeIssues
+ if ctx.Issue.IsPull {
+ checkUnit = unit.TypePullRequests
+ }
+
+ langMap := make(map[string][]*user_model.User)
+ for _, user := range users {
+ if !user.IsActive {
+ // Exclude deactivated users
+ continue
+ }
+ // At this point we exclude:
+ // user that don't have all mails enabled or users only get mail on mention and this is one ...
+ if !(user.EmailNotificationsPreference == user_model.EmailNotificationsEnabled ||
+ user.EmailNotificationsPreference == user_model.EmailNotificationsAndYourOwn ||
+ fromMention && user.EmailNotificationsPreference == user_model.EmailNotificationsOnMention) {
+ continue
+ }
+
+ // if we have already visited this user we exclude them
+ if !visited.Add(user.ID) {
+ continue
+ }
+
+ // test if this user is allowed to see the issue/pull
+ if !access_model.CheckRepoUnitUser(ctx, ctx.Issue.Repo, user, checkUnit) {
+ continue
+ }
+
+ langMap[user.Language] = append(langMap[user.Language], user)
+ }
+
+ for lang, receivers := range langMap {
+ // because we know that the len(receivers) > 0 and we don't care about the order particularly
+ // working backwards from the last (possibly) incomplete batch. If len(receivers) can be 0 this
+ // starting condition will need to be changed slightly
+ for i := ((len(receivers) - 1) / MailBatchSize) * MailBatchSize; i >= 0; i -= MailBatchSize {
+ msgs, err := composeIssueCommentMessages(ctx, lang, receivers[i:], fromMention, "issue comments")
+ if err != nil {
+ return err
+ }
+ SendAsync(msgs...)
+ receivers = receivers[:i]
+ }
+ }
+
+ return nil
+}
+
+// MailParticipants sends new issue thread created emails to repository watchers
+// and mentioned people.
+func MailParticipants(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, opType activities_model.ActionType, mentions []*user_model.User) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ content := issue.Content
+ if opType == activities_model.ActionCloseIssue || opType == activities_model.ActionClosePullRequest ||
+ opType == activities_model.ActionReopenIssue || opType == activities_model.ActionReopenPullRequest ||
+ opType == activities_model.ActionMergePullRequest || opType == activities_model.ActionAutoMergePullRequest {
+ content = ""
+ }
+ forceDoerNotification := opType == activities_model.ActionAutoMergePullRequest
+ if err := mailIssueCommentToParticipants(
+ &mailCommentContext{
+ Context: ctx,
+ Issue: issue,
+ Doer: doer,
+ ActionType: opType,
+ Content: content,
+ Comment: nil,
+ ForceDoerNotification: forceDoerNotification,
+ }, mentions); err != nil {
+ log.Error("mailIssueCommentToParticipants: %v", err)
+ }
+ return nil
+}
diff --git a/services/mailer/mail_release.go b/services/mailer/mail_release.go
new file mode 100644
index 0000000..0b8b97e
--- /dev/null
+++ b/services/mailer/mail_release.go
@@ -0,0 +1,98 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "bytes"
+ "context"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/translation"
+)
+
+const (
+ tplNewReleaseMail base.TplName = "release"
+)
+
+// MailNewRelease send new release notify to all repo watchers.
+func MailNewRelease(ctx context.Context, rel *repo_model.Release) {
+ if setting.MailService == nil {
+ // No mail service configured
+ return
+ }
+
+ watcherIDList, err := repo_model.GetRepoWatchersIDs(ctx, rel.RepoID)
+ if err != nil {
+ log.Error("GetRepoWatchersIDs(%d): %v", rel.RepoID, err)
+ return
+ }
+
+ recipients, err := user_model.GetMaileableUsersByIDs(ctx, watcherIDList, false)
+ if err != nil {
+ log.Error("user_model.GetMaileableUsersByIDs: %v", err)
+ return
+ }
+
+ langMap := make(map[string][]*user_model.User)
+ for _, user := range recipients {
+ if user.ID != rel.PublisherID {
+ langMap[user.Language] = append(langMap[user.Language], user)
+ }
+ }
+
+ for lang, tos := range langMap {
+ mailNewRelease(ctx, lang, tos, rel)
+ }
+}
+
+func mailNewRelease(ctx context.Context, lang string, tos []*user_model.User, rel *repo_model.Release) {
+ locale := translation.NewLocale(lang)
+
+ var err error
+ rel.RenderedNote, err = markdown.RenderString(&markup.RenderContext{
+ Ctx: ctx,
+ Links: markup.Links{
+ Base: rel.Repo.HTMLURL(),
+ },
+ Metas: rel.Repo.ComposeMetas(ctx),
+ }, rel.Note)
+ if err != nil {
+ log.Error("markdown.RenderString(%d): %v", rel.RepoID, err)
+ return
+ }
+
+ subject := locale.TrString("mail.release.new.subject", rel.TagName, rel.Repo.FullName())
+ mailMeta := map[string]any{
+ "locale": locale,
+ "Release": rel,
+ "Subject": subject,
+ "Language": locale.Language(),
+ "Link": rel.HTMLURL(),
+ }
+
+ var mailBody bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&mailBody, string(tplNewReleaseMail), mailMeta); err != nil {
+ log.Error("ExecuteTemplate [%s]: %v", string(tplNewReleaseMail)+"/body", err)
+ return
+ }
+
+ msgs := make([]*Message, 0, len(tos))
+ publisherName := fromDisplayName(rel.Publisher)
+ msgID := createMessageIDForRelease(rel)
+ for _, to := range tos {
+ msg := NewMessageFrom(to.EmailTo(), publisherName, setting.MailService.FromEmail, subject, mailBody.String())
+ msg.Info = subject
+ msg.SetHeader("Message-ID", msgID)
+ msgs = append(msgs, msg)
+ }
+
+ SendAsync(msgs...)
+}
diff --git a/services/mailer/mail_repo.go b/services/mailer/mail_repo.go
new file mode 100644
index 0000000..7003584
--- /dev/null
+++ b/services/mailer/mail_repo.go
@@ -0,0 +1,89 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/translation"
+)
+
+// SendRepoTransferNotifyMail triggers a notification e-mail when a pending repository transfer was created
+func SendRepoTransferNotifyMail(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ if newOwner.IsOrganization() {
+ users, err := organization.GetUsersWhoCanCreateOrgRepo(ctx, newOwner.ID)
+ if err != nil {
+ return err
+ }
+
+ langMap := make(map[string][]*user_model.User)
+ for _, user := range users {
+ if !user.IsActive {
+ // don't send emails to inactive users
+ continue
+ }
+ langMap[user.Language] = append(langMap[user.Language], user)
+ }
+
+ for lang, tos := range langMap {
+ if err := sendRepoTransferNotifyMailPerLang(lang, newOwner, doer, tos, repo); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+
+ return sendRepoTransferNotifyMailPerLang(newOwner.Language, newOwner, doer, []*user_model.User{newOwner}, repo)
+}
+
+// sendRepoTransferNotifyMail triggers a notification e-mail when a pending repository transfer was created for each language
+func sendRepoTransferNotifyMailPerLang(lang string, newOwner, doer *user_model.User, emailTos []*user_model.User, repo *repo_model.Repository) error {
+ var (
+ locale = translation.NewLocale(lang)
+ content bytes.Buffer
+ )
+
+ destination := locale.TrString("mail.repo.transfer.to_you")
+ subject := locale.TrString("mail.repo.transfer.subject_to_you", doer.DisplayName(), repo.FullName())
+ if newOwner.IsOrganization() {
+ destination = newOwner.DisplayName()
+ subject = locale.TrString("mail.repo.transfer.subject_to", doer.DisplayName(), repo.FullName(), destination)
+ }
+
+ data := map[string]any{
+ "locale": locale,
+ "Doer": doer,
+ "User": repo.Owner,
+ "Repo": repo.FullName(),
+ "Link": repo.HTMLURL(),
+ "Subject": subject,
+ "Language": locale.Language(),
+ "Destination": destination,
+ }
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailRepoTransferNotify), data); err != nil {
+ return err
+ }
+
+ for _, to := range emailTos {
+ msg := NewMessageFrom(to.EmailTo(), fromDisplayName(doer), setting.MailService.FromEmail, subject, content.String())
+ msg.Info = fmt.Sprintf("UID: %d, repository pending transfer notification", newOwner.ID)
+
+ SendAsync(msg)
+ }
+
+ return nil
+}
diff --git a/services/mailer/mail_team_invite.go b/services/mailer/mail_team_invite.go
new file mode 100644
index 0000000..ceecefa
--- /dev/null
+++ b/services/mailer/mail_team_invite.go
@@ -0,0 +1,76 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "net/url"
+
+ org_model "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/translation"
+)
+
+const (
+ tplTeamInviteMail base.TplName = "team_invite"
+)
+
+// MailTeamInvite sends team invites
+func MailTeamInvite(ctx context.Context, inviter *user_model.User, team *org_model.Team, invite *org_model.TeamInvite) error {
+ if setting.MailService == nil {
+ return nil
+ }
+
+ org, err := user_model.GetUserByID(ctx, team.OrgID)
+ if err != nil {
+ return err
+ }
+
+ locale := translation.NewLocale(inviter.Language)
+
+ // check if a user with this email already exists
+ user, err := user_model.GetUserByEmail(ctx, invite.Email)
+ if err != nil && !user_model.IsErrUserNotExist(err) {
+ return err
+ } else if user != nil && user.ProhibitLogin {
+ return fmt.Errorf("login is prohibited for the invited user")
+ }
+
+ inviteRedirect := url.QueryEscape(fmt.Sprintf("/org/invite/%s", invite.Token))
+ inviteURL := fmt.Sprintf("%suser/sign_up?redirect_to=%s", setting.AppURL, inviteRedirect)
+
+ if (err == nil && user != nil) || setting.Service.DisableRegistration || setting.Service.AllowOnlyExternalRegistration {
+ // user account exists or registration disabled
+ inviteURL = fmt.Sprintf("%suser/login?redirect_to=%s", setting.AppURL, inviteRedirect)
+ }
+
+ subject := locale.TrString("mail.team_invite.subject", inviter.DisplayName(), org.DisplayName())
+ mailMeta := map[string]any{
+ "locale": locale,
+ "Inviter": inviter,
+ "Organization": org,
+ "Team": team,
+ "Invite": invite,
+ "Subject": subject,
+ "InviteURL": inviteURL,
+ }
+
+ var mailBody bytes.Buffer
+ if err := bodyTemplates.ExecuteTemplate(&mailBody, string(tplTeamInviteMail), mailMeta); err != nil {
+ log.Error("ExecuteTemplate [%s]: %v", string(tplTeamInviteMail)+"/body", err)
+ return err
+ }
+
+ msg := NewMessage(invite.Email, subject, mailBody.String())
+ msg.Info = subject
+
+ SendAsync(msg)
+
+ return nil
+}
diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go
new file mode 100644
index 0000000..1a9bbc9
--- /dev/null
+++ b/services/mailer/mail_test.go
@@ -0,0 +1,540 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "html/template"
+ "io"
+ "mime/quotedprintable"
+ "regexp"
+ "strings"
+ "testing"
+ texttmpl "text/template"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+const subjectTpl = `
+{{.SubjectPrefix}}[{{.Repo}}] @{{.Doer.Name}} #{{.Issue.Index}} - {{.Issue.Title}}
+`
+
+const bodyTpl = `
+<!DOCTYPE html>
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+</head>
+
+<body>
+ <p>{{.Body}}</p>
+ <p>
+ ---
+ <br>
+ <a href="{{.Link}}">View it on Gitea</a>.
+ </p>
+</body>
+</html>
+`
+
+func prepareMailerTest(t *testing.T) (doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, comment *issues_model.Comment) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ doer = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1, Owner: doer})
+ issue = unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1, Repo: repo, Poster: doer})
+ require.NoError(t, issue.LoadRepo(db.DefaultContext))
+ comment = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2, Issue: issue})
+ return doer, repo, issue, comment
+}
+
+func TestComposeIssueCommentMessage(t *testing.T) {
+ defer MockMailSettings(nil)()
+ doer, _, issue, comment := prepareMailerTest(t)
+
+ markup.Init(&markup.ProcessorHelper{
+ IsUsernameMentionable: func(ctx context.Context, username string) bool {
+ return username == doer.Name
+ },
+ })
+
+ defer test.MockVariableValue(&setting.IncomingEmail.Enabled, true)()
+
+ subjectTemplates = texttmpl.Must(texttmpl.New("issue/comment").Parse(subjectTpl))
+ bodyTemplates = template.Must(template.New("issue/comment").Parse(bodyTpl))
+
+ recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}}
+ msgs, err := composeIssueCommentMessages(&mailCommentContext{
+ Context: context.TODO(), // TODO: use a correct context
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCommentIssue,
+ Content: fmt.Sprintf("test @%s %s#%d body", doer.Name, issue.Repo.FullName(), issue.Index),
+ Comment: comment,
+ }, "en-US", recipients, false, "issue comment")
+ require.NoError(t, err)
+ assert.Len(t, msgs, 2)
+ gomailMsg := msgs[0].ToMessage()
+ replyTo := gomailMsg.GetHeader("Reply-To")[0]
+ subject := gomailMsg.GetHeader("Subject")[0]
+
+ assert.Len(t, gomailMsg.GetHeader("To"), 1, "exactly one recipient is expected in the To field")
+ tokenRegex := regexp.MustCompile(`\Aincoming\+(.+)@localhost\z`)
+ assert.Regexp(t, tokenRegex, replyTo)
+ token := tokenRegex.FindAllStringSubmatch(replyTo, 1)[0][1]
+ assert.Equal(t, "Re: ", subject[:4], "Comment reply subject should contain Re:")
+ assert.Equal(t, "Re: [user2/repo1] @user2 #1 - issue1", subject)
+ assert.Equal(t, "<user2/repo1/issues/1@localhost>", gomailMsg.GetHeader("In-Reply-To")[0], "In-Reply-To header doesn't match")
+ assert.ElementsMatch(t, []string{"<user2/repo1/issues/1@localhost>", "<reply-" + token + "@localhost>"}, gomailMsg.GetHeader("References"), "References header doesn't match")
+ assert.Equal(t, "<user2/repo1/issues/1/comment/2@localhost>", gomailMsg.GetHeader("Message-ID")[0], "Message-ID header doesn't match")
+ assert.Equal(t, "<mailto:"+replyTo+">", gomailMsg.GetHeader("List-Post")[0])
+ assert.Len(t, gomailMsg.GetHeader("List-Unsubscribe"), 2) // url + mailto
+
+ var buf bytes.Buffer
+ gomailMsg.WriteTo(&buf)
+
+ b, err := io.ReadAll(quotedprintable.NewReader(&buf))
+ require.NoError(t, err)
+
+ // text/plain
+ assert.Contains(t, string(b), fmt.Sprintf(`( %s )`, doer.HTMLURL()))
+ assert.Contains(t, string(b), fmt.Sprintf(`( %s )`, issue.HTMLURL()))
+
+ // text/html
+ assert.Contains(t, string(b), fmt.Sprintf(`href="%s"`, doer.HTMLURL()))
+ assert.Contains(t, string(b), fmt.Sprintf(`href="%s"`, issue.HTMLURL()))
+}
+
+func TestComposeIssueMessage(t *testing.T) {
+ defer MockMailSettings(nil)()
+ doer, _, issue, _ := prepareMailerTest(t)
+
+ recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}}
+ msgs, err := composeIssueCommentMessages(&mailCommentContext{
+ Context: context.TODO(), // TODO: use a correct context
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue,
+ Content: "test body",
+ }, "en-US", recipients, false, "issue create")
+ require.NoError(t, err)
+ assert.Len(t, msgs, 2)
+
+ gomailMsg := msgs[0].ToMessage()
+ mailto := gomailMsg.GetHeader("To")
+ subject := gomailMsg.GetHeader("Subject")
+ messageID := gomailMsg.GetHeader("Message-ID")
+ inReplyTo := gomailMsg.GetHeader("In-Reply-To")
+ references := gomailMsg.GetHeader("References")
+
+ assert.Len(t, mailto, 1, "exactly one recipient is expected in the To field")
+ assert.Equal(t, "[user2/repo1] issue1 (#1)", subject[0])
+ assert.Equal(t, "<user2/repo1/issues/1@localhost>", inReplyTo[0], "In-Reply-To header doesn't match")
+ assert.Equal(t, "<user2/repo1/issues/1@localhost>", references[0], "References header doesn't match")
+ assert.Equal(t, "<user2/repo1/issues/1@localhost>", messageID[0], "Message-ID header doesn't match")
+ assert.Empty(t, gomailMsg.GetHeader("List-Post")) // incoming mail feature disabled
+ assert.Len(t, gomailMsg.GetHeader("List-Unsubscribe"), 1) // url without mailto
+}
+
+func TestMailerIssueTemplate(t *testing.T) {
+ defer MockMailSettings(nil)()
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ expect := func(t *testing.T, msg *Message, issue *issues_model.Issue, expected ...string) {
+ subject := msg.ToMessage().GetHeader("Subject")
+ msgbuf := new(bytes.Buffer)
+ _, _ = msg.ToMessage().WriteTo(msgbuf)
+ wholemsg := msgbuf.String()
+ assert.Contains(t, subject[0], fallbackMailSubject(issue))
+ for _, s := range expected {
+ assert.Contains(t, wholemsg, s)
+ }
+ AssertTranslatedLocale(t, wholemsg, "mail.issue")
+ }
+
+ testCompose := func(t *testing.T, ctx *mailCommentContext) *Message {
+ t.Helper()
+ recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}}
+
+ ctx.Context = context.Background()
+ fromMention := false
+ msgs, err := composeIssueCommentMessages(ctx, "en-US", recipients, fromMention, "TestMailerIssueTemplate")
+ require.NoError(t, err)
+ assert.Len(t, msgs, 1)
+ return msgs[0]
+ }
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+ require.NoError(t, issue.LoadRepo(db.DefaultContext))
+
+ msg := testCompose(t, &mailCommentContext{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue,
+ Content: issue.Content,
+ })
+ expect(t, msg, issue, issue.Content)
+
+ comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2, Issue: issue})
+
+ msg = testCompose(t, &mailCommentContext{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCommentIssue,
+ Content: comment.Content, Comment: comment,
+ })
+ expect(t, msg, issue, comment.Content)
+
+ msg = testCompose(t, &mailCommentContext{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCloseIssue,
+ Content: comment.Content, Comment: comment,
+ })
+ expect(t, msg, issue, comment.Content)
+
+ msg = testCompose(t, &mailCommentContext{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionReopenIssue,
+ Content: comment.Content, Comment: comment,
+ })
+ expect(t, msg, issue, comment.Content)
+
+ pull := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ require.NoError(t, pull.LoadAttributes(db.DefaultContext))
+ pullComment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 4, Issue: pull})
+
+ msg = testCompose(t, &mailCommentContext{
+ Issue: pull, Doer: doer, ActionType: activities_model.ActionCommentPull,
+ Content: pullComment.Content, Comment: pullComment,
+ })
+ expect(t, msg, pull, pullComment.Content)
+
+ msg = testCompose(t, &mailCommentContext{
+ Issue: pull, Doer: doer, ActionType: activities_model.ActionMergePullRequest,
+ Content: pullComment.Content, Comment: pullComment,
+ })
+ expect(t, msg, pull, pullComment.Content, pull.PullRequest.BaseBranch)
+
+ reviewComment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 9})
+ require.NoError(t, reviewComment.LoadReview(db.DefaultContext))
+
+ approveComment := reviewComment
+ approveComment.Review.Type = issues_model.ReviewTypeApprove
+ msg = testCompose(t, &mailCommentContext{
+ Issue: pull, Doer: doer, ActionType: activities_model.ActionApprovePullRequest,
+ Content: approveComment.Content, Comment: approveComment,
+ })
+ expect(t, msg, pull, approveComment.Content)
+
+ rejectComment := reviewComment
+ rejectComment.Review.Type = issues_model.ReviewTypeReject
+ msg = testCompose(t, &mailCommentContext{
+ Issue: pull, Doer: doer, ActionType: activities_model.ActionRejectPullRequest,
+ Content: rejectComment.Content, Comment: rejectComment,
+ })
+ expect(t, msg, pull, rejectComment.Content)
+}
+
+func TestTemplateSelection(t *testing.T) {
+ defer MockMailSettings(nil)()
+ doer, repo, issue, comment := prepareMailerTest(t)
+ recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}}
+
+ subjectTemplates = texttmpl.Must(texttmpl.New("issue/default").Parse("issue/default/subject"))
+ texttmpl.Must(subjectTemplates.New("issue/new").Parse("issue/new/subject"))
+ texttmpl.Must(subjectTemplates.New("pull/comment").Parse("pull/comment/subject"))
+ texttmpl.Must(subjectTemplates.New("issue/close").Parse("")) // Must default to fallback subject
+
+ bodyTemplates = template.Must(template.New("issue/default").Parse("issue/default/body"))
+ template.Must(bodyTemplates.New("issue/new").Parse("issue/new/body"))
+ template.Must(bodyTemplates.New("pull/comment").Parse("pull/comment/body"))
+ template.Must(bodyTemplates.New("issue/close").Parse("issue/close/body"))
+
+ expect := func(t *testing.T, msg *Message, expSubject, expBody string) {
+ subject := msg.ToMessage().GetHeader("Subject")
+ msgbuf := new(bytes.Buffer)
+ _, _ = msg.ToMessage().WriteTo(msgbuf)
+ wholemsg := msgbuf.String()
+ assert.Equal(t, []string{expSubject}, subject)
+ assert.Contains(t, wholemsg, expBody)
+ }
+
+ msg := testComposeIssueCommentMessage(t, &mailCommentContext{
+ Context: context.TODO(), // TODO: use a correct context
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue,
+ Content: "test body",
+ }, recipients, false, "TestTemplateSelection")
+ expect(t, msg, "issue/new/subject", "issue/new/body")
+
+ msg = testComposeIssueCommentMessage(t, &mailCommentContext{
+ Context: context.TODO(), // TODO: use a correct context
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCommentIssue,
+ Content: "test body", Comment: comment,
+ }, recipients, false, "TestTemplateSelection")
+ expect(t, msg, "issue/default/subject", "issue/default/body")
+
+ pull := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2, Repo: repo, Poster: doer})
+ comment = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 4, Issue: pull})
+ msg = testComposeIssueCommentMessage(t, &mailCommentContext{
+ Context: context.TODO(), // TODO: use a correct context
+ Issue: pull, Doer: doer, ActionType: activities_model.ActionCommentPull,
+ Content: "test body", Comment: comment,
+ }, recipients, false, "TestTemplateSelection")
+ expect(t, msg, "pull/comment/subject", "pull/comment/body")
+
+ msg = testComposeIssueCommentMessage(t, &mailCommentContext{
+ Context: context.TODO(), // TODO: use a correct context
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCloseIssue,
+ Content: "test body", Comment: comment,
+ }, recipients, false, "TestTemplateSelection")
+ expect(t, msg, "Re: [user2/repo1] issue1 (#1)", "issue/close/body")
+}
+
+func TestTemplateServices(t *testing.T) {
+ defer MockMailSettings(nil)()
+ doer, _, issue, comment := prepareMailerTest(t)
+ require.NoError(t, issue.LoadRepo(db.DefaultContext))
+
+ expect := func(t *testing.T, issue *issues_model.Issue, comment *issues_model.Comment, doer *user_model.User,
+ actionType activities_model.ActionType, fromMention bool, tplSubject, tplBody, expSubject, expBody string,
+ ) {
+ subjectTemplates = texttmpl.Must(texttmpl.New("issue/default").Parse(tplSubject))
+ bodyTemplates = template.Must(template.New("issue/default").Parse(tplBody))
+
+ recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}}
+ msg := testComposeIssueCommentMessage(t, &mailCommentContext{
+ Context: context.TODO(), // TODO: use a correct context
+ Issue: issue, Doer: doer, ActionType: actionType,
+ Content: "test body", Comment: comment,
+ }, recipients, fromMention, "TestTemplateServices")
+
+ subject := msg.ToMessage().GetHeader("Subject")
+ msgbuf := new(bytes.Buffer)
+ _, _ = msg.ToMessage().WriteTo(msgbuf)
+ wholemsg := msgbuf.String()
+
+ assert.Equal(t, []string{expSubject}, subject)
+ assert.Contains(t, wholemsg, "\r\n"+expBody+"\r\n")
+ }
+
+ expect(t, issue, comment, doer, activities_model.ActionCommentIssue, false,
+ "{{.SubjectPrefix}}[{{.Repo}}]: @{{.Doer.Name}} commented on #{{.Issue.Index}} - {{.Issue.Title}}",
+ "//{{.ActionType}},{{.ActionName}},{{if .IsMention}}norender{{end}}//",
+ "Re: [user2/repo1]: @user2 commented on #1 - issue1",
+ "//issue,comment,//")
+
+ expect(t, issue, comment, doer, activities_model.ActionCommentIssue, true,
+ "{{if .IsMention}}must render{{end}}",
+ "//subject is: {{.Subject}}//",
+ "must render",
+ "//subject is: must render//")
+
+ expect(t, issue, comment, doer, activities_model.ActionCommentIssue, true,
+ "{{.FallbackSubject}}",
+ "//{{.SubjectPrefix}}//",
+ "Re: [user2/repo1] issue1 (#1)",
+ "//Re: //")
+}
+
+func testComposeIssueCommentMessage(t *testing.T, ctx *mailCommentContext, recipients []*user_model.User, fromMention bool, info string) *Message {
+ msgs, err := composeIssueCommentMessages(ctx, "en-US", recipients, fromMention, info)
+ require.NoError(t, err)
+ assert.Len(t, msgs, 1)
+ return msgs[0]
+}
+
+func TestGenerateAdditionalHeaders(t *testing.T) {
+ defer MockMailSettings(nil)()
+ doer, _, issue, _ := prepareMailerTest(t)
+
+ ctx := &mailCommentContext{Context: context.TODO() /* TODO: use a correct context */, Issue: issue, Doer: doer}
+ recipient := &user_model.User{Name: "test", Email: "test@gitea.com"}
+
+ headers := generateAdditionalHeaders(ctx, "dummy-reason", recipient)
+
+ expected := map[string]string{
+ "List-ID": "user2/repo1 <repo1.user2.localhost>",
+ "List-Archive": "<https://try.gitea.io/user2/repo1>",
+ "X-Mailer": "Forgejo",
+ "X-Gitea-Reason": "dummy-reason",
+ "X-Gitea-Sender": "user2",
+ "X-Gitea-Recipient": "test",
+ "X-Gitea-Recipient-Address": "test@gitea.com",
+ "X-Gitea-Repository": "repo1",
+ "X-Gitea-Repository-Path": "user2/repo1",
+ "X-Gitea-Repository-Link": "https://try.gitea.io/user2/repo1",
+ "X-Gitea-Issue-ID": "1",
+ "X-Gitea-Issue-Link": "https://try.gitea.io/user2/repo1/issues/1",
+ "X-Forgejo-Sender": "user2",
+ "X-Forgejo-Recipient": "test",
+ }
+
+ for key, value := range expected {
+ if assert.Contains(t, headers, key) {
+ assert.Equal(t, value, headers[key])
+ }
+ }
+}
+
+func Test_createReference(t *testing.T) {
+ defer MockMailSettings(nil)()
+ _, _, issue, comment := prepareMailerTest(t)
+ _, _, pullIssue, _ := prepareMailerTest(t)
+ pullIssue.IsPull = true
+
+ type args struct {
+ issue *issues_model.Issue
+ comment *issues_model.Comment
+ actionType activities_model.ActionType
+ }
+ tests := []struct {
+ name string
+ args args
+ prefix string
+ }{
+ {
+ name: "Open Issue",
+ args: args{
+ issue: issue,
+ actionType: activities_model.ActionCreateIssue,
+ },
+ prefix: fmt.Sprintf("<%s/issues/%d@%s>", issue.Repo.FullName(), issue.Index, setting.Domain),
+ },
+ {
+ name: "Open Pull",
+ args: args{
+ issue: pullIssue,
+ actionType: activities_model.ActionCreatePullRequest,
+ },
+ prefix: fmt.Sprintf("<%s/pulls/%d@%s>", issue.Repo.FullName(), issue.Index, setting.Domain),
+ },
+ {
+ name: "Comment Issue",
+ args: args{
+ issue: issue,
+ comment: comment,
+ actionType: activities_model.ActionCommentIssue,
+ },
+ prefix: fmt.Sprintf("<%s/issues/%d/comment/%d@%s>", issue.Repo.FullName(), issue.Index, comment.ID, setting.Domain),
+ },
+ {
+ name: "Comment Pull",
+ args: args{
+ issue: pullIssue,
+ comment: comment,
+ actionType: activities_model.ActionCommentPull,
+ },
+ prefix: fmt.Sprintf("<%s/pulls/%d/comment/%d@%s>", issue.Repo.FullName(), issue.Index, comment.ID, setting.Domain),
+ },
+ {
+ name: "Close Issue",
+ args: args{
+ issue: issue,
+ actionType: activities_model.ActionCloseIssue,
+ },
+ prefix: fmt.Sprintf("<%s/issues/%d/close/", issue.Repo.FullName(), issue.Index),
+ },
+ {
+ name: "Close Pull",
+ args: args{
+ issue: pullIssue,
+ actionType: activities_model.ActionClosePullRequest,
+ },
+ prefix: fmt.Sprintf("<%s/pulls/%d/close/", issue.Repo.FullName(), issue.Index),
+ },
+ {
+ name: "Reopen Issue",
+ args: args{
+ issue: issue,
+ actionType: activities_model.ActionReopenIssue,
+ },
+ prefix: fmt.Sprintf("<%s/issues/%d/reopen/", issue.Repo.FullName(), issue.Index),
+ },
+ {
+ name: "Reopen Pull",
+ args: args{
+ issue: pullIssue,
+ actionType: activities_model.ActionReopenPullRequest,
+ },
+ prefix: fmt.Sprintf("<%s/pulls/%d/reopen/", issue.Repo.FullName(), issue.Index),
+ },
+ {
+ name: "Merge Pull",
+ args: args{
+ issue: pullIssue,
+ actionType: activities_model.ActionMergePullRequest,
+ },
+ prefix: fmt.Sprintf("<%s/pulls/%d/merge/", issue.Repo.FullName(), issue.Index),
+ },
+ {
+ name: "Ready Pull",
+ args: args{
+ issue: pullIssue,
+ actionType: activities_model.ActionPullRequestReadyForReview,
+ },
+ prefix: fmt.Sprintf("<%s/pulls/%d/ready/", issue.Repo.FullName(), issue.Index),
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := createReference(tt.args.issue, tt.args.comment, tt.args.actionType)
+ if !strings.HasPrefix(got, tt.prefix) {
+ t.Errorf("createReference() = %v, want %v", got, tt.prefix)
+ }
+ })
+ }
+}
+
+func TestFromDisplayName(t *testing.T) {
+ template, err := texttmpl.New("mailFrom").Parse("{{ .DisplayName }}")
+ require.NoError(t, err)
+ setting.MailService = &setting.Mailer{FromDisplayNameFormatTemplate: template}
+ defer func() { setting.MailService = nil }()
+
+ tests := []struct {
+ userDisplayName string
+ fromDisplayName string
+ }{{
+ userDisplayName: "test",
+ fromDisplayName: "test",
+ }, {
+ userDisplayName: "Hi Its <Mee>",
+ fromDisplayName: "Hi Its <Mee>",
+ }, {
+ userDisplayName: "Æsir",
+ fromDisplayName: "=?utf-8?q?=C3=86sir?=",
+ }, {
+ userDisplayName: "new😀user",
+ fromDisplayName: "=?utf-8?q?new=F0=9F=98=80user?=",
+ }}
+
+ for _, tc := range tests {
+ t.Run(tc.userDisplayName, func(t *testing.T) {
+ user := &user_model.User{FullName: tc.userDisplayName, Name: "tmp"}
+ got := fromDisplayName(user)
+ assert.EqualValues(t, tc.fromDisplayName, got)
+ })
+ }
+
+ t.Run("template with all available vars", func(t *testing.T) {
+ template, err = texttmpl.New("mailFrom").Parse("{{ .DisplayName }} (by {{ .AppName }} on [{{ .Domain }}])")
+ require.NoError(t, err)
+ setting.MailService = &setting.Mailer{FromDisplayNameFormatTemplate: template}
+ oldAppName := setting.AppName
+ setting.AppName = "Code IT"
+ oldDomain := setting.Domain
+ setting.Domain = "code.it"
+ defer func() {
+ setting.AppName = oldAppName
+ setting.Domain = oldDomain
+ }()
+
+ assert.EqualValues(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"}))
+ })
+}
diff --git a/services/mailer/mailer.go b/services/mailer/mailer.go
new file mode 100644
index 0000000..0a723f9
--- /dev/null
+++ b/services/mailer/mailer.go
@@ -0,0 +1,448 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "bytes"
+ "context"
+ "crypto/tls"
+ "fmt"
+ "hash/fnv"
+ "io"
+ "net"
+ "net/smtp"
+ "os"
+ "os/exec"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ notify_service "code.gitea.io/gitea/services/notify"
+
+ ntlmssp "github.com/Azure/go-ntlmssp"
+ "github.com/jaytaylor/html2text"
+ "gopkg.in/gomail.v2"
+)
+
+// Message mail body and log info
+type Message struct {
+ Info string // Message information for log purpose.
+ FromAddress string
+ FromDisplayName string
+ To string // Use only one recipient to prevent leaking of addresses
+ ReplyTo string
+ Subject string
+ Date time.Time
+ Body string
+ Headers map[string][]string
+}
+
+// ToMessage converts a Message to gomail.Message
+func (m *Message) ToMessage() *gomail.Message {
+ msg := gomail.NewMessage()
+ msg.SetAddressHeader("From", m.FromAddress, m.FromDisplayName)
+ msg.SetHeader("To", m.To)
+ if m.ReplyTo != "" {
+ msg.SetHeader("Reply-To", m.ReplyTo)
+ }
+ for header := range m.Headers {
+ msg.SetHeader(header, m.Headers[header]...)
+ }
+
+ if setting.MailService.SubjectPrefix != "" {
+ msg.SetHeader("Subject", setting.MailService.SubjectPrefix+" "+m.Subject)
+ } else {
+ msg.SetHeader("Subject", m.Subject)
+ }
+ msg.SetDateHeader("Date", m.Date)
+ msg.SetHeader("X-Auto-Response-Suppress", "All")
+
+ plainBody, err := html2text.FromString(m.Body)
+ if err != nil || setting.MailService.SendAsPlainText {
+ if strings.Contains(base.TruncateString(m.Body, 100), "<html>") {
+ log.Warn("Mail contains HTML but configured to send as plain text.")
+ }
+ msg.SetBody("text/plain", plainBody)
+ } else {
+ msg.SetBody("text/plain", plainBody)
+ msg.AddAlternative("text/html", m.Body)
+ }
+
+ if len(msg.GetHeader("Message-ID")) == 0 {
+ msg.SetHeader("Message-ID", m.generateAutoMessageID())
+ }
+
+ for k, v := range setting.MailService.OverrideHeader {
+ if len(msg.GetHeader(k)) != 0 {
+ log.Debug("Mailer override header '%s' as per config", k)
+ }
+ msg.SetHeader(k, v...)
+ }
+
+ return msg
+}
+
+// SetHeader adds additional headers to a message
+func (m *Message) SetHeader(field string, value ...string) {
+ m.Headers[field] = value
+}
+
+func (m *Message) generateAutoMessageID() string {
+ dateMs := m.Date.UnixNano() / 1e6
+ h := fnv.New64()
+ if len(m.To) > 0 {
+ _, _ = h.Write([]byte(m.To))
+ }
+ _, _ = h.Write([]byte(m.Subject))
+ _, _ = h.Write([]byte(m.Body))
+ return fmt.Sprintf("<autogen-%d-%016x@%s>", dateMs, h.Sum64(), setting.Domain)
+}
+
+// NewMessageFrom creates new mail message object with custom From header.
+func NewMessageFrom(to, fromDisplayName, fromAddress, subject, body string) *Message {
+ log.Trace("NewMessageFrom (body):\n%s", body)
+
+ return &Message{
+ FromAddress: fromAddress,
+ FromDisplayName: fromDisplayName,
+ To: to,
+ Subject: subject,
+ Date: time.Now(),
+ Body: body,
+ Headers: map[string][]string{},
+ }
+}
+
+// NewMessage creates new mail message object with default From header.
+func NewMessage(to, subject, body string) *Message {
+ return NewMessageFrom(to, setting.MailService.FromName, setting.MailService.FromEmail, subject, body)
+}
+
+type loginAuth struct {
+ username, password string
+}
+
+// LoginAuth SMTP AUTH LOGIN Auth Handler
+func LoginAuth(username, password string) smtp.Auth {
+ return &loginAuth{username, password}
+}
+
+// Start start SMTP login auth
+func (a *loginAuth) Start(server *smtp.ServerInfo) (string, []byte, error) {
+ return "LOGIN", []byte{}, nil
+}
+
+// Next next step of SMTP login auth
+func (a *loginAuth) Next(fromServer []byte, more bool) ([]byte, error) {
+ if more {
+ switch string(fromServer) {
+ case "Username:":
+ return []byte(a.username), nil
+ case "Password:":
+ return []byte(a.password), nil
+ default:
+ return nil, fmt.Errorf("unknown fromServer: %s", string(fromServer))
+ }
+ }
+ return nil, nil
+}
+
+type ntlmAuth struct {
+ username, password, domain string
+ domainNeeded bool
+}
+
+// NtlmAuth SMTP AUTH NTLM Auth Handler
+func NtlmAuth(username, password string) smtp.Auth {
+ user, domain, domainNeeded := ntlmssp.GetDomain(username)
+ return &ntlmAuth{user, password, domain, domainNeeded}
+}
+
+// Start starts SMTP NTLM Auth
+func (a *ntlmAuth) Start(server *smtp.ServerInfo) (string, []byte, error) {
+ negotiateMessage, err := ntlmssp.NewNegotiateMessage(a.domain, "")
+ return "NTLM", negotiateMessage, err
+}
+
+// Next next step of SMTP ntlm auth
+func (a *ntlmAuth) Next(fromServer []byte, more bool) ([]byte, error) {
+ if more {
+ if len(fromServer) == 0 {
+ return nil, fmt.Errorf("ntlm ChallengeMessage is empty")
+ }
+ authenticateMessage, err := ntlmssp.ProcessChallenge(fromServer, a.username, a.password, a.domainNeeded)
+ return authenticateMessage, err
+ }
+ return nil, nil
+}
+
+// Sender SMTP mail sender
+type smtpSender struct{}
+
+// Send send email
+func (s *smtpSender) Send(from string, to []string, msg io.WriterTo) error {
+ opts := setting.MailService
+
+ var network string
+ var address string
+ if opts.Protocol == "smtp+unix" {
+ network = "unix"
+ address = opts.SMTPAddr
+ } else {
+ network = "tcp"
+ address = net.JoinHostPort(opts.SMTPAddr, opts.SMTPPort)
+ }
+
+ conn, err := net.Dial(network, address)
+ if err != nil {
+ return fmt.Errorf("failed to establish network connection to SMTP server: %w", err)
+ }
+ defer conn.Close()
+
+ var tlsconfig *tls.Config
+ if opts.Protocol == "smtps" || opts.Protocol == "smtp+starttls" {
+ tlsconfig = &tls.Config{
+ InsecureSkipVerify: opts.ForceTrustServerCert,
+ ServerName: opts.SMTPAddr,
+ }
+
+ if opts.UseClientCert {
+ cert, err := tls.LoadX509KeyPair(opts.ClientCertFile, opts.ClientKeyFile)
+ if err != nil {
+ return fmt.Errorf("could not load SMTP client certificate: %w", err)
+ }
+ tlsconfig.Certificates = []tls.Certificate{cert}
+ }
+ }
+
+ if opts.Protocol == "smtps" {
+ conn = tls.Client(conn, tlsconfig)
+ }
+
+ host := "localhost"
+ if opts.Protocol == "smtp+unix" {
+ host = opts.SMTPAddr
+ }
+ client, err := smtp.NewClient(conn, host)
+ if err != nil {
+ return fmt.Errorf("could not initiate SMTP session: %w", err)
+ }
+
+ if opts.EnableHelo {
+ hostname := opts.HeloHostname
+ if len(hostname) == 0 {
+ hostname, err = os.Hostname()
+ if err != nil {
+ return fmt.Errorf("could not retrieve system hostname: %w", err)
+ }
+ }
+
+ if err = client.Hello(hostname); err != nil {
+ return fmt.Errorf("failed to issue HELO command: %w", err)
+ }
+ }
+
+ if opts.Protocol == "smtp+starttls" {
+ hasStartTLS, _ := client.Extension("STARTTLS")
+ if hasStartTLS {
+ if err = client.StartTLS(tlsconfig); err != nil {
+ return fmt.Errorf("failed to start TLS connection: %w", err)
+ }
+ } else {
+ log.Warn("StartTLS requested, but SMTP server does not support it; falling back to regular SMTP")
+ }
+ }
+
+ canAuth, options := client.Extension("AUTH")
+ if len(opts.User) > 0 {
+ if !canAuth {
+ return fmt.Errorf("SMTP server does not support AUTH, but credentials provided")
+ }
+
+ var auth smtp.Auth
+
+ if strings.Contains(options, "CRAM-MD5") {
+ auth = smtp.CRAMMD5Auth(opts.User, opts.Passwd)
+ } else if strings.Contains(options, "PLAIN") {
+ auth = smtp.PlainAuth("", opts.User, opts.Passwd, host)
+ } else if strings.Contains(options, "LOGIN") {
+ // Patch for AUTH LOGIN
+ auth = LoginAuth(opts.User, opts.Passwd)
+ } else if strings.Contains(options, "NTLM") {
+ auth = NtlmAuth(opts.User, opts.Passwd)
+ }
+
+ if auth != nil {
+ if err = client.Auth(auth); err != nil {
+ return fmt.Errorf("failed to authenticate SMTP: %w", err)
+ }
+ }
+ }
+
+ if opts.OverrideEnvelopeFrom {
+ if err = client.Mail(opts.EnvelopeFrom); err != nil {
+ return fmt.Errorf("failed to issue MAIL command: %w", err)
+ }
+ } else {
+ if err = client.Mail(from); err != nil {
+ return fmt.Errorf("failed to issue MAIL command: %w", err)
+ }
+ }
+
+ for _, rec := range to {
+ if err = client.Rcpt(rec); err != nil {
+ return fmt.Errorf("failed to issue RCPT command: %w", err)
+ }
+ }
+
+ w, err := client.Data()
+ if err != nil {
+ return fmt.Errorf("failed to issue DATA command: %w", err)
+ } else if _, err = msg.WriteTo(w); err != nil {
+ return fmt.Errorf("SMTP write failed: %w", err)
+ } else if err = w.Close(); err != nil {
+ return fmt.Errorf("SMTP close failed: %w", err)
+ }
+
+ return client.Quit()
+}
+
+// Sender sendmail mail sender
+type sendmailSender struct{}
+
+// Send send email
+func (s *sendmailSender) Send(from string, to []string, msg io.WriterTo) error {
+ var err error
+ var closeError error
+ var waitError error
+
+ envelopeFrom := from
+ if setting.MailService.OverrideEnvelopeFrom {
+ envelopeFrom = setting.MailService.EnvelopeFrom
+ }
+
+ args := []string{"-f", envelopeFrom, "-i"}
+ args = append(args, setting.MailService.SendmailArgs...)
+ args = append(args, to...)
+ log.Trace("Sending with: %s %v", setting.MailService.SendmailPath, args)
+
+ desc := fmt.Sprintf("SendMail: %s %v", setting.MailService.SendmailPath, args)
+
+ ctx, _, finished := process.GetManager().AddContextTimeout(graceful.GetManager().HammerContext(), setting.MailService.SendmailTimeout, desc)
+ defer finished()
+
+ cmd := exec.CommandContext(ctx, setting.MailService.SendmailPath, args...)
+ pipe, err := cmd.StdinPipe()
+ if err != nil {
+ return err
+ }
+ process.SetSysProcAttribute(cmd)
+
+ if err = cmd.Start(); err != nil {
+ _ = pipe.Close()
+ return err
+ }
+
+ if setting.MailService.SendmailConvertCRLF {
+ buf := &strings.Builder{}
+ _, err = msg.WriteTo(buf)
+ if err == nil {
+ _, err = strings.NewReplacer("\r\n", "\n").WriteString(pipe, buf.String())
+ }
+ } else {
+ _, err = msg.WriteTo(pipe)
+ }
+
+ // we MUST close the pipe or sendmail will hang waiting for more of the message
+ // Also we should wait on our sendmail command even if something fails
+ closeError = pipe.Close()
+ waitError = cmd.Wait()
+ if err != nil {
+ return err
+ } else if closeError != nil {
+ return closeError
+ }
+ return waitError
+}
+
+type dummySender struct{}
+
+func (s *dummySender) Send(from string, to []string, msg io.WriterTo) error {
+ buf := bytes.Buffer{}
+ if _, err := msg.WriteTo(&buf); err != nil {
+ return err
+ }
+ log.Info("Mail From: %s To: %v Body: %s", from, to, buf.String())
+ return nil
+}
+
+var mailQueue *queue.WorkerPoolQueue[*Message]
+
+// Sender sender for sending mail synchronously
+var Sender gomail.Sender
+
+// NewContext start mail queue service
+func NewContext(ctx context.Context) {
+ // Need to check if mailQueue is nil because in during reinstall (user had installed
+ // before but switched install lock off), this function will be called again
+ // while mail queue is already processing tasks, and produces a race condition.
+ if setting.MailService == nil || mailQueue != nil {
+ return
+ }
+
+ if setting.Service.EnableNotifyMail {
+ notify_service.RegisterNotifier(NewNotifier())
+ }
+
+ switch setting.MailService.Protocol {
+ case "sendmail":
+ Sender = &sendmailSender{}
+ case "dummy":
+ Sender = &dummySender{}
+ default:
+ Sender = &smtpSender{}
+ }
+
+ subjectTemplates, bodyTemplates = templates.Mailer(ctx)
+
+ mailQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "mail", func(items ...*Message) []*Message {
+ for _, msg := range items {
+ gomailMsg := msg.ToMessage()
+ log.Trace("New e-mail sending request %s: %s", gomailMsg.GetHeader("To"), msg.Info)
+ if err := gomail.Send(Sender, gomailMsg); err != nil {
+ log.Error("Failed to send emails %s: %s - %v", gomailMsg.GetHeader("To"), msg.Info, err)
+ } else {
+ log.Trace("E-mails sent %s: %s", gomailMsg.GetHeader("To"), msg.Info)
+ }
+ }
+ return nil
+ })
+ if mailQueue == nil {
+ log.Fatal("Unable to create mail queue")
+ }
+ go graceful.GetManager().RunWithCancel(mailQueue)
+}
+
+// SendAsync send emails asynchronously (make it mockable)
+var SendAsync = sendAsync
+
+func sendAsync(msgs ...*Message) {
+ if setting.MailService == nil {
+ log.Error("Mailer: SendAsync is being invoked but mail service hasn't been initialized")
+ return
+ }
+
+ go func() {
+ for _, msg := range msgs {
+ _ = mailQueue.Push(msg)
+ }
+ }()
+}
diff --git a/services/mailer/mailer_test.go b/services/mailer/mailer_test.go
new file mode 100644
index 0000000..045701f
--- /dev/null
+++ b/services/mailer/mailer_test.go
@@ -0,0 +1,128 @@
+// Copyright 2021 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "strings"
+ "testing"
+ "time"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGenerateMessageID(t *testing.T) {
+ defer test.MockVariableValue(&setting.MailService, &setting.Mailer{
+ From: "test@gitea.com",
+ })()
+ defer test.MockVariableValue(&setting.Domain, "localhost")()
+
+ date := time.Date(2000, 1, 2, 3, 4, 5, 6, time.UTC)
+ m := NewMessageFrom("", "display-name", "from-address", "subject", "body")
+ m.Date = date
+ gm := m.ToMessage()
+ assert.Equal(t, "<autogen-946782245000-41e8fc54a8ad3a3f@localhost>", gm.GetHeader("Message-ID")[0])
+
+ m = NewMessageFrom("a@b.com", "display-name", "from-address", "subject", "body")
+ m.Date = date
+ gm = m.ToMessage()
+ assert.Equal(t, "<autogen-946782245000-cc88ce3cfe9bd04f@localhost>", gm.GetHeader("Message-ID")[0])
+
+ m = NewMessageFrom("a@b.com", "display-name", "from-address", "subject", "body")
+ m.SetHeader("Message-ID", "<msg-d@domain.com>")
+ gm = m.ToMessage()
+ assert.Equal(t, "<msg-d@domain.com>", gm.GetHeader("Message-ID")[0])
+}
+
+func TestGenerateMessageIDForRelease(t *testing.T) {
+ defer test.MockVariableValue(&setting.Domain, "localhost")()
+
+ rel := repo_model.Release{
+ ID: 42,
+ Repo: &repo_model.Repository{
+ OwnerName: "test",
+ Name: "tag-test",
+ },
+ }
+ m := createMessageIDForRelease(&rel)
+ assert.Equal(t, "<test/tag-test/releases/42@localhost>", m)
+}
+
+func TestToMessage(t *testing.T) {
+ defer test.MockVariableValue(&setting.MailService, &setting.Mailer{
+ From: "test@gitea.com",
+ })()
+ defer test.MockVariableValue(&setting.Domain, "localhost")()
+
+ m1 := Message{
+ Info: "info",
+ FromAddress: "test@gitea.com",
+ FromDisplayName: "Test Gitea",
+ To: "a@b.com",
+ Subject: "Issue X Closed",
+ Body: "Some Issue got closed by Y-Man",
+ }
+
+ buf := &strings.Builder{}
+ _, err := m1.ToMessage().WriteTo(buf)
+ require.NoError(t, err)
+ header, _ := extractMailHeaderAndContent(t, buf.String())
+ assert.EqualValues(t, map[string]string{
+ "Content-Type": "multipart/alternative;",
+ "Date": "Mon, 01 Jan 0001 00:00:00 +0000",
+ "From": "\"Test Gitea\" <test@gitea.com>",
+ "Message-ID": "<autogen--6795364578871-69c000786adc60dc@localhost>",
+ "Mime-Version": "1.0",
+ "Subject": "Issue X Closed",
+ "To": "a@b.com",
+ "X-Auto-Response-Suppress": "All",
+ }, header)
+
+ setting.MailService.OverrideHeader = map[string][]string{
+ "Message-ID": {""}, // delete message id
+ "Auto-Submitted": {"auto-generated"}, // suppress auto replay
+ }
+
+ buf = &strings.Builder{}
+ _, err = m1.ToMessage().WriteTo(buf)
+ require.NoError(t, err)
+ header, _ = extractMailHeaderAndContent(t, buf.String())
+ assert.EqualValues(t, map[string]string{
+ "Content-Type": "multipart/alternative;",
+ "Date": "Mon, 01 Jan 0001 00:00:00 +0000",
+ "From": "\"Test Gitea\" <test@gitea.com>",
+ "Message-ID": "",
+ "Mime-Version": "1.0",
+ "Subject": "Issue X Closed",
+ "To": "a@b.com",
+ "X-Auto-Response-Suppress": "All",
+ "Auto-Submitted": "auto-generated",
+ }, header)
+}
+
+func extractMailHeaderAndContent(t *testing.T, mail string) (map[string]string, string) {
+ header := make(map[string]string)
+
+ parts := strings.SplitN(mail, "boundary=", 2)
+ if !assert.Len(t, parts, 2) {
+ return nil, ""
+ }
+ content := strings.TrimSpace("boundary=" + parts[1])
+
+ hParts := strings.Split(parts[0], "\n")
+
+ for _, hPart := range hParts {
+ parts := strings.SplitN(hPart, ":", 2)
+ hk := strings.TrimSpace(parts[0])
+ if hk != "" {
+ header[hk] = strings.TrimSpace(parts[1])
+ }
+ }
+
+ return header, content
+}
diff --git a/services/mailer/main_test.go b/services/mailer/main_test.go
new file mode 100644
index 0000000..908976e
--- /dev/null
+++ b/services/mailer/main_test.go
@@ -0,0 +1,48 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "context"
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/modules/translation"
+
+ _ "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func AssertTranslatedLocale(t *testing.T, message string, prefixes ...string) {
+ t.Helper()
+ for _, prefix := range prefixes {
+ assert.NotContains(t, message, prefix, "there is an untranslated locale prefix")
+ }
+}
+
+func MockMailSettings(send func(msgs ...*Message)) func() {
+ translation.InitLocales(context.Background())
+ subjectTemplates, bodyTemplates = templates.Mailer(context.Background())
+ mailService := setting.Mailer{
+ From: "test@gitea.com",
+ }
+ cleanups := []func(){
+ test.MockVariableValue(&setting.MailService, &mailService),
+ test.MockVariableValue(&setting.Domain, "localhost"),
+ test.MockVariableValue(&SendAsync, send),
+ }
+ return func() {
+ for _, cleanup := range cleanups {
+ cleanup()
+ }
+ }
+}
diff --git a/services/mailer/notify.go b/services/mailer/notify.go
new file mode 100644
index 0000000..54ab80a
--- /dev/null
+++ b/services/mailer/notify.go
@@ -0,0 +1,208 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "context"
+ "fmt"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+type mailNotifier struct {
+ notify_service.NullNotifier
+}
+
+var _ notify_service.Notifier = &mailNotifier{}
+
+// NewNotifier create a new mailNotifier notifier
+func NewNotifier() notify_service.Notifier {
+ return &mailNotifier{}
+}
+
+func (m *mailNotifier) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
+) {
+ var act activities_model.ActionType
+ if comment.Type == issues_model.CommentTypeClose {
+ act = activities_model.ActionCloseIssue
+ } else if comment.Type == issues_model.CommentTypeReopen {
+ act = activities_model.ActionReopenIssue
+ } else if comment.Type == issues_model.CommentTypeComment {
+ act = activities_model.ActionCommentIssue
+ } else if comment.Type == issues_model.CommentTypeCode {
+ act = activities_model.ActionCommentIssue
+ } else if comment.Type == issues_model.CommentTypePullRequestPush {
+ act = 0
+ }
+
+ if err := MailParticipantsComment(ctx, comment, act, issue, mentions); err != nil {
+ log.Error("MailParticipantsComment: %v", err)
+ }
+}
+
+func (m *mailNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User) {
+ if err := MailParticipants(ctx, issue, issue.Poster, activities_model.ActionCreateIssue, mentions); err != nil {
+ log.Error("MailParticipants: %v", err)
+ }
+}
+
+func (m *mailNotifier) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, actionComment *issues_model.Comment, isClosed bool) {
+ var actionType activities_model.ActionType
+ if issue.IsPull {
+ if isClosed {
+ actionType = activities_model.ActionClosePullRequest
+ } else {
+ actionType = activities_model.ActionReopenPullRequest
+ }
+ } else {
+ if isClosed {
+ actionType = activities_model.ActionCloseIssue
+ } else {
+ actionType = activities_model.ActionReopenIssue
+ }
+ }
+
+ if err := MailParticipants(ctx, issue, doer, actionType, nil); err != nil {
+ log.Error("MailParticipants: %v", err)
+ }
+}
+
+func (m *mailNotifier) IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string) {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("issue.LoadPullRequest: %v", err)
+ return
+ }
+ if issue.IsPull && issues_model.HasWorkInProgressPrefix(oldTitle) && !issue.PullRequest.IsWorkInProgress(ctx) {
+ if err := MailParticipants(ctx, issue, doer, activities_model.ActionPullRequestReadyForReview, nil); err != nil {
+ log.Error("MailParticipants: %v", err)
+ }
+ }
+}
+
+func (m *mailNotifier) NewPullRequest(ctx context.Context, pr *issues_model.PullRequest, mentions []*user_model.User) {
+ if err := MailParticipants(ctx, pr.Issue, pr.Issue.Poster, activities_model.ActionCreatePullRequest, mentions); err != nil {
+ log.Error("MailParticipants: %v", err)
+ }
+}
+
+func (m *mailNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, r *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
+ var act activities_model.ActionType
+ if comment.Type == issues_model.CommentTypeClose {
+ act = activities_model.ActionCloseIssue
+ } else if comment.Type == issues_model.CommentTypeReopen {
+ act = activities_model.ActionReopenIssue
+ } else if comment.Type == issues_model.CommentTypeComment {
+ act = activities_model.ActionCommentPull
+ }
+ if err := MailParticipantsComment(ctx, comment, act, pr.Issue, mentions); err != nil {
+ log.Error("MailParticipantsComment: %v", err)
+ }
+}
+
+func (m *mailNotifier) PullRequestCodeComment(ctx context.Context, pr *issues_model.PullRequest, comment *issues_model.Comment, mentions []*user_model.User) {
+ if err := MailMentionsComment(ctx, pr, comment, mentions); err != nil {
+ log.Error("MailMentionsComment: %v", err)
+ }
+}
+
+func (m *mailNotifier) IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
+ // mail only sent to added assignees and not self-assignee
+ if !removed && doer.ID != assignee.ID && assignee.EmailNotificationsPreference != user_model.EmailNotificationsDisabled {
+ ct := fmt.Sprintf("Assigned #%d.", issue.Index)
+ if err := SendIssueAssignedMail(ctx, issue, doer, ct, comment, []*user_model.User{assignee}); err != nil {
+ log.Error("Error in SendIssueAssignedMail for issue[%d] to assignee[%d]: %v", issue.ID, assignee.ID, err)
+ }
+ }
+}
+
+func (m *mailNotifier) PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment) {
+ if isRequest && doer.ID != reviewer.ID && reviewer.EmailNotificationsPreference != user_model.EmailNotificationsDisabled {
+ ct := fmt.Sprintf("Requested to review %s.", issue.HTMLURL())
+ if err := SendIssueAssignedMail(ctx, issue, doer, ct, comment, []*user_model.User{reviewer}); err != nil {
+ log.Error("Error in SendIssueAssignedMail for issue[%d] to reviewer[%d]: %v", issue.ID, reviewer.ID, err)
+ }
+ }
+}
+
+func (m *mailNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ if err := MailParticipants(ctx, pr.Issue, doer, activities_model.ActionMergePullRequest, nil); err != nil {
+ log.Error("MailParticipants: %v", err)
+ }
+}
+
+func (m *mailNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("pr.LoadIssue: %v", err)
+ return
+ }
+ if err := MailParticipants(ctx, pr.Issue, doer, activities_model.ActionAutoMergePullRequest, nil); err != nil {
+ log.Error("MailParticipants: %v", err)
+ }
+}
+
+func (m *mailNotifier) PullRequestPushCommits(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, comment *issues_model.Comment) {
+ var err error
+ if err = comment.LoadIssue(ctx); err != nil {
+ log.Error("comment.LoadIssue: %v", err)
+ return
+ }
+ if err = comment.Issue.LoadRepo(ctx); err != nil {
+ log.Error("comment.Issue.LoadRepo: %v", err)
+ return
+ }
+ if err = comment.Issue.LoadPullRequest(ctx); err != nil {
+ log.Error("comment.Issue.LoadPullRequest: %v", err)
+ return
+ }
+ if err = comment.Issue.PullRequest.LoadBaseRepo(ctx); err != nil {
+ log.Error("comment.Issue.PullRequest.LoadBaseRepo: %v", err)
+ return
+ }
+ if err := comment.LoadPushCommits(ctx); err != nil {
+ log.Error("comment.LoadPushCommits: %v", err)
+ }
+ m.CreateIssueComment(ctx, doer, comment.Issue.Repo, comment.Issue, comment, nil)
+}
+
+func (m *mailNotifier) PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment) {
+ if err := comment.Review.LoadReviewer(ctx); err != nil {
+ log.Error("Error in PullReviewDismiss while loading reviewer for issue[%d], review[%d] and reviewer[%d]: %v", review.Issue.ID, comment.Review.ID, comment.Review.ReviewerID, err)
+ }
+ if err := MailParticipantsComment(ctx, comment, activities_model.ActionPullReviewDismissed, review.Issue, nil); err != nil {
+ log.Error("MailParticipantsComment: %v", err)
+ }
+}
+
+func (m *mailNotifier) NewRelease(ctx context.Context, rel *repo_model.Release) {
+ if err := rel.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if rel.IsDraft || rel.IsPrerelease {
+ return
+ }
+
+ MailNewRelease(ctx, rel)
+}
+
+func (m *mailNotifier) RepoPendingTransfer(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository) {
+ if err := SendRepoTransferNotifyMail(ctx, doer, newOwner, repo); err != nil {
+ log.Error("SendRepoTransferNotifyMail: %v", err)
+ }
+}
+
+func (m *mailNotifier) NewUserSignUp(ctx context.Context, newUser *user_model.User) {
+ MailNewUser(ctx, newUser)
+}
diff --git a/services/mailer/token/token.go b/services/mailer/token/token.go
new file mode 100644
index 0000000..1a52bce
--- /dev/null
+++ b/services/mailer/token/token.go
@@ -0,0 +1,138 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package token
+
+import (
+ "context"
+ crypto_hmac "crypto/hmac"
+ "crypto/sha256"
+ "encoding/base32"
+ "fmt"
+ "time"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// A token is a verifiable container describing an action.
+//
+// A token has a dynamic length depending on the contained data and has the following structure:
+// | Token Version | User ID | HMAC | Payload |
+//
+// The payload is verifiable by the generated HMAC using the user secret. It contains:
+// | Timestamp | Action/Handler Type | Action/Handler Data |
+//
+//
+// Version changelog
+//
+// v1 -> v2:
+// Use 128 instead of 80 bits of the HMAC-SHA256 output.
+
+const (
+ tokenVersion1 byte = 1
+ tokenVersion2 byte = 2
+ tokenLifetimeInYears int = 1
+)
+
+type HandlerType byte
+
+const (
+ UnknownHandlerType HandlerType = iota
+ ReplyHandlerType
+ UnsubscribeHandlerType
+)
+
+var encodingWithoutPadding = base32.StdEncoding.WithPadding(base32.NoPadding)
+
+type ErrToken struct {
+ context string
+}
+
+func (err *ErrToken) Error() string {
+ return "invalid email token: " + err.context
+}
+
+func (err *ErrToken) Unwrap() error {
+ return util.ErrInvalidArgument
+}
+
+// CreateToken creates a token for the action/user tuple
+func CreateToken(ht HandlerType, user *user_model.User, data []byte) (string, error) {
+ payload, err := util.PackData(
+ time.Now().AddDate(tokenLifetimeInYears, 0, 0).Unix(),
+ ht,
+ data,
+ )
+ if err != nil {
+ return "", err
+ }
+
+ packagedData, err := util.PackData(
+ user.ID,
+ generateHmac([]byte(user.Rands), payload),
+ payload,
+ )
+ if err != nil {
+ return "", err
+ }
+
+ return encodingWithoutPadding.EncodeToString(append([]byte{tokenVersion2}, packagedData...)), nil
+}
+
+// ExtractToken extracts the action/user tuple from the token and verifies the content
+func ExtractToken(ctx context.Context, token string) (HandlerType, *user_model.User, []byte, error) {
+ data, err := encodingWithoutPadding.DecodeString(token)
+ if err != nil {
+ return UnknownHandlerType, nil, nil, err
+ }
+
+ if len(data) < 1 {
+ return UnknownHandlerType, nil, nil, &ErrToken{"no data"}
+ }
+
+ if data[0] != tokenVersion2 {
+ return UnknownHandlerType, nil, nil, &ErrToken{fmt.Sprintf("unsupported token version: %v", data[0])}
+ }
+
+ var userID int64
+ var hmac []byte
+ var payload []byte
+ if err := util.UnpackData(data[1:], &userID, &hmac, &payload); err != nil {
+ return UnknownHandlerType, nil, nil, err
+ }
+
+ user, err := user_model.GetUserByID(ctx, userID)
+ if err != nil {
+ return UnknownHandlerType, nil, nil, err
+ }
+
+ if !crypto_hmac.Equal(hmac, generateHmac([]byte(user.Rands), payload)) {
+ return UnknownHandlerType, nil, nil, &ErrToken{"verification failed"}
+ }
+
+ var expiresUnix int64
+ var handlerType HandlerType
+ var innerPayload []byte
+ if err := util.UnpackData(payload, &expiresUnix, &handlerType, &innerPayload); err != nil {
+ return UnknownHandlerType, nil, nil, err
+ }
+
+ if time.Unix(expiresUnix, 0).Before(time.Now()) {
+ return UnknownHandlerType, nil, nil, &ErrToken{"token expired"}
+ }
+
+ return handlerType, user, innerPayload, nil
+}
+
+// generateHmac creates a trunkated HMAC for the given payload
+func generateHmac(secret, payload []byte) []byte {
+ mac := crypto_hmac.New(sha256.New, secret)
+ mac.Write(payload)
+ hmac := mac.Sum(nil)
+
+ // RFC2104 section 5 recommends that if you do HMAC truncation, you should use
+ // the max(80, hash_len/2) of the leftmost bits.
+ // For SHA256 this works out to using 128 of the leftmost bits.
+ return hmac[:16]
+}
diff --git a/services/markup/main_test.go b/services/markup/main_test.go
new file mode 100644
index 0000000..89fe3e7
--- /dev/null
+++ b/services/markup/main_test.go
@@ -0,0 +1,16 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package markup
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m, &unittest.TestOptions{
+ FixtureFiles: []string{"user.yml"},
+ })
+}
diff --git a/services/markup/processorhelper.go b/services/markup/processorhelper.go
new file mode 100644
index 0000000..40bf1d6
--- /dev/null
+++ b/services/markup/processorhelper.go
@@ -0,0 +1,87 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package markup
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ gitea_context "code.gitea.io/gitea/services/context"
+ file_service "code.gitea.io/gitea/services/repository/files"
+)
+
+func ProcessorHelper() *markup.ProcessorHelper {
+ return &markup.ProcessorHelper{
+ ElementDir: "auto", // set dir="auto" for necessary (eg: <p>, <h?>, etc) tags
+ IsUsernameMentionable: func(ctx context.Context, username string) bool {
+ mentionedUser, err := user.GetUserByName(ctx, username)
+ if err != nil {
+ return false
+ }
+
+ giteaCtx, ok := ctx.(*gitea_context.Context)
+ if !ok {
+ // when using general context, use user's visibility to check
+ return mentionedUser.Visibility.IsPublic()
+ }
+
+ // when using gitea context (web context), use user's visibility and user's permission to check
+ return user.IsUserVisibleToViewer(giteaCtx, mentionedUser, giteaCtx.Doer)
+ },
+ GetRepoFileBlob: func(ctx context.Context, ownerName, repoName, commitSha, filePath string, language *string) (*git.Blob, error) {
+ repo, err := repo.GetRepositoryByOwnerAndName(ctx, ownerName, repoName)
+ if err != nil {
+ return nil, err
+ }
+
+ var user *user.User
+
+ giteaCtx, ok := ctx.(*gitea_context.Context)
+ if ok {
+ user = giteaCtx.Doer
+ }
+
+ perms, err := access.GetUserRepoPermission(ctx, repo, user)
+ if err != nil {
+ return nil, err
+ }
+ if !perms.CanRead(unit.TypeCode) {
+ return nil, fmt.Errorf("cannot access repository code")
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return nil, err
+ }
+ defer gitRepo.Close()
+
+ commit, err := gitRepo.GetCommit(commitSha)
+ if err != nil {
+ return nil, err
+ }
+
+ if language != nil {
+ *language, err = file_service.TryGetContentLanguage(gitRepo, commitSha, filePath)
+ if err != nil {
+ log.Error("Unable to get file language for %-v:%s. Error: %v", repo, filePath, err)
+ }
+ }
+
+ blob, err := commit.GetBlobByPath(filePath)
+ if err != nil {
+ return nil, err
+ }
+
+ return blob, nil
+ },
+ }
+}
diff --git a/services/markup/processorhelper_test.go b/services/markup/processorhelper_test.go
new file mode 100644
index 0000000..fafde74
--- /dev/null
+++ b/services/markup/processorhelper_test.go
@@ -0,0 +1,55 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package markup
+
+import (
+ "context"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/models/user"
+ gitea_context "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestProcessorHelper(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ userPublic := "user1"
+ userPrivate := "user31"
+ userLimited := "user33"
+ userNoSuch := "no-such-user"
+
+ unittest.AssertCount(t, &user.User{Name: userPublic}, 1)
+ unittest.AssertCount(t, &user.User{Name: userPrivate}, 1)
+ unittest.AssertCount(t, &user.User{Name: userLimited}, 1)
+ unittest.AssertCount(t, &user.User{Name: userNoSuch}, 0)
+
+ // when using general context, use user's visibility to check
+ assert.True(t, ProcessorHelper().IsUsernameMentionable(context.Background(), userPublic))
+ assert.False(t, ProcessorHelper().IsUsernameMentionable(context.Background(), userLimited))
+ assert.False(t, ProcessorHelper().IsUsernameMentionable(context.Background(), userPrivate))
+ assert.False(t, ProcessorHelper().IsUsernameMentionable(context.Background(), userNoSuch))
+
+ // when using web context, use user.IsUserVisibleToViewer to check
+ req, err := http.NewRequest("GET", "/", nil)
+ require.NoError(t, err)
+ base, baseCleanUp := gitea_context.NewBaseContext(httptest.NewRecorder(), req)
+ defer baseCleanUp()
+ giteaCtx := gitea_context.NewWebContext(base, &contexttest.MockRender{}, nil)
+
+ assert.True(t, ProcessorHelper().IsUsernameMentionable(giteaCtx, userPublic))
+ assert.False(t, ProcessorHelper().IsUsernameMentionable(giteaCtx, userPrivate))
+
+ giteaCtx.Doer, err = user.GetUserByName(db.DefaultContext, userPrivate)
+ require.NoError(t, err)
+ assert.True(t, ProcessorHelper().IsUsernameMentionable(giteaCtx, userPublic))
+ assert.True(t, ProcessorHelper().IsUsernameMentionable(giteaCtx, userPrivate))
+}
diff --git a/services/migrations/codebase.go b/services/migrations/codebase.go
new file mode 100644
index 0000000..492fc90
--- /dev/null
+++ b/services/migrations/codebase.go
@@ -0,0 +1,651 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "encoding/xml"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/proxy"
+ "code.gitea.io/gitea/modules/structs"
+)
+
+var (
+ _ base.Downloader = &CodebaseDownloader{}
+ _ base.DownloaderFactory = &CodebaseDownloaderFactory{}
+)
+
+func init() {
+ RegisterDownloaderFactory(&CodebaseDownloaderFactory{})
+}
+
+// CodebaseDownloaderFactory defines a downloader factory
+type CodebaseDownloaderFactory struct{}
+
+// New returns a downloader related to this factory according MigrateOptions
+func (f *CodebaseDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
+ u, err := url.Parse(opts.CloneAddr)
+ if err != nil {
+ return nil, err
+ }
+ u.User = nil
+
+ fields := strings.Split(strings.Trim(u.Path, "/"), "/")
+ if len(fields) != 2 {
+ return nil, fmt.Errorf("invalid path: %s", u.Path)
+ }
+ project := fields[0]
+ repoName := strings.TrimSuffix(fields[1], ".git")
+
+ log.Trace("Create Codebase downloader. BaseURL: %v RepoName: %s", u, repoName)
+
+ return NewCodebaseDownloader(ctx, u, project, repoName, opts.AuthUsername, opts.AuthPassword), nil
+}
+
+// GitServiceType returns the type of git service
+func (f *CodebaseDownloaderFactory) GitServiceType() structs.GitServiceType {
+ return structs.CodebaseService
+}
+
+type codebaseUser struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ Email string `json:"email"`
+}
+
+// CodebaseDownloader implements a Downloader interface to get repository information
+// from Codebase
+type CodebaseDownloader struct {
+ base.NullDownloader
+ ctx context.Context
+ client *http.Client
+ baseURL *url.URL
+ projectURL *url.URL
+ project string
+ repoName string
+ maxIssueIndex int64
+ userMap map[int64]*codebaseUser
+ commitMap map[string]string
+}
+
+// SetContext set context
+func (d *CodebaseDownloader) SetContext(ctx context.Context) {
+ d.ctx = ctx
+}
+
+// NewCodebaseDownloader creates a new downloader
+func NewCodebaseDownloader(ctx context.Context, projectURL *url.URL, project, repoName, username, password string) *CodebaseDownloader {
+ baseURL, _ := url.Parse("https://api3.codebasehq.com")
+
+ downloader := &CodebaseDownloader{
+ ctx: ctx,
+ baseURL: baseURL,
+ projectURL: projectURL,
+ project: project,
+ repoName: repoName,
+ client: &http.Client{
+ Transport: &http.Transport{
+ Proxy: func(req *http.Request) (*url.URL, error) {
+ if len(username) > 0 && len(password) > 0 {
+ req.SetBasicAuth(username, password)
+ }
+ return proxy.Proxy()(req)
+ },
+ },
+ },
+ userMap: make(map[int64]*codebaseUser),
+ commitMap: make(map[string]string),
+ }
+
+ log.Trace("Create Codebase downloader. BaseURL: %s Project: %s RepoName: %s", baseURL, project, repoName)
+ return downloader
+}
+
+// String implements Stringer
+func (d *CodebaseDownloader) String() string {
+ return fmt.Sprintf("migration from codebase server %s %s/%s", d.baseURL, d.project, d.repoName)
+}
+
+func (d *CodebaseDownloader) LogString() string {
+ if d == nil {
+ return "<CodebaseDownloader nil>"
+ }
+ return fmt.Sprintf("<CodebaseDownloader %s %s/%s>", d.baseURL, d.project, d.repoName)
+}
+
+// FormatCloneURL add authentication into remote URLs
+func (d *CodebaseDownloader) FormatCloneURL(opts base.MigrateOptions, remoteAddr string) (string, error) {
+ return opts.CloneAddr, nil
+}
+
+func (d *CodebaseDownloader) callAPI(endpoint string, parameter map[string]string, result any) error {
+ u, err := d.baseURL.Parse(endpoint)
+ if err != nil {
+ return err
+ }
+
+ if parameter != nil {
+ query := u.Query()
+ for k, v := range parameter {
+ query.Set(k, v)
+ }
+ u.RawQuery = query.Encode()
+ }
+
+ req, err := http.NewRequestWithContext(d.ctx, "GET", u.String(), nil)
+ if err != nil {
+ return err
+ }
+ req.Header.Add("Accept", "application/xml")
+
+ resp, err := d.client.Do(req)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+
+ return xml.NewDecoder(resp.Body).Decode(&result)
+}
+
+// GetRepoInfo returns repository information
+// https://support.codebasehq.com/kb/projects
+func (d *CodebaseDownloader) GetRepoInfo() (*base.Repository, error) {
+ var rawRepository struct {
+ XMLName xml.Name `xml:"repository"`
+ Name string `xml:"name"`
+ Description string `xml:"description"`
+ Permalink string `xml:"permalink"`
+ CloneURL string `xml:"clone-url"`
+ Source string `xml:"source"`
+ }
+
+ err := d.callAPI(
+ fmt.Sprintf("/%s/%s", d.project, d.repoName),
+ nil,
+ &rawRepository,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ return &base.Repository{
+ Name: rawRepository.Name,
+ Description: rawRepository.Description,
+ CloneURL: rawRepository.CloneURL,
+ OriginalURL: d.projectURL.String(),
+ }, nil
+}
+
+// GetMilestones returns milestones
+// https://support.codebasehq.com/kb/tickets-and-milestones/milestones
+func (d *CodebaseDownloader) GetMilestones() ([]*base.Milestone, error) {
+ var rawMilestones struct {
+ XMLName xml.Name `xml:"ticketing-milestone"`
+ Type string `xml:"type,attr"`
+ TicketingMilestone []struct {
+ Text string `xml:",chardata"`
+ ID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"id"`
+ Identifier string `xml:"identifier"`
+ Name string `xml:"name"`
+ Deadline struct {
+ Value string `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"deadline"`
+ Description string `xml:"description"`
+ Status string `xml:"status"`
+ } `xml:"ticketing-milestone"`
+ }
+
+ err := d.callAPI(
+ fmt.Sprintf("/%s/milestones", d.project),
+ nil,
+ &rawMilestones,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ milestones := make([]*base.Milestone, 0, len(rawMilestones.TicketingMilestone))
+ for _, milestone := range rawMilestones.TicketingMilestone {
+ var deadline *time.Time
+ if len(milestone.Deadline.Value) > 0 {
+ if val, err := time.Parse("2006-01-02", milestone.Deadline.Value); err == nil {
+ deadline = &val
+ }
+ }
+
+ closed := deadline
+ state := "closed"
+ if milestone.Status == "active" {
+ closed = nil
+ state = ""
+ }
+
+ milestones = append(milestones, &base.Milestone{
+ Title: milestone.Name,
+ Deadline: deadline,
+ Closed: closed,
+ State: state,
+ })
+ }
+ return milestones, nil
+}
+
+// GetLabels returns labels
+// https://support.codebasehq.com/kb/tickets-and-milestones/statuses-priorities-and-categories
+func (d *CodebaseDownloader) GetLabels() ([]*base.Label, error) {
+ var rawTypes struct {
+ XMLName xml.Name `xml:"ticketing-types"`
+ Type string `xml:"type,attr"`
+ TicketingType []struct {
+ ID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"id"`
+ Name string `xml:"name"`
+ } `xml:"ticketing-type"`
+ }
+
+ err := d.callAPI(
+ fmt.Sprintf("/%s/tickets/types", d.project),
+ nil,
+ &rawTypes,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ labels := make([]*base.Label, 0, len(rawTypes.TicketingType))
+ for _, label := range rawTypes.TicketingType {
+ labels = append(labels, &base.Label{
+ Name: label.Name,
+ Color: "ffffff",
+ })
+ }
+ return labels, nil
+}
+
+type codebaseIssueContext struct {
+ Comments []*base.Comment
+}
+
+// GetIssues returns issues, limits are not supported
+// https://support.codebasehq.com/kb/tickets-and-milestones
+// https://support.codebasehq.com/kb/tickets-and-milestones/updating-tickets
+func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+ var rawIssues struct {
+ XMLName xml.Name `xml:"tickets"`
+ Type string `xml:"type,attr"`
+ Ticket []struct {
+ TicketID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"ticket-id"`
+ Summary string `xml:"summary"`
+ TicketType string `xml:"ticket-type"`
+ ReporterID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"reporter-id"`
+ Reporter string `xml:"reporter"`
+ Type struct {
+ Name string `xml:"name"`
+ } `xml:"type"`
+ Status struct {
+ TreatAsClosed struct {
+ Value bool `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"treat-as-closed"`
+ } `xml:"status"`
+ Milestone struct {
+ Name string `xml:"name"`
+ } `xml:"milestone"`
+ UpdatedAt struct {
+ Value time.Time `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"updated-at"`
+ CreatedAt struct {
+ Value time.Time `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"created-at"`
+ } `xml:"ticket"`
+ }
+
+ err := d.callAPI(
+ fmt.Sprintf("/%s/tickets", d.project),
+ nil,
+ &rawIssues,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ issues := make([]*base.Issue, 0, len(rawIssues.Ticket))
+ for _, issue := range rawIssues.Ticket {
+ var notes struct {
+ XMLName xml.Name `xml:"ticket-notes"`
+ Type string `xml:"type,attr"`
+ TicketNote []struct {
+ Content string `xml:"content"`
+ CreatedAt struct {
+ Value time.Time `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"created-at"`
+ UpdatedAt struct {
+ Value time.Time `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"updated-at"`
+ ID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"id"`
+ UserID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"user-id"`
+ } `xml:"ticket-note"`
+ }
+ err := d.callAPI(
+ fmt.Sprintf("/%s/tickets/%d/notes", d.project, issue.TicketID.Value),
+ nil,
+ &notes,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+ comments := make([]*base.Comment, 0, len(notes.TicketNote))
+ for _, note := range notes.TicketNote {
+ if len(note.Content) == 0 {
+ continue
+ }
+ poster := d.tryGetUser(note.UserID.Value)
+ comments = append(comments, &base.Comment{
+ IssueIndex: issue.TicketID.Value,
+ Index: note.ID.Value,
+ PosterID: poster.ID,
+ PosterName: poster.Name,
+ PosterEmail: poster.Email,
+ Content: note.Content,
+ Created: note.CreatedAt.Value,
+ Updated: note.UpdatedAt.Value,
+ })
+ }
+ if len(comments) == 0 {
+ comments = append(comments, &base.Comment{})
+ }
+
+ state := "open"
+ if issue.Status.TreatAsClosed.Value {
+ state = "closed"
+ }
+ poster := d.tryGetUser(issue.ReporterID.Value)
+ issues = append(issues, &base.Issue{
+ Title: issue.Summary,
+ Number: issue.TicketID.Value,
+ PosterName: poster.Name,
+ PosterEmail: poster.Email,
+ Content: comments[0].Content,
+ Milestone: issue.Milestone.Name,
+ State: state,
+ Created: issue.CreatedAt.Value,
+ Updated: issue.UpdatedAt.Value,
+ Labels: []*base.Label{
+ {Name: issue.Type.Name},
+ },
+ ForeignIndex: issue.TicketID.Value,
+ Context: codebaseIssueContext{
+ Comments: comments[1:],
+ },
+ })
+
+ if d.maxIssueIndex < issue.TicketID.Value {
+ d.maxIssueIndex = issue.TicketID.Value
+ }
+ }
+
+ return issues, true, nil
+}
+
+// GetComments returns comments
+func (d *CodebaseDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+ context, ok := commentable.GetContext().(codebaseIssueContext)
+ if !ok {
+ return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext())
+ }
+
+ return context.Comments, true, nil
+}
+
+// GetPullRequests returns pull requests
+// https://support.codebasehq.com/kb/repositories/merge-requests
+func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+ var rawMergeRequests struct {
+ XMLName xml.Name `xml:"merge-requests"`
+ Type string `xml:"type,attr"`
+ MergeRequest []struct {
+ ID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"id"`
+ } `xml:"merge-request"`
+ }
+
+ err := d.callAPI(
+ fmt.Sprintf("/%s/%s/merge_requests", d.project, d.repoName),
+ map[string]string{
+ "query": `"Target Project" is "` + d.repoName + `"`,
+ "offset": strconv.Itoa((page - 1) * perPage),
+ "count": strconv.Itoa(perPage),
+ },
+ &rawMergeRequests,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ pullRequests := make([]*base.PullRequest, 0, len(rawMergeRequests.MergeRequest))
+ for i, mr := range rawMergeRequests.MergeRequest {
+ var rawMergeRequest struct {
+ XMLName xml.Name `xml:"merge-request"`
+ ID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"id"`
+ SourceRef string `xml:"source-ref"` // NOTE: from the documentation these are actually just branches NOT full refs
+ TargetRef string `xml:"target-ref"` // NOTE: from the documentation these are actually just branches NOT full refs
+ Subject string `xml:"subject"`
+ Status string `xml:"status"`
+ UserID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"user-id"`
+ CreatedAt struct {
+ Value time.Time `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"created-at"`
+ UpdatedAt struct {
+ Value time.Time `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"updated-at"`
+ Comments struct {
+ Type string `xml:"type,attr"`
+ Comment []struct {
+ Content string `xml:"content"`
+ ID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"id"`
+ UserID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"user-id"`
+ Action struct {
+ Value string `xml:",chardata"`
+ Nil string `xml:"nil,attr"`
+ } `xml:"action"`
+ CreatedAt struct {
+ Value time.Time `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"created-at"`
+ } `xml:"comment"`
+ } `xml:"comments"`
+ }
+ err := d.callAPI(
+ fmt.Sprintf("/%s/%s/merge_requests/%d", d.project, d.repoName, mr.ID.Value),
+ nil,
+ &rawMergeRequest,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ number := d.maxIssueIndex + int64(i) + 1
+
+ state := "open"
+ merged := false
+ var closeTime *time.Time
+ var mergedTime *time.Time
+ if rawMergeRequest.Status != "new" {
+ state = "closed"
+ closeTime = &rawMergeRequest.UpdatedAt.Value
+ }
+
+ comments := make([]*base.Comment, 0, len(rawMergeRequest.Comments.Comment))
+ for _, comment := range rawMergeRequest.Comments.Comment {
+ if len(comment.Content) == 0 {
+ if comment.Action.Value == "merging" {
+ merged = true
+ mergedTime = &comment.CreatedAt.Value
+ }
+ continue
+ }
+ poster := d.tryGetUser(comment.UserID.Value)
+ comments = append(comments, &base.Comment{
+ IssueIndex: number,
+ Index: comment.ID.Value,
+ PosterID: poster.ID,
+ PosterName: poster.Name,
+ PosterEmail: poster.Email,
+ Content: comment.Content,
+ Created: comment.CreatedAt.Value,
+ Updated: comment.CreatedAt.Value,
+ })
+ }
+ if len(comments) == 0 {
+ comments = append(comments, &base.Comment{})
+ }
+
+ poster := d.tryGetUser(rawMergeRequest.UserID.Value)
+
+ pullRequests = append(pullRequests, &base.PullRequest{
+ Title: rawMergeRequest.Subject,
+ Number: number,
+ PosterName: poster.Name,
+ PosterEmail: poster.Email,
+ Content: comments[0].Content,
+ State: state,
+ Created: rawMergeRequest.CreatedAt.Value,
+ Updated: rawMergeRequest.UpdatedAt.Value,
+ Closed: closeTime,
+ Merged: merged,
+ MergedTime: mergedTime,
+ Head: base.PullRequestBranch{
+ Ref: rawMergeRequest.SourceRef,
+ SHA: d.getHeadCommit(rawMergeRequest.SourceRef),
+ RepoName: d.repoName,
+ },
+ Base: base.PullRequestBranch{
+ Ref: rawMergeRequest.TargetRef,
+ SHA: d.getHeadCommit(rawMergeRequest.TargetRef),
+ RepoName: d.repoName,
+ },
+ ForeignIndex: rawMergeRequest.ID.Value,
+ Context: codebaseIssueContext{
+ Comments: comments[1:],
+ },
+ })
+
+ // SECURITY: Ensure that the PR is safe
+ _ = CheckAndEnsureSafePR(pullRequests[len(pullRequests)-1], d.baseURL.String(), d)
+ }
+
+ return pullRequests, true, nil
+}
+
+func (d *CodebaseDownloader) tryGetUser(userID int64) *codebaseUser {
+ if len(d.userMap) == 0 {
+ var rawUsers struct {
+ XMLName xml.Name `xml:"users"`
+ Type string `xml:"type,attr"`
+ User []struct {
+ EmailAddress string `xml:"email-address"`
+ ID struct {
+ Value int64 `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ } `xml:"id"`
+ LastName string `xml:"last-name"`
+ FirstName string `xml:"first-name"`
+ Username string `xml:"username"`
+ } `xml:"user"`
+ }
+
+ err := d.callAPI(
+ "/users",
+ nil,
+ &rawUsers,
+ )
+ if err == nil {
+ for _, user := range rawUsers.User {
+ d.userMap[user.ID.Value] = &codebaseUser{
+ Name: user.Username,
+ Email: user.EmailAddress,
+ }
+ }
+ }
+ }
+
+ user, ok := d.userMap[userID]
+ if !ok {
+ user = &codebaseUser{
+ Name: fmt.Sprintf("User %d", userID),
+ }
+ d.userMap[userID] = user
+ }
+
+ return user
+}
+
+func (d *CodebaseDownloader) getHeadCommit(ref string) string {
+ commitRef, ok := d.commitMap[ref]
+ if !ok {
+ var rawCommits struct {
+ XMLName xml.Name `xml:"commits"`
+ Type string `xml:"type,attr"`
+ Commit []struct {
+ Ref string `xml:"ref"`
+ } `xml:"commit"`
+ }
+ err := d.callAPI(
+ fmt.Sprintf("/%s/%s/commits/%s", d.project, d.repoName, ref),
+ nil,
+ &rawCommits,
+ )
+ if err == nil && len(rawCommits.Commit) > 0 {
+ commitRef = rawCommits.Commit[0].Ref
+ d.commitMap[ref] = commitRef
+ }
+ }
+ return commitRef
+}
diff --git a/services/migrations/codebase_test.go b/services/migrations/codebase_test.go
new file mode 100644
index 0000000..23626d1
--- /dev/null
+++ b/services/migrations/codebase_test.go
@@ -0,0 +1,151 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "net/url"
+ "os"
+ "testing"
+ "time"
+
+ base "code.gitea.io/gitea/modules/migration"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCodebaseDownloadRepo(t *testing.T) {
+ // Skip tests if Codebase token is not found
+ cloneUser := os.Getenv("CODEBASE_CLONE_USER")
+ clonePassword := os.Getenv("CODEBASE_CLONE_PASSWORD")
+ apiUser := os.Getenv("CODEBASE_API_USER")
+ apiPassword := os.Getenv("CODEBASE_API_TOKEN")
+ if apiUser == "" || apiPassword == "" {
+ t.Skip("skipped test because a CODEBASE_ variable was not in the environment")
+ }
+
+ cloneAddr := "https://gitea-test.codebasehq.com/gitea-test/test.git"
+ u, _ := url.Parse(cloneAddr)
+ if cloneUser != "" {
+ u.User = url.UserPassword(cloneUser, clonePassword)
+ }
+
+ factory := &CodebaseDownloaderFactory{}
+ downloader, err := factory.New(context.Background(), base.MigrateOptions{
+ CloneAddr: u.String(),
+ AuthUsername: apiUser,
+ AuthPassword: apiPassword,
+ })
+ if err != nil {
+ t.Fatalf("Error creating Codebase downloader: %v", err)
+ }
+ repo, err := downloader.GetRepoInfo()
+ require.NoError(t, err)
+ assertRepositoryEqual(t, &base.Repository{
+ Name: "test",
+ Owner: "",
+ Description: "Repository Description",
+ CloneURL: "git@codebasehq.com:gitea-test/gitea-test/test.git",
+ OriginalURL: cloneAddr,
+ }, repo)
+
+ milestones, err := downloader.GetMilestones()
+ require.NoError(t, err)
+ assertMilestonesEqual(t, []*base.Milestone{
+ {
+ Title: "Milestone1",
+ Deadline: timePtr(time.Date(2021, time.September, 16, 0, 0, 0, 0, time.UTC)),
+ },
+ {
+ Title: "Milestone2",
+ Deadline: timePtr(time.Date(2021, time.September, 17, 0, 0, 0, 0, time.UTC)),
+ Closed: timePtr(time.Date(2021, time.September, 17, 0, 0, 0, 0, time.UTC)),
+ State: "closed",
+ },
+ }, milestones)
+
+ labels, err := downloader.GetLabels()
+ require.NoError(t, err)
+ assert.Len(t, labels, 4)
+
+ issues, isEnd, err := downloader.GetIssues(1, 2)
+ require.NoError(t, err)
+ assert.True(t, isEnd)
+ assertIssuesEqual(t, []*base.Issue{
+ {
+ Number: 2,
+ Title: "Open Ticket",
+ Content: "Open Ticket Message",
+ PosterName: "gitea-test-43",
+ PosterEmail: "gitea-codebase@smack.email",
+ State: "open",
+ Created: time.Date(2021, time.September, 26, 19, 19, 14, 0, time.UTC),
+ Updated: time.Date(2021, time.September, 26, 19, 19, 34, 0, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "Feature",
+ },
+ },
+ },
+ {
+ Number: 1,
+ Title: "Closed Ticket",
+ Content: "Closed Ticket Message",
+ PosterName: "gitea-test-43",
+ PosterEmail: "gitea-codebase@smack.email",
+ State: "closed",
+ Milestone: "Milestone1",
+ Created: time.Date(2021, time.September, 26, 19, 18, 33, 0, time.UTC),
+ Updated: time.Date(2021, time.September, 26, 19, 18, 55, 0, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "Bug",
+ },
+ },
+ },
+ }, issues)
+
+ comments, _, err := downloader.GetComments(issues[0])
+ require.NoError(t, err)
+ assertCommentsEqual(t, []*base.Comment{
+ {
+ IssueIndex: 2,
+ PosterName: "gitea-test-43",
+ PosterEmail: "gitea-codebase@smack.email",
+ Created: time.Date(2021, time.September, 26, 19, 19, 34, 0, time.UTC),
+ Updated: time.Date(2021, time.September, 26, 19, 19, 34, 0, time.UTC),
+ Content: "open comment",
+ },
+ }, comments)
+
+ prs, _, err := downloader.GetPullRequests(1, 1)
+ require.NoError(t, err)
+ assertPullRequestsEqual(t, []*base.PullRequest{
+ {
+ Number: 3,
+ Title: "Readme Change",
+ Content: "Merge Request comment",
+ PosterName: "gitea-test-43",
+ PosterEmail: "gitea-codebase@smack.email",
+ State: "open",
+ Created: time.Date(2021, time.September, 26, 20, 25, 47, 0, time.UTC),
+ Updated: time.Date(2021, time.September, 26, 20, 25, 47, 0, time.UTC),
+ Head: base.PullRequestBranch{
+ Ref: "readme-mr",
+ SHA: "1287f206b888d4d13540e0a8e1c07458f5420059",
+ RepoName: "test",
+ },
+ Base: base.PullRequestBranch{
+ Ref: "master",
+ SHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2",
+ RepoName: "test",
+ },
+ },
+ }, prs)
+
+ rvs, err := downloader.GetReviews(prs[0])
+ require.NoError(t, err)
+ assert.Empty(t, rvs)
+}
diff --git a/services/migrations/common.go b/services/migrations/common.go
new file mode 100644
index 0000000..d885188
--- /dev/null
+++ b/services/migrations/common.go
@@ -0,0 +1,83 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "fmt"
+ "strings"
+
+ system_model "code.gitea.io/gitea/models/system"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+)
+
+// WarnAndNotice will log the provided message and send a repository notice
+func WarnAndNotice(fmtStr string, args ...any) {
+ log.Warn(fmtStr, args...)
+ if err := system_model.CreateRepositoryNotice(fmt.Sprintf(fmtStr, args...)); err != nil {
+ log.Error("create repository notice failed: ", err)
+ }
+}
+
+func hasBaseURL(toCheck, baseURL string) bool {
+ if len(baseURL) > 0 && baseURL[len(baseURL)-1] != '/' {
+ baseURL += "/"
+ }
+ return strings.HasPrefix(toCheck, baseURL)
+}
+
+// CheckAndEnsureSafePR will check that a given PR is safe to download
+func CheckAndEnsureSafePR(pr *base.PullRequest, commonCloneBaseURL string, g base.Downloader) bool {
+ valid := true
+ // SECURITY: the patchURL must be checked to have the same baseURL as the current to prevent open redirect
+ if pr.PatchURL != "" && !hasBaseURL(pr.PatchURL, commonCloneBaseURL) {
+ // TODO: Should we check that this url has the expected format for a patch url?
+ WarnAndNotice("PR #%d in %s has invalid PatchURL: %s baseURL: %s", pr.Number, g, pr.PatchURL, commonCloneBaseURL)
+ pr.PatchURL = ""
+ valid = false
+ }
+
+ // SECURITY: the headCloneURL must be checked to have the same baseURL as the current to prevent open redirect
+ if pr.Head.CloneURL != "" && !hasBaseURL(pr.Head.CloneURL, commonCloneBaseURL) {
+ // TODO: Should we check that this url has the expected format for a patch url?
+ WarnAndNotice("PR #%d in %s has invalid HeadCloneURL: %s baseURL: %s", pr.Number, g, pr.Head.CloneURL, commonCloneBaseURL)
+ pr.Head.CloneURL = ""
+ valid = false
+ }
+
+ // SECURITY: SHAs Must be a SHA
+ // FIXME: hash only a SHA1
+ CommitType := git.Sha1ObjectFormat
+ if pr.MergeCommitSHA != "" && !CommitType.IsValid(pr.MergeCommitSHA) {
+ WarnAndNotice("PR #%d in %s has invalid MergeCommitSHA: %s", pr.Number, g, pr.MergeCommitSHA)
+ pr.MergeCommitSHA = ""
+ }
+ if pr.Head.SHA != "" && !CommitType.IsValid(pr.Head.SHA) {
+ WarnAndNotice("PR #%d in %s has invalid HeadSHA: %s", pr.Number, g, pr.Head.SHA)
+ pr.Head.SHA = ""
+ valid = false
+ }
+ if pr.Base.SHA != "" && !CommitType.IsValid(pr.Base.SHA) {
+ WarnAndNotice("PR #%d in %s has invalid BaseSHA: %s", pr.Number, g, pr.Base.SHA)
+ pr.Base.SHA = ""
+ valid = false
+ }
+
+ // SECURITY: Refs must be valid refs or SHAs
+ if pr.Head.Ref != "" && !git.IsValidRefPattern(pr.Head.Ref) {
+ WarnAndNotice("PR #%d in %s has invalid HeadRef: %s", pr.Number, g, pr.Head.Ref)
+ pr.Head.Ref = ""
+ valid = false
+ }
+ if pr.Base.Ref != "" && !git.IsValidRefPattern(pr.Base.Ref) {
+ WarnAndNotice("PR #%d in %s has invalid BaseRef: %s", pr.Number, g, pr.Base.Ref)
+ pr.Base.Ref = ""
+ valid = false
+ }
+
+ pr.EnsuredSafe = true
+
+ return valid
+}
diff --git a/services/migrations/dump.go b/services/migrations/dump.go
new file mode 100644
index 0000000..0781200
--- /dev/null
+++ b/services/migrations/dump.go
@@ -0,0 +1,737 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/google/uuid"
+ "gopkg.in/yaml.v3"
+)
+
+var _ base.Uploader = &RepositoryDumper{}
+
+// RepositoryDumper implements an Uploader to the local directory
+type RepositoryDumper struct {
+ ctx context.Context
+ baseDir string
+ repoOwner string
+ repoName string
+ opts base.MigrateOptions
+ milestoneFile *os.File
+ labelFile *os.File
+ releaseFile *os.File
+ issueFile *os.File
+ commentFiles map[int64]*os.File
+ pullrequestFile *os.File
+ reviewFiles map[int64]*os.File
+
+ gitRepo *git.Repository
+ prHeadCache map[string]string
+}
+
+// NewRepositoryDumper creates an gitea Uploader
+func NewRepositoryDumper(ctx context.Context, baseDir, repoOwner, repoName string, opts base.MigrateOptions) (*RepositoryDumper, error) {
+ baseDir = filepath.Join(baseDir, repoOwner, repoName)
+ if err := os.MkdirAll(baseDir, os.ModePerm); err != nil {
+ return nil, err
+ }
+ return &RepositoryDumper{
+ ctx: ctx,
+ opts: opts,
+ baseDir: baseDir,
+ repoOwner: repoOwner,
+ repoName: repoName,
+ prHeadCache: make(map[string]string),
+ commentFiles: make(map[int64]*os.File),
+ reviewFiles: make(map[int64]*os.File),
+ }, nil
+}
+
+// MaxBatchInsertSize returns the table's max batch insert size
+func (g *RepositoryDumper) MaxBatchInsertSize(tp string) int {
+ return 1000
+}
+
+func (g *RepositoryDumper) gitPath() string {
+ return filepath.Join(g.baseDir, "git")
+}
+
+func (g *RepositoryDumper) wikiPath() string {
+ return filepath.Join(g.baseDir, "wiki")
+}
+
+func (g *RepositoryDumper) commentDir() string {
+ return filepath.Join(g.baseDir, "comments")
+}
+
+func (g *RepositoryDumper) reviewDir() string {
+ return filepath.Join(g.baseDir, "reviews")
+}
+
+func (g *RepositoryDumper) setURLToken(remoteAddr string) (string, error) {
+ if len(g.opts.AuthToken) > 0 || len(g.opts.AuthUsername) > 0 {
+ u, err := url.Parse(remoteAddr)
+ if err != nil {
+ return "", err
+ }
+ u.User = url.UserPassword(g.opts.AuthUsername, g.opts.AuthPassword)
+ if len(g.opts.AuthToken) > 0 {
+ u.User = url.UserPassword("oauth2", g.opts.AuthToken)
+ }
+ remoteAddr = u.String()
+ }
+
+ return remoteAddr, nil
+}
+
+// CreateRepo creates a repository
+func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOptions) error {
+ f, err := os.Create(filepath.Join(g.baseDir, "repo.yml"))
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ bs, err := yaml.Marshal(map[string]any{
+ "name": repo.Name,
+ "owner": repo.Owner,
+ "description": repo.Description,
+ "clone_addr": opts.CloneAddr,
+ "original_url": repo.OriginalURL,
+ "is_private": opts.Private,
+ "service_type": opts.GitServiceType,
+ "wiki": opts.Wiki,
+ "issues": opts.Issues,
+ "milestones": opts.Milestones,
+ "labels": opts.Labels,
+ "releases": opts.Releases,
+ "comments": opts.Comments,
+ "pulls": opts.PullRequests,
+ "assets": opts.ReleaseAssets,
+ })
+ if err != nil {
+ return err
+ }
+
+ if _, err := f.Write(bs); err != nil {
+ return err
+ }
+
+ repoPath := g.gitPath()
+ if err := os.MkdirAll(repoPath, os.ModePerm); err != nil {
+ return err
+ }
+
+ migrateTimeout := 2 * time.Hour
+
+ remoteAddr, err := g.setURLToken(repo.CloneURL)
+ if err != nil {
+ return err
+ }
+
+ err = git.Clone(g.ctx, remoteAddr, repoPath, git.CloneRepoOptions{
+ Mirror: true,
+ Quiet: true,
+ Timeout: migrateTimeout,
+ SkipTLSVerify: setting.Migrations.SkipTLSVerify,
+ })
+ if err != nil {
+ return fmt.Errorf("Clone: %w", err)
+ }
+ if err := git.WriteCommitGraph(g.ctx, repoPath); err != nil {
+ return err
+ }
+
+ if opts.Wiki {
+ wikiPath := g.wikiPath()
+ wikiRemotePath := repository.WikiRemoteURL(g.ctx, remoteAddr)
+ if len(wikiRemotePath) > 0 {
+ if err := os.MkdirAll(wikiPath, os.ModePerm); err != nil {
+ return fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
+ }
+
+ if err := git.Clone(g.ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{
+ Mirror: true,
+ Quiet: true,
+ Timeout: migrateTimeout,
+ Branch: "master",
+ SkipTLSVerify: setting.Migrations.SkipTLSVerify,
+ }); err != nil {
+ log.Warn("Clone wiki: %v", err)
+ if err := os.RemoveAll(wikiPath); err != nil {
+ return fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
+ }
+ } else if err := git.WriteCommitGraph(g.ctx, wikiPath); err != nil {
+ return err
+ }
+ }
+ }
+
+ g.gitRepo, err = git.OpenRepository(g.ctx, g.gitPath())
+ return err
+}
+
+// Close closes this uploader
+func (g *RepositoryDumper) Close() {
+ if g.gitRepo != nil {
+ g.gitRepo.Close()
+ }
+ if g.milestoneFile != nil {
+ g.milestoneFile.Close()
+ }
+ if g.labelFile != nil {
+ g.labelFile.Close()
+ }
+ if g.releaseFile != nil {
+ g.releaseFile.Close()
+ }
+ if g.issueFile != nil {
+ g.issueFile.Close()
+ }
+ for _, f := range g.commentFiles {
+ f.Close()
+ }
+ if g.pullrequestFile != nil {
+ g.pullrequestFile.Close()
+ }
+ for _, f := range g.reviewFiles {
+ f.Close()
+ }
+}
+
+// CreateTopics creates topics
+func (g *RepositoryDumper) CreateTopics(topics ...string) error {
+ f, err := os.Create(filepath.Join(g.baseDir, "topic.yml"))
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ bs, err := yaml.Marshal(map[string]any{
+ "topics": topics,
+ })
+ if err != nil {
+ return err
+ }
+
+ if _, err := f.Write(bs); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// CreateMilestones creates milestones
+func (g *RepositoryDumper) CreateMilestones(milestones ...*base.Milestone) error {
+ var err error
+ if g.milestoneFile == nil {
+ g.milestoneFile, err = os.Create(filepath.Join(g.baseDir, "milestone.yml"))
+ if err != nil {
+ return err
+ }
+ }
+
+ bs, err := yaml.Marshal(milestones)
+ if err != nil {
+ return err
+ }
+
+ if _, err := g.milestoneFile.Write(bs); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// CreateLabels creates labels
+func (g *RepositoryDumper) CreateLabels(labels ...*base.Label) error {
+ var err error
+ if g.labelFile == nil {
+ g.labelFile, err = os.Create(filepath.Join(g.baseDir, "label.yml"))
+ if err != nil {
+ return err
+ }
+ }
+
+ bs, err := yaml.Marshal(labels)
+ if err != nil {
+ return err
+ }
+
+ if _, err := g.labelFile.Write(bs); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// CreateReleases creates releases
+func (g *RepositoryDumper) CreateReleases(releases ...*base.Release) error {
+ if g.opts.ReleaseAssets {
+ for _, release := range releases {
+ attachDir := filepath.Join("release_assets", release.TagName)
+ if err := os.MkdirAll(filepath.Join(g.baseDir, attachDir), os.ModePerm); err != nil {
+ return err
+ }
+ for _, asset := range release.Assets {
+ attachLocalPath := filepath.Join(attachDir, asset.Name)
+
+ // SECURITY: We cannot check the DownloadURL and DownloadFunc are safe here
+ // ... we must assume that they are safe and simply download the attachment
+ // download attachment
+ err := func(attachPath string) error {
+ var rc io.ReadCloser
+ var err error
+ if asset.DownloadURL == nil {
+ rc, err = asset.DownloadFunc()
+ if err != nil {
+ return err
+ }
+ } else {
+ resp, err := http.Get(*asset.DownloadURL)
+ if err != nil {
+ return err
+ }
+ rc = resp.Body
+ }
+ defer rc.Close()
+
+ fw, err := os.Create(attachPath)
+ if err != nil {
+ return fmt.Errorf("create: %w", err)
+ }
+ defer fw.Close()
+
+ _, err = io.Copy(fw, rc)
+ return err
+ }(filepath.Join(g.baseDir, attachLocalPath))
+ if err != nil {
+ return err
+ }
+ asset.DownloadURL = &attachLocalPath // to save the filepath on the yml file, change the source
+ }
+ }
+ }
+
+ var err error
+ if g.releaseFile == nil {
+ g.releaseFile, err = os.Create(filepath.Join(g.baseDir, "release.yml"))
+ if err != nil {
+ return err
+ }
+ }
+
+ bs, err := yaml.Marshal(releases)
+ if err != nil {
+ return err
+ }
+
+ if _, err := g.releaseFile.Write(bs); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// SyncTags syncs releases with tags in the database
+func (g *RepositoryDumper) SyncTags() error {
+ return nil
+}
+
+// CreateIssues creates issues
+func (g *RepositoryDumper) CreateIssues(issues ...*base.Issue) error {
+ var err error
+ if g.issueFile == nil {
+ g.issueFile, err = os.Create(filepath.Join(g.baseDir, "issue.yml"))
+ if err != nil {
+ return err
+ }
+ }
+
+ bs, err := yaml.Marshal(issues)
+ if err != nil {
+ return err
+ }
+
+ if _, err := g.issueFile.Write(bs); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (g *RepositoryDumper) createItems(dir string, itemFiles map[int64]*os.File, itemsMap map[int64][]any) error {
+ if err := os.MkdirAll(dir, os.ModePerm); err != nil {
+ return err
+ }
+
+ for number, items := range itemsMap {
+ if err := g.encodeItems(number, items, dir, itemFiles); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (g *RepositoryDumper) encodeItems(number int64, items []any, dir string, itemFiles map[int64]*os.File) error {
+ itemFile := itemFiles[number]
+ if itemFile == nil {
+ var err error
+ itemFile, err = os.Create(filepath.Join(dir, fmt.Sprintf("%d.yml", number)))
+ if err != nil {
+ return err
+ }
+ itemFiles[number] = itemFile
+ }
+
+ encoder := yaml.NewEncoder(itemFile)
+ defer encoder.Close()
+
+ return encoder.Encode(items)
+}
+
+// CreateComments creates comments of issues
+func (g *RepositoryDumper) CreateComments(comments ...*base.Comment) error {
+ commentsMap := make(map[int64][]any, len(comments))
+ for _, comment := range comments {
+ commentsMap[comment.IssueIndex] = append(commentsMap[comment.IssueIndex], comment)
+ }
+
+ return g.createItems(g.commentDir(), g.commentFiles, commentsMap)
+}
+
+func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error {
+ // SECURITY: this pr must have been ensured safe
+ if !pr.EnsuredSafe {
+ log.Error("PR #%d in %s/%s has not been checked for safety ... We will ignore this.", pr.Number, g.repoOwner, g.repoName)
+ return fmt.Errorf("unsafe PR #%d", pr.Number)
+ }
+
+ // First we download the patch file
+ err := func() error {
+ // if the patchURL is empty there is nothing to download
+ if pr.PatchURL == "" {
+ return nil
+ }
+
+ // SECURITY: We will assume that the pr.PatchURL has been checked
+ // pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe
+ u, err := g.setURLToken(pr.PatchURL)
+ if err != nil {
+ return err
+ }
+
+ // SECURITY: We will assume that the pr.PatchURL has been checked
+ // pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe
+ resp, err := http.Get(u) // TODO: This probably needs to use the downloader as there may be rate limiting issues here
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+ pullDir := filepath.Join(g.gitPath(), "pulls")
+ if err = os.MkdirAll(pullDir, os.ModePerm); err != nil {
+ return err
+ }
+ fPath := filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number))
+ f, err := os.Create(fPath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // TODO: Should there be limits on the size of this file?
+ if _, err = io.Copy(f, resp.Body); err != nil {
+ return err
+ }
+ pr.PatchURL = "git/pulls/" + fmt.Sprintf("%d.patch", pr.Number)
+
+ return nil
+ }()
+ if err != nil {
+ log.Error("PR #%d in %s/%s unable to download patch: %v", pr.Number, g.repoOwner, g.repoName, err)
+ return err
+ }
+
+ isFork := pr.IsForkPullRequest()
+
+ // Even if it's a forked repo PR, we have to change head info as the same as the base info
+ oldHeadOwnerName := pr.Head.OwnerName
+ pr.Head.OwnerName, pr.Head.RepoName = pr.Base.OwnerName, pr.Base.RepoName
+
+ if !isFork || pr.State == "closed" {
+ return nil
+ }
+
+ // OK we want to fetch the current head as a branch from its CloneURL
+
+ // 1. Is there a head clone URL available?
+ // 2. Is there a head ref available?
+ if pr.Head.CloneURL == "" || pr.Head.Ref == "" {
+ // Set head information if pr.Head.SHA is available
+ if pr.Head.SHA != "" {
+ _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
+ if err != nil {
+ log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err)
+ }
+ }
+ return nil
+ }
+
+ // 3. We need to create a remote for this clone url
+ // ... maybe we already have a name for this remote
+ remote, ok := g.prHeadCache[pr.Head.CloneURL+":"]
+ if !ok {
+ // ... let's try ownername as a reasonable name
+ remote = oldHeadOwnerName
+ if !git.IsValidRefPattern(remote) {
+ // ... let's try something less nice
+ remote = "head-pr-" + strconv.FormatInt(pr.Number, 10)
+ }
+ // ... now add the remote
+ err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true)
+ if err != nil {
+ log.Error("PR #%d in %s/%s AddRemote[%s] failed: %v", pr.Number, g.repoOwner, g.repoName, remote, err)
+ } else {
+ g.prHeadCache[pr.Head.CloneURL+":"] = remote
+ ok = true
+ }
+ }
+ if !ok {
+ // Set head information if pr.Head.SHA is available
+ if pr.Head.SHA != "" {
+ _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
+ if err != nil {
+ log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err)
+ }
+ }
+
+ return nil
+ }
+
+ // 4. Check if we already have this ref?
+ localRef, ok := g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref]
+ if !ok {
+ // ... We would normally name this migrated branch as <OwnerName>/<HeadRef> but we need to ensure that is safe
+ localRef = git.SanitizeRefPattern(oldHeadOwnerName + "/" + pr.Head.Ref)
+
+ // ... Now we must assert that this does not exist
+ if g.gitRepo.IsBranchExist(localRef) {
+ localRef = "head-pr-" + strconv.FormatInt(pr.Number, 10) + "/" + localRef
+ i := 0
+ for g.gitRepo.IsBranchExist(localRef) {
+ if i > 5 {
+ // ... We tried, we really tried but this is just a seriously unfriendly repo
+ return fmt.Errorf("unable to create unique local reference from %s", pr.Head.Ref)
+ }
+ // OK just try some uuids!
+ localRef = git.SanitizeRefPattern("head-pr-" + strconv.FormatInt(pr.Number, 10) + uuid.New().String())
+ i++
+ }
+ }
+
+ fetchArg := pr.Head.Ref + ":" + git.BranchPrefix + localRef
+ if strings.HasPrefix(fetchArg, "-") {
+ fetchArg = git.BranchPrefix + fetchArg
+ }
+
+ _, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags").AddDashesAndList(remote, fetchArg).RunStdString(&git.RunOpts{Dir: g.gitPath()})
+ if err != nil {
+ log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
+ // We need to continue here so that the Head.Ref is reset and we attempt to set the gitref for the PR
+ // (This last step will likely fail but we should try to do as much as we can.)
+ } else {
+ // Cache the localRef as the Head.Ref - if we've failed we can always try again.
+ g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref] = localRef
+ }
+ }
+
+ // Set the pr.Head.Ref to the localRef
+ pr.Head.Ref = localRef
+
+ // 5. Now if pr.Head.SHA == "" we should recover this to the head of this branch
+ if pr.Head.SHA == "" {
+ headSha, err := g.gitRepo.GetBranchCommitID(localRef)
+ if err != nil {
+ log.Error("unable to get head SHA of local head for PR #%d from %s in %s/%s. Error: %v", pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
+ return nil
+ }
+ pr.Head.SHA = headSha
+ }
+ if pr.Head.SHA != "" {
+ _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
+ if err != nil {
+ log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
+ }
+ }
+
+ return nil
+}
+
+// CreatePullRequests creates pull requests
+func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
+ var err error
+ if g.pullrequestFile == nil {
+ if err := os.MkdirAll(g.baseDir, os.ModePerm); err != nil {
+ return err
+ }
+ g.pullrequestFile, err = os.Create(filepath.Join(g.baseDir, "pull_request.yml"))
+ if err != nil {
+ return err
+ }
+ }
+
+ encoder := yaml.NewEncoder(g.pullrequestFile)
+ defer encoder.Close()
+
+ count := 0
+ for i := 0; i < len(prs); i++ {
+ pr := prs[i]
+ if err := g.handlePullRequest(pr); err != nil {
+ log.Error("PR #%d in %s/%s failed - skipping", pr.Number, g.repoOwner, g.repoName, err)
+ continue
+ }
+ prs[count] = pr
+ count++
+ }
+ prs = prs[:count]
+
+ return encoder.Encode(prs)
+}
+
+// CreateReviews create pull request reviews
+func (g *RepositoryDumper) CreateReviews(reviews ...*base.Review) error {
+ reviewsMap := make(map[int64][]any, len(reviews))
+ for _, review := range reviews {
+ reviewsMap[review.IssueIndex] = append(reviewsMap[review.IssueIndex], review)
+ }
+
+ return g.createItems(g.reviewDir(), g.reviewFiles, reviewsMap)
+}
+
+// Rollback when migrating failed, this will rollback all the changes.
+func (g *RepositoryDumper) Rollback() error {
+ g.Close()
+ return os.RemoveAll(g.baseDir)
+}
+
+// Finish when migrating succeed, this will update something.
+func (g *RepositoryDumper) Finish() error {
+ return nil
+}
+
+// DumpRepository dump repository according MigrateOptions to a local directory
+func DumpRepository(ctx context.Context, baseDir, ownerName string, opts base.MigrateOptions) error {
+ doer, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ return err
+ }
+ downloader, err := newDownloader(ctx, ownerName, opts)
+ if err != nil {
+ return err
+ }
+ uploader, err := NewRepositoryDumper(ctx, baseDir, ownerName, opts.RepoName, opts)
+ if err != nil {
+ return err
+ }
+
+ if err := migrateRepository(ctx, doer, downloader, uploader, opts, nil); err != nil {
+ if err1 := uploader.Rollback(); err1 != nil {
+ log.Error("rollback failed: %v", err1)
+ }
+ return err
+ }
+ return nil
+}
+
+func updateOptionsUnits(opts *base.MigrateOptions, units []string) error {
+ if len(units) == 0 {
+ opts.Wiki = true
+ opts.Issues = true
+ opts.Milestones = true
+ opts.Labels = true
+ opts.Releases = true
+ opts.Comments = true
+ opts.PullRequests = true
+ opts.ReleaseAssets = true
+ } else {
+ for _, unit := range units {
+ switch strings.ToLower(strings.TrimSpace(unit)) {
+ case "":
+ continue
+ case "wiki":
+ opts.Wiki = true
+ case "issues":
+ opts.Issues = true
+ case "milestones":
+ opts.Milestones = true
+ case "labels":
+ opts.Labels = true
+ case "releases":
+ opts.Releases = true
+ case "release_assets":
+ opts.ReleaseAssets = true
+ case "comments":
+ opts.Comments = true
+ case "pull_requests":
+ opts.PullRequests = true
+ default:
+ return errors.New("invalid unit: " + unit)
+ }
+ }
+ }
+ return nil
+}
+
+// RestoreRepository restore a repository from the disk directory
+func RestoreRepository(ctx context.Context, baseDir, ownerName, repoName string, units []string, validation bool) error {
+ doer, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ return err
+ }
+ uploader := NewGiteaLocalUploader(ctx, doer, ownerName, repoName)
+ downloader, err := NewRepositoryRestorer(ctx, baseDir, ownerName, repoName, validation)
+ if err != nil {
+ return err
+ }
+ opts, err := downloader.getRepoOptions()
+ if err != nil {
+ return err
+ }
+ tp, _ := strconv.Atoi(opts["service_type"])
+
+ migrateOpts := base.MigrateOptions{
+ GitServiceType: structs.GitServiceType(tp),
+ }
+ if err := updateOptionsUnits(&migrateOpts, units); err != nil {
+ return err
+ }
+
+ if err = migrateRepository(ctx, doer, downloader, uploader, migrateOpts, nil); err != nil {
+ if err1 := uploader.Rollback(); err1 != nil {
+ log.Error("rollback failed: %v", err1)
+ }
+ return err
+ }
+ return updateMigrationPosterIDByGitService(ctx, structs.GitServiceType(tp))
+}
diff --git a/services/migrations/error.go b/services/migrations/error.go
new file mode 100644
index 0000000..a592989
--- /dev/null
+++ b/services/migrations/error.go
@@ -0,0 +1,26 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2018 Jonas Franz. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "errors"
+
+ "github.com/google/go-github/v64/github"
+)
+
+// ErrRepoNotCreated returns the error that repository not created
+var ErrRepoNotCreated = errors.New("repository is not created yet")
+
+// IsRateLimitError returns true if the err is github.RateLimitError
+func IsRateLimitError(err error) bool {
+ _, ok := err.(*github.RateLimitError)
+ return ok
+}
+
+// IsTwoFactorAuthError returns true if the err is github.TwoFactorAuthError
+func IsTwoFactorAuthError(err error) bool {
+ _, ok := err.(*github.TwoFactorAuthError)
+ return ok
+}
diff --git a/services/migrations/forgejo_downloader.go b/services/migrations/forgejo_downloader.go
new file mode 100644
index 0000000..25dbb6e
--- /dev/null
+++ b/services/migrations/forgejo_downloader.go
@@ -0,0 +1,20 @@
+// Copyright 2023 The Forgejo Authors
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "code.gitea.io/gitea/modules/structs"
+)
+
+func init() {
+ RegisterDownloaderFactory(&ForgejoDownloaderFactory{})
+}
+
+type ForgejoDownloaderFactory struct {
+ GiteaDownloaderFactory
+}
+
+func (f *ForgejoDownloaderFactory) GitServiceType() structs.GitServiceType {
+ return structs.ForgejoService
+}
diff --git a/services/migrations/forgejo_downloader_test.go b/services/migrations/forgejo_downloader_test.go
new file mode 100644
index 0000000..5bd3755
--- /dev/null
+++ b/services/migrations/forgejo_downloader_test.go
@@ -0,0 +1,16 @@
+// Copyright 2023 The Forgejo Authors
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestForgejoDownload(t *testing.T) {
+ require.NotNil(t, getFactoryFromServiceType(structs.ForgejoService))
+}
diff --git a/services/migrations/git.go b/services/migrations/git.go
new file mode 100644
index 0000000..22ffd5e
--- /dev/null
+++ b/services/migrations/git.go
@@ -0,0 +1,48 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+
+ base "code.gitea.io/gitea/modules/migration"
+)
+
+var _ base.Downloader = &PlainGitDownloader{}
+
+// PlainGitDownloader implements a Downloader interface to clone git from a http/https URL
+type PlainGitDownloader struct {
+ base.NullDownloader
+ ownerName string
+ repoName string
+ remoteURL string
+}
+
+// NewPlainGitDownloader creates a git Downloader
+func NewPlainGitDownloader(ownerName, repoName, remoteURL string) *PlainGitDownloader {
+ return &PlainGitDownloader{
+ ownerName: ownerName,
+ repoName: repoName,
+ remoteURL: remoteURL,
+ }
+}
+
+// SetContext set context
+func (g *PlainGitDownloader) SetContext(ctx context.Context) {
+}
+
+// GetRepoInfo returns a repository information
+func (g *PlainGitDownloader) GetRepoInfo() (*base.Repository, error) {
+ // convert github repo to stand Repo
+ return &base.Repository{
+ Owner: g.ownerName,
+ Name: g.repoName,
+ CloneURL: g.remoteURL,
+ }, nil
+}
+
+// GetTopics return empty string slice
+func (g PlainGitDownloader) GetTopics() ([]string, error) {
+ return []string{}, nil
+}
diff --git a/services/migrations/gitbucket.go b/services/migrations/gitbucket.go
new file mode 100644
index 0000000..4fe9e30
--- /dev/null
+++ b/services/migrations/gitbucket.go
@@ -0,0 +1,90 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/structs"
+)
+
+var (
+ _ base.Downloader = &GitBucketDownloader{}
+ _ base.DownloaderFactory = &GitBucketDownloaderFactory{}
+)
+
+func init() {
+ RegisterDownloaderFactory(&GitBucketDownloaderFactory{})
+}
+
+// GitBucketDownloaderFactory defines a GitBucket downloader factory
+type GitBucketDownloaderFactory struct{}
+
+// New returns a Downloader related to this factory according MigrateOptions
+func (f *GitBucketDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
+ u, err := url.Parse(opts.CloneAddr)
+ if err != nil {
+ return nil, err
+ }
+
+ fields := strings.Split(u.Path, "/")
+ if len(fields) < 2 {
+ return nil, fmt.Errorf("invalid path: %s", u.Path)
+ }
+ baseURL := u.Scheme + "://" + u.Host + strings.TrimSuffix(strings.Join(fields[:len(fields)-2], "/"), "/git")
+
+ oldOwner := fields[len(fields)-2]
+ oldName := strings.TrimSuffix(fields[len(fields)-1], ".git")
+
+ log.Trace("Create GitBucket downloader. BaseURL: %s RepoOwner: %s RepoName: %s", baseURL, oldOwner, oldName)
+ return NewGitBucketDownloader(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
+}
+
+// GitServiceType returns the type of git service
+func (f *GitBucketDownloaderFactory) GitServiceType() structs.GitServiceType {
+ return structs.GitBucketService
+}
+
+// GitBucketDownloader implements a Downloader interface to get repository information
+// from GitBucket via GithubDownloader
+type GitBucketDownloader struct {
+ *GithubDownloaderV3
+}
+
+// String implements Stringer
+func (g *GitBucketDownloader) String() string {
+ return fmt.Sprintf("migration from gitbucket server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
+}
+
+func (g *GitBucketDownloader) LogString() string {
+ if g == nil {
+ return "<GitBucketDownloader nil>"
+ }
+ return fmt.Sprintf("<GitBucketDownloader %s %s/%s>", g.baseURL, g.repoOwner, g.repoName)
+}
+
+// NewGitBucketDownloader creates a GitBucket downloader
+func NewGitBucketDownloader(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GitBucketDownloader {
+ githubDownloader := NewGithubDownloaderV3(ctx, baseURL, userName, password, token, repoOwner, repoName)
+ // Gitbucket 4.40 uses different internal hard-coded perPage values.
+ // Issues, PRs, and other major parts use 25. Release page uses 10.
+ // Some API doesn't support paging yet. Sounds difficult, but using
+ // minimum number among them worked out very well.
+ githubDownloader.maxPerPage = 10
+ githubDownloader.SkipReactions = true
+ githubDownloader.SkipReviews = true
+ return &GitBucketDownloader{
+ githubDownloader,
+ }
+}
+
+// SupportGetRepoComments return true if it supports get repo comments
+func (g *GitBucketDownloader) SupportGetRepoComments() bool {
+ return false
+}
diff --git a/services/migrations/gitea_downloader.go b/services/migrations/gitea_downloader.go
new file mode 100644
index 0000000..272bf02
--- /dev/null
+++ b/services/migrations/gitea_downloader.go
@@ -0,0 +1,703 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/structs"
+
+ gitea_sdk "code.gitea.io/sdk/gitea"
+)
+
+var (
+ _ base.Downloader = &GiteaDownloader{}
+ _ base.DownloaderFactory = &GiteaDownloaderFactory{}
+)
+
+func init() {
+ RegisterDownloaderFactory(&GiteaDownloaderFactory{})
+}
+
+// GiteaDownloaderFactory defines a gitea downloader factory
+type GiteaDownloaderFactory struct{}
+
+// New returns a Downloader related to this factory according MigrateOptions
+func (f *GiteaDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
+ u, err := url.Parse(opts.CloneAddr)
+ if err != nil {
+ return nil, err
+ }
+
+ baseURL := u.Scheme + "://" + u.Host
+ repoNameSpace := strings.TrimPrefix(u.Path, "/")
+ repoNameSpace = strings.TrimSuffix(repoNameSpace, ".git")
+
+ path := strings.Split(repoNameSpace, "/")
+ if len(path) < 2 {
+ return nil, fmt.Errorf("invalid path: %s", repoNameSpace)
+ }
+
+ repoPath := strings.Join(path[len(path)-2:], "/")
+ if len(path) > 2 {
+ subPath := strings.Join(path[:len(path)-2], "/")
+ baseURL += "/" + subPath
+ }
+
+ log.Trace("Create gitea downloader. BaseURL: %s RepoName: %s", baseURL, repoNameSpace)
+
+ return NewGiteaDownloader(ctx, baseURL, repoPath, opts.AuthUsername, opts.AuthPassword, opts.AuthToken)
+}
+
+// GitServiceType returns the type of git service
+func (f *GiteaDownloaderFactory) GitServiceType() structs.GitServiceType {
+ return structs.GiteaService
+}
+
+// GiteaDownloader implements a Downloader interface to get repository information's
+type GiteaDownloader struct {
+ base.NullDownloader
+ ctx context.Context
+ client *gitea_sdk.Client
+ baseURL string
+ repoOwner string
+ repoName string
+ pagination bool
+ maxPerPage int
+}
+
+// NewGiteaDownloader creates a gitea Downloader via gitea API
+//
+// Use either a username/password or personal token. token is preferred
+// Note: Public access only allows very basic access
+func NewGiteaDownloader(ctx context.Context, baseURL, repoPath, username, password, token string) (*GiteaDownloader, error) {
+ giteaClient, err := gitea_sdk.NewClient(
+ baseURL,
+ gitea_sdk.SetToken(token),
+ gitea_sdk.SetBasicAuth(username, password),
+ gitea_sdk.SetContext(ctx),
+ gitea_sdk.SetHTTPClient(NewMigrationHTTPClient()),
+ )
+ if err != nil {
+ log.Error(fmt.Sprintf("Failed to create NewGiteaDownloader for: %s. Error: %v", baseURL, err))
+ return nil, err
+ }
+
+ path := strings.Split(repoPath, "/")
+
+ paginationSupport := true
+ if err = giteaClient.CheckServerVersionConstraint(">=1.12"); err != nil {
+ paginationSupport = false
+ }
+
+ // set small maxPerPage since we can only guess
+ // (default would be 50 but this can differ)
+ maxPerPage := 10
+ // gitea instances >=1.13 can tell us what maximum they have
+ apiConf, _, err := giteaClient.GetGlobalAPISettings()
+ if err != nil {
+ log.Info("Unable to get global API settings. Ignoring these.")
+ log.Debug("giteaClient.GetGlobalAPISettings. Error: %v", err)
+ }
+ if apiConf != nil {
+ maxPerPage = apiConf.MaxResponseItems
+ }
+
+ return &GiteaDownloader{
+ ctx: ctx,
+ client: giteaClient,
+ baseURL: baseURL,
+ repoOwner: path[0],
+ repoName: path[1],
+ pagination: paginationSupport,
+ maxPerPage: maxPerPage,
+ }, nil
+}
+
+// SetContext set context
+func (g *GiteaDownloader) SetContext(ctx context.Context) {
+ g.ctx = ctx
+}
+
+// String implements Stringer
+func (g *GiteaDownloader) String() string {
+ return fmt.Sprintf("migration from gitea server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
+}
+
+func (g *GiteaDownloader) LogString() string {
+ if g == nil {
+ return "<GiteaDownloader nil>"
+ }
+ return fmt.Sprintf("<GiteaDownloader %s %s/%s>", g.baseURL, g.repoOwner, g.repoName)
+}
+
+// GetRepoInfo returns a repository information
+func (g *GiteaDownloader) GetRepoInfo() (*base.Repository, error) {
+ if g == nil {
+ return nil, errors.New("error: GiteaDownloader is nil")
+ }
+
+ repo, _, err := g.client.GetRepo(g.repoOwner, g.repoName)
+ if err != nil {
+ return nil, err
+ }
+
+ return &base.Repository{
+ Name: repo.Name,
+ Owner: repo.Owner.UserName,
+ IsPrivate: repo.Private,
+ Description: repo.Description,
+ CloneURL: repo.CloneURL,
+ OriginalURL: repo.HTMLURL,
+ DefaultBranch: repo.DefaultBranch,
+ }, nil
+}
+
+// GetTopics return gitea topics
+func (g *GiteaDownloader) GetTopics() ([]string, error) {
+ topics, _, err := g.client.ListRepoTopics(g.repoOwner, g.repoName, gitea_sdk.ListRepoTopicsOptions{})
+ return topics, err
+}
+
+// GetMilestones returns milestones
+func (g *GiteaDownloader) GetMilestones() ([]*base.Milestone, error) {
+ milestones := make([]*base.Milestone, 0, g.maxPerPage)
+
+ for i := 1; ; i++ {
+ // make sure gitea can shutdown gracefully
+ select {
+ case <-g.ctx.Done():
+ return nil, nil
+ default:
+ }
+
+ ms, _, err := g.client.ListRepoMilestones(g.repoOwner, g.repoName, gitea_sdk.ListMilestoneOption{
+ ListOptions: gitea_sdk.ListOptions{
+ PageSize: g.maxPerPage,
+ Page: i,
+ },
+ State: gitea_sdk.StateAll,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ for i := range ms {
+ // old gitea instances dont have this information
+ createdAT := time.Time{}
+ var updatedAT *time.Time
+ if ms[i].Closed != nil {
+ createdAT = *ms[i].Closed
+ updatedAT = ms[i].Closed
+ }
+
+ // new gitea instances (>=1.13) do
+ if !ms[i].Created.IsZero() {
+ createdAT = ms[i].Created
+ }
+ if ms[i].Updated != nil && !ms[i].Updated.IsZero() {
+ updatedAT = ms[i].Updated
+ }
+
+ milestones = append(milestones, &base.Milestone{
+ Title: ms[i].Title,
+ Description: ms[i].Description,
+ Deadline: ms[i].Deadline,
+ Created: createdAT,
+ Updated: updatedAT,
+ Closed: ms[i].Closed,
+ State: string(ms[i].State),
+ })
+ }
+ if !g.pagination || len(ms) < g.maxPerPage {
+ break
+ }
+ }
+ return milestones, nil
+}
+
+func (g *GiteaDownloader) convertGiteaLabel(label *gitea_sdk.Label) *base.Label {
+ return &base.Label{
+ Name: label.Name,
+ Color: label.Color,
+ Description: label.Description,
+ }
+}
+
+// GetLabels returns labels
+func (g *GiteaDownloader) GetLabels() ([]*base.Label, error) {
+ labels := make([]*base.Label, 0, g.maxPerPage)
+
+ for i := 1; ; i++ {
+ // make sure gitea can shutdown gracefully
+ select {
+ case <-g.ctx.Done():
+ return nil, nil
+ default:
+ }
+
+ ls, _, err := g.client.ListRepoLabels(g.repoOwner, g.repoName, gitea_sdk.ListLabelsOptions{ListOptions: gitea_sdk.ListOptions{
+ PageSize: g.maxPerPage,
+ Page: i,
+ }})
+ if err != nil {
+ return nil, err
+ }
+
+ for i := range ls {
+ labels = append(labels, g.convertGiteaLabel(ls[i]))
+ }
+ if !g.pagination || len(ls) < g.maxPerPage {
+ break
+ }
+ }
+ return labels, nil
+}
+
+func (g *GiteaDownloader) convertGiteaRelease(rel *gitea_sdk.Release) *base.Release {
+ r := &base.Release{
+ TagName: rel.TagName,
+ TargetCommitish: rel.Target,
+ Name: rel.Title,
+ Body: rel.Note,
+ Draft: rel.IsDraft,
+ Prerelease: rel.IsPrerelease,
+ PublisherID: rel.Publisher.ID,
+ PublisherName: rel.Publisher.UserName,
+ PublisherEmail: rel.Publisher.Email,
+ Published: rel.PublishedAt,
+ Created: rel.CreatedAt,
+ }
+
+ httpClient := NewMigrationHTTPClient()
+
+ for _, asset := range rel.Attachments {
+ assetID := asset.ID // Don't optimize this, for closure we need a local variable
+ assetDownloadURL := asset.DownloadURL
+ size := int(asset.Size)
+ dlCount := int(asset.DownloadCount)
+ r.Assets = append(r.Assets, &base.ReleaseAsset{
+ ID: asset.ID,
+ Name: asset.Name,
+ Size: &size,
+ DownloadCount: &dlCount,
+ Created: asset.Created,
+ DownloadURL: &asset.DownloadURL,
+ DownloadFunc: func() (io.ReadCloser, error) {
+ asset, _, err := g.client.GetReleaseAttachment(g.repoOwner, g.repoName, rel.ID, assetID)
+ if err != nil {
+ return nil, err
+ }
+
+ if !hasBaseURL(assetDownloadURL, g.baseURL) {
+ WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", assetID, g, assetDownloadURL)
+ return io.NopCloser(strings.NewReader(asset.DownloadURL)), nil
+ }
+
+ // FIXME: for a private download?
+ req, err := http.NewRequest("GET", assetDownloadURL, nil)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := httpClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+
+ // resp.Body is closed by the uploader
+ return resp.Body, nil
+ },
+ })
+ }
+ return r
+}
+
+// GetReleases returns releases
+func (g *GiteaDownloader) GetReleases() ([]*base.Release, error) {
+ releases := make([]*base.Release, 0, g.maxPerPage)
+
+ for i := 1; ; i++ {
+ // make sure gitea can shutdown gracefully
+ select {
+ case <-g.ctx.Done():
+ return nil, nil
+ default:
+ }
+
+ rl, _, err := g.client.ListReleases(g.repoOwner, g.repoName, gitea_sdk.ListReleasesOptions{ListOptions: gitea_sdk.ListOptions{
+ PageSize: g.maxPerPage,
+ Page: i,
+ }})
+ if err != nil {
+ return nil, err
+ }
+
+ for i := range rl {
+ releases = append(releases, g.convertGiteaRelease(rl[i]))
+ }
+ if !g.pagination || len(rl) < g.maxPerPage {
+ break
+ }
+ }
+ return releases, nil
+}
+
+func (g *GiteaDownloader) getIssueReactions(index int64) ([]*base.Reaction, error) {
+ var reactions []*base.Reaction
+ if err := g.client.CheckServerVersionConstraint(">=1.11"); err != nil {
+ log.Info("GiteaDownloader: instance to old, skip getIssueReactions")
+ return reactions, nil
+ }
+ rl, _, err := g.client.GetIssueReactions(g.repoOwner, g.repoName, index)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, reaction := range rl {
+ reactions = append(reactions, &base.Reaction{
+ UserID: reaction.User.ID,
+ UserName: reaction.User.UserName,
+ Content: reaction.Reaction,
+ })
+ }
+ return reactions, nil
+}
+
+func (g *GiteaDownloader) getCommentReactions(commentID int64) ([]*base.Reaction, error) {
+ var reactions []*base.Reaction
+ if err := g.client.CheckServerVersionConstraint(">=1.11"); err != nil {
+ log.Info("GiteaDownloader: instance to old, skip getCommentReactions")
+ return reactions, nil
+ }
+ rl, _, err := g.client.GetIssueCommentReactions(g.repoOwner, g.repoName, commentID)
+ if err != nil {
+ return nil, err
+ }
+
+ for i := range rl {
+ reactions = append(reactions, &base.Reaction{
+ UserID: rl[i].User.ID,
+ UserName: rl[i].User.UserName,
+ Content: rl[i].Reaction,
+ })
+ }
+ return reactions, nil
+}
+
+// GetIssues returns issues according start and limit
+func (g *GiteaDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+ if perPage > g.maxPerPage {
+ perPage = g.maxPerPage
+ }
+ allIssues := make([]*base.Issue, 0, perPage)
+
+ issues, _, err := g.client.ListRepoIssues(g.repoOwner, g.repoName, gitea_sdk.ListIssueOption{
+ ListOptions: gitea_sdk.ListOptions{Page: page, PageSize: perPage},
+ State: gitea_sdk.StateAll,
+ Type: gitea_sdk.IssueTypeIssue,
+ })
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing issues: %w", err)
+ }
+ for _, issue := range issues {
+ labels := make([]*base.Label, 0, len(issue.Labels))
+ for i := range issue.Labels {
+ labels = append(labels, g.convertGiteaLabel(issue.Labels[i]))
+ }
+
+ var milestone string
+ if issue.Milestone != nil {
+ milestone = issue.Milestone.Title
+ }
+
+ reactions, err := g.getIssueReactions(issue.Index)
+ if err != nil {
+ WarnAndNotice("Unable to load reactions during migrating issue #%d in %s. Error: %v", issue.Index, g, err)
+ }
+
+ var assignees []string
+ for i := range issue.Assignees {
+ assignees = append(assignees, issue.Assignees[i].UserName)
+ }
+
+ allIssues = append(allIssues, &base.Issue{
+ Title: issue.Title,
+ Number: issue.Index,
+ PosterID: issue.Poster.ID,
+ PosterName: issue.Poster.UserName,
+ PosterEmail: issue.Poster.Email,
+ Content: issue.Body,
+ Milestone: milestone,
+ State: string(issue.State),
+ Created: issue.Created,
+ Updated: issue.Updated,
+ Closed: issue.Closed,
+ Reactions: reactions,
+ Labels: labels,
+ Assignees: assignees,
+ IsLocked: issue.IsLocked,
+ ForeignIndex: issue.Index,
+ })
+ }
+
+ isEnd := len(issues) < perPage
+ if !g.pagination {
+ isEnd = len(issues) == 0
+ }
+ return allIssues, isEnd, nil
+}
+
+// GetComments returns comments according issueNumber
+func (g *GiteaDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+ allComments := make([]*base.Comment, 0, g.maxPerPage)
+
+ for i := 1; ; i++ {
+ // make sure gitea can shutdown gracefully
+ select {
+ case <-g.ctx.Done():
+ return nil, false, nil
+ default:
+ }
+
+ comments, _, err := g.client.ListIssueComments(g.repoOwner, g.repoName, commentable.GetForeignIndex(), gitea_sdk.ListIssueCommentOptions{ListOptions: gitea_sdk.ListOptions{
+ PageSize: g.maxPerPage,
+ Page: i,
+ }})
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing comments for issue #%d. Error: %w", commentable.GetForeignIndex(), err)
+ }
+
+ for _, comment := range comments {
+ reactions, err := g.getCommentReactions(comment.ID)
+ if err != nil {
+ WarnAndNotice("Unable to load comment reactions during migrating issue #%d for comment %d in %s. Error: %v", commentable.GetForeignIndex(), comment.ID, g, err)
+ }
+
+ allComments = append(allComments, &base.Comment{
+ IssueIndex: commentable.GetLocalIndex(),
+ Index: comment.ID,
+ PosterID: comment.Poster.ID,
+ PosterName: comment.Poster.UserName,
+ PosterEmail: comment.Poster.Email,
+ Content: comment.Body,
+ Created: comment.Created,
+ Updated: comment.Updated,
+ Reactions: reactions,
+ })
+ }
+
+ if !g.pagination || len(comments) < g.maxPerPage {
+ break
+ }
+ }
+ return allComments, true, nil
+}
+
+// GetPullRequests returns pull requests according page and perPage
+func (g *GiteaDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+ if perPage > g.maxPerPage {
+ perPage = g.maxPerPage
+ }
+ allPRs := make([]*base.PullRequest, 0, perPage)
+
+ prs, _, err := g.client.ListRepoPullRequests(g.repoOwner, g.repoName, gitea_sdk.ListPullRequestsOptions{
+ ListOptions: gitea_sdk.ListOptions{
+ Page: page,
+ PageSize: perPage,
+ },
+ State: gitea_sdk.StateAll,
+ })
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing pull requests (page: %d, pagesize: %d). Error: %w", page, perPage, err)
+ }
+ for _, pr := range prs {
+ var milestone string
+ if pr.Milestone != nil {
+ milestone = pr.Milestone.Title
+ }
+
+ labels := make([]*base.Label, 0, len(pr.Labels))
+ for i := range pr.Labels {
+ labels = append(labels, g.convertGiteaLabel(pr.Labels[i]))
+ }
+
+ var (
+ headUserName string
+ headRepoName string
+ headCloneURL string
+ headRef string
+ headSHA string
+ )
+ if pr.Head != nil {
+ if pr.Head.Repository != nil {
+ headUserName = pr.Head.Repository.Owner.UserName
+ headRepoName = pr.Head.Repository.Name
+ headCloneURL = pr.Head.Repository.CloneURL
+ }
+ headSHA = pr.Head.Sha
+ headRef = pr.Head.Ref
+ }
+
+ var mergeCommitSHA string
+ if pr.MergedCommitID != nil {
+ mergeCommitSHA = *pr.MergedCommitID
+ }
+
+ reactions, err := g.getIssueReactions(pr.Index)
+ if err != nil {
+ WarnAndNotice("Unable to load reactions during migrating pull #%d in %s. Error: %v", pr.Index, g, err)
+ }
+
+ var assignees []string
+ for i := range pr.Assignees {
+ assignees = append(assignees, pr.Assignees[i].UserName)
+ }
+
+ createdAt := time.Time{}
+ if pr.Created != nil {
+ createdAt = *pr.Created
+ }
+ updatedAt := time.Time{}
+ if pr.Created != nil {
+ updatedAt = *pr.Updated
+ }
+
+ closedAt := pr.Closed
+ if pr.Merged != nil && closedAt == nil {
+ closedAt = pr.Merged
+ }
+
+ allPRs = append(allPRs, &base.PullRequest{
+ Title: pr.Title,
+ Number: pr.Index,
+ PosterID: pr.Poster.ID,
+ PosterName: pr.Poster.UserName,
+ PosterEmail: pr.Poster.Email,
+ Content: pr.Body,
+ State: string(pr.State),
+ Created: createdAt,
+ Updated: updatedAt,
+ Closed: closedAt,
+ Labels: labels,
+ Milestone: milestone,
+ Reactions: reactions,
+ Assignees: assignees,
+ Merged: pr.HasMerged,
+ MergedTime: pr.Merged,
+ MergeCommitSHA: mergeCommitSHA,
+ IsLocked: pr.IsLocked,
+ PatchURL: pr.PatchURL,
+ Head: base.PullRequestBranch{
+ Ref: headRef,
+ SHA: headSHA,
+ RepoName: headRepoName,
+ OwnerName: headUserName,
+ CloneURL: headCloneURL,
+ },
+ Base: base.PullRequestBranch{
+ Ref: pr.Base.Ref,
+ SHA: pr.Base.Sha,
+ RepoName: g.repoName,
+ OwnerName: g.repoOwner,
+ },
+ ForeignIndex: pr.Index,
+ })
+ // SECURITY: Ensure that the PR is safe
+ _ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g)
+ }
+
+ isEnd := len(prs) < perPage
+ if !g.pagination {
+ isEnd = len(prs) == 0
+ }
+ return allPRs, isEnd, nil
+}
+
+// GetReviews returns pull requests review
+func (g *GiteaDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+ if err := g.client.CheckServerVersionConstraint(">=1.12"); err != nil {
+ log.Info("GiteaDownloader: instance to old, skip GetReviews")
+ return nil, nil
+ }
+
+ allReviews := make([]*base.Review, 0, g.maxPerPage)
+
+ for i := 1; ; i++ {
+ // make sure gitea can shutdown gracefully
+ select {
+ case <-g.ctx.Done():
+ return nil, nil
+ default:
+ }
+
+ prl, _, err := g.client.ListPullReviews(g.repoOwner, g.repoName, reviewable.GetForeignIndex(), gitea_sdk.ListPullReviewsOptions{ListOptions: gitea_sdk.ListOptions{
+ Page: i,
+ PageSize: g.maxPerPage,
+ }})
+ if err != nil {
+ return nil, err
+ }
+
+ for _, pr := range prl {
+ if pr.Reviewer == nil {
+ // Presumably this is a team review which we cannot migrate at present but we have to skip this review as otherwise the review will be mapped on to an incorrect user.
+ // TODO: handle team reviews
+ continue
+ }
+
+ rcl, _, err := g.client.ListPullReviewComments(g.repoOwner, g.repoName, reviewable.GetForeignIndex(), pr.ID)
+ if err != nil {
+ return nil, err
+ }
+ var reviewComments []*base.ReviewComment
+ for i := range rcl {
+ line := int(rcl[i].LineNum)
+ if rcl[i].OldLineNum > 0 {
+ line = int(rcl[i].OldLineNum) * -1
+ }
+
+ reviewComments = append(reviewComments, &base.ReviewComment{
+ ID: rcl[i].ID,
+ Content: rcl[i].Body,
+ TreePath: rcl[i].Path,
+ DiffHunk: rcl[i].DiffHunk,
+ Line: line,
+ CommitID: rcl[i].CommitID,
+ PosterID: rcl[i].Reviewer.ID,
+ CreatedAt: rcl[i].Created,
+ UpdatedAt: rcl[i].Updated,
+ })
+ }
+
+ review := &base.Review{
+ ID: pr.ID,
+ IssueIndex: reviewable.GetLocalIndex(),
+ ReviewerID: pr.Reviewer.ID,
+ ReviewerName: pr.Reviewer.UserName,
+ Official: pr.Official,
+ CommitID: pr.CommitID,
+ Content: pr.Body,
+ CreatedAt: pr.Submitted,
+ State: string(pr.State),
+ Comments: reviewComments,
+ }
+
+ allReviews = append(allReviews, review)
+ }
+
+ if len(prl) < g.maxPerPage {
+ break
+ }
+ }
+ return allReviews, nil
+}
diff --git a/services/migrations/gitea_downloader_test.go b/services/migrations/gitea_downloader_test.go
new file mode 100644
index 0000000..28a52c2
--- /dev/null
+++ b/services/migrations/gitea_downloader_test.go
@@ -0,0 +1,314 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "net/http"
+ "os"
+ "sort"
+ "testing"
+ "time"
+
+ base "code.gitea.io/gitea/modules/migration"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGiteaDownloadRepo(t *testing.T) {
+ // Skip tests if Gitea token is not found
+ giteaToken := os.Getenv("GITEA_TOKEN")
+ if giteaToken == "" {
+ t.Skip("skipped test because GITEA_TOKEN was not in the environment")
+ }
+
+ resp, err := http.Get("https://gitea.com/gitea")
+ if err != nil || resp.StatusCode != http.StatusOK {
+ t.Skipf("Can't reach https://gitea.com, skipping %s", t.Name())
+ }
+
+ downloader, err := NewGiteaDownloader(context.Background(), "https://gitea.com", "gitea/test_repo", "", "", giteaToken)
+ if downloader == nil {
+ t.Fatal("NewGitlabDownloader is nil")
+ }
+ require.NoError(t, err, "NewGitlabDownloader error occur")
+
+ repo, err := downloader.GetRepoInfo()
+ require.NoError(t, err)
+ assertRepositoryEqual(t, &base.Repository{
+ Name: "test_repo",
+ Owner: "gitea",
+ IsPrivate: false,
+ Description: "Test repository for testing migration from gitea to gitea",
+ CloneURL: "https://gitea.com/gitea/test_repo.git",
+ OriginalURL: "https://gitea.com/gitea/test_repo",
+ DefaultBranch: "master",
+ }, repo)
+
+ topics, err := downloader.GetTopics()
+ require.NoError(t, err)
+ sort.Strings(topics)
+ assert.EqualValues(t, []string{"ci", "gitea", "migration", "test"}, topics)
+
+ labels, err := downloader.GetLabels()
+ require.NoError(t, err)
+ assertLabelsEqual(t, []*base.Label{
+ {
+ Name: "Bug",
+ Color: "e11d21",
+ },
+ {
+ Name: "Enhancement",
+ Color: "207de5",
+ },
+ {
+ Name: "Feature",
+ Color: "0052cc",
+ Description: "a feature request",
+ },
+ {
+ Name: "Invalid",
+ Color: "d4c5f9",
+ },
+ {
+ Name: "Question",
+ Color: "fbca04",
+ },
+ {
+ Name: "Valid",
+ Color: "53e917",
+ },
+ }, labels)
+
+ milestones, err := downloader.GetMilestones()
+ require.NoError(t, err)
+ assertMilestonesEqual(t, []*base.Milestone{
+ {
+ Title: "V2 Finalize",
+ Created: time.Unix(0, 0),
+ Deadline: timePtr(time.Unix(1599263999, 0)),
+ Updated: timePtr(time.Unix(0, 0)),
+ State: "open",
+ },
+ {
+ Title: "V1",
+ Description: "Generate Content",
+ Created: time.Unix(0, 0),
+ Updated: timePtr(time.Unix(0, 0)),
+ Closed: timePtr(time.Unix(1598985406, 0)),
+ State: "closed",
+ },
+ }, milestones)
+
+ releases, err := downloader.GetReleases()
+ require.NoError(t, err)
+ assertReleasesEqual(t, []*base.Release{
+ {
+ Name: "Second Release",
+ TagName: "v2-rc1",
+ TargetCommitish: "master",
+ Body: "this repo has:\r\n* reactions\r\n* wiki\r\n* issues (open/closed)\r\n* pulls (open/closed/merged) (external/internal)\r\n* pull reviews\r\n* projects\r\n* milestones\r\n* labels\r\n* releases\r\n\r\nto test migration against",
+ Draft: false,
+ Prerelease: true,
+ Created: time.Date(2020, 9, 1, 18, 2, 43, 0, time.UTC),
+ Published: time.Date(2020, 9, 1, 18, 2, 43, 0, time.UTC),
+ PublisherID: 689,
+ PublisherName: "6543",
+ PublisherEmail: "6543@obermui.de",
+ },
+ {
+ Name: "First Release",
+ TagName: "V1",
+ TargetCommitish: "master",
+ Body: "as title",
+ Draft: false,
+ Prerelease: false,
+ Created: time.Date(2020, 9, 1, 17, 30, 32, 0, time.UTC),
+ Published: time.Date(2020, 9, 1, 17, 30, 32, 0, time.UTC),
+ PublisherID: 689,
+ PublisherName: "6543",
+ PublisherEmail: "6543@obermui.de",
+ },
+ }, releases)
+
+ issues, isEnd, err := downloader.GetIssues(1, 50)
+ require.NoError(t, err)
+ assert.True(t, isEnd)
+ assert.Len(t, issues, 7)
+ assert.EqualValues(t, "open", issues[0].State)
+
+ issues, isEnd, err = downloader.GetIssues(3, 2)
+ require.NoError(t, err)
+ assert.False(t, isEnd)
+
+ assertIssuesEqual(t, []*base.Issue{
+ {
+ Number: 4,
+ Title: "what is this repo about?",
+ Content: "",
+ Milestone: "V1",
+ PosterID: -1,
+ PosterName: "Ghost",
+ PosterEmail: "",
+ State: "closed",
+ IsLocked: true,
+ Created: time.Unix(1598975321, 0),
+ Updated: time.Unix(1598975400, 0),
+ Labels: []*base.Label{{
+ Name: "Question",
+ Color: "fbca04",
+ Description: "",
+ }},
+ Reactions: []*base.Reaction{
+ {
+ UserID: 689,
+ UserName: "6543",
+ Content: "gitea",
+ },
+ {
+ UserID: 689,
+ UserName: "6543",
+ Content: "laugh",
+ },
+ },
+ Closed: timePtr(time.Date(2020, 9, 1, 15, 49, 34, 0, time.UTC)),
+ },
+ {
+ Number: 2,
+ Title: "Spam",
+ Content: ":(",
+ Milestone: "",
+ PosterID: 689,
+ PosterName: "6543",
+ PosterEmail: "6543@obermui.de",
+ State: "closed",
+ IsLocked: false,
+ Created: time.Unix(1598919780, 0),
+ Updated: time.Unix(1598969497, 0),
+ Labels: []*base.Label{{
+ Name: "Invalid",
+ Color: "d4c5f9",
+ Description: "",
+ }},
+ Closed: timePtr(time.Unix(1598969497, 0)),
+ },
+ }, issues)
+
+ comments, _, err := downloader.GetComments(&base.Issue{Number: 4, ForeignIndex: 4})
+ require.NoError(t, err)
+ assertCommentsEqual(t, []*base.Comment{
+ {
+ IssueIndex: 4,
+ PosterID: 689,
+ PosterName: "6543",
+ PosterEmail: "6543@obermui.de",
+ Created: time.Unix(1598975370, 0),
+ Updated: time.Unix(1599070865, 0),
+ Content: "a really good question!\n\nIt is the used as TESTSET for gitea2gitea repo migration function",
+ },
+ {
+ IssueIndex: 4,
+ PosterID: -1,
+ PosterName: "Ghost",
+ PosterEmail: "",
+ Created: time.Unix(1598975393, 0),
+ Updated: time.Unix(1598975393, 0),
+ Content: "Oh!",
+ },
+ }, comments)
+
+ prs, isEnd, err := downloader.GetPullRequests(1, 50)
+ require.NoError(t, err)
+ assert.True(t, isEnd)
+ assert.Len(t, prs, 6)
+ prs, isEnd, err = downloader.GetPullRequests(1, 3)
+ require.NoError(t, err)
+ assert.False(t, isEnd)
+ assert.Len(t, prs, 3)
+ assertPullRequestEqual(t, &base.PullRequest{
+ Number: 12,
+ PosterID: 689,
+ PosterName: "6543",
+ PosterEmail: "6543@obermui.de",
+ Title: "Dont Touch",
+ Content: "\r\nadd dont touch note",
+ Milestone: "V2 Finalize",
+ State: "closed",
+ IsLocked: false,
+ Created: time.Unix(1598982759, 0),
+ Updated: time.Unix(1599023425, 0),
+ Closed: timePtr(time.Unix(1598982934, 0)),
+ Assignees: []string{"techknowlogick"},
+ Base: base.PullRequestBranch{
+ CloneURL: "",
+ Ref: "master",
+ SHA: "827aa28a907853e5ddfa40c8f9bc52471a2685fd",
+ RepoName: "test_repo",
+ OwnerName: "gitea",
+ },
+ Head: base.PullRequestBranch{
+ CloneURL: "https://gitea.com/6543-forks/test_repo.git",
+ Ref: "refs/pull/12/head",
+ SHA: "b6ab5d9ae000b579a5fff03f92c486da4ddf48b6",
+ RepoName: "test_repo",
+ OwnerName: "6543-forks",
+ },
+ Merged: true,
+ MergedTime: timePtr(time.Unix(1598982934, 0)),
+ MergeCommitSHA: "827aa28a907853e5ddfa40c8f9bc52471a2685fd",
+ PatchURL: "https://gitea.com/gitea/test_repo/pulls/12.patch",
+ }, prs[1])
+
+ reviews, err := downloader.GetReviews(&base.Issue{Number: 7, ForeignIndex: 7})
+ require.NoError(t, err)
+ assertReviewsEqual(t, []*base.Review{
+ {
+ ID: 1770,
+ IssueIndex: 7,
+ ReviewerID: 689,
+ ReviewerName: "6543",
+ CommitID: "187ece0cb6631e2858a6872e5733433bb3ca3b03",
+ CreatedAt: time.Date(2020, 9, 1, 16, 12, 58, 0, time.UTC),
+ State: "COMMENT", // TODO
+ Comments: []*base.ReviewComment{
+ {
+ ID: 116561,
+ InReplyTo: 0,
+ Content: "is one `\\newline` to less?",
+ TreePath: "README.md",
+ DiffHunk: "@@ -2,3 +2,3 @@\n \n-Test repository for testing migration from gitea 2 gitea\n\\ No newline at end of file\n+Test repository for testing migration from gitea 2 gitea",
+ Position: 0,
+ Line: 4,
+ CommitID: "187ece0cb6631e2858a6872e5733433bb3ca3b03",
+ PosterID: 689,
+ Reactions: nil,
+ CreatedAt: time.Date(2020, 9, 1, 16, 12, 58, 0, time.UTC),
+ UpdatedAt: time.Date(2020, 9, 1, 16, 12, 58, 0, time.UTC),
+ },
+ },
+ },
+ {
+ ID: 1771,
+ IssueIndex: 7,
+ ReviewerID: 9,
+ ReviewerName: "techknowlogick",
+ CommitID: "187ece0cb6631e2858a6872e5733433bb3ca3b03",
+ CreatedAt: time.Date(2020, 9, 1, 17, 6, 47, 0, time.UTC),
+ State: "REQUEST_CHANGES", // TODO
+ Content: "I think this needs some changes",
+ },
+ {
+ ID: 1772,
+ IssueIndex: 7,
+ ReviewerID: 9,
+ ReviewerName: "techknowlogick",
+ CommitID: "187ece0cb6631e2858a6872e5733433bb3ca3b03",
+ CreatedAt: time.Date(2020, 9, 1, 17, 19, 51, 0, time.UTC),
+ State: base.ReviewStateApproved,
+ Official: true,
+ Content: "looks good",
+ },
+ }, reviews)
+}
diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go
new file mode 100644
index 0000000..3ba4ca2
--- /dev/null
+++ b/services/migrations/gitea_uploader.go
@@ -0,0 +1,1031 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2018 Jonas Franz. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ base_module "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/label"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/uri"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/pull"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/google/uuid"
+)
+
+var _ base.Uploader = &GiteaLocalUploader{}
+
+// GiteaLocalUploader implements an Uploader to gitea sites
+type GiteaLocalUploader struct {
+ ctx context.Context
+ doer *user_model.User
+ repoOwner string
+ repoName string
+ repo *repo_model.Repository
+ labels map[string]*issues_model.Label
+ milestones map[string]int64
+ issues map[int64]*issues_model.Issue
+ gitRepo *git.Repository
+ prHeadCache map[string]string
+ sameApp bool
+ userMap map[int64]int64 // external user id mapping to user id
+ prCache map[int64]*issues_model.PullRequest
+ gitServiceType structs.GitServiceType
+}
+
+// NewGiteaLocalUploader creates an gitea Uploader via gitea API v1
+func NewGiteaLocalUploader(ctx context.Context, doer *user_model.User, repoOwner, repoName string) *GiteaLocalUploader {
+ return &GiteaLocalUploader{
+ ctx: ctx,
+ doer: doer,
+ repoOwner: repoOwner,
+ repoName: repoName,
+ labels: make(map[string]*issues_model.Label),
+ milestones: make(map[string]int64),
+ issues: make(map[int64]*issues_model.Issue),
+ prHeadCache: make(map[string]string),
+ userMap: make(map[int64]int64),
+ prCache: make(map[int64]*issues_model.PullRequest),
+ }
+}
+
+// MaxBatchInsertSize returns the table's max batch insert size
+func (g *GiteaLocalUploader) MaxBatchInsertSize(tp string) int {
+ switch tp {
+ case "issue":
+ return db.MaxBatchInsertSize(new(issues_model.Issue))
+ case "comment":
+ return db.MaxBatchInsertSize(new(issues_model.Comment))
+ case "milestone":
+ return db.MaxBatchInsertSize(new(issues_model.Milestone))
+ case "label":
+ return db.MaxBatchInsertSize(new(issues_model.Label))
+ case "release":
+ return db.MaxBatchInsertSize(new(repo_model.Release))
+ case "pullrequest":
+ return db.MaxBatchInsertSize(new(issues_model.PullRequest))
+ }
+ return 10
+}
+
+// CreateRepo creates a repository
+func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.MigrateOptions) error {
+ owner, err := user_model.GetUserByName(g.ctx, g.repoOwner)
+ if err != nil {
+ return err
+ }
+
+ var r *repo_model.Repository
+ if opts.MigrateToRepoID <= 0 {
+ r, err = repo_service.CreateRepositoryDirectly(g.ctx, g.doer, owner, repo_service.CreateRepoOptions{
+ Name: g.repoName,
+ Description: repo.Description,
+ OriginalURL: repo.OriginalURL,
+ GitServiceType: opts.GitServiceType,
+ IsPrivate: opts.Private || setting.Repository.ForcePrivate,
+ IsMirror: opts.Mirror,
+ Status: repo_model.RepositoryBeingMigrated,
+ })
+ } else {
+ r, err = repo_model.GetRepositoryByID(g.ctx, opts.MigrateToRepoID)
+ }
+ if err != nil {
+ return err
+ }
+ r.DefaultBranch = repo.DefaultBranch
+ r.Description = repo.Description
+
+ r, err = repo_service.MigrateRepositoryGitData(g.ctx, owner, r, base.MigrateOptions{
+ RepoName: g.repoName,
+ Description: repo.Description,
+ OriginalURL: repo.OriginalURL,
+ GitServiceType: opts.GitServiceType,
+ Mirror: repo.IsMirror,
+ LFS: opts.LFS,
+ LFSEndpoint: opts.LFSEndpoint,
+ CloneAddr: repo.CloneURL, // SECURITY: we will assume that this has already been checked
+ Private: repo.IsPrivate,
+ Wiki: opts.Wiki,
+ Releases: opts.Releases, // if didn't get releases, then sync them from tags
+ MirrorInterval: opts.MirrorInterval,
+ }, NewMigrationHTTPTransport())
+
+ g.sameApp = strings.HasPrefix(repo.OriginalURL, setting.AppURL)
+ g.repo = r
+ if err != nil {
+ return err
+ }
+ g.gitRepo, err = gitrepo.OpenRepository(g.ctx, g.repo)
+ if err != nil {
+ return err
+ }
+
+ // detect object format from git repository and update to database
+ objectFormat, err := g.gitRepo.GetObjectFormat()
+ if err != nil {
+ return err
+ }
+ g.repo.ObjectFormatName = objectFormat.Name()
+ return repo_model.UpdateRepositoryCols(g.ctx, g.repo, "object_format_name")
+}
+
+// Close closes this uploader
+func (g *GiteaLocalUploader) Close() {
+ if g.gitRepo != nil {
+ g.gitRepo.Close()
+ }
+}
+
+// CreateTopics creates topics
+func (g *GiteaLocalUploader) CreateTopics(topics ...string) error {
+ // Ignore topics too long for the db
+ c := 0
+ for _, topic := range topics {
+ if len(topic) > 50 {
+ continue
+ }
+
+ topics[c] = topic
+ c++
+ }
+ topics = topics[:c]
+ return repo_model.SaveTopics(g.ctx, g.repo.ID, topics...)
+}
+
+// CreateMilestones creates milestones
+func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) error {
+ mss := make([]*issues_model.Milestone, 0, len(milestones))
+ for _, milestone := range milestones {
+ var deadline timeutil.TimeStamp
+ if milestone.Deadline != nil {
+ deadline = timeutil.TimeStamp(milestone.Deadline.Unix())
+ }
+ if deadline == 0 {
+ deadline = timeutil.TimeStamp(time.Date(9999, 1, 1, 0, 0, 0, 0, setting.DefaultUILocation).Unix())
+ }
+
+ if milestone.Created.IsZero() {
+ if milestone.Updated != nil {
+ milestone.Created = *milestone.Updated
+ } else if milestone.Deadline != nil {
+ milestone.Created = *milestone.Deadline
+ } else {
+ milestone.Created = time.Now()
+ }
+ }
+ if milestone.Updated == nil || milestone.Updated.IsZero() {
+ milestone.Updated = &milestone.Created
+ }
+
+ ms := issues_model.Milestone{
+ RepoID: g.repo.ID,
+ Name: milestone.Title,
+ Content: milestone.Description,
+ IsClosed: milestone.State == "closed",
+ CreatedUnix: timeutil.TimeStamp(milestone.Created.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(milestone.Updated.Unix()),
+ DeadlineUnix: deadline,
+ }
+ if ms.IsClosed && milestone.Closed != nil {
+ ms.ClosedDateUnix = timeutil.TimeStamp(milestone.Closed.Unix())
+ }
+ mss = append(mss, &ms)
+ }
+
+ err := issues_model.InsertMilestones(g.ctx, mss...)
+ if err != nil {
+ return err
+ }
+
+ for _, ms := range mss {
+ g.milestones[ms.Name] = ms.ID
+ }
+ return nil
+}
+
+// CreateLabels creates labels
+func (g *GiteaLocalUploader) CreateLabels(labels ...*base.Label) error {
+ lbs := make([]*issues_model.Label, 0, len(labels))
+ for _, l := range labels {
+ if color, err := label.NormalizeColor(l.Color); err != nil {
+ log.Warn("Invalid label color: #%s for label: %s in migration to %s/%s", l.Color, l.Name, g.repoOwner, g.repoName)
+ l.Color = "#ffffff"
+ } else {
+ l.Color = color
+ }
+
+ lbs = append(lbs, &issues_model.Label{
+ RepoID: g.repo.ID,
+ Name: l.Name,
+ Exclusive: l.Exclusive,
+ Description: l.Description,
+ Color: l.Color,
+ })
+ }
+
+ err := issues_model.NewLabels(g.ctx, lbs...)
+ if err != nil {
+ return err
+ }
+ for _, lb := range lbs {
+ g.labels[lb.Name] = lb
+ }
+ return nil
+}
+
+// CreateReleases creates releases
+func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error {
+ rels := make([]*repo_model.Release, 0, len(releases))
+ for _, release := range releases {
+ if release.Created.IsZero() {
+ if !release.Published.IsZero() {
+ release.Created = release.Published
+ } else {
+ release.Created = time.Now()
+ }
+ }
+
+ // SECURITY: The TagName must be a valid git ref
+ if release.TagName != "" && !git.IsValidRefPattern(release.TagName) {
+ release.TagName = ""
+ }
+
+ // SECURITY: The TargetCommitish must be a valid git ref
+ if release.TargetCommitish != "" && !git.IsValidRefPattern(release.TargetCommitish) {
+ release.TargetCommitish = ""
+ }
+
+ rel := repo_model.Release{
+ RepoID: g.repo.ID,
+ TagName: release.TagName,
+ LowerTagName: strings.ToLower(release.TagName),
+ Target: release.TargetCommitish,
+ Title: release.Name,
+ Note: release.Body,
+ IsDraft: release.Draft,
+ IsPrerelease: release.Prerelease,
+ IsTag: false,
+ CreatedUnix: timeutil.TimeStamp(release.Created.Unix()),
+ }
+
+ if err := g.remapUser(release, &rel); err != nil {
+ return err
+ }
+
+ // calc NumCommits if possible
+ if rel.TagName != "" {
+ commit, err := g.gitRepo.GetTagCommit(rel.TagName)
+ if !git.IsErrNotExist(err) {
+ if err != nil {
+ return fmt.Errorf("GetTagCommit[%v]: %w", rel.TagName, err)
+ }
+ rel.Sha1 = commit.ID.String()
+ rel.NumCommits, err = commit.CommitsCount()
+ if err != nil {
+ return fmt.Errorf("CommitsCount: %w", err)
+ }
+ }
+ }
+
+ for _, asset := range release.Assets {
+ if asset.Created.IsZero() {
+ if !asset.Updated.IsZero() {
+ asset.Created = asset.Updated
+ } else {
+ asset.Created = release.Created
+ }
+ }
+ attach := repo_model.Attachment{
+ UUID: uuid.New().String(),
+ Name: asset.Name,
+ DownloadCount: int64(*asset.DownloadCount),
+ Size: int64(*asset.Size),
+ CreatedUnix: timeutil.TimeStamp(asset.Created.Unix()),
+ }
+
+ // SECURITY: We cannot check the DownloadURL and DownloadFunc are safe here
+ // ... we must assume that they are safe and simply download the attachment
+ err := func() error {
+ // asset.DownloadURL maybe a local file
+ var rc io.ReadCloser
+ var err error
+ if asset.DownloadFunc != nil {
+ rc, err = asset.DownloadFunc()
+ if err != nil {
+ return err
+ }
+ } else if asset.DownloadURL != nil {
+ rc, err = uri.Open(*asset.DownloadURL)
+ if err != nil {
+ return err
+ }
+ }
+ if rc == nil {
+ return nil
+ }
+ _, err = storage.Attachments.Save(attach.RelativePath(), rc, int64(*asset.Size))
+ rc.Close()
+ return err
+ }()
+ if err != nil {
+ return err
+ }
+
+ rel.Attachments = append(rel.Attachments, &attach)
+ }
+
+ rels = append(rels, &rel)
+ }
+
+ return repo_model.InsertReleases(g.ctx, rels...)
+}
+
+// SyncTags syncs releases with tags in the database
+func (g *GiteaLocalUploader) SyncTags() error {
+ return repo_module.SyncReleasesWithTags(g.ctx, g.repo, g.gitRepo)
+}
+
+// CreateIssues creates issues
+func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
+ iss := make([]*issues_model.Issue, 0, len(issues))
+ for _, issue := range issues {
+ var labels []*issues_model.Label
+ for _, label := range issue.Labels {
+ lb, ok := g.labels[label.Name]
+ if ok {
+ labels = append(labels, lb)
+ }
+ }
+
+ milestoneID := g.milestones[issue.Milestone]
+
+ if issue.Created.IsZero() {
+ if issue.Closed != nil {
+ issue.Created = *issue.Closed
+ } else {
+ issue.Created = time.Now()
+ }
+ }
+ if issue.Updated.IsZero() {
+ if issue.Closed != nil {
+ issue.Updated = *issue.Closed
+ } else {
+ issue.Updated = time.Now()
+ }
+ }
+
+ // SECURITY: issue.Ref needs to be a valid reference
+ if !git.IsValidRefPattern(issue.Ref) {
+ log.Warn("Invalid issue.Ref[%s] in issue #%d in %s/%s", issue.Ref, issue.Number, g.repoOwner, g.repoName)
+ issue.Ref = ""
+ }
+
+ is := issues_model.Issue{
+ RepoID: g.repo.ID,
+ Repo: g.repo,
+ Index: issue.Number,
+ Title: base_module.TruncateString(issue.Title, 255),
+ Content: issue.Content,
+ Ref: issue.Ref,
+ IsClosed: issue.State == "closed",
+ IsLocked: issue.IsLocked,
+ MilestoneID: milestoneID,
+ Labels: labels,
+ CreatedUnix: timeutil.TimeStamp(issue.Created.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(issue.Updated.Unix()),
+ }
+
+ if err := g.remapUser(issue, &is); err != nil {
+ return err
+ }
+
+ if issue.Closed != nil {
+ is.ClosedUnix = timeutil.TimeStamp(issue.Closed.Unix())
+ }
+ // add reactions
+ for _, reaction := range issue.Reactions {
+ res := issues_model.Reaction{
+ Type: reaction.Content,
+ CreatedUnix: timeutil.TimeStampNow(),
+ }
+ if err := g.remapUser(reaction, &res); err != nil {
+ return err
+ }
+ is.Reactions = append(is.Reactions, &res)
+ }
+ iss = append(iss, &is)
+ }
+
+ if len(iss) > 0 {
+ if err := issues_model.InsertIssues(g.ctx, iss...); err != nil {
+ return err
+ }
+
+ for _, is := range iss {
+ g.issues[is.Index] = is
+ }
+ }
+
+ return nil
+}
+
+// CreateComments creates comments of issues
+func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error {
+ cms := make([]*issues_model.Comment, 0, len(comments))
+ for _, comment := range comments {
+ var issue *issues_model.Issue
+ issue, ok := g.issues[comment.IssueIndex]
+ if !ok {
+ return fmt.Errorf("comment references non existent IssueIndex %d", comment.IssueIndex)
+ }
+
+ if comment.Created.IsZero() {
+ comment.Created = time.Unix(int64(issue.CreatedUnix), 0)
+ }
+ if comment.Updated.IsZero() {
+ comment.Updated = comment.Created
+ }
+ if comment.CommentType == "" {
+ // if type field is missing, then assume a normal comment
+ comment.CommentType = issues_model.CommentTypeComment.String()
+ }
+ cm := issues_model.Comment{
+ IssueID: issue.ID,
+ Type: issues_model.AsCommentType(comment.CommentType),
+ Content: comment.Content,
+ CreatedUnix: timeutil.TimeStamp(comment.Created.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(comment.Updated.Unix()),
+ }
+
+ switch cm.Type {
+ case issues_model.CommentTypeReopen:
+ cm.Content = ""
+ case issues_model.CommentTypeClose:
+ cm.Content = ""
+ case issues_model.CommentTypeAssignees:
+ if assigneeID, ok := comment.Meta["AssigneeID"].(int); ok {
+ cm.AssigneeID = int64(assigneeID)
+ }
+ if comment.Meta["RemovedAssigneeID"] != nil {
+ cm.RemovedAssignee = true
+ }
+ case issues_model.CommentTypeChangeTitle:
+ if comment.Meta["OldTitle"] != nil {
+ cm.OldTitle = fmt.Sprint(comment.Meta["OldTitle"])
+ }
+ if comment.Meta["NewTitle"] != nil {
+ cm.NewTitle = fmt.Sprint(comment.Meta["NewTitle"])
+ }
+ case issues_model.CommentTypeChangeTargetBranch:
+ if comment.Meta["OldRef"] != nil && comment.Meta["NewRef"] != nil {
+ cm.OldRef = fmt.Sprint(comment.Meta["OldRef"])
+ cm.NewRef = fmt.Sprint(comment.Meta["NewRef"])
+ cm.Content = ""
+ }
+ case issues_model.CommentTypeMergePull:
+ cm.Content = ""
+ case issues_model.CommentTypePRScheduledToAutoMerge, issues_model.CommentTypePRUnScheduledToAutoMerge:
+ cm.Content = ""
+ default:
+ }
+
+ if err := g.remapUser(comment, &cm); err != nil {
+ return err
+ }
+
+ // add reactions
+ for _, reaction := range comment.Reactions {
+ res := issues_model.Reaction{
+ Type: reaction.Content,
+ CreatedUnix: timeutil.TimeStampNow(),
+ }
+ if err := g.remapUser(reaction, &res); err != nil {
+ return err
+ }
+ cm.Reactions = append(cm.Reactions, &res)
+ }
+
+ cms = append(cms, &cm)
+ }
+
+ if len(cms) == 0 {
+ return nil
+ }
+ return issues_model.InsertIssueComments(g.ctx, cms)
+}
+
+// CreatePullRequests creates pull requests
+func (g *GiteaLocalUploader) CreatePullRequests(prs ...*base.PullRequest) error {
+ gprs := make([]*issues_model.PullRequest, 0, len(prs))
+ for _, pr := range prs {
+ gpr, err := g.newPullRequest(pr)
+ if err != nil {
+ return err
+ }
+
+ if err := g.remapUser(pr, gpr.Issue); err != nil {
+ return err
+ }
+
+ gprs = append(gprs, gpr)
+ }
+ if err := issues_model.InsertPullRequests(g.ctx, gprs...); err != nil {
+ return err
+ }
+ for _, pr := range gprs {
+ g.issues[pr.Issue.Index] = pr.Issue
+ pull.AddToTaskQueue(g.ctx, pr)
+ }
+ return nil
+}
+
+func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head string, err error) {
+ // SECURITY: this pr must have been must have been ensured safe
+ if !pr.EnsuredSafe {
+ log.Error("PR #%d in %s/%s has not been checked for safety.", pr.Number, g.repoOwner, g.repoName)
+ return "", fmt.Errorf("the PR[%d] was not checked for safety", pr.Number)
+ }
+
+ // Anonymous function to download the patch file (allows us to use defer)
+ err = func() error {
+ // if the patchURL is empty there is nothing to download
+ if pr.PatchURL == "" {
+ return nil
+ }
+
+ // SECURITY: We will assume that the pr.PatchURL has been checked
+ // pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe
+ ret, err := uri.Open(pr.PatchURL) // TODO: This probably needs to use the downloader as there may be rate limiting issues here
+ if err != nil {
+ return err
+ }
+ defer ret.Close()
+
+ pullDir := filepath.Join(g.repo.RepoPath(), "pulls")
+ if err = os.MkdirAll(pullDir, os.ModePerm); err != nil {
+ return err
+ }
+
+ f, err := os.Create(filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number)))
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // TODO: Should there be limits on the size of this file?
+ _, err = io.Copy(f, ret)
+
+ return err
+ }()
+ if err != nil {
+ return "", err
+ }
+
+ head = "unknown repository"
+ if pr.IsForkPullRequest() && pr.State != "closed" {
+ // OK we want to fetch the current head as a branch from its CloneURL
+
+ // 1. Is there a head clone URL available?
+ // 2. Is there a head ref available?
+ if pr.Head.CloneURL == "" || pr.Head.Ref == "" {
+ return head, nil
+ }
+
+ // 3. We need to create a remote for this clone url
+ // ... maybe we already have a name for this remote
+ remote, ok := g.prHeadCache[pr.Head.CloneURL+":"]
+ if !ok {
+ // ... let's try ownername as a reasonable name
+ remote = pr.Head.OwnerName
+ if !git.IsValidRefPattern(remote) {
+ // ... let's try something less nice
+ remote = "head-pr-" + strconv.FormatInt(pr.Number, 10)
+ }
+ // ... now add the remote
+ err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true)
+ if err != nil {
+ log.Error("PR #%d in %s/%s AddRemote[%s] failed: %v", pr.Number, g.repoOwner, g.repoName, remote, err)
+ } else {
+ g.prHeadCache[pr.Head.CloneURL+":"] = remote
+ ok = true
+ }
+ }
+ if !ok {
+ return head, nil
+ }
+
+ // 4. Check if we already have this ref?
+ localRef, ok := g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref]
+ if !ok {
+ // ... We would normally name this migrated branch as <OwnerName>/<HeadRef> but we need to ensure that is safe
+ localRef = git.SanitizeRefPattern(pr.Head.OwnerName + "/" + pr.Head.Ref)
+
+ // ... Now we must assert that this does not exist
+ if g.gitRepo.IsBranchExist(localRef) {
+ localRef = "head-pr-" + strconv.FormatInt(pr.Number, 10) + "/" + localRef
+ i := 0
+ for g.gitRepo.IsBranchExist(localRef) {
+ if i > 5 {
+ // ... We tried, we really tried but this is just a seriously unfriendly repo
+ return head, nil
+ }
+ // OK just try some uuids!
+ localRef = git.SanitizeRefPattern("head-pr-" + strconv.FormatInt(pr.Number, 10) + uuid.New().String())
+ i++
+ }
+ }
+
+ fetchArg := pr.Head.Ref + ":" + git.BranchPrefix + localRef
+ if strings.HasPrefix(fetchArg, "-") {
+ fetchArg = git.BranchPrefix + fetchArg
+ }
+
+ _, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags").AddDashesAndList(remote, fetchArg).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
+ if err != nil {
+ log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
+ return head, nil
+ }
+ g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref] = localRef
+ head = localRef
+ }
+
+ // 5. Now if pr.Head.SHA == "" we should recover this to the head of this branch
+ if pr.Head.SHA == "" {
+ headSha, err := g.gitRepo.GetBranchCommitID(localRef)
+ if err != nil {
+ log.Error("unable to get head SHA of local head for PR #%d from %s in %s/%s. Error: %v", pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
+ return head, nil
+ }
+ pr.Head.SHA = headSha
+ }
+
+ _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
+ if err != nil {
+ return "", err
+ }
+
+ return head, nil
+ }
+
+ if pr.Head.Ref != "" {
+ head = pr.Head.Ref
+ }
+
+ // Ensure the closed PR SHA still points to an existing ref
+ if pr.Head.SHA == "" {
+ // The SHA is empty
+ log.Warn("Empty reference, no pull head for PR #%d in %s/%s", pr.Number, g.repoOwner, g.repoName)
+ } else {
+ _, _, err = git.NewCommand(g.ctx, "rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
+ if err != nil {
+ // Git update-ref remove bad references with a relative path
+ log.Warn("Deprecated local head %s for PR #%d in %s/%s, removing %s", pr.Head.SHA, pr.Number, g.repoOwner, g.repoName, pr.GetGitRefName())
+ } else {
+ // set head information
+ _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
+ if err != nil {
+ log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
+ }
+ }
+ }
+
+ return head, nil
+}
+
+func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model.PullRequest, error) {
+ var labels []*issues_model.Label
+ for _, label := range pr.Labels {
+ lb, ok := g.labels[label.Name]
+ if ok {
+ labels = append(labels, lb)
+ }
+ }
+
+ milestoneID := g.milestones[pr.Milestone]
+
+ head, err := g.updateGitForPullRequest(pr)
+ if err != nil {
+ return nil, fmt.Errorf("updateGitForPullRequest: %w", err)
+ }
+
+ // Now we may need to fix the mergebase
+ if pr.Base.SHA == "" {
+ if pr.Base.Ref != "" && pr.Head.SHA != "" {
+ // A PR against a tag base does not make sense - therefore pr.Base.Ref must be a branch
+ // TODO: should we be checking for the refs/heads/ prefix on the pr.Base.Ref? (i.e. are these actually branches or refs)
+ pr.Base.SHA, _, err = g.gitRepo.GetMergeBase("", git.BranchPrefix+pr.Base.Ref, pr.Head.SHA)
+ if err != nil {
+ log.Error("Cannot determine the merge base for PR #%d in %s/%s. Error: %v", pr.Number, g.repoOwner, g.repoName, err)
+ }
+ } else {
+ log.Error("Cannot determine the merge base for PR #%d in %s/%s. Not enough information", pr.Number, g.repoOwner, g.repoName)
+ }
+ }
+
+ if pr.Created.IsZero() {
+ if pr.Closed != nil {
+ pr.Created = *pr.Closed
+ } else if pr.MergedTime != nil {
+ pr.Created = *pr.MergedTime
+ } else {
+ pr.Created = time.Now()
+ }
+ }
+ if pr.Updated.IsZero() {
+ pr.Updated = pr.Created
+ }
+
+ issue := issues_model.Issue{
+ RepoID: g.repo.ID,
+ Repo: g.repo,
+ Title: pr.Title,
+ Index: pr.Number,
+ Content: pr.Content,
+ MilestoneID: milestoneID,
+ IsPull: true,
+ IsClosed: pr.State == "closed",
+ IsLocked: pr.IsLocked,
+ Labels: labels,
+ CreatedUnix: timeutil.TimeStamp(pr.Created.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(pr.Updated.Unix()),
+ }
+
+ if err := g.remapUser(pr, &issue); err != nil {
+ return nil, err
+ }
+
+ // add reactions
+ for _, reaction := range pr.Reactions {
+ res := issues_model.Reaction{
+ Type: reaction.Content,
+ CreatedUnix: timeutil.TimeStampNow(),
+ }
+ if err := g.remapUser(reaction, &res); err != nil {
+ return nil, err
+ }
+ issue.Reactions = append(issue.Reactions, &res)
+ }
+
+ pullRequest := issues_model.PullRequest{
+ HeadRepoID: g.repo.ID,
+ HeadBranch: head,
+ BaseRepoID: g.repo.ID,
+ BaseBranch: pr.Base.Ref,
+ MergeBase: pr.Base.SHA,
+ Index: pr.Number,
+ HasMerged: pr.Merged,
+
+ Issue: &issue,
+ }
+
+ if pullRequest.Issue.IsClosed && pr.Closed != nil {
+ pullRequest.Issue.ClosedUnix = timeutil.TimeStamp(pr.Closed.Unix())
+ }
+ if pullRequest.HasMerged && pr.MergedTime != nil {
+ pullRequest.MergedUnix = timeutil.TimeStamp(pr.MergedTime.Unix())
+ pullRequest.MergedCommitID = pr.MergeCommitSHA
+ pullRequest.MergerID = g.doer.ID
+ }
+
+ // TODO: assignees
+
+ return &pullRequest, nil
+}
+
+func convertReviewState(state string) issues_model.ReviewType {
+ switch state {
+ case base.ReviewStatePending:
+ return issues_model.ReviewTypePending
+ case base.ReviewStateApproved:
+ return issues_model.ReviewTypeApprove
+ case base.ReviewStateChangesRequested:
+ return issues_model.ReviewTypeReject
+ case base.ReviewStateCommented:
+ return issues_model.ReviewTypeComment
+ case base.ReviewStateRequestReview:
+ return issues_model.ReviewTypeRequest
+ default:
+ return issues_model.ReviewTypePending
+ }
+}
+
+// CreateReviews create pull request reviews of currently migrated issues
+func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
+ cms := make([]*issues_model.Review, 0, len(reviews))
+ for _, review := range reviews {
+ var issue *issues_model.Issue
+ issue, ok := g.issues[review.IssueIndex]
+ if !ok {
+ return fmt.Errorf("review references non existent IssueIndex %d", review.IssueIndex)
+ }
+ if review.CreatedAt.IsZero() {
+ review.CreatedAt = time.Unix(int64(issue.CreatedUnix), 0)
+ }
+
+ cm := issues_model.Review{
+ Type: convertReviewState(review.State),
+ IssueID: issue.ID,
+ Content: review.Content,
+ Official: review.Official,
+ CreatedUnix: timeutil.TimeStamp(review.CreatedAt.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(review.CreatedAt.Unix()),
+ }
+
+ if err := g.remapUser(review, &cm); err != nil {
+ return err
+ }
+
+ cms = append(cms, &cm)
+
+ // get pr
+ pr, ok := g.prCache[issue.ID]
+ if !ok {
+ var err error
+ pr, err = issues_model.GetPullRequestByIssueIDWithNoAttributes(g.ctx, issue.ID)
+ if err != nil {
+ return err
+ }
+ g.prCache[issue.ID] = pr
+ }
+ if pr.MergeBase == "" {
+ // No mergebase -> no basis for any patches
+ log.Warn("PR #%d in %s/%s: does not have a merge base, all review comments will be ignored", pr.Index, g.repoOwner, g.repoName)
+ continue
+ }
+
+ headCommitID, err := g.gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ log.Warn("PR #%d GetRefCommitID[%s] in %s/%s: %v, all review comments will be ignored", pr.Index, pr.GetGitRefName(), g.repoOwner, g.repoName, err)
+ continue
+ }
+
+ for _, comment := range review.Comments {
+ // Skip code comment if it doesn't have a diff it is commenting on.
+ if comment.DiffHunk == "" {
+ continue
+ }
+
+ line := comment.Line
+ if line != 0 {
+ comment.Position = 1
+ } else if comment.DiffHunk != "" {
+ _, _, line, _ = git.ParseDiffHunkString(comment.DiffHunk)
+ }
+
+ // SECURITY: The TreePath must be cleaned! use relative path
+ comment.TreePath = util.PathJoinRel(comment.TreePath)
+
+ var patch string
+ reader, writer := io.Pipe()
+ defer func() {
+ _ = reader.Close()
+ _ = writer.Close()
+ }()
+ go func(comment *base.ReviewComment) {
+ if err := git.GetRepoRawDiffForFile(g.gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, comment.TreePath, writer); err != nil {
+ // We should ignore the error since the commit maybe removed when force push to the pull request
+ log.Warn("GetRepoRawDiffForFile failed when migrating [%s, %s, %s, %s]: %v", g.gitRepo.Path, pr.MergeBase, headCommitID, comment.TreePath, err)
+ }
+ _ = writer.Close()
+ }(comment)
+
+ patch, _ = git.CutDiffAroundLine(reader, int64((&issues_model.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines)
+
+ if comment.CreatedAt.IsZero() {
+ comment.CreatedAt = review.CreatedAt
+ }
+ if comment.UpdatedAt.IsZero() {
+ comment.UpdatedAt = comment.CreatedAt
+ }
+
+ objectFormat := git.ObjectFormatFromName(g.repo.ObjectFormatName)
+ if !objectFormat.IsValid(comment.CommitID) {
+ log.Warn("Invalid comment CommitID[%s] on comment[%d] in PR #%d of %s/%s replaced with %s", comment.CommitID, pr.Index, g.repoOwner, g.repoName, headCommitID)
+ comment.CommitID = headCommitID
+ }
+
+ c := issues_model.Comment{
+ Type: issues_model.CommentTypeCode,
+ IssueID: issue.ID,
+ Content: comment.Content,
+ Line: int64(line + comment.Position - 1),
+ TreePath: comment.TreePath,
+ CommitSHA: comment.CommitID,
+ Patch: patch,
+ CreatedUnix: timeutil.TimeStamp(comment.CreatedAt.Unix()),
+ UpdatedUnix: timeutil.TimeStamp(comment.UpdatedAt.Unix()),
+ }
+
+ if err := g.remapUser(review, &c); err != nil {
+ return err
+ }
+
+ cm.Comments = append(cm.Comments, &c)
+ }
+ }
+
+ return issues_model.InsertReviews(g.ctx, cms)
+}
+
+// Rollback when migrating failed, this will rollback all the changes.
+func (g *GiteaLocalUploader) Rollback() error {
+ if g.repo != nil && g.repo.ID > 0 {
+ g.gitRepo.Close()
+
+ // do not delete the repository, otherwise the end users won't be able to see the last error message
+ }
+ return nil
+}
+
+// Finish when migrating success, this will do some status update things.
+func (g *GiteaLocalUploader) Finish() error {
+ if g.repo == nil || g.repo.ID <= 0 {
+ return ErrRepoNotCreated
+ }
+
+ // update issue_index
+ if err := issues_model.RecalculateIssueIndexForRepo(g.ctx, g.repo.ID); err != nil {
+ return err
+ }
+
+ if err := models.UpdateRepoStats(g.ctx, g.repo.ID); err != nil {
+ return err
+ }
+
+ g.repo.Status = repo_model.RepositoryReady
+ return repo_model.UpdateRepositoryCols(g.ctx, g.repo, "status")
+}
+
+func (g *GiteaLocalUploader) remapUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error {
+ var userID int64
+ var err error
+ if g.sameApp {
+ userID, err = g.remapLocalUser(source)
+ } else {
+ userID, err = g.remapExternalUser(source)
+ }
+ if err != nil {
+ return err
+ }
+
+ if userID > 0 {
+ return target.RemapExternalUser("", 0, userID)
+ }
+ return target.RemapExternalUser(source.GetExternalName(), source.GetExternalID(), user_model.GhostUserID)
+}
+
+func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated) (int64, error) {
+ userid, ok := g.userMap[source.GetExternalID()]
+ if !ok {
+ name, err := user_model.GetUserNameByID(g.ctx, source.GetExternalID())
+ if err != nil {
+ return 0, err
+ }
+ // let's not reuse an ID when the user was deleted or has a different user name
+ if name != source.GetExternalName() {
+ userid = 0
+ } else {
+ userid = source.GetExternalID()
+ }
+ g.userMap[source.GetExternalID()] = userid
+ }
+ return userid, nil
+}
+
+func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated) (userid int64, err error) {
+ userid, ok := g.userMap[source.GetExternalID()]
+ if !ok {
+ userid, err = user_model.GetUserIDByExternalUserID(g.ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID()))
+ if err != nil {
+ log.Error("GetUserIDByExternalUserID: %v", err)
+ return 0, err
+ }
+ g.userMap[source.GetExternalID()] = userid
+ }
+ return userid, nil
+}
diff --git a/services/migrations/gitea_uploader_test.go b/services/migrations/gitea_uploader_test.go
new file mode 100644
index 0000000..ad193b2
--- /dev/null
+++ b/services/migrations/gitea_uploader_test.go
@@ -0,0 +1,519 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2018 Jonas Franz. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strconv"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGiteaUploadRepo(t *testing.T) {
+ // FIXME: Since no accesskey or user/password will trigger rate limit of github, just skip
+ t.Skip()
+
+ unittest.PrepareTestEnv(t)
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ var (
+ ctx = context.Background()
+ downloader = NewGithubDownloaderV3(ctx, "https://github.com", "", "", "", "go-xorm", "builder")
+ repoName = "builder-" + time.Now().Format("2006-01-02-15-04-05")
+ uploader = NewGiteaLocalUploader(graceful.GetManager().HammerContext(), user, user.Name, repoName)
+ )
+
+ err := migrateRepository(db.DefaultContext, user, downloader, uploader, base.MigrateOptions{
+ CloneAddr: "https://github.com/go-xorm/builder",
+ RepoName: repoName,
+ AuthUsername: "",
+
+ Wiki: true,
+ Issues: true,
+ Milestones: true,
+ Labels: true,
+ Releases: true,
+ Comments: true,
+ PullRequests: true,
+ Private: true,
+ Mirror: false,
+ }, nil)
+ require.NoError(t, err)
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, Name: repoName})
+ assert.True(t, repo.HasWiki())
+ assert.EqualValues(t, repo_model.RepositoryReady, repo.Status)
+
+ milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: optional.Some(false),
+ })
+ require.NoError(t, err)
+ assert.Len(t, milestones, 1)
+
+ milestones, err = db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: optional.Some(true),
+ })
+ require.NoError(t, err)
+ assert.Empty(t, milestones)
+
+ labels, err := issues_model.GetLabelsByRepoID(ctx, repo.ID, "", db.ListOptions{})
+ require.NoError(t, err)
+ assert.Len(t, labels, 12)
+
+ releases, err := db.Find[repo_model.Release](db.DefaultContext, repo_model.FindReleasesOptions{
+ ListOptions: db.ListOptions{
+ PageSize: 10,
+ Page: 0,
+ },
+ IncludeTags: true,
+ RepoID: repo.ID,
+ })
+ require.NoError(t, err)
+ assert.Len(t, releases, 8)
+
+ releases, err = db.Find[repo_model.Release](db.DefaultContext, repo_model.FindReleasesOptions{
+ ListOptions: db.ListOptions{
+ PageSize: 10,
+ Page: 0,
+ },
+ IncludeTags: false,
+ RepoID: repo.ID,
+ })
+ require.NoError(t, err)
+ assert.Len(t, releases, 1)
+
+ issues, err := issues_model.Issues(db.DefaultContext, &issues_model.IssuesOptions{
+ RepoIDs: []int64{repo.ID},
+ IsPull: optional.Some(false),
+ SortType: "oldest",
+ })
+ require.NoError(t, err)
+ assert.Len(t, issues, 15)
+ require.NoError(t, issues[0].LoadDiscussComments(db.DefaultContext))
+ assert.Empty(t, issues[0].Comments)
+
+ pulls, _, err := issues_model.PullRequests(db.DefaultContext, repo.ID, &issues_model.PullRequestsOptions{
+ SortType: "oldest",
+ })
+ require.NoError(t, err)
+ assert.Len(t, pulls, 30)
+ require.NoError(t, pulls[0].LoadIssue(db.DefaultContext))
+ require.NoError(t, pulls[0].Issue.LoadDiscussComments(db.DefaultContext))
+ assert.Len(t, pulls[0].Issue.Comments, 2)
+}
+
+func TestGiteaUploadRemapLocalUser(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ repoName := "migrated"
+ uploader := NewGiteaLocalUploader(context.Background(), doer, doer.Name, repoName)
+ // call remapLocalUser
+ uploader.sameApp = true
+
+ externalID := int64(1234567)
+ externalName := "username"
+ source := base.Release{
+ PublisherID: externalID,
+ PublisherName: externalName,
+ }
+
+ //
+ // The externalID does not match any existing user, everything
+ // belongs to the Ghost user
+ //
+ target := repo_model.Release{}
+ uploader.userMap = make(map[int64]int64)
+ err := uploader.remapUser(&source, &target)
+ require.NoError(t, err)
+ assert.EqualValues(t, user_model.GhostUserID, target.GetUserID())
+
+ //
+ // The externalID matches a known user but the name does not match,
+ // everything belongs to the Ghost user
+ //
+ source.PublisherID = user.ID
+ target = repo_model.Release{}
+ uploader.userMap = make(map[int64]int64)
+ err = uploader.remapUser(&source, &target)
+ require.NoError(t, err)
+ assert.EqualValues(t, user_model.GhostUserID, target.GetUserID())
+
+ //
+ // The externalID and externalName match an existing user, everything
+ // belongs to the existing user
+ //
+ source.PublisherName = user.Name
+ target = repo_model.Release{}
+ uploader.userMap = make(map[int64]int64)
+ err = uploader.remapUser(&source, &target)
+ require.NoError(t, err)
+ assert.EqualValues(t, user.ID, target.GetUserID())
+}
+
+func TestGiteaUploadRemapExternalUser(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ repoName := "migrated"
+ uploader := NewGiteaLocalUploader(context.Background(), doer, doer.Name, repoName)
+ uploader.gitServiceType = structs.GiteaService
+ // call remapExternalUser
+ uploader.sameApp = false
+
+ externalID := int64(1234567)
+ externalName := "username"
+ source := base.Release{
+ PublisherID: externalID,
+ PublisherName: externalName,
+ }
+
+ //
+ // When there is no user linked to the external ID, the migrated data is authored
+ // by the Ghost user
+ //
+ uploader.userMap = make(map[int64]int64)
+ target := repo_model.Release{}
+ err := uploader.remapUser(&source, &target)
+ require.NoError(t, err)
+ assert.EqualValues(t, user_model.GhostUserID, target.GetUserID())
+
+ //
+ // Link the external ID to an existing user
+ //
+ linkedUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ externalLoginUser := &user_model.ExternalLoginUser{
+ ExternalID: strconv.FormatInt(externalID, 10),
+ UserID: linkedUser.ID,
+ LoginSourceID: 0,
+ Provider: structs.GiteaService.Name(),
+ }
+ err = user_model.LinkExternalToUser(db.DefaultContext, linkedUser, externalLoginUser)
+ require.NoError(t, err)
+
+ //
+ // When a user is linked to the external ID, it becomes the author of
+ // the migrated data
+ //
+ uploader.userMap = make(map[int64]int64)
+ target = repo_model.Release{}
+ err = uploader.remapUser(&source, &target)
+ require.NoError(t, err)
+ assert.EqualValues(t, linkedUser.ID, target.GetUserID())
+}
+
+func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ //
+ // fromRepo master
+ //
+ fromRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ baseRef := "master"
+ require.NoError(t, git.InitRepository(git.DefaultContext, fromRepo.RepoPath(), false, fromRepo.ObjectFormatName))
+ err := git.NewCommand(git.DefaultContext, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseRef).Run(&git.RunOpts{Dir: fromRepo.RepoPath()})
+ require.NoError(t, err)
+ require.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# Testing Repository\n\nOriginally created in: %s", fromRepo.RepoPath())), 0o644))
+ require.NoError(t, git.AddChanges(fromRepo.RepoPath(), true))
+ signature := git.Signature{
+ Email: "test@example.com",
+ Name: "test",
+ When: time.Now(),
+ }
+ require.NoError(t, git.CommitChanges(fromRepo.RepoPath(), git.CommitChangesOptions{
+ Committer: &signature,
+ Author: &signature,
+ Message: "Initial Commit",
+ }))
+ fromGitRepo, err := gitrepo.OpenRepository(git.DefaultContext, fromRepo)
+ require.NoError(t, err)
+ defer fromGitRepo.Close()
+ baseSHA, err := fromGitRepo.GetBranchCommitID(baseRef)
+ require.NoError(t, err)
+
+ //
+ // fromRepo branch1
+ //
+ headRef := "branch1"
+ _, _, err = git.NewCommand(git.DefaultContext, "checkout", "-b").AddDynamicArguments(headRef).RunStdString(&git.RunOpts{Dir: fromRepo.RepoPath()})
+ require.NoError(t, err)
+ require.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte("SOMETHING"), 0o644))
+ require.NoError(t, git.AddChanges(fromRepo.RepoPath(), true))
+ signature.When = time.Now()
+ require.NoError(t, git.CommitChanges(fromRepo.RepoPath(), git.CommitChangesOptions{
+ Committer: &signature,
+ Author: &signature,
+ Message: "Pull request",
+ }))
+ require.NoError(t, err)
+ headSHA, err := fromGitRepo.GetBranchCommitID(headRef)
+ require.NoError(t, err)
+
+ fromRepoOwner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: fromRepo.OwnerID})
+
+ //
+ // forkRepo branch2
+ //
+ forkHeadRef := "branch2"
+ forkRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 8})
+ require.NoError(t, git.CloneWithArgs(git.DefaultContext, nil, fromRepo.RepoPath(), forkRepo.RepoPath(), git.CloneRepoOptions{
+ Branch: headRef,
+ }))
+ _, _, err = git.NewCommand(git.DefaultContext, "checkout", "-b").AddDynamicArguments(forkHeadRef).RunStdString(&git.RunOpts{Dir: forkRepo.RepoPath()})
+ require.NoError(t, err)
+ require.NoError(t, os.WriteFile(filepath.Join(forkRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# branch2 %s", forkRepo.RepoPath())), 0o644))
+ require.NoError(t, git.AddChanges(forkRepo.RepoPath(), true))
+ require.NoError(t, git.CommitChanges(forkRepo.RepoPath(), git.CommitChangesOptions{
+ Committer: &signature,
+ Author: &signature,
+ Message: "branch2 commit",
+ }))
+ forkGitRepo, err := gitrepo.OpenRepository(git.DefaultContext, forkRepo)
+ require.NoError(t, err)
+ defer forkGitRepo.Close()
+ forkHeadSHA, err := forkGitRepo.GetBranchCommitID(forkHeadRef)
+ require.NoError(t, err)
+
+ toRepoName := "migrated"
+ uploader := NewGiteaLocalUploader(context.Background(), fromRepoOwner, fromRepoOwner.Name, toRepoName)
+ uploader.gitServiceType = structs.GiteaService
+ require.NoError(t, uploader.CreateRepo(&base.Repository{
+ Description: "description",
+ OriginalURL: fromRepo.RepoPath(),
+ CloneURL: fromRepo.RepoPath(),
+ IsPrivate: false,
+ IsMirror: true,
+ }, base.MigrateOptions{
+ GitServiceType: structs.GiteaService,
+ Private: false,
+ Mirror: true,
+ }))
+
+ for _, testCase := range []struct {
+ name string
+ head string
+ logFilter []string
+ logFiltered []bool
+ pr base.PullRequest
+ }{
+ {
+ name: "fork, good Head.SHA",
+ head: fmt.Sprintf("%s/%s", forkRepo.OwnerName, forkHeadRef),
+ pr: base.PullRequest{
+ PatchURL: "",
+ Number: 1,
+ State: "open",
+ Base: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: baseRef,
+ SHA: baseSHA,
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ Head: base.PullRequestBranch{
+ CloneURL: forkRepo.RepoPath(),
+ Ref: forkHeadRef,
+ SHA: forkHeadSHA,
+ RepoName: forkRepo.Name,
+ OwnerName: forkRepo.OwnerName,
+ },
+ },
+ },
+ {
+ name: "fork, invalid Head.Ref",
+ head: "unknown repository",
+ pr: base.PullRequest{
+ PatchURL: "",
+ Number: 1,
+ State: "open",
+ Base: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: baseRef,
+ SHA: baseSHA,
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ Head: base.PullRequestBranch{
+ CloneURL: forkRepo.RepoPath(),
+ Ref: "INVALID",
+ SHA: forkHeadSHA,
+ RepoName: forkRepo.Name,
+ OwnerName: forkRepo.OwnerName,
+ },
+ },
+ logFilter: []string{"Fetch branch from"},
+ logFiltered: []bool{true},
+ },
+ {
+ name: "invalid fork CloneURL",
+ head: "unknown repository",
+ pr: base.PullRequest{
+ PatchURL: "",
+ Number: 1,
+ State: "open",
+ Base: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: baseRef,
+ SHA: baseSHA,
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ Head: base.PullRequestBranch{
+ CloneURL: "UNLIKELY",
+ Ref: forkHeadRef,
+ SHA: forkHeadSHA,
+ RepoName: forkRepo.Name,
+ OwnerName: "WRONG",
+ },
+ },
+ logFilter: []string{"AddRemote"},
+ logFiltered: []bool{true},
+ },
+ {
+ name: "no fork, good Head.SHA",
+ head: headRef,
+ pr: base.PullRequest{
+ PatchURL: "",
+ Number: 1,
+ State: "open",
+ Base: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: baseRef,
+ SHA: baseSHA,
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ Head: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: headRef,
+ SHA: headSHA,
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ },
+ },
+ {
+ name: "no fork, empty Head.SHA",
+ head: headRef,
+ pr: base.PullRequest{
+ PatchURL: "",
+ Number: 1,
+ State: "open",
+ Base: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: baseRef,
+ SHA: baseSHA,
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ Head: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: headRef,
+ SHA: "",
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ },
+ logFilter: []string{"Empty reference", "Cannot remove local head"},
+ logFiltered: []bool{true, false},
+ },
+ {
+ name: "no fork, invalid Head.SHA",
+ head: headRef,
+ pr: base.PullRequest{
+ PatchURL: "",
+ Number: 1,
+ State: "open",
+ Base: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: baseRef,
+ SHA: baseSHA,
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ Head: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: headRef,
+ SHA: "brokenSHA",
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ },
+ logFilter: []string{"Deprecated local head"},
+ logFiltered: []bool{true},
+ },
+ {
+ name: "no fork, not found Head.SHA",
+ head: headRef,
+ pr: base.PullRequest{
+ PatchURL: "",
+ Number: 1,
+ State: "open",
+ Base: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: baseRef,
+ SHA: baseSHA,
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ Head: base.PullRequestBranch{
+ CloneURL: fromRepo.RepoPath(),
+ Ref: headRef,
+ SHA: "2697b352310fcd01cbd1f3dbd43b894080027f68",
+ RepoName: fromRepo.Name,
+ OwnerName: fromRepo.OwnerName,
+ },
+ },
+ logFilter: []string{"Deprecated local head", "Cannot remove local head"},
+ logFiltered: []bool{true, false},
+ },
+ } {
+ t.Run(testCase.name, func(t *testing.T) {
+ stopMark := fmt.Sprintf(">>>>>>>>>>>>>STOP: %s<<<<<<<<<<<<<<<", testCase.name)
+
+ logChecker, cleanup := test.NewLogChecker(log.DEFAULT, log.INFO)
+ logChecker.Filter(testCase.logFilter...).StopMark(stopMark)
+ defer cleanup()
+
+ testCase.pr.EnsuredSafe = true
+
+ head, err := uploader.updateGitForPullRequest(&testCase.pr)
+ require.NoError(t, err)
+ assert.EqualValues(t, testCase.head, head)
+
+ log.Info(stopMark)
+
+ logFiltered, logStopped := logChecker.Check(5 * time.Second)
+ assert.True(t, logStopped)
+ if len(testCase.logFilter) > 0 {
+ assert.EqualValues(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter)
+ }
+ })
+ }
+}
diff --git a/services/migrations/github.go b/services/migrations/github.go
new file mode 100644
index 0000000..54d3859
--- /dev/null
+++ b/services/migrations/github.go
@@ -0,0 +1,885 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2018 Jonas Franz. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/proxy"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/google/go-github/v64/github"
+ "golang.org/x/oauth2"
+)
+
+var (
+ _ base.Downloader = &GithubDownloaderV3{}
+ _ base.DownloaderFactory = &GithubDownloaderV3Factory{}
+ // GithubLimitRateRemaining limit to wait for new rate to apply
+ GithubLimitRateRemaining = 0
+)
+
+func init() {
+ RegisterDownloaderFactory(&GithubDownloaderV3Factory{})
+}
+
+// GithubDownloaderV3Factory defines a github downloader v3 factory
+type GithubDownloaderV3Factory struct{}
+
+// New returns a Downloader related to this factory according MigrateOptions
+func (f *GithubDownloaderV3Factory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
+ u, err := url.Parse(opts.CloneAddr)
+ if err != nil {
+ return nil, err
+ }
+
+ // some users are using the github redirect url for migration
+ if u.Host == "www.github.com" {
+ u.Host = "github.com"
+ }
+
+ baseURL := u.Scheme + "://" + u.Host
+ fields := strings.Split(u.Path, "/")
+ oldOwner := fields[1]
+ oldName := strings.TrimSuffix(fields[2], ".git")
+
+ log.Trace("Create github downloader BaseURL: %s %s/%s", baseURL, oldOwner, oldName)
+
+ return NewGithubDownloaderV3(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
+}
+
+// GitServiceType returns the type of git service
+func (f *GithubDownloaderV3Factory) GitServiceType() structs.GitServiceType {
+ return structs.GithubService
+}
+
+// GithubDownloaderV3 implements a Downloader interface to get repository information
+// from github via APIv3
+type GithubDownloaderV3 struct {
+ base.NullDownloader
+ ctx context.Context
+ clients []*github.Client
+ baseURL string
+ repoOwner string
+ repoName string
+ userName string
+ password string
+ rates []*github.Rate
+ curClientIdx int
+ maxPerPage int
+ SkipReactions bool
+ SkipReviews bool
+}
+
+// NewGithubDownloaderV3 creates a github Downloader via github v3 API
+func NewGithubDownloaderV3(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 {
+ downloader := GithubDownloaderV3{
+ userName: userName,
+ baseURL: baseURL,
+ password: password,
+ ctx: ctx,
+ repoOwner: repoOwner,
+ repoName: repoName,
+ maxPerPage: 100,
+ }
+
+ if token != "" {
+ tokens := strings.Split(token, ",")
+ for _, token := range tokens {
+ token = strings.TrimSpace(token)
+ ts := oauth2.StaticTokenSource(
+ &oauth2.Token{AccessToken: token},
+ )
+ client := &http.Client{
+ Transport: &oauth2.Transport{
+ Base: NewMigrationHTTPTransport(),
+ Source: oauth2.ReuseTokenSource(nil, ts),
+ },
+ }
+
+ downloader.addClient(client, baseURL)
+ }
+ } else {
+ transport := NewMigrationHTTPTransport()
+ transport.Proxy = func(req *http.Request) (*url.URL, error) {
+ req.SetBasicAuth(userName, password)
+ return proxy.Proxy()(req)
+ }
+ client := &http.Client{
+ Transport: transport,
+ }
+ downloader.addClient(client, baseURL)
+ }
+ return &downloader
+}
+
+// String implements Stringer
+func (g *GithubDownloaderV3) String() string {
+ return fmt.Sprintf("migration from github server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
+}
+
+func (g *GithubDownloaderV3) LogString() string {
+ if g == nil {
+ return "<GithubDownloaderV3 nil>"
+ }
+ return fmt.Sprintf("<GithubDownloaderV3 %s %s/%s>", g.baseURL, g.repoOwner, g.repoName)
+}
+
+func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) {
+ githubClient := github.NewClient(client)
+ if baseURL != "https://github.com" {
+ githubClient, _ = github.NewClient(client).WithEnterpriseURLs(baseURL, baseURL)
+ }
+ g.clients = append(g.clients, githubClient)
+ g.rates = append(g.rates, nil)
+}
+
+// SetContext set context
+func (g *GithubDownloaderV3) SetContext(ctx context.Context) {
+ g.ctx = ctx
+}
+
+func (g *GithubDownloaderV3) waitAndPickClient() {
+ var recentIdx int
+ var maxRemaining int
+ for i := 0; i < len(g.clients); i++ {
+ if g.rates[i] != nil && g.rates[i].Remaining > maxRemaining {
+ maxRemaining = g.rates[i].Remaining
+ recentIdx = i
+ }
+ }
+ g.curClientIdx = recentIdx // if no max remain, it will always pick the first client.
+
+ for g.rates[g.curClientIdx] != nil && g.rates[g.curClientIdx].Remaining <= GithubLimitRateRemaining {
+ timer := time.NewTimer(time.Until(g.rates[g.curClientIdx].Reset.Time))
+ select {
+ case <-g.ctx.Done():
+ timer.Stop()
+ return
+ case <-timer.C:
+ }
+
+ err := g.RefreshRate()
+ if err != nil {
+ log.Error("g.getClient().RateLimit.Get: %s", err)
+ }
+ }
+}
+
+// RefreshRate update the current rate (doesn't count in rate limit)
+func (g *GithubDownloaderV3) RefreshRate() error {
+ rates, _, err := g.getClient().RateLimit.Get(g.ctx)
+ if err != nil {
+ // if rate limit is not enabled, ignore it
+ if strings.Contains(err.Error(), "404") {
+ g.setRate(nil)
+ return nil
+ }
+ return err
+ }
+
+ g.setRate(rates.GetCore())
+ return nil
+}
+
+func (g *GithubDownloaderV3) getClient() *github.Client {
+ return g.clients[g.curClientIdx]
+}
+
+func (g *GithubDownloaderV3) setRate(rate *github.Rate) {
+ g.rates[g.curClientIdx] = rate
+}
+
+// GetRepoInfo returns a repository information
+func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
+ g.waitAndPickClient()
+ gr, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName)
+ if err != nil {
+ return nil, err
+ }
+ g.setRate(&resp.Rate)
+
+ // convert github repo to stand Repo
+ return &base.Repository{
+ Owner: g.repoOwner,
+ Name: gr.GetName(),
+ IsPrivate: gr.GetPrivate(),
+ Description: gr.GetDescription(),
+ OriginalURL: gr.GetHTMLURL(),
+ CloneURL: gr.GetCloneURL(),
+ DefaultBranch: gr.GetDefaultBranch(),
+ }, nil
+}
+
+// GetTopics return github topics
+func (g *GithubDownloaderV3) GetTopics() ([]string, error) {
+ g.waitAndPickClient()
+ r, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName)
+ if err != nil {
+ return nil, err
+ }
+ g.setRate(&resp.Rate)
+ return r.Topics, nil
+}
+
+// GetMilestones returns milestones
+func (g *GithubDownloaderV3) GetMilestones() ([]*base.Milestone, error) {
+ perPage := g.maxPerPage
+ milestones := make([]*base.Milestone, 0, perPage)
+ for i := 1; ; i++ {
+ g.waitAndPickClient()
+ ms, resp, err := g.getClient().Issues.ListMilestones(g.ctx, g.repoOwner, g.repoName,
+ &github.MilestoneListOptions{
+ State: "all",
+ ListOptions: github.ListOptions{
+ Page: i,
+ PerPage: perPage,
+ },
+ })
+ if err != nil {
+ return nil, err
+ }
+ g.setRate(&resp.Rate)
+
+ for _, m := range ms {
+ state := "open"
+ if m.State != nil {
+ state = *m.State
+ }
+ milestones = append(milestones, &base.Milestone{
+ Title: m.GetTitle(),
+ Description: m.GetDescription(),
+ Deadline: m.DueOn.GetTime(),
+ State: state,
+ Created: m.GetCreatedAt().Time,
+ Updated: m.UpdatedAt.GetTime(),
+ Closed: m.ClosedAt.GetTime(),
+ })
+ }
+ if len(ms) < perPage {
+ break
+ }
+ }
+ return milestones, nil
+}
+
+func convertGithubLabel(label *github.Label) *base.Label {
+ return &base.Label{
+ Name: label.GetName(),
+ Color: label.GetColor(),
+ Description: label.GetDescription(),
+ }
+}
+
+// GetLabels returns labels
+func (g *GithubDownloaderV3) GetLabels() ([]*base.Label, error) {
+ perPage := g.maxPerPage
+ labels := make([]*base.Label, 0, perPage)
+ for i := 1; ; i++ {
+ g.waitAndPickClient()
+ ls, resp, err := g.getClient().Issues.ListLabels(g.ctx, g.repoOwner, g.repoName,
+ &github.ListOptions{
+ Page: i,
+ PerPage: perPage,
+ })
+ if err != nil {
+ return nil, err
+ }
+ g.setRate(&resp.Rate)
+
+ for _, label := range ls {
+ labels = append(labels, convertGithubLabel(label))
+ }
+ if len(ls) < perPage {
+ break
+ }
+ }
+ return labels, nil
+}
+
+func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease) *base.Release {
+ // GitHub allows committish to be a reference.
+ // In this case, we need to remove the prefix, i.e. convert "refs/heads/main" to "main".
+ targetCommitish := strings.TrimPrefix(rel.GetTargetCommitish(), git.BranchPrefix)
+
+ r := &base.Release{
+ Name: rel.GetName(),
+ TagName: rel.GetTagName(),
+ TargetCommitish: targetCommitish,
+ Draft: rel.GetDraft(),
+ Prerelease: rel.GetPrerelease(),
+ Created: rel.GetCreatedAt().Time,
+ PublisherID: rel.GetAuthor().GetID(),
+ PublisherName: rel.GetAuthor().GetLogin(),
+ PublisherEmail: rel.GetAuthor().GetEmail(),
+ Body: rel.GetBody(),
+ }
+
+ if rel.PublishedAt != nil {
+ r.Published = rel.PublishedAt.Time
+ }
+
+ httpClient := NewMigrationHTTPClient()
+
+ for _, asset := range rel.Assets {
+ assetID := *asset.ID // Don't optimize this, for closure we need a local variable
+ r.Assets = append(r.Assets, &base.ReleaseAsset{
+ ID: asset.GetID(),
+ Name: asset.GetName(),
+ ContentType: asset.ContentType,
+ Size: asset.Size,
+ DownloadCount: asset.DownloadCount,
+ Created: asset.CreatedAt.Time,
+ Updated: asset.UpdatedAt.Time,
+ DownloadFunc: func() (io.ReadCloser, error) {
+ g.waitAndPickClient()
+ readCloser, redirectURL, err := g.getClient().Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil)
+ if err != nil {
+ return nil, err
+ }
+ if err := g.RefreshRate(); err != nil {
+ log.Error("g.getClient().RateLimits: %s", err)
+ }
+
+ if readCloser != nil {
+ return readCloser, nil
+ }
+
+ if redirectURL == "" {
+ return nil, fmt.Errorf("no release asset found for %d", assetID)
+ }
+
+ // Prevent open redirect
+ if !hasBaseURL(redirectURL, g.baseURL) &&
+ !hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") {
+ WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.GetID(), g, redirectURL)
+
+ return io.NopCloser(strings.NewReader(redirectURL)), nil
+ }
+
+ g.waitAndPickClient()
+ req, err := http.NewRequestWithContext(g.ctx, "GET", redirectURL, nil)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := httpClient.Do(req)
+ err1 := g.RefreshRate()
+ if err1 != nil {
+ log.Error("g.RefreshRate(): %s", err1)
+ }
+ if err != nil {
+ return nil, err
+ }
+ return resp.Body, nil
+ },
+ })
+ }
+ return r
+}
+
+// GetReleases returns releases
+func (g *GithubDownloaderV3) GetReleases() ([]*base.Release, error) {
+ perPage := g.maxPerPage
+ releases := make([]*base.Release, 0, perPage)
+ for i := 1; ; i++ {
+ g.waitAndPickClient()
+ ls, resp, err := g.getClient().Repositories.ListReleases(g.ctx, g.repoOwner, g.repoName,
+ &github.ListOptions{
+ Page: i,
+ PerPage: perPage,
+ })
+ if err != nil {
+ return nil, err
+ }
+ g.setRate(&resp.Rate)
+
+ for _, release := range ls {
+ releases = append(releases, g.convertGithubRelease(release))
+ }
+ if len(ls) < perPage {
+ break
+ }
+ }
+ return releases, nil
+}
+
+// GetIssues returns issues according start and limit
+func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+ if perPage > g.maxPerPage {
+ perPage = g.maxPerPage
+ }
+ opt := &github.IssueListByRepoOptions{
+ Sort: "created",
+ Direction: "asc",
+ State: "all",
+ ListOptions: github.ListOptions{
+ PerPage: perPage,
+ Page: page,
+ },
+ }
+
+ allIssues := make([]*base.Issue, 0, perPage)
+ g.waitAndPickClient()
+ issues, resp, err := g.getClient().Issues.ListByRepo(g.ctx, g.repoOwner, g.repoName, opt)
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing repos: %w", err)
+ }
+ log.Trace("Request get issues %d/%d, but in fact get %d", perPage, page, len(issues))
+ g.setRate(&resp.Rate)
+ for _, issue := range issues {
+ if issue.IsPullRequest() {
+ continue
+ }
+
+ labels := make([]*base.Label, 0, len(issue.Labels))
+ for _, l := range issue.Labels {
+ labels = append(labels, convertGithubLabel(l))
+ }
+
+ // get reactions
+ var reactions []*base.Reaction
+ if !g.SkipReactions {
+ for i := 1; ; i++ {
+ g.waitAndPickClient()
+ res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListOptions{
+ Page: i,
+ PerPage: perPage,
+ })
+ if err != nil {
+ return nil, false, err
+ }
+ g.setRate(&resp.Rate)
+ if len(res) == 0 {
+ break
+ }
+ for _, reaction := range res {
+ reactions = append(reactions, &base.Reaction{
+ UserID: reaction.User.GetID(),
+ UserName: reaction.User.GetLogin(),
+ Content: reaction.GetContent(),
+ })
+ }
+ }
+ }
+
+ var assignees []string
+ for i := range issue.Assignees {
+ assignees = append(assignees, issue.Assignees[i].GetLogin())
+ }
+
+ allIssues = append(allIssues, &base.Issue{
+ Title: *issue.Title,
+ Number: int64(*issue.Number),
+ PosterID: issue.GetUser().GetID(),
+ PosterName: issue.GetUser().GetLogin(),
+ PosterEmail: issue.GetUser().GetEmail(),
+ Content: issue.GetBody(),
+ Milestone: issue.GetMilestone().GetTitle(),
+ State: issue.GetState(),
+ Created: issue.GetCreatedAt().Time,
+ Updated: issue.GetUpdatedAt().Time,
+ Labels: labels,
+ Reactions: reactions,
+ Closed: issue.ClosedAt.GetTime(),
+ IsLocked: issue.GetLocked(),
+ Assignees: assignees,
+ ForeignIndex: int64(*issue.Number),
+ })
+ }
+
+ return allIssues, len(issues) < perPage, nil
+}
+
+// SupportGetRepoComments return true if it supports get repo comments
+func (g *GithubDownloaderV3) SupportGetRepoComments() bool {
+ return true
+}
+
+// GetComments returns comments according issueNumber
+func (g *GithubDownloaderV3) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+ comments, err := g.getComments(commentable)
+ return comments, false, err
+}
+
+func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.Comment, error) {
+ var (
+ allComments = make([]*base.Comment, 0, g.maxPerPage)
+ created = "created"
+ asc = "asc"
+ )
+ opt := &github.IssueListCommentsOptions{
+ Sort: &created,
+ Direction: &asc,
+ ListOptions: github.ListOptions{
+ PerPage: g.maxPerPage,
+ },
+ }
+ for {
+ g.waitAndPickClient()
+ comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, int(commentable.GetForeignIndex()), opt)
+ if err != nil {
+ return nil, fmt.Errorf("error while listing repos: %w", err)
+ }
+ g.setRate(&resp.Rate)
+ for _, comment := range comments {
+ // get reactions
+ var reactions []*base.Reaction
+ if !g.SkipReactions {
+ for i := 1; ; i++ {
+ g.waitAndPickClient()
+ res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
+ Page: i,
+ PerPage: g.maxPerPage,
+ })
+ if err != nil {
+ return nil, err
+ }
+ g.setRate(&resp.Rate)
+ if len(res) == 0 {
+ break
+ }
+ for _, reaction := range res {
+ reactions = append(reactions, &base.Reaction{
+ UserID: reaction.User.GetID(),
+ UserName: reaction.User.GetLogin(),
+ Content: reaction.GetContent(),
+ })
+ }
+ }
+ }
+
+ allComments = append(allComments, &base.Comment{
+ IssueIndex: commentable.GetLocalIndex(),
+ Index: comment.GetID(),
+ PosterID: comment.GetUser().GetID(),
+ PosterName: comment.GetUser().GetLogin(),
+ PosterEmail: comment.GetUser().GetEmail(),
+ Content: comment.GetBody(),
+ Created: comment.GetCreatedAt().Time,
+ Updated: comment.GetUpdatedAt().Time,
+ Reactions: reactions,
+ })
+ }
+ if resp.NextPage == 0 {
+ break
+ }
+ opt.Page = resp.NextPage
+ }
+ return allComments, nil
+}
+
+// GetAllComments returns repository comments according page and perPageSize
+func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment, bool, error) {
+ var (
+ allComments = make([]*base.Comment, 0, perPage)
+ created = "created"
+ asc = "asc"
+ )
+ if perPage > g.maxPerPage {
+ perPage = g.maxPerPage
+ }
+ opt := &github.IssueListCommentsOptions{
+ Sort: &created,
+ Direction: &asc,
+ ListOptions: github.ListOptions{
+ Page: page,
+ PerPage: perPage,
+ },
+ }
+
+ g.waitAndPickClient()
+ comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, 0, opt)
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing repos: %w", err)
+ }
+ isEnd := resp.NextPage == 0
+
+ log.Trace("Request get comments %d/%d, but in fact get %d, next page is %d", perPage, page, len(comments), resp.NextPage)
+ g.setRate(&resp.Rate)
+ for _, comment := range comments {
+ // get reactions
+ var reactions []*base.Reaction
+ if !g.SkipReactions {
+ for i := 1; ; i++ {
+ g.waitAndPickClient()
+ res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
+ Page: i,
+ PerPage: g.maxPerPage,
+ })
+ if err != nil {
+ return nil, false, err
+ }
+ g.setRate(&resp.Rate)
+ if len(res) == 0 {
+ break
+ }
+ for _, reaction := range res {
+ reactions = append(reactions, &base.Reaction{
+ UserID: reaction.User.GetID(),
+ UserName: reaction.User.GetLogin(),
+ Content: reaction.GetContent(),
+ })
+ }
+ }
+ }
+ idx := strings.LastIndex(*comment.IssueURL, "/")
+ issueIndex, _ := strconv.ParseInt((*comment.IssueURL)[idx+1:], 10, 64)
+ allComments = append(allComments, &base.Comment{
+ IssueIndex: issueIndex,
+ Index: comment.GetID(),
+ PosterID: comment.GetUser().GetID(),
+ PosterName: comment.GetUser().GetLogin(),
+ PosterEmail: comment.GetUser().GetEmail(),
+ Content: comment.GetBody(),
+ Created: comment.GetCreatedAt().Time,
+ Updated: comment.GetUpdatedAt().Time,
+ Reactions: reactions,
+ })
+ }
+
+ return allComments, isEnd, nil
+}
+
+// GetPullRequests returns pull requests according page and perPage
+func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+ if perPage > g.maxPerPage {
+ perPage = g.maxPerPage
+ }
+ opt := &github.PullRequestListOptions{
+ Sort: "created",
+ Direction: "asc",
+ State: "all",
+ ListOptions: github.ListOptions{
+ PerPage: perPage,
+ Page: page,
+ },
+ }
+ allPRs := make([]*base.PullRequest, 0, perPage)
+ g.waitAndPickClient()
+ prs, resp, err := g.getClient().PullRequests.List(g.ctx, g.repoOwner, g.repoName, opt)
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing repos: %w", err)
+ }
+ log.Trace("Request get pull requests %d/%d, but in fact get %d", perPage, page, len(prs))
+ g.setRate(&resp.Rate)
+ for _, pr := range prs {
+ labels := make([]*base.Label, 0, len(pr.Labels))
+ for _, l := range pr.Labels {
+ labels = append(labels, convertGithubLabel(l))
+ }
+
+ // get reactions
+ var reactions []*base.Reaction
+ if !g.SkipReactions {
+ for i := 1; ; i++ {
+ g.waitAndPickClient()
+ res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListOptions{
+ Page: i,
+ PerPage: perPage,
+ })
+ if err != nil {
+ return nil, false, err
+ }
+ g.setRate(&resp.Rate)
+ if len(res) == 0 {
+ break
+ }
+ for _, reaction := range res {
+ reactions = append(reactions, &base.Reaction{
+ UserID: reaction.User.GetID(),
+ UserName: reaction.User.GetLogin(),
+ Content: reaction.GetContent(),
+ })
+ }
+ }
+ }
+
+ // download patch and saved as tmp file
+ g.waitAndPickClient()
+
+ allPRs = append(allPRs, &base.PullRequest{
+ Title: pr.GetTitle(),
+ Number: int64(pr.GetNumber()),
+ PosterID: pr.GetUser().GetID(),
+ PosterName: pr.GetUser().GetLogin(),
+ PosterEmail: pr.GetUser().GetEmail(),
+ Content: pr.GetBody(),
+ Milestone: pr.GetMilestone().GetTitle(),
+ State: pr.GetState(),
+ Created: pr.GetCreatedAt().Time,
+ Updated: pr.GetUpdatedAt().Time,
+ Closed: pr.ClosedAt.GetTime(),
+ Labels: labels,
+ Merged: pr.MergedAt != nil,
+ MergeCommitSHA: pr.GetMergeCommitSHA(),
+ MergedTime: pr.MergedAt.GetTime(),
+ IsLocked: pr.ActiveLockReason != nil,
+ Head: base.PullRequestBranch{
+ Ref: pr.GetHead().GetRef(),
+ SHA: pr.GetHead().GetSHA(),
+ OwnerName: pr.GetHead().GetUser().GetLogin(),
+ RepoName: pr.GetHead().GetRepo().GetName(),
+ CloneURL: pr.GetHead().GetRepo().GetCloneURL(), // see below for SECURITY related issues here
+ },
+ Base: base.PullRequestBranch{
+ Ref: pr.GetBase().GetRef(),
+ SHA: pr.GetBase().GetSHA(),
+ RepoName: pr.GetBase().GetRepo().GetName(),
+ OwnerName: pr.GetBase().GetUser().GetLogin(),
+ },
+ PatchURL: pr.GetPatchURL(), // see below for SECURITY related issues here
+ Reactions: reactions,
+ ForeignIndex: int64(*pr.Number),
+ })
+
+ // SECURITY: Ensure that the PR is safe
+ _ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g)
+ }
+
+ return allPRs, len(prs) < perPage, nil
+}
+
+func convertGithubReview(r *github.PullRequestReview) *base.Review {
+ return &base.Review{
+ ID: r.GetID(),
+ ReviewerID: r.GetUser().GetID(),
+ ReviewerName: r.GetUser().GetLogin(),
+ CommitID: r.GetCommitID(),
+ Content: r.GetBody(),
+ CreatedAt: r.GetSubmittedAt().Time,
+ State: r.GetState(),
+ }
+}
+
+func (g *GithubDownloaderV3) convertGithubReviewComments(cs []*github.PullRequestComment) ([]*base.ReviewComment, error) {
+ rcs := make([]*base.ReviewComment, 0, len(cs))
+ for _, c := range cs {
+ // get reactions
+ var reactions []*base.Reaction
+ if !g.SkipReactions {
+ for i := 1; ; i++ {
+ g.waitAndPickClient()
+ res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(g.ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListOptions{
+ Page: i,
+ PerPage: g.maxPerPage,
+ })
+ if err != nil {
+ return nil, err
+ }
+ g.setRate(&resp.Rate)
+ if len(res) == 0 {
+ break
+ }
+ for _, reaction := range res {
+ reactions = append(reactions, &base.Reaction{
+ UserID: reaction.User.GetID(),
+ UserName: reaction.User.GetLogin(),
+ Content: reaction.GetContent(),
+ })
+ }
+ }
+ }
+
+ rcs = append(rcs, &base.ReviewComment{
+ ID: c.GetID(),
+ InReplyTo: c.GetInReplyTo(),
+ Content: c.GetBody(),
+ TreePath: c.GetPath(),
+ DiffHunk: c.GetDiffHunk(),
+ Position: c.GetPosition(),
+ CommitID: c.GetCommitID(),
+ PosterID: c.GetUser().GetID(),
+ Reactions: reactions,
+ CreatedAt: c.GetCreatedAt().Time,
+ UpdatedAt: c.GetUpdatedAt().Time,
+ })
+ }
+ return rcs, nil
+}
+
+// GetReviews returns pull requests review
+func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+ allReviews := make([]*base.Review, 0, g.maxPerPage)
+ if g.SkipReviews {
+ return allReviews, nil
+ }
+ opt := &github.ListOptions{
+ PerPage: g.maxPerPage,
+ }
+ // Get approve/request change reviews
+ for {
+ g.waitAndPickClient()
+ reviews, resp, err := g.getClient().PullRequests.ListReviews(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
+ if err != nil {
+ return nil, fmt.Errorf("error while listing repos: %w", err)
+ }
+ g.setRate(&resp.Rate)
+ for _, review := range reviews {
+ r := convertGithubReview(review)
+ r.IssueIndex = reviewable.GetLocalIndex()
+ // retrieve all review comments
+ opt2 := &github.ListOptions{
+ PerPage: g.maxPerPage,
+ }
+ for {
+ g.waitAndPickClient()
+ reviewComments, resp, err := g.getClient().PullRequests.ListReviewComments(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), review.GetID(), opt2)
+ if err != nil {
+ return nil, fmt.Errorf("error while listing repos: %w", err)
+ }
+ g.setRate(&resp.Rate)
+
+ cs, err := g.convertGithubReviewComments(reviewComments)
+ if err != nil {
+ return nil, err
+ }
+ r.Comments = append(r.Comments, cs...)
+ if resp.NextPage == 0 {
+ break
+ }
+ opt2.Page = resp.NextPage
+ }
+ allReviews = append(allReviews, r)
+ }
+ if resp.NextPage == 0 {
+ break
+ }
+ opt.Page = resp.NextPage
+ }
+ // Get requested reviews
+ for {
+ g.waitAndPickClient()
+ reviewers, resp, err := g.getClient().PullRequests.ListReviewers(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
+ if err != nil {
+ return nil, fmt.Errorf("error while listing repos: %w", err)
+ }
+ g.setRate(&resp.Rate)
+ for _, user := range reviewers.Users {
+ r := &base.Review{
+ ReviewerID: user.GetID(),
+ ReviewerName: user.GetLogin(),
+ State: base.ReviewStateRequestReview,
+ IssueIndex: reviewable.GetLocalIndex(),
+ }
+ allReviews = append(allReviews, r)
+ }
+ // TODO: Handle Team requests
+ if resp.NextPage == 0 {
+ break
+ }
+ opt.Page = resp.NextPage
+ }
+ return allReviews, nil
+}
diff --git a/services/migrations/github_test.go b/services/migrations/github_test.go
new file mode 100644
index 0000000..a2134f8
--- /dev/null
+++ b/services/migrations/github_test.go
@@ -0,0 +1,432 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2018 Jonas Franz. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "os"
+ "testing"
+ "time"
+
+ base "code.gitea.io/gitea/modules/migration"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGitHubDownloadRepo(t *testing.T) {
+ GithubLimitRateRemaining = 3 // Wait at 3 remaining since we could have 3 CI in //
+ token := os.Getenv("GITHUB_READ_TOKEN")
+ if token == "" {
+ t.Skip("Skipping GitHub migration test because GITHUB_READ_TOKEN is empty")
+ }
+ downloader := NewGithubDownloaderV3(context.Background(), "https://github.com", "", "", token, "go-gitea", "test_repo")
+ err := downloader.RefreshRate()
+ require.NoError(t, err)
+
+ repo, err := downloader.GetRepoInfo()
+ require.NoError(t, err)
+ assertRepositoryEqual(t, &base.Repository{
+ Name: "test_repo",
+ Owner: "go-gitea",
+ Description: "Test repository for testing migration from github to gitea",
+ CloneURL: "https://github.com/go-gitea/test_repo.git",
+ OriginalURL: "https://github.com/go-gitea/test_repo",
+ DefaultBranch: "master",
+ }, repo)
+
+ topics, err := downloader.GetTopics()
+ require.NoError(t, err)
+ assert.Contains(t, topics, "gitea")
+
+ milestones, err := downloader.GetMilestones()
+ require.NoError(t, err)
+ assertMilestonesEqual(t, []*base.Milestone{
+ {
+ Title: "1.0.0",
+ Description: "Milestone 1.0.0",
+ Deadline: timePtr(time.Date(2019, 11, 11, 8, 0, 0, 0, time.UTC)),
+ Created: time.Date(2019, 11, 12, 19, 37, 8, 0, time.UTC),
+ Updated: timePtr(time.Date(2019, 11, 12, 21, 56, 17, 0, time.UTC)),
+ Closed: timePtr(time.Date(2019, 11, 12, 19, 45, 49, 0, time.UTC)),
+ State: "closed",
+ },
+ {
+ Title: "1.1.0",
+ Description: "Milestone 1.1.0",
+ Deadline: timePtr(time.Date(2019, 11, 12, 8, 0, 0, 0, time.UTC)),
+ Created: time.Date(2019, 11, 12, 19, 37, 25, 0, time.UTC),
+ Updated: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
+ Closed: timePtr(time.Date(2019, 11, 12, 19, 45, 46, 0, time.UTC)),
+ State: "closed",
+ },
+ }, milestones)
+
+ labels, err := downloader.GetLabels()
+ require.NoError(t, err)
+ assertLabelsEqual(t, []*base.Label{
+ {
+ Name: "bug",
+ Color: "d73a4a",
+ Description: "Something isn't working",
+ },
+ {
+ Name: "documentation",
+ Color: "0075ca",
+ Description: "Improvements or additions to documentation",
+ },
+ {
+ Name: "duplicate",
+ Color: "cfd3d7",
+ Description: "This issue or pull request already exists",
+ },
+ {
+ Name: "enhancement",
+ Color: "a2eeef",
+ Description: "New feature or request",
+ },
+ {
+ Name: "good first issue",
+ Color: "7057ff",
+ Description: "Good for newcomers",
+ },
+ {
+ Name: "help wanted",
+ Color: "008672",
+ Description: "Extra attention is needed",
+ },
+ {
+ Name: "invalid",
+ Color: "e4e669",
+ Description: "This doesn't seem right",
+ },
+ {
+ Name: "question",
+ Color: "d876e3",
+ Description: "Further information is requested",
+ },
+ {
+ Name: "wontfix",
+ Color: "ffffff",
+ Description: "This will not be worked on",
+ },
+ }, labels)
+
+ releases, err := downloader.GetReleases()
+ require.NoError(t, err)
+ assertReleasesEqual(t, []*base.Release{
+ {
+ TagName: "v0.9.99",
+ TargetCommitish: "master",
+ Name: "First Release",
+ Body: "A test release",
+ Created: time.Date(2019, 11, 9, 16, 49, 21, 0, time.UTC),
+ Published: time.Date(2019, 11, 12, 20, 12, 10, 0, time.UTC),
+ PublisherID: 1669571,
+ PublisherName: "mrsdizzie",
+ },
+ }, releases)
+
+ // downloader.GetIssues()
+ issues, isEnd, err := downloader.GetIssues(1, 2)
+ require.NoError(t, err)
+ assert.False(t, isEnd)
+ assertIssuesEqual(t, []*base.Issue{
+ {
+ Number: 1,
+ Title: "Please add an animated gif icon to the merge button",
+ Content: "I just want the merge button to hurt my eyes a little. \xF0\x9F\x98\x9D ",
+ Milestone: "1.0.0",
+ PosterID: 18600385,
+ PosterName: "guillep2k",
+ State: "closed",
+ Created: time.Date(2019, 11, 9, 17, 0, 29, 0, time.UTC),
+ Updated: time.Date(2019, 11, 12, 20, 29, 53, 0, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "bug",
+ Color: "d73a4a",
+ Description: "Something isn't working",
+ },
+ {
+ Name: "good first issue",
+ Color: "7057ff",
+ Description: "Good for newcomers",
+ },
+ },
+ Reactions: []*base.Reaction{
+ {
+ UserID: 1669571,
+ UserName: "mrsdizzie",
+ Content: "+1",
+ },
+ },
+ Closed: timePtr(time.Date(2019, 11, 12, 20, 22, 22, 0, time.UTC)),
+ },
+ {
+ Number: 2,
+ Title: "Test issue",
+ Content: "This is test issue 2, do not touch!",
+ Milestone: "1.1.0",
+ PosterID: 1669571,
+ PosterName: "mrsdizzie",
+ State: "closed",
+ Created: time.Date(2019, 11, 12, 21, 0, 6, 0, time.UTC),
+ Updated: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "duplicate",
+ Color: "cfd3d7",
+ Description: "This issue or pull request already exists",
+ },
+ },
+ Reactions: []*base.Reaction{
+ {
+ UserID: 1669571,
+ UserName: "mrsdizzie",
+ Content: "heart",
+ },
+ {
+ UserID: 1669571,
+ UserName: "mrsdizzie",
+ Content: "laugh",
+ },
+ {
+ UserID: 1669571,
+ UserName: "mrsdizzie",
+ Content: "-1",
+ },
+ {
+ UserID: 1669571,
+ UserName: "mrsdizzie",
+ Content: "confused",
+ },
+ {
+ UserID: 1669571,
+ UserName: "mrsdizzie",
+ Content: "hooray",
+ },
+ {
+ UserID: 1669571,
+ UserName: "mrsdizzie",
+ Content: "+1",
+ },
+ },
+ Closed: timePtr(time.Date(2019, 11, 12, 21, 1, 31, 0, time.UTC)),
+ },
+ }, issues)
+
+ // downloader.GetComments()
+ comments, _, err := downloader.GetComments(&base.Issue{Number: 2, ForeignIndex: 2})
+ require.NoError(t, err)
+ assertCommentsEqual(t, []*base.Comment{
+ {
+ IssueIndex: 2,
+ PosterID: 1669571,
+ PosterName: "mrsdizzie",
+ Created: time.Date(2019, 11, 12, 21, 0, 13, 0, time.UTC),
+ Updated: time.Date(2019, 11, 12, 21, 0, 13, 0, time.UTC),
+ Content: "This is a comment",
+ Reactions: []*base.Reaction{
+ {
+ UserID: 1669571,
+ UserName: "mrsdizzie",
+ Content: "+1",
+ },
+ },
+ },
+ {
+ IssueIndex: 2,
+ PosterID: 1669571,
+ PosterName: "mrsdizzie",
+ Created: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC),
+ Updated: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC),
+ Content: "A second comment",
+ Reactions: nil,
+ },
+ }, comments)
+
+ // downloader.GetPullRequests()
+ prs, _, err := downloader.GetPullRequests(1, 2)
+ require.NoError(t, err)
+ assertPullRequestsEqual(t, []*base.PullRequest{
+ {
+ Number: 3,
+ Title: "Update README.md",
+ Content: "add warning to readme",
+ Milestone: "1.1.0",
+ PosterID: 1669571,
+ PosterName: "mrsdizzie",
+ State: "closed",
+ Created: time.Date(2019, 11, 12, 21, 21, 43, 0, time.UTC),
+ Updated: time.Date(2019, 11, 12, 21, 39, 28, 0, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "documentation",
+ Color: "0075ca",
+ Description: "Improvements or additions to documentation",
+ },
+ },
+ PatchURL: "https://github.com/go-gitea/test_repo/pull/3.patch",
+ Head: base.PullRequestBranch{
+ Ref: "master",
+ CloneURL: "https://github.com/mrsdizzie/test_repo.git",
+ SHA: "076160cf0b039f13e5eff19619932d181269414b",
+ RepoName: "test_repo",
+
+ OwnerName: "mrsdizzie",
+ },
+ Base: base.PullRequestBranch{
+ Ref: "master",
+ SHA: "72866af952e98d02a73003501836074b286a78f6",
+ OwnerName: "go-gitea",
+ RepoName: "test_repo",
+ },
+ Closed: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
+ Merged: true,
+ MergedTime: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
+ MergeCommitSHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2",
+ ForeignIndex: 3,
+ },
+ {
+ Number: 4,
+ Title: "Test branch",
+ Content: "do not merge this PR",
+ Milestone: "1.0.0",
+ PosterID: 1669571,
+ PosterName: "mrsdizzie",
+ State: "open",
+ Created: time.Date(2019, 11, 12, 21, 54, 18, 0, time.UTC),
+ Updated: time.Date(2020, 1, 4, 11, 30, 1, 0, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "bug",
+ Color: "d73a4a",
+ Description: "Something isn't working",
+ },
+ },
+ PatchURL: "https://github.com/go-gitea/test_repo/pull/4.patch",
+ Head: base.PullRequestBranch{
+ Ref: "test-branch",
+ SHA: "2be9101c543658591222acbee3eb799edfc3853d",
+ RepoName: "test_repo",
+ OwnerName: "mrsdizzie",
+ CloneURL: "https://github.com/mrsdizzie/test_repo.git",
+ },
+ Base: base.PullRequestBranch{
+ Ref: "master",
+ SHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2",
+ OwnerName: "go-gitea",
+ RepoName: "test_repo",
+ },
+ Merged: false,
+ MergeCommitSHA: "565d1208f5fffdc1c5ae1a2436491eb9a5e4ebae",
+ Reactions: []*base.Reaction{
+ {
+ UserID: 81045,
+ UserName: "lunny",
+ Content: "heart",
+ },
+ {
+ UserID: 81045,
+ UserName: "lunny",
+ Content: "+1",
+ },
+ },
+ ForeignIndex: 4,
+ },
+ }, prs)
+
+ reviews, err := downloader.GetReviews(&base.PullRequest{Number: 3, ForeignIndex: 3})
+ require.NoError(t, err)
+ assertReviewsEqual(t, []*base.Review{
+ {
+ ID: 315859956,
+ IssueIndex: 3,
+ ReviewerID: 42128690,
+ ReviewerName: "jolheiser",
+ CommitID: "076160cf0b039f13e5eff19619932d181269414b",
+ CreatedAt: time.Date(2019, 11, 12, 21, 35, 24, 0, time.UTC),
+ State: base.ReviewStateApproved,
+ },
+ {
+ ID: 315860062,
+ IssueIndex: 3,
+ ReviewerID: 1824502,
+ ReviewerName: "zeripath",
+ CommitID: "076160cf0b039f13e5eff19619932d181269414b",
+ CreatedAt: time.Date(2019, 11, 12, 21, 35, 36, 0, time.UTC),
+ State: base.ReviewStateApproved,
+ },
+ {
+ ID: 315861440,
+ IssueIndex: 3,
+ ReviewerID: 165205,
+ ReviewerName: "lafriks",
+ CommitID: "076160cf0b039f13e5eff19619932d181269414b",
+ CreatedAt: time.Date(2019, 11, 12, 21, 38, 0, 0, time.UTC),
+ State: base.ReviewStateApproved,
+ },
+ }, reviews)
+
+ reviews, err = downloader.GetReviews(&base.PullRequest{Number: 4, ForeignIndex: 4})
+ require.NoError(t, err)
+ assertReviewsEqual(t, []*base.Review{
+ {
+ ID: 338338740,
+ IssueIndex: 4,
+ ReviewerID: 81045,
+ ReviewerName: "lunny",
+ CommitID: "2be9101c543658591222acbee3eb799edfc3853d",
+ CreatedAt: time.Date(2020, 1, 4, 5, 33, 18, 0, time.UTC),
+ State: base.ReviewStateApproved,
+ Comments: []*base.ReviewComment{
+ {
+ ID: 363017488,
+ Content: "This is a good pull request.",
+ TreePath: "README.md",
+ DiffHunk: "@@ -1,2 +1,4 @@\n # test_repo\n Test repository for testing migration from github to gitea\n+",
+ Position: 3,
+ CommitID: "2be9101c543658591222acbee3eb799edfc3853d",
+ PosterID: 81045,
+ CreatedAt: time.Date(2020, 1, 4, 5, 33, 6, 0, time.UTC),
+ UpdatedAt: time.Date(2020, 1, 4, 5, 33, 18, 0, time.UTC),
+ },
+ },
+ },
+ {
+ ID: 338339651,
+ IssueIndex: 4,
+ ReviewerID: 81045,
+ ReviewerName: "lunny",
+ CommitID: "2be9101c543658591222acbee3eb799edfc3853d",
+ CreatedAt: time.Date(2020, 1, 4, 6, 7, 6, 0, time.UTC),
+ State: base.ReviewStateChangesRequested,
+ Content: "Don't add more reviews",
+ },
+ {
+ ID: 338349019,
+ IssueIndex: 4,
+ ReviewerID: 81045,
+ ReviewerName: "lunny",
+ CommitID: "2be9101c543658591222acbee3eb799edfc3853d",
+ CreatedAt: time.Date(2020, 1, 4, 11, 21, 41, 0, time.UTC),
+ State: base.ReviewStateCommented,
+ Comments: []*base.ReviewComment{
+ {
+ ID: 363029944,
+ Content: "test a single comment.",
+ TreePath: "LICENSE",
+ DiffHunk: "@@ -19,3 +19,5 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n SOFTWARE.\n+",
+ Position: 4,
+ CommitID: "2be9101c543658591222acbee3eb799edfc3853d",
+ PosterID: 81045,
+ CreatedAt: time.Date(2020, 1, 4, 11, 21, 41, 0, time.UTC),
+ UpdatedAt: time.Date(2020, 1, 4, 11, 21, 41, 0, time.UTC),
+ },
+ },
+ },
+ }, reviews)
+}
diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go
new file mode 100644
index 0000000..1639a34
--- /dev/null
+++ b/services/migrations/gitlab.go
@@ -0,0 +1,784 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "path"
+ "regexp"
+ "strings"
+ "time"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/xanzy/go-gitlab"
+)
+
+var (
+ _ base.Downloader = &GitlabDownloader{}
+ _ base.DownloaderFactory = &GitlabDownloaderFactory{}
+)
+
+func init() {
+ RegisterDownloaderFactory(&GitlabDownloaderFactory{})
+}
+
+// GitlabDownloaderFactory defines a gitlab downloader factory
+type GitlabDownloaderFactory struct{}
+
+// New returns a Downloader related to this factory according MigrateOptions
+func (f *GitlabDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
+ u, err := url.Parse(opts.CloneAddr)
+ if err != nil {
+ return nil, err
+ }
+
+ baseURL := u.Scheme + "://" + u.Host
+ repoNameSpace := strings.TrimPrefix(u.Path, "/")
+ repoNameSpace = strings.TrimSuffix(repoNameSpace, ".git")
+
+ log.Trace("Create gitlab downloader. BaseURL: %s RepoName: %s", baseURL, repoNameSpace)
+
+ return NewGitlabDownloader(ctx, baseURL, repoNameSpace, opts.AuthUsername, opts.AuthPassword, opts.AuthToken)
+}
+
+// GitServiceType returns the type of git service
+func (f *GitlabDownloaderFactory) GitServiceType() structs.GitServiceType {
+ return structs.GitlabService
+}
+
+type gitlabIIDResolver struct {
+ maxIssueIID int64
+ frozen bool
+}
+
+func (r *gitlabIIDResolver) recordIssueIID(issueIID int) {
+ if r.frozen {
+ panic("cannot record issue IID after pull request IID generation has started")
+ }
+ r.maxIssueIID = max(r.maxIssueIID, int64(issueIID))
+}
+
+func (r *gitlabIIDResolver) generatePullRequestNumber(mrIID int) int64 {
+ r.frozen = true
+ return r.maxIssueIID + int64(mrIID)
+}
+
+// GitlabDownloader implements a Downloader interface to get repository information
+// from gitlab via go-gitlab
+// - issueCount is incremented in GetIssues() to ensure PR and Issue numbers do not overlap,
+// because Gitlab has individual Issue and Pull Request numbers.
+type GitlabDownloader struct {
+ base.NullDownloader
+ ctx context.Context
+ client *gitlab.Client
+ baseURL string
+ repoID int
+ repoName string
+ iidResolver gitlabIIDResolver
+ maxPerPage int
+}
+
+// NewGitlabDownloader creates a gitlab Downloader via gitlab API
+//
+// Use either a username/password, personal token entered into the username field, or anonymous/public access
+// Note: Public access only allows very basic access
+func NewGitlabDownloader(ctx context.Context, baseURL, repoPath, username, password, token string) (*GitlabDownloader, error) {
+ gitlabClient, err := gitlab.NewClient(token, gitlab.WithBaseURL(baseURL), gitlab.WithHTTPClient(NewMigrationHTTPClient()))
+ // Only use basic auth if token is blank and password is NOT
+ // Basic auth will fail with empty strings, but empty token will allow anonymous public API usage
+ if token == "" && password != "" {
+ gitlabClient, err = gitlab.NewBasicAuthClient(username, password, gitlab.WithBaseURL(baseURL), gitlab.WithHTTPClient(NewMigrationHTTPClient()))
+ }
+
+ if err != nil {
+ log.Trace("Error logging into gitlab: %v", err)
+ return nil, err
+ }
+
+ // split namespace and subdirectory
+ pathParts := strings.Split(strings.Trim(repoPath, "/"), "/")
+ var resp *gitlab.Response
+ u, _ := url.Parse(baseURL)
+ for len(pathParts) >= 2 {
+ _, resp, err = gitlabClient.Version.GetVersion()
+ if err == nil || resp != nil && resp.StatusCode == http.StatusUnauthorized {
+ err = nil // if no authentication given, this still should work
+ break
+ }
+
+ u.Path = path.Join(u.Path, pathParts[0])
+ baseURL = u.String()
+ pathParts = pathParts[1:]
+ _ = gitlab.WithBaseURL(baseURL)(gitlabClient)
+ repoPath = strings.Join(pathParts, "/")
+ }
+ if err != nil {
+ log.Trace("Error could not get gitlab version: %v", err)
+ return nil, err
+ }
+
+ log.Trace("gitlab downloader: use BaseURL: '%s' and RepoPath: '%s'", baseURL, repoPath)
+
+ // Grab and store project/repo ID here, due to issues using the URL escaped path
+ gr, _, err := gitlabClient.Projects.GetProject(repoPath, nil, nil, gitlab.WithContext(ctx))
+ if err != nil {
+ log.Trace("Error retrieving project: %v", err)
+ return nil, err
+ }
+
+ if gr == nil {
+ log.Trace("Error getting project, project is nil")
+ return nil, errors.New("Error getting project, project is nil")
+ }
+
+ return &GitlabDownloader{
+ ctx: ctx,
+ client: gitlabClient,
+ baseURL: baseURL,
+ repoID: gr.ID,
+ repoName: gr.Name,
+ maxPerPage: 100,
+ }, nil
+}
+
+// String implements Stringer
+func (g *GitlabDownloader) String() string {
+ return fmt.Sprintf("migration from gitlab server %s [%d]/%s", g.baseURL, g.repoID, g.repoName)
+}
+
+func (g *GitlabDownloader) LogString() string {
+ if g == nil {
+ return "<GitlabDownloader nil>"
+ }
+ return fmt.Sprintf("<GitlabDownloader %s [%d]/%s>", g.baseURL, g.repoID, g.repoName)
+}
+
+// SetContext set context
+func (g *GitlabDownloader) SetContext(ctx context.Context) {
+ g.ctx = ctx
+}
+
+// GetRepoInfo returns a repository information
+func (g *GitlabDownloader) GetRepoInfo() (*base.Repository, error) {
+ gr, _, err := g.client.Projects.GetProject(g.repoID, nil, nil, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, err
+ }
+
+ var private bool
+ switch gr.Visibility {
+ case gitlab.InternalVisibility:
+ private = true
+ case gitlab.PrivateVisibility:
+ private = true
+ }
+
+ var owner string
+ if gr.Owner == nil {
+ log.Trace("gr.Owner is nil, trying to get owner from Namespace")
+ if gr.Namespace != nil && gr.Namespace.Kind == "user" {
+ owner = gr.Namespace.Path
+ }
+ } else {
+ owner = gr.Owner.Username
+ }
+
+ // convert gitlab repo to stand Repo
+ return &base.Repository{
+ Owner: owner,
+ Name: gr.Name,
+ IsPrivate: private,
+ Description: gr.Description,
+ OriginalURL: gr.WebURL,
+ CloneURL: gr.HTTPURLToRepo,
+ DefaultBranch: gr.DefaultBranch,
+ }, nil
+}
+
+// GetTopics return gitlab topics
+func (g *GitlabDownloader) GetTopics() ([]string, error) {
+ gr, _, err := g.client.Projects.GetProject(g.repoID, nil, nil, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, err
+ }
+ return gr.TagList, err
+}
+
+// GetMilestones returns milestones
+func (g *GitlabDownloader) GetMilestones() ([]*base.Milestone, error) {
+ perPage := g.maxPerPage
+ state := "all"
+ milestones := make([]*base.Milestone, 0, perPage)
+ for i := 1; ; i++ {
+ ms, _, err := g.client.Milestones.ListMilestones(g.repoID, &gitlab.ListMilestonesOptions{
+ State: &state,
+ ListOptions: gitlab.ListOptions{
+ Page: i,
+ PerPage: perPage,
+ },
+ }, nil, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, err
+ }
+
+ for _, m := range ms {
+ var desc string
+ if m.Description != "" {
+ desc = m.Description
+ }
+ state := "open"
+ var closedAt *time.Time
+ if m.State != "" {
+ state = m.State
+ if state == "closed" {
+ closedAt = m.UpdatedAt
+ }
+ }
+
+ var deadline *time.Time
+ if m.DueDate != nil {
+ deadlineParsed, err := time.Parse("2006-01-02", m.DueDate.String())
+ if err != nil {
+ log.Trace("Error parsing Milestone DueDate time")
+ deadline = nil
+ } else {
+ deadline = &deadlineParsed
+ }
+ }
+
+ milestones = append(milestones, &base.Milestone{
+ Title: m.Title,
+ Description: desc,
+ Deadline: deadline,
+ State: state,
+ Created: *m.CreatedAt,
+ Updated: m.UpdatedAt,
+ Closed: closedAt,
+ })
+ }
+ if len(ms) < perPage {
+ break
+ }
+ }
+ return milestones, nil
+}
+
+func (g *GitlabDownloader) normalizeColor(val string) string {
+ val = strings.TrimLeft(val, "#")
+ val = strings.ToLower(val)
+ if len(val) == 3 {
+ c := []rune(val)
+ val = fmt.Sprintf("%c%c%c%c%c%c", c[0], c[0], c[1], c[1], c[2], c[2])
+ }
+ if len(val) != 6 {
+ return ""
+ }
+ return val
+}
+
+// GetLabels returns labels
+func (g *GitlabDownloader) GetLabels() ([]*base.Label, error) {
+ perPage := g.maxPerPage
+ labels := make([]*base.Label, 0, perPage)
+ for i := 1; ; i++ {
+ ls, _, err := g.client.Labels.ListLabels(g.repoID, &gitlab.ListLabelsOptions{ListOptions: gitlab.ListOptions{
+ Page: i,
+ PerPage: perPage,
+ }}, nil, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, err
+ }
+ for _, label := range ls {
+ baseLabel := &base.Label{
+ Name: strings.Replace(label.Name, "::", "/", 1),
+ Color: g.normalizeColor(label.Color),
+ Description: label.Description,
+ Exclusive: strings.Contains(label.Name, "::"),
+ }
+ labels = append(labels, baseLabel)
+ }
+ if len(ls) < perPage {
+ break
+ }
+ }
+ return labels, nil
+}
+
+func (g *GitlabDownloader) convertGitlabRelease(rel *gitlab.Release) *base.Release {
+ var zero int
+ r := &base.Release{
+ TagName: rel.TagName,
+ TargetCommitish: rel.Commit.ID,
+ Name: rel.Name,
+ Body: rel.Description,
+ Created: *rel.CreatedAt,
+ PublisherID: int64(rel.Author.ID),
+ PublisherName: rel.Author.Username,
+ }
+
+ httpClient := NewMigrationHTTPClient()
+
+ for k, asset := range rel.Assets.Links {
+ assetID := asset.ID // Don't optimize this, for closure we need a local variable
+ r.Assets = append(r.Assets, &base.ReleaseAsset{
+ ID: int64(asset.ID),
+ Name: asset.Name,
+ ContentType: &rel.Assets.Sources[k].Format,
+ Size: &zero,
+ DownloadCount: &zero,
+ DownloadFunc: func() (io.ReadCloser, error) {
+ link, _, err := g.client.ReleaseLinks.GetReleaseLink(g.repoID, rel.TagName, assetID, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, err
+ }
+
+ if !hasBaseURL(link.URL, g.baseURL) {
+ WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", assetID, g, link.URL)
+ return io.NopCloser(strings.NewReader(link.URL)), nil
+ }
+
+ req, err := http.NewRequest("GET", link.URL, nil)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(g.ctx)
+ resp, err := httpClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+
+ // resp.Body is closed by the uploader
+ return resp.Body, nil
+ },
+ })
+ }
+ return r
+}
+
+// GetReleases returns releases
+func (g *GitlabDownloader) GetReleases() ([]*base.Release, error) {
+ perPage := g.maxPerPage
+ releases := make([]*base.Release, 0, perPage)
+ for i := 1; ; i++ {
+ ls, _, err := g.client.Releases.ListReleases(g.repoID, &gitlab.ListReleasesOptions{
+ ListOptions: gitlab.ListOptions{
+ Page: i,
+ PerPage: perPage,
+ },
+ }, nil, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, err
+ }
+
+ for _, release := range ls {
+ releases = append(releases, g.convertGitlabRelease(release))
+ }
+ if len(ls) < perPage {
+ break
+ }
+ }
+ return releases, nil
+}
+
+type gitlabIssueContext struct {
+ IsMergeRequest bool
+}
+
+// GetIssues returns issues according start and limit
+//
+// Note: issue label description and colors are not supported by the go-gitlab library at this time
+func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+ state := "all"
+ sort := "asc"
+
+ if perPage > g.maxPerPage {
+ perPage = g.maxPerPage
+ }
+
+ opt := &gitlab.ListProjectIssuesOptions{
+ State: &state,
+ Sort: &sort,
+ ListOptions: gitlab.ListOptions{
+ PerPage: perPage,
+ Page: page,
+ },
+ }
+
+ allIssues := make([]*base.Issue, 0, perPage)
+
+ issues, _, err := g.client.Issues.ListProjectIssues(g.repoID, opt, nil, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing issues: %w", err)
+ }
+ for _, issue := range issues {
+ labels := make([]*base.Label, 0, len(issue.Labels))
+ for _, l := range issue.Labels {
+ labels = append(labels, &base.Label{
+ Name: strings.Replace(l, "::", "/", 1),
+ })
+ }
+
+ var milestone string
+ if issue.Milestone != nil {
+ milestone = issue.Milestone.Title
+ }
+
+ var reactions []*gitlab.AwardEmoji
+ awardPage := 1
+ for {
+ awards, _, err := g.client.AwardEmoji.ListIssueAwardEmoji(g.repoID, issue.IID, &gitlab.ListAwardEmojiOptions{Page: awardPage, PerPage: perPage}, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing issue awards: %w", err)
+ }
+
+ reactions = append(reactions, awards...)
+
+ if len(awards) < perPage {
+ break
+ }
+
+ awardPage++
+ }
+
+ allIssues = append(allIssues, &base.Issue{
+ Title: issue.Title,
+ Number: int64(issue.IID),
+ PosterID: int64(issue.Author.ID),
+ PosterName: issue.Author.Username,
+ Content: issue.Description,
+ Milestone: milestone,
+ State: issue.State,
+ Created: *issue.CreatedAt,
+ Labels: labels,
+ Reactions: g.awardsToReactions(reactions),
+ Closed: issue.ClosedAt,
+ IsLocked: issue.DiscussionLocked,
+ Updated: *issue.UpdatedAt,
+ ForeignIndex: int64(issue.IID),
+ Context: gitlabIssueContext{IsMergeRequest: false},
+ })
+
+ // record the issue IID, to be used in GetPullRequests()
+ g.iidResolver.recordIssueIID(issue.IID)
+ }
+
+ return allIssues, len(issues) < perPage, nil
+}
+
+// GetComments returns comments according issueNumber
+// TODO: figure out how to transfer comment reactions
+func (g *GitlabDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+ context, ok := commentable.GetContext().(gitlabIssueContext)
+ if !ok {
+ return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext())
+ }
+
+ allComments := make([]*base.Comment, 0, g.maxPerPage)
+
+ page := 1
+
+ for {
+ var comments []*gitlab.Discussion
+ var resp *gitlab.Response
+ var err error
+ if !context.IsMergeRequest {
+ comments, resp, err = g.client.Discussions.ListIssueDiscussions(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListIssueDiscussionsOptions{
+ Page: page,
+ PerPage: g.maxPerPage,
+ }, nil, gitlab.WithContext(g.ctx))
+ } else {
+ comments, resp, err = g.client.Discussions.ListMergeRequestDiscussions(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListMergeRequestDiscussionsOptions{
+ Page: page,
+ PerPage: g.maxPerPage,
+ }, nil, gitlab.WithContext(g.ctx))
+ }
+
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing comments: %v %w", g.repoID, err)
+ }
+ for _, comment := range comments {
+ for _, note := range comment.Notes {
+ allComments = append(allComments, g.convertNoteToComment(commentable.GetLocalIndex(), note))
+ }
+ }
+ if resp.NextPage == 0 {
+ break
+ }
+ page = resp.NextPage
+ }
+
+ page = 1
+ for {
+ var stateEvents []*gitlab.StateEvent
+ var resp *gitlab.Response
+ var err error
+ if context.IsMergeRequest {
+ stateEvents, resp, err = g.client.ResourceStateEvents.ListMergeStateEvents(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListStateEventsOptions{
+ ListOptions: gitlab.ListOptions{
+ Page: page,
+ PerPage: g.maxPerPage,
+ },
+ }, nil, gitlab.WithContext(g.ctx))
+ } else {
+ stateEvents, resp, err = g.client.ResourceStateEvents.ListIssueStateEvents(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListStateEventsOptions{
+ ListOptions: gitlab.ListOptions{
+ Page: page,
+ PerPage: g.maxPerPage,
+ },
+ }, nil, gitlab.WithContext(g.ctx))
+ }
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing state events: %v %w", g.repoID, err)
+ }
+
+ for _, stateEvent := range stateEvents {
+ comment := &base.Comment{
+ IssueIndex: commentable.GetLocalIndex(),
+ Index: int64(stateEvent.ID),
+ PosterID: int64(stateEvent.User.ID),
+ PosterName: stateEvent.User.Username,
+ Content: "",
+ Created: *stateEvent.CreatedAt,
+ }
+ switch stateEvent.State {
+ case gitlab.ClosedEventType:
+ comment.CommentType = issues_model.CommentTypeClose.String()
+ case gitlab.MergedEventType:
+ comment.CommentType = issues_model.CommentTypeMergePull.String()
+ case gitlab.ReopenedEventType:
+ comment.CommentType = issues_model.CommentTypeReopen.String()
+ default:
+ // Ignore other event types
+ continue
+ }
+ allComments = append(allComments, comment)
+ }
+
+ if resp.NextPage == 0 {
+ break
+ }
+ page = resp.NextPage
+ }
+
+ return allComments, true, nil
+}
+
+var targetBranchChangeRegexp = regexp.MustCompile("^changed target branch from `(.*?)` to `(.*?)`$")
+
+func (g *GitlabDownloader) convertNoteToComment(localIndex int64, note *gitlab.Note) *base.Comment {
+ comment := &base.Comment{
+ IssueIndex: localIndex,
+ Index: int64(note.ID),
+ PosterID: int64(note.Author.ID),
+ PosterName: note.Author.Username,
+ PosterEmail: note.Author.Email,
+ Content: note.Body,
+ Created: *note.CreatedAt,
+ Meta: map[string]any{},
+ }
+
+ // Try to find the underlying event of system notes.
+ if note.System {
+ if match := targetBranchChangeRegexp.FindStringSubmatch(note.Body); match != nil {
+ comment.CommentType = issues_model.CommentTypeChangeTargetBranch.String()
+ comment.Meta["OldRef"] = match[1]
+ comment.Meta["NewRef"] = match[2]
+ } else if strings.HasPrefix(note.Body, "enabled an automatic merge") {
+ comment.CommentType = issues_model.CommentTypePRScheduledToAutoMerge.String()
+ } else if note.Body == "canceled the automatic merge" {
+ comment.CommentType = issues_model.CommentTypePRUnScheduledToAutoMerge.String()
+ }
+ }
+
+ return comment
+}
+
+// GetPullRequests returns pull requests according page and perPage
+func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+ if perPage > g.maxPerPage {
+ perPage = g.maxPerPage
+ }
+
+ view := "simple"
+ opt := &gitlab.ListProjectMergeRequestsOptions{
+ ListOptions: gitlab.ListOptions{
+ PerPage: perPage,
+ Page: page,
+ },
+ View: &view,
+ }
+
+ allPRs := make([]*base.PullRequest, 0, perPage)
+
+ prs, _, err := g.client.MergeRequests.ListProjectMergeRequests(g.repoID, opt, nil, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing merge requests: %w", err)
+ }
+ for _, simplePR := range prs {
+ // Load merge request again by itself, as not all fields are populated in the ListProjectMergeRequests endpoint.
+ // See https://gitlab.com/gitlab-org/gitlab/-/issues/29620
+ pr, _, err := g.client.MergeRequests.GetMergeRequest(g.repoID, simplePR.IID, nil)
+ if err != nil {
+ return nil, false, fmt.Errorf("error while loading merge request: %w", err)
+ }
+
+ labels := make([]*base.Label, 0, len(pr.Labels))
+ for _, l := range pr.Labels {
+ labels = append(labels, &base.Label{
+ Name: strings.Replace(l, "::", "/", 1),
+ })
+ }
+
+ var merged bool
+ if pr.State == "merged" {
+ merged = true
+ pr.State = "closed"
+ }
+
+ mergeTime := pr.MergedAt
+ if merged && pr.MergedAt == nil {
+ mergeTime = pr.UpdatedAt
+ }
+
+ closeTime := pr.ClosedAt
+ if merged && pr.ClosedAt == nil {
+ closeTime = pr.UpdatedAt
+ }
+
+ mergeCommitSHA := pr.MergeCommitSHA
+ if mergeCommitSHA == "" {
+ mergeCommitSHA = pr.SquashCommitSHA
+ }
+
+ var locked bool
+ if pr.State == "locked" {
+ locked = true
+ }
+
+ var milestone string
+ if pr.Milestone != nil {
+ milestone = pr.Milestone.Title
+ }
+
+ var reactions []*gitlab.AwardEmoji
+ awardPage := 1
+ for {
+ awards, _, err := g.client.AwardEmoji.ListMergeRequestAwardEmoji(g.repoID, pr.IID, &gitlab.ListAwardEmojiOptions{Page: awardPage, PerPage: perPage}, gitlab.WithContext(g.ctx))
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing merge requests awards: %w", err)
+ }
+
+ reactions = append(reactions, awards...)
+
+ if len(awards) < perPage {
+ break
+ }
+
+ awardPage++
+ }
+
+ // Generate new PR Numbers by the known Issue Numbers, because they share the same number space in Gitea, but they are independent in Gitlab
+ newPRNumber := g.iidResolver.generatePullRequestNumber(pr.IID)
+
+ allPRs = append(allPRs, &base.PullRequest{
+ Title: pr.Title,
+ Number: newPRNumber,
+ PosterName: pr.Author.Username,
+ PosterID: int64(pr.Author.ID),
+ Content: pr.Description,
+ Milestone: milestone,
+ State: pr.State,
+ Created: *pr.CreatedAt,
+ Closed: closeTime,
+ Labels: labels,
+ Merged: merged,
+ MergeCommitSHA: mergeCommitSHA,
+ MergedTime: mergeTime,
+ IsLocked: locked,
+ Reactions: g.awardsToReactions(reactions),
+ Head: base.PullRequestBranch{
+ Ref: pr.SourceBranch,
+ SHA: pr.SHA,
+ RepoName: g.repoName,
+ OwnerName: pr.Author.Username,
+ CloneURL: pr.WebURL,
+ },
+ Base: base.PullRequestBranch{
+ Ref: pr.TargetBranch,
+ SHA: pr.DiffRefs.BaseSha,
+ RepoName: g.repoName,
+ OwnerName: pr.Author.Username,
+ },
+ PatchURL: pr.WebURL + ".patch",
+ ForeignIndex: int64(pr.IID),
+ Context: gitlabIssueContext{IsMergeRequest: true},
+ })
+
+ // SECURITY: Ensure that the PR is safe
+ _ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g)
+ }
+
+ return allPRs, len(prs) < perPage, nil
+}
+
+// GetReviews returns pull requests review
+func (g *GitlabDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+ approvals, resp, err := g.client.MergeRequestApprovals.GetConfiguration(g.repoID, int(reviewable.GetForeignIndex()), gitlab.WithContext(g.ctx))
+ if err != nil {
+ if resp != nil && resp.StatusCode == http.StatusNotFound {
+ log.Error(fmt.Sprintf("GitlabDownloader: while migrating a error occurred: '%s'", err.Error()))
+ return []*base.Review{}, nil
+ }
+ return nil, err
+ }
+
+ var createdAt time.Time
+ if approvals.CreatedAt != nil {
+ createdAt = *approvals.CreatedAt
+ } else if approvals.UpdatedAt != nil {
+ createdAt = *approvals.UpdatedAt
+ } else {
+ createdAt = time.Now()
+ }
+
+ reviews := make([]*base.Review, 0, len(approvals.ApprovedBy))
+ for _, user := range approvals.ApprovedBy {
+ reviews = append(reviews, &base.Review{
+ IssueIndex: reviewable.GetLocalIndex(),
+ ReviewerID: int64(user.User.ID),
+ ReviewerName: user.User.Username,
+ CreatedAt: createdAt,
+ // All we get are approvals
+ State: base.ReviewStateApproved,
+ })
+ }
+
+ return reviews, nil
+}
+
+func (g *GitlabDownloader) awardsToReactions(awards []*gitlab.AwardEmoji) []*base.Reaction {
+ result := make([]*base.Reaction, 0, len(awards))
+ uniqCheck := make(container.Set[string])
+ for _, award := range awards {
+ uid := fmt.Sprintf("%s%d", award.Name, award.User.ID)
+ if uniqCheck.Add(uid) {
+ result = append(result, &base.Reaction{
+ UserID: int64(award.User.ID),
+ UserName: award.User.Username,
+ Content: award.Name,
+ })
+ }
+ }
+ return result
+}
diff --git a/services/migrations/gitlab_test.go b/services/migrations/gitlab_test.go
new file mode 100644
index 0000000..39edba3
--- /dev/null
+++ b/services/migrations/gitlab_test.go
@@ -0,0 +1,646 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strconv"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/json"
+ base "code.gitea.io/gitea/modules/migration"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/xanzy/go-gitlab"
+)
+
+func TestGitlabDownloadRepo(t *testing.T) {
+ // If a GitLab access token is provided, this test will make HTTP requests to the live gitlab.com instance.
+ // When doing so, the responses from gitlab.com will be saved as test data files.
+ // If no access token is available, those cached responses will be used instead.
+ gitlabPersonalAccessToken := os.Getenv("GITLAB_READ_TOKEN")
+ fixturePath := "./testdata/gitlab/full_download"
+ server := unittest.NewMockWebServer(t, "https://gitlab.com", fixturePath, gitlabPersonalAccessToken != "")
+ defer server.Close()
+
+ downloader, err := NewGitlabDownloader(context.Background(), server.URL, "forgejo/test_repo", "", "", gitlabPersonalAccessToken)
+ if err != nil {
+ t.Fatalf("NewGitlabDownloader is nil: %v", err)
+ }
+ repo, err := downloader.GetRepoInfo()
+ require.NoError(t, err)
+ // Repo Owner is blank in Gitlab Group repos
+ assertRepositoryEqual(t, &base.Repository{
+ Name: "test_repo",
+ Owner: "",
+ Description: "Test repository for testing migration from gitlab to forgejo",
+ CloneURL: server.URL + "/forgejo/test_repo.git",
+ OriginalURL: server.URL + "/forgejo/test_repo",
+ DefaultBranch: "master",
+ }, repo)
+
+ topics, err := downloader.GetTopics()
+ require.NoError(t, err)
+ assert.Len(t, topics, 2)
+ assert.EqualValues(t, []string{"migration", "test"}, topics)
+
+ milestones, err := downloader.GetMilestones()
+ require.NoError(t, err)
+ assertMilestonesEqual(t, []*base.Milestone{
+ {
+ Title: "1.0.0",
+ Created: time.Date(2024, 9, 3, 13, 53, 8, 516000000, time.UTC),
+ Updated: timePtr(time.Date(2024, 9, 3, 20, 3, 57, 786000000, time.UTC)),
+ Closed: timePtr(time.Date(2024, 9, 3, 20, 3, 57, 786000000, time.UTC)),
+ State: "closed",
+ },
+ {
+ Title: "1.1.0",
+ Created: time.Date(2024, 9, 3, 13, 52, 48, 414000000, time.UTC),
+ Updated: timePtr(time.Date(2024, 9, 3, 14, 52, 14, 93000000, time.UTC)),
+ State: "active",
+ },
+ }, milestones)
+
+ labels, err := downloader.GetLabels()
+ require.NoError(t, err)
+ assertLabelsEqual(t, []*base.Label{
+ {
+ Name: "bug",
+ Color: "d9534f",
+ },
+ {
+ Name: "confirmed",
+ Color: "d9534f",
+ },
+ {
+ Name: "critical",
+ Color: "d9534f",
+ },
+ {
+ Name: "discussion",
+ Color: "428bca",
+ },
+ {
+ Name: "documentation",
+ Color: "f0ad4e",
+ },
+ {
+ Name: "duplicate",
+ Color: "7f8c8d",
+ },
+ {
+ Name: "enhancement",
+ Color: "5cb85c",
+ },
+ {
+ Name: "suggestion",
+ Color: "428bca",
+ },
+ {
+ Name: "support",
+ Color: "f0ad4e",
+ },
+ {
+ Name: "test-scope/label0",
+ Color: "6699cc",
+ Description: "scoped label",
+ Exclusive: true,
+ },
+ {
+ Name: "test-scope/label1",
+ Color: "dc143c",
+ Exclusive: true,
+ },
+ }, labels)
+
+ releases, err := downloader.GetReleases()
+ require.NoError(t, err)
+ assertReleasesEqual(t, []*base.Release{
+ {
+ TagName: "v0.9.99",
+ TargetCommitish: "0720a3ec57c1f843568298117b874319e7deee75",
+ Name: "First Release",
+ Body: "A test release",
+ Created: time.Date(2024, 9, 3, 15, 1, 1, 513000000, time.UTC),
+ PublisherID: 548513,
+ PublisherName: "mkobel",
+ },
+ }, releases)
+
+ issues, isEnd, err := downloader.GetIssues(1, 2)
+ require.NoError(t, err)
+ assert.False(t, isEnd)
+ assertIssuesEqual(t, []*base.Issue{
+ {
+ Number: 1,
+ Title: "Please add an animated gif icon to the merge button",
+ Content: "I just want the merge button to hurt my eyes a little. :stuck_out_tongue_closed_eyes:",
+ Milestone: "1.0.0",
+ PosterID: 548513,
+ PosterName: "mkobel",
+ State: "closed",
+ Created: time.Date(2024, 9, 3, 14, 42, 34, 924000000, time.UTC),
+ Updated: time.Date(2024, 9, 3, 14, 48, 43, 756000000, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "bug",
+ },
+ {
+ Name: "discussion",
+ },
+ },
+ Reactions: []*base.Reaction{
+ {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "thumbsup",
+ },
+ {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "open_mouth",
+ },
+ },
+ Closed: timePtr(time.Date(2024, 9, 3, 14, 43, 10, 708000000, time.UTC)),
+ },
+ {
+ Number: 2,
+ Title: "Test issue",
+ Content: "This is test issue 2, do not touch!",
+ Milestone: "1.0.0",
+ PosterID: 548513,
+ PosterName: "mkobel",
+ State: "closed",
+ Created: time.Date(2024, 9, 3, 14, 42, 35, 371000000, time.UTC),
+ Updated: time.Date(2024, 9, 3, 20, 3, 43, 536000000, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "duplicate",
+ },
+ },
+ Reactions: []*base.Reaction{
+ {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "thumbsup",
+ },
+ {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "thumbsdown",
+ },
+ {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "laughing",
+ },
+ {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "tada",
+ },
+ {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "confused",
+ },
+ {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "hearts",
+ },
+ },
+ Closed: timePtr(time.Date(2024, 9, 3, 14, 43, 10, 906000000, time.UTC)),
+ },
+ }, issues)
+
+ comments, _, err := downloader.GetComments(&base.Issue{
+ Number: 2,
+ ForeignIndex: 2,
+ Context: gitlabIssueContext{IsMergeRequest: false},
+ })
+ require.NoError(t, err)
+ assertCommentsEqual(t, []*base.Comment{
+ {
+ IssueIndex: 2,
+ PosterID: 548513,
+ PosterName: "mkobel",
+ Created: time.Date(2024, 9, 3, 14, 45, 20, 848000000, time.UTC),
+ Content: "This is a comment",
+ Reactions: nil,
+ },
+ {
+ IssueIndex: 2,
+ PosterID: 548513,
+ PosterName: "mkobel",
+ Created: time.Date(2024, 9, 3, 14, 45, 30, 59000000, time.UTC),
+ Content: "A second comment",
+ Reactions: nil,
+ },
+ {
+ IssueIndex: 2,
+ PosterID: 548513,
+ PosterName: "mkobel",
+ Created: time.Date(2024, 9, 3, 14, 43, 10, 947000000, time.UTC),
+ Content: "",
+ Reactions: nil,
+ CommentType: "close",
+ },
+ }, comments)
+
+ prs, _, err := downloader.GetPullRequests(1, 1)
+ require.NoError(t, err)
+ assertPullRequestsEqual(t, []*base.PullRequest{
+ {
+ Number: 3,
+ Title: "Test branch",
+ Content: "do not merge this PR",
+ Milestone: "1.1.0",
+ PosterID: 2005797,
+ PosterName: "oliverpool",
+ State: "opened",
+ Created: time.Date(2024, 9, 3, 7, 57, 19, 866000000, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "test-scope/label0",
+ },
+ {
+ Name: "test-scope/label1",
+ },
+ },
+ Reactions: []*base.Reaction{{
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "thumbsup",
+ }, {
+ UserID: 548513,
+ UserName: "mkobel",
+ Content: "tada",
+ }},
+ PatchURL: server.URL + "/forgejo/test_repo/-/merge_requests/1.patch",
+ Head: base.PullRequestBranch{
+ Ref: "feat/test",
+ CloneURL: server.URL + "/forgejo/test_repo/-/merge_requests/1",
+ SHA: "9f733b96b98a4175276edf6a2e1231489c3bdd23",
+ RepoName: "test_repo",
+ OwnerName: "oliverpool",
+ },
+ Base: base.PullRequestBranch{
+ Ref: "master",
+ SHA: "c59c9b451acca9d106cc19d61d87afe3fbbb8b83",
+ OwnerName: "oliverpool",
+ RepoName: "test_repo",
+ },
+ Closed: nil,
+ Merged: false,
+ MergedTime: nil,
+ MergeCommitSHA: "",
+ ForeignIndex: 2,
+ Context: gitlabIssueContext{IsMergeRequest: true},
+ },
+ }, prs)
+
+ rvs, err := downloader.GetReviews(&base.PullRequest{Number: 1, ForeignIndex: 1})
+ require.NoError(t, err)
+ assertReviewsEqual(t, []*base.Review{
+ {
+ IssueIndex: 1,
+ ReviewerID: 548513,
+ ReviewerName: "mkobel",
+ CreatedAt: time.Date(2024, 9, 3, 7, 57, 19, 86600000, time.UTC),
+ State: "APPROVED",
+ },
+ }, rvs)
+}
+
+func TestGitlabSkippedIssueNumber(t *testing.T) {
+ // If a GitLab access token is provided, this test will make HTTP requests to the live gitlab.com instance.
+ // When doing so, the responses from gitlab.com will be saved as test data files.
+ // If no access token is available, those cached responses will be used instead.
+ gitlabPersonalAccessToken := os.Getenv("GITLAB_READ_TOKEN")
+ fixturePath := "./testdata/gitlab/skipped_issue_number"
+ server := unittest.NewMockWebServer(t, "https://gitlab.com", fixturePath, gitlabPersonalAccessToken != "")
+ defer server.Close()
+
+ downloader, err := NewGitlabDownloader(context.Background(), server.URL, "troyengel/archbuild", "", "", gitlabPersonalAccessToken)
+ if err != nil {
+ t.Fatalf("NewGitlabDownloader is nil: %v", err)
+ }
+ repo, err := downloader.GetRepoInfo()
+ require.NoError(t, err)
+ assertRepositoryEqual(t, &base.Repository{
+ Name: "archbuild",
+ Owner: "troyengel",
+ Description: "Arch packaging and build files",
+ CloneURL: server.URL + "/troyengel/archbuild.git",
+ OriginalURL: server.URL + "/troyengel/archbuild",
+ DefaultBranch: "master",
+ }, repo)
+
+ issues, isEnd, err := downloader.GetIssues(1, 10)
+ require.NoError(t, err)
+ assert.True(t, isEnd)
+
+ // the only issue in this repository has number 2
+ assert.Len(t, issues, 1)
+ assert.EqualValues(t, 2, issues[0].Number)
+ assert.EqualValues(t, "vpn unlimited errors", issues[0].Title)
+
+ prs, _, err := downloader.GetPullRequests(1, 10)
+ require.NoError(t, err)
+ // the only merge request in this repository has number 1,
+ // but we offset it by the maximum issue number so it becomes
+ // pull request 3 in Forgejo
+ assert.Len(t, prs, 1)
+ assert.EqualValues(t, 3, prs[0].Number)
+ assert.EqualValues(t, "Review", prs[0].Title)
+}
+
+func gitlabClientMockSetup(t *testing.T) (*http.ServeMux, *httptest.Server, *gitlab.Client) {
+ // mux is the HTTP request multiplexer used with the test server.
+ mux := http.NewServeMux()
+
+ // server is a test HTTP server used to provide mock API responses.
+ server := httptest.NewServer(mux)
+
+ // client is the Gitlab client being tested.
+ client, err := gitlab.NewClient("", gitlab.WithBaseURL(server.URL))
+ if err != nil {
+ server.Close()
+ t.Fatalf("Failed to create client: %v", err)
+ }
+
+ return mux, server, client
+}
+
+func gitlabClientMockTeardown(server *httptest.Server) {
+ server.Close()
+}
+
+type reviewTestCase struct {
+ repoID, prID, reviewerID int
+ reviewerName string
+ createdAt, updatedAt *time.Time
+ expectedCreatedAt time.Time
+}
+
+func convertTestCase(t reviewTestCase) (func(w http.ResponseWriter, r *http.Request), base.Review) {
+ var updatedAtField string
+ if t.updatedAt == nil {
+ updatedAtField = ""
+ } else {
+ updatedAtField = `"updated_at": "` + t.updatedAt.Format(time.RFC3339) + `",`
+ }
+
+ var createdAtField string
+ if t.createdAt == nil {
+ createdAtField = ""
+ } else {
+ createdAtField = `"created_at": "` + t.createdAt.Format(time.RFC3339) + `",`
+ }
+
+ handler := func(w http.ResponseWriter, r *http.Request) {
+ fmt.Fprint(w, `
+{
+ "id": 5,
+ "iid": `+strconv.Itoa(t.prID)+`,
+ "project_id": `+strconv.Itoa(t.repoID)+`,
+ "title": "Approvals API",
+ "description": "Test",
+ "state": "opened",
+ `+createdAtField+`
+ `+updatedAtField+`
+ "merge_status": "cannot_be_merged",
+ "approvals_required": 2,
+ "approvals_left": 1,
+ "approved_by": [
+ {
+ "user": {
+ "name": "Administrator",
+ "username": "`+t.reviewerName+`",
+ "id": `+strconv.Itoa(t.reviewerID)+`,
+ "state": "active",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon",
+ "web_url": "http://localhost:3000/root"
+ }
+ }
+ ]
+}`)
+ }
+ review := base.Review{
+ IssueIndex: int64(t.prID),
+ ReviewerID: int64(t.reviewerID),
+ ReviewerName: t.reviewerName,
+ CreatedAt: t.expectedCreatedAt,
+ State: "APPROVED",
+ }
+
+ return handler, review
+}
+
+func TestGitlabGetReviews(t *testing.T) {
+ mux, server, client := gitlabClientMockSetup(t)
+ defer gitlabClientMockTeardown(server)
+
+ repoID := 1324
+
+ downloader := &GitlabDownloader{
+ ctx: context.Background(),
+ client: client,
+ repoID: repoID,
+ }
+
+ createdAt := time.Date(2020, 4, 19, 19, 24, 21, 0, time.UTC)
+
+ for _, testCase := range []reviewTestCase{
+ {
+ repoID: repoID,
+ prID: 1,
+ reviewerID: 801,
+ reviewerName: "someone1",
+ createdAt: nil,
+ updatedAt: &createdAt,
+ expectedCreatedAt: createdAt,
+ },
+ {
+ repoID: repoID,
+ prID: 2,
+ reviewerID: 802,
+ reviewerName: "someone2",
+ createdAt: &createdAt,
+ updatedAt: nil,
+ expectedCreatedAt: createdAt,
+ },
+ {
+ repoID: repoID,
+ prID: 3,
+ reviewerID: 803,
+ reviewerName: "someone3",
+ createdAt: nil,
+ updatedAt: nil,
+ expectedCreatedAt: time.Now(),
+ },
+ } {
+ mock, review := convertTestCase(testCase)
+ mux.HandleFunc(fmt.Sprintf("/api/v4/projects/%d/merge_requests/%d/approvals", testCase.repoID, testCase.prID), mock)
+
+ id := int64(testCase.prID)
+ rvs, err := downloader.GetReviews(&base.Issue{Number: id, ForeignIndex: id})
+ require.NoError(t, err)
+ assertReviewsEqual(t, []*base.Review{&review}, rvs)
+ }
+}
+
+func TestAwardsToReactions(t *testing.T) {
+ downloader := &GitlabDownloader{}
+ // yes gitlab can have duplicated reactions (https://gitlab.com/jaywink/socialhome/-/issues/24)
+ testResponse := `
+[
+ {
+ "name": "thumbsup",
+ "user": {
+ "id": 1241334,
+ "username": "lafriks"
+ }
+ },
+ {
+ "name": "thumbsup",
+ "user": {
+ "id": 1241334,
+ "username": "lafriks"
+ }
+ },
+ {
+ "name": "thumbsup",
+ "user": {
+ "id": 4575606,
+ "username": "real6543"
+ }
+ }
+]
+`
+ var awards []*gitlab.AwardEmoji
+ require.NoError(t, json.Unmarshal([]byte(testResponse), &awards))
+
+ reactions := downloader.awardsToReactions(awards)
+ assert.EqualValues(t, []*base.Reaction{
+ {
+ UserName: "lafriks",
+ UserID: 1241334,
+ Content: "thumbsup",
+ },
+ {
+ UserName: "real6543",
+ UserID: 4575606,
+ Content: "thumbsup",
+ },
+ }, reactions)
+}
+
+func TestNoteToComment(t *testing.T) {
+ downloader := &GitlabDownloader{}
+
+ now := time.Now()
+ makeTestNote := func(id int, body string, system bool) gitlab.Note {
+ return gitlab.Note{
+ ID: id,
+ Author: struct {
+ ID int `json:"id"`
+ Username string `json:"username"`
+ Email string `json:"email"`
+ Name string `json:"name"`
+ State string `json:"state"`
+ AvatarURL string `json:"avatar_url"`
+ WebURL string `json:"web_url"`
+ }{
+ ID: 72,
+ Email: "test@example.com",
+ Username: "test",
+ },
+ Body: body,
+ CreatedAt: &now,
+ System: system,
+ }
+ }
+ notes := []gitlab.Note{
+ makeTestNote(1, "This is a regular comment", false),
+ makeTestNote(2, "enabled an automatic merge for abcd1234", true),
+ makeTestNote(3, "changed target branch from `master` to `main`", true),
+ makeTestNote(4, "canceled the automatic merge", true),
+ }
+ comments := []base.Comment{{
+ IssueIndex: 17,
+ Index: 1,
+ PosterID: 72,
+ PosterName: "test",
+ PosterEmail: "test@example.com",
+ CommentType: "",
+ Content: "This is a regular comment",
+ Created: now,
+ Meta: map[string]any{},
+ }, {
+ IssueIndex: 17,
+ Index: 2,
+ PosterID: 72,
+ PosterName: "test",
+ PosterEmail: "test@example.com",
+ CommentType: "pull_scheduled_merge",
+ Content: "enabled an automatic merge for abcd1234",
+ Created: now,
+ Meta: map[string]any{},
+ }, {
+ IssueIndex: 17,
+ Index: 3,
+ PosterID: 72,
+ PosterName: "test",
+ PosterEmail: "test@example.com",
+ CommentType: "change_target_branch",
+ Content: "changed target branch from `master` to `main`",
+ Created: now,
+ Meta: map[string]any{
+ "OldRef": "master",
+ "NewRef": "main",
+ },
+ }, {
+ IssueIndex: 17,
+ Index: 4,
+ PosterID: 72,
+ PosterName: "test",
+ PosterEmail: "test@example.com",
+ CommentType: "pull_cancel_scheduled_merge",
+ Content: "canceled the automatic merge",
+ Created: now,
+ Meta: map[string]any{},
+ }}
+
+ for i, note := range notes {
+ actualComment := *downloader.convertNoteToComment(17, &note)
+ assert.EqualValues(t, actualComment, comments[i])
+ }
+}
+
+func TestGitlabIIDResolver(t *testing.T) {
+ r := gitlabIIDResolver{}
+ r.recordIssueIID(1)
+ r.recordIssueIID(2)
+ r.recordIssueIID(3)
+ r.recordIssueIID(2)
+ assert.EqualValues(t, 4, r.generatePullRequestNumber(1))
+ assert.EqualValues(t, 13, r.generatePullRequestNumber(10))
+
+ assert.Panics(t, func() {
+ r := gitlabIIDResolver{}
+ r.recordIssueIID(1)
+ assert.EqualValues(t, 2, r.generatePullRequestNumber(1))
+ r.recordIssueIID(3) // the generation procedure has been started, it shouldn't accept any new issue IID, so it panics
+ })
+}
diff --git a/services/migrations/gogs.go b/services/migrations/gogs.go
new file mode 100644
index 0000000..b31d05f
--- /dev/null
+++ b/services/migrations/gogs.go
@@ -0,0 +1,330 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/proxy"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/gogs/go-gogs-client"
+)
+
+var (
+ _ base.Downloader = &GogsDownloader{}
+ _ base.DownloaderFactory = &GogsDownloaderFactory{}
+)
+
+func init() {
+ RegisterDownloaderFactory(&GogsDownloaderFactory{})
+}
+
+// GogsDownloaderFactory defines a gogs downloader factory
+type GogsDownloaderFactory struct{}
+
+// New returns a Downloader related to this factory according MigrateOptions
+func (f *GogsDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
+ u, err := url.Parse(opts.CloneAddr)
+ if err != nil {
+ return nil, err
+ }
+
+ repoNameSpace := strings.TrimSuffix(u.Path, ".git")
+ repoNameSpace = strings.Trim(repoNameSpace, "/")
+
+ fields := strings.Split(repoNameSpace, "/")
+ numFields := len(fields)
+ if numFields < 2 {
+ return nil, fmt.Errorf("invalid path: %s", repoNameSpace)
+ }
+
+ repoOwner := fields[numFields-2]
+ repoName := fields[numFields-1]
+
+ u.Path = ""
+ u = u.JoinPath(fields[:numFields-2]...)
+ baseURL := u.String()
+
+ log.Trace("Create gogs downloader. BaseURL: %s RepoOwner: %s RepoName: %s", baseURL, repoOwner, repoName)
+ return NewGogsDownloader(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, repoOwner, repoName), nil
+}
+
+// GitServiceType returns the type of git service
+func (f *GogsDownloaderFactory) GitServiceType() structs.GitServiceType {
+ return structs.GogsService
+}
+
+// GogsDownloader implements a Downloader interface to get repository information
+// from gogs via API
+type GogsDownloader struct {
+ base.NullDownloader
+ ctx context.Context
+ client *gogs.Client
+ baseURL string
+ repoOwner string
+ repoName string
+ userName string
+ password string
+ openIssuesFinished bool
+ openIssuesPages int
+ transport http.RoundTripper
+}
+
+// String implements Stringer
+func (g *GogsDownloader) String() string {
+ return fmt.Sprintf("migration from gogs server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
+}
+
+func (g *GogsDownloader) LogString() string {
+ if g == nil {
+ return "<GogsDownloader nil>"
+ }
+ return fmt.Sprintf("<GogsDownloader %s %s/%s>", g.baseURL, g.repoOwner, g.repoName)
+}
+
+// SetContext set context
+func (g *GogsDownloader) SetContext(ctx context.Context) {
+ g.ctx = ctx
+}
+
+// NewGogsDownloader creates a gogs Downloader via gogs API
+func NewGogsDownloader(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GogsDownloader {
+ downloader := GogsDownloader{
+ ctx: ctx,
+ baseURL: baseURL,
+ userName: userName,
+ password: password,
+ repoOwner: repoOwner,
+ repoName: repoName,
+ }
+
+ var client *gogs.Client
+ if len(token) != 0 {
+ client = gogs.NewClient(baseURL, token)
+ downloader.userName = token
+ } else {
+ transport := NewMigrationHTTPTransport()
+ transport.Proxy = func(req *http.Request) (*url.URL, error) {
+ req.SetBasicAuth(userName, password)
+ return proxy.Proxy()(req)
+ }
+ downloader.transport = transport
+
+ client = gogs.NewClient(baseURL, "")
+ client.SetHTTPClient(&http.Client{
+ Transport: &downloader,
+ })
+ }
+
+ downloader.client = client
+ return &downloader
+}
+
+// RoundTrip wraps the provided request within this downloader's context and passes it to our internal http.Transport.
+// This implements http.RoundTripper and makes the gogs client requests cancellable even though it is not cancellable itself
+func (g *GogsDownloader) RoundTrip(req *http.Request) (*http.Response, error) {
+ return g.transport.RoundTrip(req.WithContext(g.ctx))
+}
+
+// GetRepoInfo returns a repository information
+func (g *GogsDownloader) GetRepoInfo() (*base.Repository, error) {
+ gr, err := g.client.GetRepo(g.repoOwner, g.repoName)
+ if err != nil {
+ return nil, err
+ }
+
+ // convert gogs repo to stand Repo
+ return &base.Repository{
+ Owner: g.repoOwner,
+ Name: g.repoName,
+ IsPrivate: gr.Private,
+ Description: gr.Description,
+ CloneURL: gr.CloneURL,
+ OriginalURL: gr.HTMLURL,
+ DefaultBranch: gr.DefaultBranch,
+ }, nil
+}
+
+// GetMilestones returns milestones
+func (g *GogsDownloader) GetMilestones() ([]*base.Milestone, error) {
+ perPage := 100
+ milestones := make([]*base.Milestone, 0, perPage)
+
+ ms, err := g.client.ListRepoMilestones(g.repoOwner, g.repoName)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, m := range ms {
+ milestones = append(milestones, &base.Milestone{
+ Title: m.Title,
+ Description: m.Description,
+ Deadline: m.Deadline,
+ State: string(m.State),
+ Closed: m.Closed,
+ })
+ }
+
+ return milestones, nil
+}
+
+// GetLabels returns labels
+func (g *GogsDownloader) GetLabels() ([]*base.Label, error) {
+ perPage := 100
+ labels := make([]*base.Label, 0, perPage)
+ ls, err := g.client.ListRepoLabels(g.repoOwner, g.repoName)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, label := range ls {
+ labels = append(labels, convertGogsLabel(label))
+ }
+
+ return labels, nil
+}
+
+// GetIssues returns issues according start and limit, perPage is not supported
+func (g *GogsDownloader) GetIssues(page, _ int) ([]*base.Issue, bool, error) {
+ var state string
+ if g.openIssuesFinished {
+ state = string(gogs.STATE_CLOSED)
+ page -= g.openIssuesPages
+ } else {
+ state = string(gogs.STATE_OPEN)
+ g.openIssuesPages = page
+ }
+
+ issues, isEnd, err := g.getIssues(page, state)
+ if err != nil {
+ return nil, false, err
+ }
+
+ if isEnd {
+ if g.openIssuesFinished {
+ return issues, true, nil
+ }
+ g.openIssuesFinished = true
+ }
+
+ return issues, false, nil
+}
+
+func (g *GogsDownloader) getIssues(page int, state string) ([]*base.Issue, bool, error) {
+ allIssues := make([]*base.Issue, 0, 10)
+
+ issues, err := g.client.ListRepoIssues(g.repoOwner, g.repoName, gogs.ListIssueOption{
+ Page: page,
+ State: state,
+ })
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing repos: %w", err)
+ }
+
+ for _, issue := range issues {
+ if issue.PullRequest != nil {
+ continue
+ }
+ allIssues = append(allIssues, convertGogsIssue(issue))
+ }
+
+ return allIssues, len(issues) == 0, nil
+}
+
+// GetComments returns comments according issueNumber
+func (g *GogsDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+ allComments := make([]*base.Comment, 0, 100)
+
+ comments, err := g.client.ListIssueComments(g.repoOwner, g.repoName, commentable.GetForeignIndex())
+ if err != nil {
+ return nil, false, fmt.Errorf("error while listing repos: %w", err)
+ }
+ for _, comment := range comments {
+ if len(comment.Body) == 0 || comment.Poster == nil {
+ continue
+ }
+ allComments = append(allComments, &base.Comment{
+ IssueIndex: commentable.GetLocalIndex(),
+ Index: comment.ID,
+ PosterID: comment.Poster.ID,
+ PosterName: comment.Poster.Login,
+ PosterEmail: comment.Poster.Email,
+ Content: comment.Body,
+ Created: comment.Created,
+ Updated: comment.Updated,
+ })
+ }
+
+ return allComments, true, nil
+}
+
+// GetTopics return repository topics
+func (g *GogsDownloader) GetTopics() ([]string, error) {
+ return []string{}, nil
+}
+
+// FormatCloneURL add authentication into remote URLs
+func (g *GogsDownloader) FormatCloneURL(opts MigrateOptions, remoteAddr string) (string, error) {
+ if len(opts.AuthToken) > 0 || len(opts.AuthUsername) > 0 {
+ u, err := url.Parse(remoteAddr)
+ if err != nil {
+ return "", err
+ }
+ if len(opts.AuthToken) != 0 {
+ u.User = url.UserPassword(opts.AuthToken, "")
+ } else {
+ u.User = url.UserPassword(opts.AuthUsername, opts.AuthPassword)
+ }
+ return u.String(), nil
+ }
+ return remoteAddr, nil
+}
+
+func convertGogsIssue(issue *gogs.Issue) *base.Issue {
+ var milestone string
+ if issue.Milestone != nil {
+ milestone = issue.Milestone.Title
+ }
+ labels := make([]*base.Label, 0, len(issue.Labels))
+ for _, l := range issue.Labels {
+ labels = append(labels, convertGogsLabel(l))
+ }
+
+ var closed *time.Time
+ if issue.State == gogs.STATE_CLOSED {
+ // gogs client haven't provide closed, so we use updated instead
+ closed = &issue.Updated
+ }
+
+ return &base.Issue{
+ Title: issue.Title,
+ Number: issue.Index,
+ PosterID: issue.Poster.ID,
+ PosterName: issue.Poster.Login,
+ PosterEmail: issue.Poster.Email,
+ Content: issue.Body,
+ Milestone: milestone,
+ State: string(issue.State),
+ Created: issue.Created,
+ Updated: issue.Updated,
+ Labels: labels,
+ Closed: closed,
+ ForeignIndex: issue.Index,
+ }
+}
+
+func convertGogsLabel(label *gogs.Label) *base.Label {
+ return &base.Label{
+ Name: label.Name,
+ Color: label.Color,
+ }
+}
diff --git a/services/migrations/gogs_test.go b/services/migrations/gogs_test.go
new file mode 100644
index 0000000..6c511a2
--- /dev/null
+++ b/services/migrations/gogs_test.go
@@ -0,0 +1,224 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "net/http"
+ "os"
+ "testing"
+ "time"
+
+ base "code.gitea.io/gitea/modules/migration"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGogsDownloadRepo(t *testing.T) {
+ // Skip tests if Gogs token is not found
+ gogsPersonalAccessToken := os.Getenv("GOGS_READ_TOKEN")
+ if len(gogsPersonalAccessToken) == 0 {
+ t.Skip("skipped test because GOGS_READ_TOKEN was not in the environment")
+ }
+
+ resp, err := http.Get("https://try.gogs.io/lunnytest/TESTREPO")
+ if err != nil || resp.StatusCode/100 != 2 {
+ // skip and don't run test
+ t.Skipf("visit test repo failed, ignored")
+ return
+ }
+
+ downloader := NewGogsDownloader(context.Background(), "https://try.gogs.io", "", "", gogsPersonalAccessToken, "lunnytest", "TESTREPO")
+ repo, err := downloader.GetRepoInfo()
+ require.NoError(t, err)
+
+ assertRepositoryEqual(t, &base.Repository{
+ Name: "TESTREPO",
+ Owner: "lunnytest",
+ Description: "",
+ CloneURL: "https://try.gogs.io/lunnytest/TESTREPO.git",
+ OriginalURL: "https://try.gogs.io/lunnytest/TESTREPO",
+ DefaultBranch: "master",
+ }, repo)
+
+ milestones, err := downloader.GetMilestones()
+ require.NoError(t, err)
+ assertMilestonesEqual(t, []*base.Milestone{
+ {
+ Title: "1.0",
+ State: "open",
+ },
+ }, milestones)
+
+ labels, err := downloader.GetLabels()
+ require.NoError(t, err)
+ assertLabelsEqual(t, []*base.Label{
+ {
+ Name: "bug",
+ Color: "ee0701",
+ },
+ {
+ Name: "duplicate",
+ Color: "cccccc",
+ },
+ {
+ Name: "enhancement",
+ Color: "84b6eb",
+ },
+ {
+ Name: "help wanted",
+ Color: "128a0c",
+ },
+ {
+ Name: "invalid",
+ Color: "e6e6e6",
+ },
+ {
+ Name: "question",
+ Color: "cc317c",
+ },
+ {
+ Name: "wontfix",
+ Color: "ffffff",
+ },
+ }, labels)
+
+ // downloader.GetIssues()
+ issues, isEnd, err := downloader.GetIssues(1, 8)
+ require.NoError(t, err)
+ assert.False(t, isEnd)
+ assertIssuesEqual(t, []*base.Issue{
+ {
+ Number: 1,
+ PosterID: 5331,
+ PosterName: "lunny",
+ PosterEmail: "xiaolunwen@gmail.com",
+ Title: "test",
+ Content: "test",
+ Milestone: "",
+ State: "open",
+ Created: time.Date(2019, 6, 11, 8, 16, 44, 0, time.UTC),
+ Updated: time.Date(2019, 10, 26, 11, 7, 2, 0, time.UTC),
+ Labels: []*base.Label{
+ {
+ Name: "bug",
+ Color: "ee0701",
+ },
+ },
+ },
+ }, issues)
+
+ // downloader.GetComments()
+ comments, _, err := downloader.GetComments(&base.Issue{Number: 1, ForeignIndex: 1})
+ require.NoError(t, err)
+ assertCommentsEqual(t, []*base.Comment{
+ {
+ IssueIndex: 1,
+ PosterID: 5331,
+ PosterName: "lunny",
+ PosterEmail: "xiaolunwen@gmail.com",
+ Created: time.Date(2019, 6, 11, 8, 19, 50, 0, time.UTC),
+ Updated: time.Date(2019, 6, 11, 8, 19, 50, 0, time.UTC),
+ Content: "1111",
+ },
+ {
+ IssueIndex: 1,
+ PosterID: 15822,
+ PosterName: "clacplouf",
+ PosterEmail: "test1234@dbn.re",
+ Created: time.Date(2019, 10, 26, 11, 7, 2, 0, time.UTC),
+ Updated: time.Date(2019, 10, 26, 11, 7, 2, 0, time.UTC),
+ Content: "88888888",
+ },
+ }, comments)
+
+ // downloader.GetPullRequests()
+ _, _, err = downloader.GetPullRequests(1, 3)
+ require.Error(t, err)
+}
+
+func TestGogsDownloaderFactory_New(t *testing.T) {
+ tests := []struct {
+ name string
+ args base.MigrateOptions
+ baseURL string
+ repoOwner string
+ repoName string
+ wantErr bool
+ }{
+ {
+ name: "Gogs_at_root",
+ args: base.MigrateOptions{
+ CloneAddr: "https://git.example.com/user/repo.git",
+ AuthUsername: "username",
+ AuthPassword: "password",
+ AuthToken: "authtoken",
+ },
+ baseURL: "https://git.example.com/",
+ repoOwner: "user",
+ repoName: "repo",
+ wantErr: false,
+ },
+ {
+ name: "Gogs_at_sub_path",
+ args: base.MigrateOptions{
+ CloneAddr: "https://git.example.com/subpath/user/repo.git",
+ AuthUsername: "username",
+ AuthPassword: "password",
+ AuthToken: "authtoken",
+ },
+ baseURL: "https://git.example.com/subpath",
+ repoOwner: "user",
+ repoName: "repo",
+ wantErr: false,
+ },
+ {
+ name: "Gogs_at_2nd_sub_path",
+ args: base.MigrateOptions{
+ CloneAddr: "https://git.example.com/sub1/sub2/user/repo.git",
+ AuthUsername: "username",
+ AuthPassword: "password",
+ AuthToken: "authtoken",
+ },
+ baseURL: "https://git.example.com/sub1/sub2",
+ repoOwner: "user",
+ repoName: "repo",
+ wantErr: false,
+ },
+ {
+ name: "Gogs_URL_too_short",
+ args: base.MigrateOptions{
+ CloneAddr: "https://git.example.com/repo.git",
+ AuthUsername: "username",
+ AuthPassword: "password",
+ AuthToken: "authtoken",
+ },
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ f := &GogsDownloaderFactory{}
+ opts := base.MigrateOptions{
+ CloneAddr: tt.args.CloneAddr,
+ AuthUsername: tt.args.AuthUsername,
+ AuthPassword: tt.args.AuthPassword,
+ AuthToken: tt.args.AuthToken,
+ }
+ got, err := f.New(context.Background(), opts)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("GogsDownloaderFactory.New() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ } else if err != nil {
+ return
+ }
+
+ assert.IsType(t, &GogsDownloader{}, got)
+ assert.EqualValues(t, tt.baseURL, got.(*GogsDownloader).baseURL)
+ assert.EqualValues(t, tt.repoOwner, got.(*GogsDownloader).repoOwner)
+ assert.EqualValues(t, tt.repoName, got.(*GogsDownloader).repoName)
+ })
+ }
+}
diff --git a/services/migrations/http_client.go b/services/migrations/http_client.go
new file mode 100644
index 0000000..0b997e0
--- /dev/null
+++ b/services/migrations/http_client.go
@@ -0,0 +1,29 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "crypto/tls"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/hostmatcher"
+ "code.gitea.io/gitea/modules/proxy"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// NewMigrationHTTPClient returns a HTTP client for migration
+func NewMigrationHTTPClient() *http.Client {
+ return &http.Client{
+ Transport: NewMigrationHTTPTransport(),
+ }
+}
+
+// NewMigrationHTTPTransport returns a HTTP transport for migration
+func NewMigrationHTTPTransport() *http.Transport {
+ return &http.Transport{
+ TLSClientConfig: &tls.Config{InsecureSkipVerify: setting.Migrations.SkipTLSVerify},
+ Proxy: proxy.Proxy(),
+ DialContext: hostmatcher.NewDialContext("migration", allowList, blockList, setting.Proxy.ProxyURLFixed),
+ }
+}
diff --git a/services/migrations/main_test.go b/services/migrations/main_test.go
new file mode 100644
index 0000000..d0ec6a3
--- /dev/null
+++ b/services/migrations/main_test.go
@@ -0,0 +1,266 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2018 Jonas Franz. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/unittest"
+ base "code.gitea.io/gitea/modules/migration"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func timePtr(t time.Time) *time.Time {
+ return &t
+}
+
+func assertTimeEqual(t *testing.T, expected, actual time.Time) {
+ assert.Equal(t, expected.UTC(), actual.UTC())
+}
+
+func assertTimePtrEqual(t *testing.T, expected, actual *time.Time) {
+ if expected == nil {
+ assert.Nil(t, actual)
+ } else {
+ assert.NotNil(t, actual)
+ assertTimeEqual(t, *expected, *actual)
+ }
+}
+
+func assertCommentEqual(t *testing.T, expected, actual *base.Comment) {
+ assert.Equal(t, expected.IssueIndex, actual.IssueIndex)
+ assert.Equal(t, expected.PosterID, actual.PosterID)
+ assert.Equal(t, expected.PosterName, actual.PosterName)
+ assert.Equal(t, expected.PosterEmail, actual.PosterEmail)
+ assertTimeEqual(t, expected.Created, actual.Created)
+ assertTimeEqual(t, expected.Updated, actual.Updated)
+ assert.Equal(t, expected.Content, actual.Content)
+ assertReactionsEqual(t, expected.Reactions, actual.Reactions)
+}
+
+func assertCommentsEqual(t *testing.T, expected, actual []*base.Comment) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertCommentEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertLabelEqual(t *testing.T, expected, actual *base.Label) {
+ assert.Equal(t, expected.Name, actual.Name)
+ assert.Equal(t, expected.Exclusive, actual.Exclusive)
+ assert.Equal(t, expected.Color, actual.Color)
+ assert.Equal(t, expected.Description, actual.Description)
+}
+
+func assertLabelsEqual(t *testing.T, expected, actual []*base.Label) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertLabelEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertMilestoneEqual(t *testing.T, expected, actual *base.Milestone) {
+ assert.Equal(t, expected.Title, actual.Title)
+ assert.Equal(t, expected.Description, actual.Description)
+ assertTimePtrEqual(t, expected.Deadline, actual.Deadline)
+ assertTimeEqual(t, expected.Created, actual.Created)
+ assertTimePtrEqual(t, expected.Updated, actual.Updated)
+ assertTimePtrEqual(t, expected.Closed, actual.Closed)
+ assert.Equal(t, expected.State, actual.State)
+}
+
+func assertMilestonesEqual(t *testing.T, expected, actual []*base.Milestone) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertMilestoneEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertIssueEqual(t *testing.T, expected, actual *base.Issue) {
+ assert.Equal(t, expected.Number, actual.Number)
+ assert.Equal(t, expected.PosterID, actual.PosterID)
+ assert.Equal(t, expected.PosterName, actual.PosterName)
+ assert.Equal(t, expected.PosterEmail, actual.PosterEmail)
+ assert.Equal(t, expected.Title, actual.Title)
+ assert.Equal(t, expected.Content, actual.Content)
+ assert.Equal(t, expected.Ref, actual.Ref)
+ assert.Equal(t, expected.Milestone, actual.Milestone)
+ assert.Equal(t, expected.State, actual.State)
+ assert.Equal(t, expected.IsLocked, actual.IsLocked)
+ assertTimeEqual(t, expected.Created, actual.Created)
+ assertTimeEqual(t, expected.Updated, actual.Updated)
+ assertTimePtrEqual(t, expected.Closed, actual.Closed)
+ assertLabelsEqual(t, expected.Labels, actual.Labels)
+ assertReactionsEqual(t, expected.Reactions, actual.Reactions)
+ assert.ElementsMatch(t, expected.Assignees, actual.Assignees)
+}
+
+func assertIssuesEqual(t *testing.T, expected, actual []*base.Issue) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertIssueEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertPullRequestEqual(t *testing.T, expected, actual *base.PullRequest) {
+ assert.Equal(t, expected.Number, actual.Number)
+ assert.Equal(t, expected.Title, actual.Title)
+ assert.Equal(t, expected.PosterID, actual.PosterID)
+ assert.Equal(t, expected.PosterName, actual.PosterName)
+ assert.Equal(t, expected.PosterEmail, actual.PosterEmail)
+ assert.Equal(t, expected.Content, actual.Content)
+ assert.Equal(t, expected.Milestone, actual.Milestone)
+ assert.Equal(t, expected.State, actual.State)
+ assertTimeEqual(t, expected.Created, actual.Created)
+ assertTimeEqual(t, expected.Updated, actual.Updated)
+ assertTimePtrEqual(t, expected.Closed, actual.Closed)
+ assertLabelsEqual(t, expected.Labels, actual.Labels)
+ assert.Equal(t, expected.PatchURL, actual.PatchURL)
+ assert.Equal(t, expected.Merged, actual.Merged)
+ assertTimePtrEqual(t, expected.MergedTime, actual.MergedTime)
+ assert.Equal(t, expected.MergeCommitSHA, actual.MergeCommitSHA)
+ assertPullRequestBranchEqual(t, expected.Head, actual.Head)
+ assertPullRequestBranchEqual(t, expected.Base, actual.Base)
+ assert.ElementsMatch(t, expected.Assignees, actual.Assignees)
+ assert.Equal(t, expected.IsLocked, actual.IsLocked)
+ assertReactionsEqual(t, expected.Reactions, actual.Reactions)
+}
+
+func assertPullRequestsEqual(t *testing.T, expected, actual []*base.PullRequest) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertPullRequestEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertPullRequestBranchEqual(t *testing.T, expected, actual base.PullRequestBranch) {
+ assert.Equal(t, expected.CloneURL, actual.CloneURL)
+ assert.Equal(t, expected.Ref, actual.Ref)
+ assert.Equal(t, expected.SHA, actual.SHA)
+ assert.Equal(t, expected.RepoName, actual.RepoName)
+ assert.Equal(t, expected.OwnerName, actual.OwnerName)
+}
+
+func assertReactionEqual(t *testing.T, expected, actual *base.Reaction) {
+ assert.Equal(t, expected.UserID, actual.UserID)
+ assert.Equal(t, expected.UserName, actual.UserName)
+ assert.Equal(t, expected.Content, actual.Content)
+}
+
+func assertReactionsEqual(t *testing.T, expected, actual []*base.Reaction) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertReactionEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertReleaseAssetEqual(t *testing.T, expected, actual *base.ReleaseAsset) {
+ assert.Equal(t, expected.ID, actual.ID)
+ assert.Equal(t, expected.Name, actual.Name)
+ assert.Equal(t, expected.ContentType, actual.ContentType)
+ assert.Equal(t, expected.Size, actual.Size)
+ assert.Equal(t, expected.DownloadCount, actual.DownloadCount)
+ assertTimeEqual(t, expected.Created, actual.Created)
+ assertTimeEqual(t, expected.Updated, actual.Updated)
+ assert.Equal(t, expected.DownloadURL, actual.DownloadURL)
+}
+
+func assertReleaseAssetsEqual(t *testing.T, expected, actual []*base.ReleaseAsset) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertReleaseAssetEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertReleaseEqual(t *testing.T, expected, actual *base.Release) {
+ assert.Equal(t, expected.TagName, actual.TagName)
+ assert.Equal(t, expected.TargetCommitish, actual.TargetCommitish)
+ assert.Equal(t, expected.Name, actual.Name)
+ assert.Equal(t, expected.Body, actual.Body)
+ assert.Equal(t, expected.Draft, actual.Draft)
+ assert.Equal(t, expected.Prerelease, actual.Prerelease)
+ assert.Equal(t, expected.PublisherID, actual.PublisherID)
+ assert.Equal(t, expected.PublisherName, actual.PublisherName)
+ assert.Equal(t, expected.PublisherEmail, actual.PublisherEmail)
+ assertReleaseAssetsEqual(t, expected.Assets, actual.Assets)
+ assertTimeEqual(t, expected.Created, actual.Created)
+ assertTimeEqual(t, expected.Published, actual.Published)
+}
+
+func assertReleasesEqual(t *testing.T, expected, actual []*base.Release) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertReleaseEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertRepositoryEqual(t *testing.T, expected, actual *base.Repository) {
+ assert.Equal(t, expected.Name, actual.Name)
+ assert.Equal(t, expected.Owner, actual.Owner)
+ assert.Equal(t, expected.IsPrivate, actual.IsPrivate)
+ assert.Equal(t, expected.IsMirror, actual.IsMirror)
+ assert.Equal(t, expected.Description, actual.Description)
+ assert.Equal(t, expected.CloneURL, actual.CloneURL)
+ assert.Equal(t, expected.OriginalURL, actual.OriginalURL)
+ assert.Equal(t, expected.DefaultBranch, actual.DefaultBranch)
+}
+
+func assertReviewEqual(t *testing.T, expected, actual *base.Review) {
+ assert.Equal(t, expected.ID, actual.ID, "ID")
+ assert.Equal(t, expected.IssueIndex, actual.IssueIndex, "IsssueIndex")
+ assert.Equal(t, expected.ReviewerID, actual.ReviewerID, "ReviewerID")
+ assert.Equal(t, expected.ReviewerName, actual.ReviewerName, "ReviewerName")
+ assert.Equal(t, expected.Official, actual.Official, "Official")
+ assert.Equal(t, expected.CommitID, actual.CommitID, "CommitID")
+ assert.Equal(t, expected.Content, actual.Content, "Content")
+ assert.WithinDuration(t, expected.CreatedAt, actual.CreatedAt, 10*time.Second)
+ assert.Equal(t, expected.State, actual.State, "State")
+ assertReviewCommentsEqual(t, expected.Comments, actual.Comments)
+}
+
+func assertReviewsEqual(t *testing.T, expected, actual []*base.Review) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertReviewEqual(t, expected[i], actual[i])
+ }
+ }
+}
+
+func assertReviewCommentEqual(t *testing.T, expected, actual *base.ReviewComment) {
+ assert.Equal(t, expected.ID, actual.ID)
+ assert.Equal(t, expected.InReplyTo, actual.InReplyTo)
+ assert.Equal(t, expected.Content, actual.Content)
+ assert.Equal(t, expected.TreePath, actual.TreePath)
+ assert.Equal(t, expected.DiffHunk, actual.DiffHunk)
+ assert.Equal(t, expected.Position, actual.Position)
+ assert.Equal(t, expected.Line, actual.Line)
+ assert.Equal(t, expected.CommitID, actual.CommitID)
+ assert.Equal(t, expected.PosterID, actual.PosterID)
+ assertReactionsEqual(t, expected.Reactions, actual.Reactions)
+ assertTimeEqual(t, expected.CreatedAt, actual.CreatedAt)
+ assertTimeEqual(t, expected.UpdatedAt, actual.UpdatedAt)
+}
+
+func assertReviewCommentsEqual(t *testing.T, expected, actual []*base.ReviewComment) {
+ if assert.Len(t, actual, len(expected)) {
+ for i := range expected {
+ assertReviewCommentEqual(t, expected[i], actual[i])
+ }
+ }
+}
diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go
new file mode 100644
index 0000000..29b815d
--- /dev/null
+++ b/services/migrations/migrate.go
@@ -0,0 +1,510 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2018 Jonas Franz. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "net"
+ "net/url"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ repo_model "code.gitea.io/gitea/models/repo"
+ system_model "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/hostmatcher"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// MigrateOptions is equal to base.MigrateOptions
+type MigrateOptions = base.MigrateOptions
+
+var (
+ factories []base.DownloaderFactory
+
+ allowList *hostmatcher.HostMatchList
+ blockList *hostmatcher.HostMatchList
+)
+
+// RegisterDownloaderFactory registers a downloader factory
+func RegisterDownloaderFactory(factory base.DownloaderFactory) {
+ factories = append(factories, factory)
+}
+
+// IsMigrateURLAllowed checks if an URL is allowed to be migrated from
+func IsMigrateURLAllowed(remoteURL string, doer *user_model.User) error {
+ // Remote address can be HTTP/HTTPS/Git URL or local path.
+ u, err := url.Parse(remoteURL)
+ if err != nil {
+ return &models.ErrInvalidCloneAddr{IsURLError: true, Host: remoteURL}
+ }
+
+ if u.Scheme == "file" || u.Scheme == "" {
+ if !doer.CanImportLocal() {
+ return &models.ErrInvalidCloneAddr{Host: "<LOCAL_FILESYSTEM>", IsPermissionDenied: true, LocalPath: true}
+ }
+ isAbs := filepath.IsAbs(u.Host + u.Path)
+ if !isAbs {
+ return &models.ErrInvalidCloneAddr{Host: "<LOCAL_FILESYSTEM>", IsInvalidPath: true, LocalPath: true}
+ }
+ isDir, err := util.IsDir(u.Host + u.Path)
+ if err != nil {
+ log.Error("Unable to check if %s is a directory: %v", u.Host+u.Path, err)
+ return err
+ }
+ if !isDir {
+ return &models.ErrInvalidCloneAddr{Host: "<LOCAL_FILESYSTEM>", IsInvalidPath: true, LocalPath: true}
+ }
+
+ return nil
+ }
+
+ if u.Scheme == "git" && u.Port() != "" && (strings.Contains(remoteURL, "%0d") || strings.Contains(remoteURL, "%0a")) {
+ return &models.ErrInvalidCloneAddr{Host: u.Host, IsURLError: true}
+ }
+
+ if u.Opaque != "" || u.Scheme != "" && u.Scheme != "http" && u.Scheme != "https" && u.Scheme != "git" && u.Scheme != "ssh" {
+ return &models.ErrInvalidCloneAddr{Host: u.Host, IsProtocolInvalid: true, IsPermissionDenied: true, IsURLError: true}
+ }
+
+ hostName, _, err := net.SplitHostPort(u.Host)
+ if err != nil {
+ // u.Host can be "host" or "host:port"
+ err = nil //nolint
+ hostName = u.Host
+ }
+
+ // some users only use proxy, there is no DNS resolver. it's safe to ignore the LookupIP error
+ addrList, _ := net.LookupIP(hostName)
+ return checkByAllowBlockList(hostName, addrList)
+}
+
+func checkByAllowBlockList(hostName string, addrList []net.IP) error {
+ var ipAllowed bool
+ var ipBlocked bool
+ for _, addr := range addrList {
+ ipAllowed = ipAllowed || allowList.MatchIPAddr(addr)
+ ipBlocked = ipBlocked || blockList.MatchIPAddr(addr)
+ }
+ var blockedError error
+ if blockList.MatchHostName(hostName) || ipBlocked {
+ blockedError = &models.ErrInvalidCloneAddr{Host: hostName, IsPermissionDenied: true}
+ }
+ // if we have an allow-list, check the allow-list before return to get the more accurate error
+ if !allowList.IsEmpty() {
+ if !allowList.MatchHostName(hostName) && !ipAllowed {
+ return &models.ErrInvalidCloneAddr{Host: hostName, IsPermissionDenied: true}
+ }
+ }
+ // otherwise, we always follow the blocked list
+ return blockedError
+}
+
+// MigrateRepository migrate repository according MigrateOptions
+func MigrateRepository(ctx context.Context, doer *user_model.User, ownerName string, opts base.MigrateOptions, messenger base.Messenger) (*repo_model.Repository, error) {
+ err := IsMigrateURLAllowed(opts.CloneAddr, doer)
+ if err != nil {
+ return nil, err
+ }
+ if opts.LFS && len(opts.LFSEndpoint) > 0 {
+ err := IsMigrateURLAllowed(opts.LFSEndpoint, doer)
+ if err != nil {
+ return nil, err
+ }
+ }
+ downloader, err := newDownloader(ctx, ownerName, opts)
+ if err != nil {
+ return nil, err
+ }
+
+ uploader := NewGiteaLocalUploader(ctx, doer, ownerName, opts.RepoName)
+ uploader.gitServiceType = opts.GitServiceType
+
+ if err := migrateRepository(ctx, doer, downloader, uploader, opts, messenger); err != nil {
+ if err1 := uploader.Rollback(); err1 != nil {
+ log.Error("rollback failed: %v", err1)
+ }
+ if err2 := system_model.CreateRepositoryNotice(fmt.Sprintf("Migrate repository from %s failed: %v", opts.OriginalURL, err)); err2 != nil {
+ log.Error("create respotiry notice failed: ", err2)
+ }
+ return nil, err
+ }
+ return uploader.repo, nil
+}
+
+func getFactoryFromServiceType(serviceType structs.GitServiceType) base.DownloaderFactory {
+ for _, factory := range factories {
+ if factory.GitServiceType() == serviceType {
+ return factory
+ }
+ }
+ return nil
+}
+
+func newDownloader(ctx context.Context, ownerName string, opts base.MigrateOptions) (base.Downloader, error) {
+ var (
+ downloader base.Downloader
+ err error
+ )
+
+ if factory := getFactoryFromServiceType(opts.GitServiceType); factory != nil {
+ downloader, err = factory.New(ctx, opts)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ if downloader == nil {
+ opts.Wiki = true
+ opts.Milestones = false
+ opts.Labels = false
+ opts.Releases = false
+ opts.Comments = false
+ opts.Issues = false
+ opts.PullRequests = false
+ downloader = NewPlainGitDownloader(ownerName, opts.RepoName, opts.CloneAddr)
+ log.Trace("Will migrate from git: %s", opts.OriginalURL)
+ }
+
+ if setting.Migrations.MaxAttempts > 1 {
+ downloader = base.NewRetryDownloader(ctx, downloader, setting.Migrations.MaxAttempts, setting.Migrations.RetryBackoff)
+ }
+ return downloader, nil
+}
+
+// migrateRepository will download information and then upload it to Uploader, this is a simple
+// process for small repository. For a big repository, save all the data to disk
+// before upload is better
+func migrateRepository(_ context.Context, doer *user_model.User, downloader base.Downloader, uploader base.Uploader, opts base.MigrateOptions, messenger base.Messenger) error {
+ if messenger == nil {
+ messenger = base.NilMessenger
+ }
+
+ repo, err := downloader.GetRepoInfo()
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Info("migrating repo infos is not supported, ignored")
+ }
+ repo.IsPrivate = opts.Private
+ repo.IsMirror = opts.Mirror
+ if opts.Description != "" {
+ repo.Description = opts.Description
+ }
+ if repo.CloneURL, err = downloader.FormatCloneURL(opts, repo.CloneURL); err != nil {
+ return err
+ }
+
+ // SECURITY: If the downloader is not a RepositoryRestorer then we need to recheck the CloneURL
+ if _, ok := downloader.(*RepositoryRestorer); !ok {
+ // Now the clone URL can be rewritten by the downloader so we must recheck
+ if err := IsMigrateURLAllowed(repo.CloneURL, doer); err != nil {
+ return err
+ }
+
+ // SECURITY: Ensure that we haven't been redirected from an external to a local filesystem
+ // Now we know all of these must parse
+ cloneAddrURL, _ := url.Parse(opts.CloneAddr)
+ cloneURL, _ := url.Parse(repo.CloneURL)
+
+ if cloneURL.Scheme == "file" || cloneURL.Scheme == "" {
+ if cloneAddrURL.Scheme != "file" && cloneAddrURL.Scheme != "" {
+ return fmt.Errorf("repo info has changed from external to local filesystem")
+ }
+ }
+
+ // We don't actually need to check the OriginalURL as it isn't used anywhere
+ }
+
+ log.Trace("migrating git data from %s", repo.CloneURL)
+ messenger("repo.migrate.migrating_git")
+ if err = uploader.CreateRepo(repo, opts); err != nil {
+ return err
+ }
+ defer uploader.Close()
+
+ log.Trace("migrating topics")
+ messenger("repo.migrate.migrating_topics")
+ topics, err := downloader.GetTopics()
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating topics is not supported, ignored")
+ }
+ if len(topics) != 0 {
+ if err = uploader.CreateTopics(topics...); err != nil {
+ return err
+ }
+ }
+
+ if opts.Milestones {
+ log.Trace("migrating milestones")
+ messenger("repo.migrate.migrating_milestones")
+ milestones, err := downloader.GetMilestones()
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating milestones is not supported, ignored")
+ }
+ msBatchSize := uploader.MaxBatchInsertSize("milestone")
+ for len(milestones) > 0 {
+ if len(milestones) < msBatchSize {
+ msBatchSize = len(milestones)
+ }
+
+ if err := uploader.CreateMilestones(milestones[:msBatchSize]...); err != nil {
+ return err
+ }
+ milestones = milestones[msBatchSize:]
+ }
+ }
+
+ if opts.Labels {
+ log.Trace("migrating labels")
+ messenger("repo.migrate.migrating_labels")
+ labels, err := downloader.GetLabels()
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating labels is not supported, ignored")
+ }
+
+ lbBatchSize := uploader.MaxBatchInsertSize("label")
+ for len(labels) > 0 {
+ if len(labels) < lbBatchSize {
+ lbBatchSize = len(labels)
+ }
+
+ if err := uploader.CreateLabels(labels[:lbBatchSize]...); err != nil {
+ return err
+ }
+ labels = labels[lbBatchSize:]
+ }
+ }
+
+ if opts.Releases {
+ log.Trace("migrating releases")
+ messenger("repo.migrate.migrating_releases")
+ releases, err := downloader.GetReleases()
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating releases is not supported, ignored")
+ }
+
+ relBatchSize := uploader.MaxBatchInsertSize("release")
+ for len(releases) > 0 {
+ if len(releases) < relBatchSize {
+ relBatchSize = len(releases)
+ }
+
+ if err = uploader.CreateReleases(releases[:relBatchSize]...); err != nil {
+ return err
+ }
+ releases = releases[relBatchSize:]
+ }
+
+ // Once all releases (if any) are inserted, sync any remaining non-release tags
+ if err = uploader.SyncTags(); err != nil {
+ return err
+ }
+ }
+
+ var (
+ commentBatchSize = uploader.MaxBatchInsertSize("comment")
+ reviewBatchSize = uploader.MaxBatchInsertSize("review")
+ )
+
+ supportAllComments := downloader.SupportGetRepoComments()
+
+ if opts.Issues {
+ log.Trace("migrating issues and comments")
+ messenger("repo.migrate.migrating_issues")
+ issueBatchSize := uploader.MaxBatchInsertSize("issue")
+
+ for i := 1; ; i++ {
+ issues, isEnd, err := downloader.GetIssues(i, issueBatchSize)
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating issues is not supported, ignored")
+ break
+ }
+
+ if err := uploader.CreateIssues(issues...); err != nil {
+ return err
+ }
+
+ if opts.Comments && !supportAllComments {
+ allComments := make([]*base.Comment, 0, commentBatchSize)
+ for _, issue := range issues {
+ log.Trace("migrating issue %d's comments", issue.Number)
+ comments, _, err := downloader.GetComments(issue)
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating comments is not supported, ignored")
+ }
+
+ allComments = append(allComments, comments...)
+
+ if len(allComments) >= commentBatchSize {
+ if err = uploader.CreateComments(allComments[:commentBatchSize]...); err != nil {
+ return err
+ }
+
+ allComments = allComments[commentBatchSize:]
+ }
+ }
+
+ if len(allComments) > 0 {
+ if err = uploader.CreateComments(allComments...); err != nil {
+ return err
+ }
+ }
+ }
+
+ if isEnd {
+ break
+ }
+ }
+ }
+
+ if opts.PullRequests {
+ log.Trace("migrating pull requests and comments")
+ messenger("repo.migrate.migrating_pulls")
+ prBatchSize := uploader.MaxBatchInsertSize("pullrequest")
+ for i := 1; ; i++ {
+ prs, isEnd, err := downloader.GetPullRequests(i, prBatchSize)
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating pull requests is not supported, ignored")
+ break
+ }
+
+ if err := uploader.CreatePullRequests(prs...); err != nil {
+ return err
+ }
+
+ if opts.Comments {
+ if !supportAllComments {
+ // plain comments
+ allComments := make([]*base.Comment, 0, commentBatchSize)
+ for _, pr := range prs {
+ log.Trace("migrating pull request %d's comments", pr.Number)
+ comments, _, err := downloader.GetComments(pr)
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating comments is not supported, ignored")
+ }
+
+ allComments = append(allComments, comments...)
+
+ if len(allComments) >= commentBatchSize {
+ if err = uploader.CreateComments(allComments[:commentBatchSize]...); err != nil {
+ return err
+ }
+ allComments = allComments[commentBatchSize:]
+ }
+ }
+ if len(allComments) > 0 {
+ if err = uploader.CreateComments(allComments...); err != nil {
+ return err
+ }
+ }
+ }
+
+ // migrate reviews
+ allReviews := make([]*base.Review, 0, reviewBatchSize)
+ for _, pr := range prs {
+ reviews, err := downloader.GetReviews(pr)
+ if err != nil {
+ if !base.IsErrNotSupported(err) {
+ return err
+ }
+ log.Warn("migrating reviews is not supported, ignored")
+ break
+ }
+
+ allReviews = append(allReviews, reviews...)
+
+ if len(allReviews) >= reviewBatchSize {
+ if err = uploader.CreateReviews(allReviews[:reviewBatchSize]...); err != nil {
+ return err
+ }
+ allReviews = allReviews[reviewBatchSize:]
+ }
+ }
+ if len(allReviews) > 0 {
+ if err = uploader.CreateReviews(allReviews...); err != nil {
+ return err
+ }
+ }
+ }
+
+ if isEnd {
+ break
+ }
+ }
+ }
+
+ if opts.Comments && supportAllComments {
+ log.Trace("migrating comments")
+ for i := 1; ; i++ {
+ comments, isEnd, err := downloader.GetAllComments(i, commentBatchSize)
+ if err != nil {
+ return err
+ }
+
+ if err := uploader.CreateComments(comments...); err != nil {
+ return err
+ }
+
+ if isEnd {
+ break
+ }
+ }
+ }
+
+ return uploader.Finish()
+}
+
+// Init migrations service
+func Init() error {
+ // TODO: maybe we can deprecate these legacy ALLOWED_DOMAINS/ALLOW_LOCALNETWORKS/BLOCKED_DOMAINS, use ALLOWED_HOST_LIST/BLOCKED_HOST_LIST instead
+
+ blockList = hostmatcher.ParseSimpleMatchList("migrations.BLOCKED_DOMAINS", setting.Migrations.BlockedDomains)
+
+ allowList = hostmatcher.ParseSimpleMatchList("migrations.ALLOWED_DOMAINS/ALLOW_LOCALNETWORKS", setting.Migrations.AllowedDomains)
+ if allowList.IsEmpty() {
+ // the default policy is that migration module can access external hosts
+ allowList.AppendBuiltin(hostmatcher.MatchBuiltinExternal)
+ }
+ if setting.Migrations.AllowLocalNetworks {
+ allowList.AppendBuiltin(hostmatcher.MatchBuiltinPrivate)
+ allowList.AppendBuiltin(hostmatcher.MatchBuiltinLoopback)
+ }
+ // TODO: at the moment, if ALLOW_LOCALNETWORKS=false, ALLOWED_DOMAINS=domain.com, and domain.com has IP 127.0.0.1, then it's still allowed.
+ // if we want to block such case, the private&loopback should be added to the blockList when ALLOW_LOCALNETWORKS=false
+
+ return nil
+}
diff --git a/services/migrations/migrate_test.go b/services/migrations/migrate_test.go
new file mode 100644
index 0000000..109a092
--- /dev/null
+++ b/services/migrations/migrate_test.go
@@ -0,0 +1,115 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "net"
+ "path/filepath"
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestMigrateWhiteBlocklist(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ adminUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: "user1"})
+ nonAdminUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: "user2"})
+
+ setting.Migrations.AllowedDomains = "github.com"
+ setting.Migrations.AllowLocalNetworks = false
+ require.NoError(t, Init())
+
+ err := IsMigrateURLAllowed("https://gitlab.com/gitlab/gitlab.git", nonAdminUser)
+ require.Error(t, err)
+
+ err = IsMigrateURLAllowed("https://github.com/go-gitea/gitea.git", nonAdminUser)
+ require.NoError(t, err)
+
+ err = IsMigrateURLAllowed("https://gITHUb.com/go-gitea/gitea.git", nonAdminUser)
+ require.NoError(t, err)
+
+ setting.Migrations.AllowedDomains = ""
+ setting.Migrations.BlockedDomains = "github.com"
+ require.NoError(t, Init())
+
+ err = IsMigrateURLAllowed("https://gitlab.com/gitlab/gitlab.git", nonAdminUser)
+ require.NoError(t, err)
+
+ err = IsMigrateURLAllowed("https://github.com/go-gitea/gitea.git", nonAdminUser)
+ require.Error(t, err)
+
+ err = IsMigrateURLAllowed("https://10.0.0.1/go-gitea/gitea.git", nonAdminUser)
+ require.Error(t, err)
+
+ setting.Migrations.AllowLocalNetworks = true
+ require.NoError(t, Init())
+ err = IsMigrateURLAllowed("https://10.0.0.1/go-gitea/gitea.git", nonAdminUser)
+ require.NoError(t, err)
+
+ old := setting.ImportLocalPaths
+ setting.ImportLocalPaths = false
+
+ err = IsMigrateURLAllowed("/home/foo/bar/goo", adminUser)
+ require.Error(t, err)
+
+ setting.ImportLocalPaths = true
+ abs, err := filepath.Abs(".")
+ require.NoError(t, err)
+
+ err = IsMigrateURLAllowed(abs, adminUser)
+ require.NoError(t, err)
+
+ err = IsMigrateURLAllowed(abs, nonAdminUser)
+ require.Error(t, err)
+
+ nonAdminUser.AllowImportLocal = true
+ err = IsMigrateURLAllowed(abs, nonAdminUser)
+ require.NoError(t, err)
+
+ setting.ImportLocalPaths = old
+}
+
+func TestAllowBlockList(t *testing.T) {
+ init := func(allow, block string, local bool) {
+ setting.Migrations.AllowedDomains = allow
+ setting.Migrations.BlockedDomains = block
+ setting.Migrations.AllowLocalNetworks = local
+ require.NoError(t, Init())
+ }
+
+ // default, allow all external, block none, no local networks
+ init("", "", false)
+ require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("1.2.3.4")}))
+ require.Error(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("127.0.0.1")}))
+
+ // allow all including local networks (it could lead to SSRF in production)
+ init("", "", true)
+ require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("1.2.3.4")}))
+ require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("127.0.0.1")}))
+
+ // allow wildcard, block some subdomains. if the domain name is allowed, then the local network check is skipped
+ init("*.domain.com", "blocked.domain.com", false)
+ require.NoError(t, checkByAllowBlockList("sub.domain.com", []net.IP{net.ParseIP("1.2.3.4")}))
+ require.NoError(t, checkByAllowBlockList("sub.domain.com", []net.IP{net.ParseIP("127.0.0.1")}))
+ require.Error(t, checkByAllowBlockList("blocked.domain.com", []net.IP{net.ParseIP("1.2.3.4")}))
+ require.Error(t, checkByAllowBlockList("sub.other.com", []net.IP{net.ParseIP("1.2.3.4")}))
+
+ // allow wildcard (it could lead to SSRF in production)
+ init("*", "", false)
+ require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("1.2.3.4")}))
+ require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("127.0.0.1")}))
+
+ // local network can still be blocked
+ init("*", "127.0.0.*", false)
+ require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("1.2.3.4")}))
+ require.Error(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("127.0.0.1")}))
+
+ // reset
+ init("", "", false)
+}
diff --git a/services/migrations/onedev.go b/services/migrations/onedev.go
new file mode 100644
index 0000000..e2f7b77
--- /dev/null
+++ b/services/migrations/onedev.go
@@ -0,0 +1,634 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/structs"
+)
+
+var (
+ _ base.Downloader = &OneDevDownloader{}
+ _ base.DownloaderFactory = &OneDevDownloaderFactory{}
+)
+
+func init() {
+ RegisterDownloaderFactory(&OneDevDownloaderFactory{})
+}
+
+// OneDevDownloaderFactory defines a downloader factory
+type OneDevDownloaderFactory struct{}
+
+// New returns a downloader related to this factory according MigrateOptions
+func (f *OneDevDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) {
+ u, err := url.Parse(opts.CloneAddr)
+ if err != nil {
+ return nil, err
+ }
+
+ var repoName string
+
+ fields := strings.Split(strings.Trim(u.Path, "/"), "/")
+ if len(fields) == 2 && fields[0] == "projects" {
+ repoName = fields[1]
+ } else if len(fields) == 1 {
+ repoName = fields[0]
+ } else {
+ return nil, fmt.Errorf("invalid path: %s", u.Path)
+ }
+
+ u.Path = ""
+ u.Fragment = ""
+
+ log.Trace("Create onedev downloader. BaseURL: %v RepoName: %s", u, repoName)
+
+ return NewOneDevDownloader(ctx, u, opts.AuthUsername, opts.AuthPassword, repoName), nil
+}
+
+// GitServiceType returns the type of git service
+func (f *OneDevDownloaderFactory) GitServiceType() structs.GitServiceType {
+ return structs.OneDevService
+}
+
+type onedevUser struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ Email string `json:"email"`
+}
+
+// OneDevDownloader implements a Downloader interface to get repository information
+// from OneDev
+type OneDevDownloader struct {
+ base.NullDownloader
+ ctx context.Context
+ client *http.Client
+ baseURL *url.URL
+ repoName string
+ repoID int64
+ maxIssueIndex int64
+ userMap map[int64]*onedevUser
+ milestoneMap map[int64]string
+}
+
+// SetContext set context
+func (d *OneDevDownloader) SetContext(ctx context.Context) {
+ d.ctx = ctx
+}
+
+// NewOneDevDownloader creates a new downloader
+func NewOneDevDownloader(ctx context.Context, baseURL *url.URL, username, password, repoName string) *OneDevDownloader {
+ downloader := &OneDevDownloader{
+ ctx: ctx,
+ baseURL: baseURL,
+ repoName: repoName,
+ client: &http.Client{
+ Transport: &http.Transport{
+ Proxy: func(req *http.Request) (*url.URL, error) {
+ if len(username) > 0 && len(password) > 0 {
+ req.SetBasicAuth(username, password)
+ }
+ return nil, nil
+ },
+ },
+ },
+ userMap: make(map[int64]*onedevUser),
+ milestoneMap: make(map[int64]string),
+ }
+
+ return downloader
+}
+
+// String implements Stringer
+func (d *OneDevDownloader) String() string {
+ return fmt.Sprintf("migration from oneDev server %s [%d]/%s", d.baseURL, d.repoID, d.repoName)
+}
+
+func (d *OneDevDownloader) LogString() string {
+ if d == nil {
+ return "<OneDevDownloader nil>"
+ }
+ return fmt.Sprintf("<OneDevDownloader %s [%d]/%s>", d.baseURL, d.repoID, d.repoName)
+}
+
+func (d *OneDevDownloader) callAPI(endpoint string, parameter map[string]string, result any) error {
+ u, err := d.baseURL.Parse(endpoint)
+ if err != nil {
+ return err
+ }
+
+ if parameter != nil {
+ query := u.Query()
+ for k, v := range parameter {
+ query.Set(k, v)
+ }
+ u.RawQuery = query.Encode()
+ }
+
+ req, err := http.NewRequestWithContext(d.ctx, "GET", u.String(), nil)
+ if err != nil {
+ return err
+ }
+
+ resp, err := d.client.Do(req)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+
+ decoder := json.NewDecoder(resp.Body)
+ return decoder.Decode(&result)
+}
+
+// GetRepoInfo returns repository information
+func (d *OneDevDownloader) GetRepoInfo() (*base.Repository, error) {
+ info := make([]struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ Description string `json:"description"`
+ }, 0, 1)
+
+ err := d.callAPI(
+ "/api/projects",
+ map[string]string{
+ "query": `"Name" is "` + d.repoName + `"`,
+ "offset": "0",
+ "count": "1",
+ },
+ &info,
+ )
+ if err != nil {
+ return nil, err
+ }
+ if len(info) != 1 {
+ return nil, fmt.Errorf("Project %s not found", d.repoName)
+ }
+
+ d.repoID = info[0].ID
+
+ cloneURL, err := d.baseURL.Parse(info[0].Name)
+ if err != nil {
+ return nil, err
+ }
+ originalURL, err := d.baseURL.Parse("/projects/" + info[0].Name)
+ if err != nil {
+ return nil, err
+ }
+
+ return &base.Repository{
+ Name: info[0].Name,
+ Description: info[0].Description,
+ CloneURL: cloneURL.String(),
+ OriginalURL: originalURL.String(),
+ }, nil
+}
+
+// GetMilestones returns milestones
+func (d *OneDevDownloader) GetMilestones() ([]*base.Milestone, error) {
+ rawMilestones := make([]struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ Description string `json:"description"`
+ DueDate *time.Time `json:"dueDate"`
+ Closed bool `json:"closed"`
+ }, 0, 100)
+
+ endpoint := fmt.Sprintf("/api/projects/%d/milestones", d.repoID)
+
+ milestones := make([]*base.Milestone, 0, 100)
+ offset := 0
+ for {
+ err := d.callAPI(
+ endpoint,
+ map[string]string{
+ "offset": strconv.Itoa(offset),
+ "count": "100",
+ },
+ &rawMilestones,
+ )
+ if err != nil {
+ return nil, err
+ }
+ if len(rawMilestones) == 0 {
+ break
+ }
+ offset += 100
+
+ for _, milestone := range rawMilestones {
+ d.milestoneMap[milestone.ID] = milestone.Name
+ closed := milestone.DueDate
+ if !milestone.Closed {
+ closed = nil
+ }
+
+ milestones = append(milestones, &base.Milestone{
+ Title: milestone.Name,
+ Description: milestone.Description,
+ Deadline: milestone.DueDate,
+ Closed: closed,
+ })
+ }
+ }
+ return milestones, nil
+}
+
+// GetLabels returns labels
+func (d *OneDevDownloader) GetLabels() ([]*base.Label, error) {
+ return []*base.Label{
+ {
+ Name: "Bug",
+ Color: "f64e60",
+ },
+ {
+ Name: "Build Failure",
+ Color: "f64e60",
+ },
+ {
+ Name: "Discussion",
+ Color: "8950fc",
+ },
+ {
+ Name: "Improvement",
+ Color: "1bc5bd",
+ },
+ {
+ Name: "New Feature",
+ Color: "1bc5bd",
+ },
+ {
+ Name: "Support Request",
+ Color: "8950fc",
+ },
+ }, nil
+}
+
+type onedevIssueContext struct {
+ IsPullRequest bool
+}
+
+// GetIssues returns issues
+func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+ rawIssues := make([]struct {
+ ID int64 `json:"id"`
+ Number int64 `json:"number"`
+ State string `json:"state"`
+ Title string `json:"title"`
+ Description string `json:"description"`
+ SubmitterID int64 `json:"submitterId"`
+ SubmitDate time.Time `json:"submitDate"`
+ }, 0, perPage)
+
+ err := d.callAPI(
+ "/api/issues",
+ map[string]string{
+ "query": `"Project" is "` + d.repoName + `"`,
+ "offset": strconv.Itoa((page - 1) * perPage),
+ "count": strconv.Itoa(perPage),
+ },
+ &rawIssues,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ issues := make([]*base.Issue, 0, len(rawIssues))
+ for _, issue := range rawIssues {
+ fields := make([]struct {
+ Name string `json:"name"`
+ Value string `json:"value"`
+ }, 0, 10)
+ err := d.callAPI(
+ fmt.Sprintf("/api/issues/%d/fields", issue.ID),
+ nil,
+ &fields,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ var label *base.Label
+ for _, field := range fields {
+ if field.Name == "Type" {
+ label = &base.Label{Name: field.Value}
+ break
+ }
+ }
+
+ milestones := make([]struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ }, 0, 10)
+ err = d.callAPI(
+ fmt.Sprintf("/api/issues/%d/milestones", issue.ID),
+ nil,
+ &milestones,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+ milestoneID := int64(0)
+ if len(milestones) > 0 {
+ milestoneID = milestones[0].ID
+ }
+
+ state := strings.ToLower(issue.State)
+ if state == "released" {
+ state = "closed"
+ }
+ poster := d.tryGetUser(issue.SubmitterID)
+ issues = append(issues, &base.Issue{
+ Title: issue.Title,
+ Number: issue.Number,
+ PosterName: poster.Name,
+ PosterEmail: poster.Email,
+ Content: issue.Description,
+ Milestone: d.milestoneMap[milestoneID],
+ State: state,
+ Created: issue.SubmitDate,
+ Updated: issue.SubmitDate,
+ Labels: []*base.Label{label},
+ ForeignIndex: issue.ID,
+ Context: onedevIssueContext{IsPullRequest: false},
+ })
+
+ if d.maxIssueIndex < issue.Number {
+ d.maxIssueIndex = issue.Number
+ }
+ }
+
+ return issues, len(issues) == 0, nil
+}
+
+// GetComments returns comments
+func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+ context, ok := commentable.GetContext().(onedevIssueContext)
+ if !ok {
+ return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext())
+ }
+
+ rawComments := make([]struct {
+ ID int64 `json:"id"`
+ Date time.Time `json:"date"`
+ UserID int64 `json:"userId"`
+ Content string `json:"content"`
+ }, 0, 100)
+
+ var endpoint string
+ if context.IsPullRequest {
+ endpoint = fmt.Sprintf("/api/pull-requests/%d/comments", commentable.GetForeignIndex())
+ } else {
+ endpoint = fmt.Sprintf("/api/issues/%d/comments", commentable.GetForeignIndex())
+ }
+
+ err := d.callAPI(
+ endpoint,
+ nil,
+ &rawComments,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ rawChanges := make([]struct {
+ Date time.Time `json:"date"`
+ UserID int64 `json:"userId"`
+ Data map[string]any `json:"data"`
+ }, 0, 100)
+
+ if context.IsPullRequest {
+ endpoint = fmt.Sprintf("/api/pull-requests/%d/changes", commentable.GetForeignIndex())
+ } else {
+ endpoint = fmt.Sprintf("/api/issues/%d/changes", commentable.GetForeignIndex())
+ }
+
+ err = d.callAPI(
+ endpoint,
+ nil,
+ &rawChanges,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ comments := make([]*base.Comment, 0, len(rawComments)+len(rawChanges))
+ for _, comment := range rawComments {
+ if len(comment.Content) == 0 {
+ continue
+ }
+ poster := d.tryGetUser(comment.UserID)
+ comments = append(comments, &base.Comment{
+ IssueIndex: commentable.GetLocalIndex(),
+ Index: comment.ID,
+ PosterID: poster.ID,
+ PosterName: poster.Name,
+ PosterEmail: poster.Email,
+ Content: comment.Content,
+ Created: comment.Date,
+ Updated: comment.Date,
+ })
+ }
+ for _, change := range rawChanges {
+ contentV, ok := change.Data["content"]
+ if !ok {
+ contentV, ok = change.Data["comment"]
+ if !ok {
+ continue
+ }
+ }
+ content, ok := contentV.(string)
+ if !ok || len(content) == 0 {
+ continue
+ }
+
+ poster := d.tryGetUser(change.UserID)
+ comments = append(comments, &base.Comment{
+ IssueIndex: commentable.GetLocalIndex(),
+ PosterID: poster.ID,
+ PosterName: poster.Name,
+ PosterEmail: poster.Email,
+ Content: content,
+ Created: change.Date,
+ Updated: change.Date,
+ })
+ }
+
+ return comments, true, nil
+}
+
+// GetPullRequests returns pull requests
+func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+ rawPullRequests := make([]struct {
+ ID int64 `json:"id"`
+ Number int64 `json:"number"`
+ Title string `json:"title"`
+ SubmitterID int64 `json:"submitterId"`
+ SubmitDate time.Time `json:"submitDate"`
+ Description string `json:"description"`
+ TargetBranch string `json:"targetBranch"`
+ SourceBranch string `json:"sourceBranch"`
+ BaseCommitHash string `json:"baseCommitHash"`
+ CloseInfo *struct {
+ Date *time.Time `json:"date"`
+ Status string `json:"status"`
+ }
+ }, 0, perPage)
+
+ err := d.callAPI(
+ "/api/pull-requests",
+ map[string]string{
+ "query": `"Target Project" is "` + d.repoName + `"`,
+ "offset": strconv.Itoa((page - 1) * perPage),
+ "count": strconv.Itoa(perPage),
+ },
+ &rawPullRequests,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ pullRequests := make([]*base.PullRequest, 0, len(rawPullRequests))
+ for _, pr := range rawPullRequests {
+ var mergePreview struct {
+ TargetHeadCommitHash string `json:"targetHeadCommitHash"`
+ HeadCommitHash string `json:"headCommitHash"`
+ MergeStrategy string `json:"mergeStrategy"`
+ MergeCommitHash string `json:"mergeCommitHash"`
+ }
+ err := d.callAPI(
+ fmt.Sprintf("/api/pull-requests/%d/merge-preview", pr.ID),
+ nil,
+ &mergePreview,
+ )
+ if err != nil {
+ return nil, false, err
+ }
+
+ state := "open"
+ merged := false
+ var closeTime *time.Time
+ var mergedTime *time.Time
+ if pr.CloseInfo != nil {
+ state = "closed"
+ closeTime = pr.CloseInfo.Date
+ if pr.CloseInfo.Status == "MERGED" { // "DISCARDED"
+ merged = true
+ mergedTime = pr.CloseInfo.Date
+ }
+ }
+ poster := d.tryGetUser(pr.SubmitterID)
+
+ number := pr.Number + d.maxIssueIndex
+ pullRequests = append(pullRequests, &base.PullRequest{
+ Title: pr.Title,
+ Number: number,
+ PosterName: poster.Name,
+ PosterID: poster.ID,
+ Content: pr.Description,
+ State: state,
+ Created: pr.SubmitDate,
+ Updated: pr.SubmitDate,
+ Closed: closeTime,
+ Merged: merged,
+ MergedTime: mergedTime,
+ Head: base.PullRequestBranch{
+ Ref: pr.SourceBranch,
+ SHA: mergePreview.HeadCommitHash,
+ RepoName: d.repoName,
+ },
+ Base: base.PullRequestBranch{
+ Ref: pr.TargetBranch,
+ SHA: mergePreview.TargetHeadCommitHash,
+ RepoName: d.repoName,
+ },
+ ForeignIndex: pr.ID,
+ Context: onedevIssueContext{IsPullRequest: true},
+ })
+
+ // SECURITY: Ensure that the PR is safe
+ _ = CheckAndEnsureSafePR(pullRequests[len(pullRequests)-1], d.baseURL.String(), d)
+ }
+
+ return pullRequests, len(pullRequests) == 0, nil
+}
+
+// GetReviews returns pull requests reviews
+func (d *OneDevDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+ rawReviews := make([]struct {
+ ID int64 `json:"id"`
+ UserID int64 `json:"userId"`
+ Result *struct {
+ Commit string `json:"commit"`
+ Approved bool `json:"approved"`
+ Comment string `json:"comment"`
+ }
+ }, 0, 100)
+
+ err := d.callAPI(
+ fmt.Sprintf("/api/pull-requests/%d/reviews", reviewable.GetForeignIndex()),
+ nil,
+ &rawReviews,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ reviews := make([]*base.Review, 0, len(rawReviews))
+ for _, review := range rawReviews {
+ state := base.ReviewStatePending
+ content := ""
+ if review.Result != nil {
+ if len(review.Result.Comment) > 0 {
+ state = base.ReviewStateCommented
+ content = review.Result.Comment
+ }
+ if review.Result.Approved {
+ state = base.ReviewStateApproved
+ }
+ }
+
+ poster := d.tryGetUser(review.UserID)
+ reviews = append(reviews, &base.Review{
+ IssueIndex: reviewable.GetLocalIndex(),
+ ReviewerID: poster.ID,
+ ReviewerName: poster.Name,
+ Content: content,
+ State: state,
+ })
+ }
+
+ return reviews, nil
+}
+
+// GetTopics return repository topics
+func (d *OneDevDownloader) GetTopics() ([]string, error) {
+ return []string{}, nil
+}
+
+func (d *OneDevDownloader) tryGetUser(userID int64) *onedevUser {
+ user, ok := d.userMap[userID]
+ if !ok {
+ err := d.callAPI(
+ fmt.Sprintf("/api/users/%d", userID),
+ nil,
+ &user,
+ )
+ if err != nil {
+ user = &onedevUser{
+ Name: fmt.Sprintf("User %d", userID),
+ }
+ }
+ d.userMap[userID] = user
+ }
+
+ return user
+}
diff --git a/services/migrations/onedev_test.go b/services/migrations/onedev_test.go
new file mode 100644
index 0000000..80c2613
--- /dev/null
+++ b/services/migrations/onedev_test.go
@@ -0,0 +1,149 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "net/http"
+ "net/url"
+ "testing"
+ "time"
+
+ base "code.gitea.io/gitea/modules/migration"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestOneDevDownloadRepo(t *testing.T) {
+ resp, err := http.Get("https://code.onedev.io/projects/go-gitea-test_repo")
+ if err != nil || resp.StatusCode != http.StatusOK {
+ t.Skipf("Can't access test repo, skipping %s", t.Name())
+ }
+
+ u, _ := url.Parse("https://code.onedev.io")
+ downloader := NewOneDevDownloader(context.Background(), u, "", "", "go-gitea-test_repo")
+ if err != nil {
+ t.Fatalf("NewOneDevDownloader is nil: %v", err)
+ }
+ repo, err := downloader.GetRepoInfo()
+ require.NoError(t, err)
+ assertRepositoryEqual(t, &base.Repository{
+ Name: "go-gitea-test_repo",
+ Owner: "",
+ Description: "Test repository for testing migration from OneDev to gitea",
+ CloneURL: "https://code.onedev.io/go-gitea-test_repo",
+ OriginalURL: "https://code.onedev.io/projects/go-gitea-test_repo",
+ }, repo)
+
+ milestones, err := downloader.GetMilestones()
+ require.NoError(t, err)
+ deadline := time.Unix(1620086400, 0)
+ assertMilestonesEqual(t, []*base.Milestone{
+ {
+ Title: "1.0.0",
+ Deadline: &deadline,
+ Closed: &deadline,
+ },
+ {
+ Title: "1.1.0",
+ Description: "next things?",
+ },
+ }, milestones)
+
+ labels, err := downloader.GetLabels()
+ require.NoError(t, err)
+ assert.Len(t, labels, 6)
+
+ issues, isEnd, err := downloader.GetIssues(1, 2)
+ require.NoError(t, err)
+ assert.False(t, isEnd)
+ assertIssuesEqual(t, []*base.Issue{
+ {
+ Number: 4,
+ Title: "Hi there",
+ Content: "an issue not assigned to a milestone",
+ PosterName: "User 336",
+ State: "open",
+ Created: time.Unix(1628549776, 734000000),
+ Updated: time.Unix(1628549776, 734000000),
+ Labels: []*base.Label{
+ {
+ Name: "Improvement",
+ },
+ },
+ ForeignIndex: 398,
+ Context: onedevIssueContext{IsPullRequest: false},
+ },
+ {
+ Number: 3,
+ Title: "Add an awesome feature",
+ Content: "just another issue to test against",
+ PosterName: "User 336",
+ State: "open",
+ Milestone: "1.1.0",
+ Created: time.Unix(1628549749, 878000000),
+ Updated: time.Unix(1628549749, 878000000),
+ Labels: []*base.Label{
+ {
+ Name: "New Feature",
+ },
+ },
+ ForeignIndex: 397,
+ Context: onedevIssueContext{IsPullRequest: false},
+ },
+ }, issues)
+
+ comments, _, err := downloader.GetComments(&base.Issue{
+ Number: 4,
+ ForeignIndex: 398,
+ Context: onedevIssueContext{IsPullRequest: false},
+ })
+ require.NoError(t, err)
+ assertCommentsEqual(t, []*base.Comment{
+ {
+ IssueIndex: 4,
+ PosterName: "User 336",
+ Created: time.Unix(1628549791, 128000000),
+ Updated: time.Unix(1628549791, 128000000),
+ Content: "it has a comment\n\nEDIT: that got edited",
+ },
+ }, comments)
+
+ prs, _, err := downloader.GetPullRequests(1, 1)
+ require.NoError(t, err)
+ assertPullRequestsEqual(t, []*base.PullRequest{
+ {
+ Number: 5,
+ Title: "Pull to add a new file",
+ Content: "just do some git stuff",
+ PosterName: "User 336",
+ State: "open",
+ Created: time.Unix(1628550076, 25000000),
+ Updated: time.Unix(1628550076, 25000000),
+ Head: base.PullRequestBranch{
+ Ref: "branch-for-a-pull",
+ SHA: "343deffe3526b9bc84e873743ff7f6e6d8b827c0",
+ RepoName: "go-gitea-test_repo",
+ },
+ Base: base.PullRequestBranch{
+ Ref: "master",
+ SHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2",
+ RepoName: "go-gitea-test_repo",
+ },
+ ForeignIndex: 186,
+ Context: onedevIssueContext{IsPullRequest: true},
+ },
+ }, prs)
+
+ rvs, err := downloader.GetReviews(&base.PullRequest{Number: 5, ForeignIndex: 186})
+ require.NoError(t, err)
+ assertReviewsEqual(t, []*base.Review{
+ {
+ IssueIndex: 5,
+ ReviewerName: "User 317",
+ State: "PENDING",
+ },
+ }, rvs)
+}
diff --git a/services/migrations/restore.go b/services/migrations/restore.go
new file mode 100644
index 0000000..fd337b2
--- /dev/null
+++ b/services/migrations/restore.go
@@ -0,0 +1,272 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strconv"
+
+ base "code.gitea.io/gitea/modules/migration"
+
+ "gopkg.in/yaml.v3"
+)
+
+// RepositoryRestorer implements an Downloader from the local directory
+type RepositoryRestorer struct {
+ base.NullDownloader
+ ctx context.Context
+ baseDir string
+ repoOwner string
+ repoName string
+ validation bool
+}
+
+// NewRepositoryRestorer creates a repository restorer which could restore repository from a dumped folder
+func NewRepositoryRestorer(ctx context.Context, baseDir, owner, repoName string, validation bool) (*RepositoryRestorer, error) {
+ baseDir, err := filepath.Abs(baseDir)
+ if err != nil {
+ return nil, err
+ }
+ return &RepositoryRestorer{
+ ctx: ctx,
+ baseDir: baseDir,
+ repoOwner: owner,
+ repoName: repoName,
+ validation: validation,
+ }, nil
+}
+
+func (r *RepositoryRestorer) commentDir() string {
+ return filepath.Join(r.baseDir, "comments")
+}
+
+func (r *RepositoryRestorer) reviewDir() string {
+ return filepath.Join(r.baseDir, "reviews")
+}
+
+// SetContext set context
+func (r *RepositoryRestorer) SetContext(ctx context.Context) {
+ r.ctx = ctx
+}
+
+func (r *RepositoryRestorer) getRepoOptions() (map[string]string, error) {
+ p := filepath.Join(r.baseDir, "repo.yml")
+ bs, err := os.ReadFile(p)
+ if err != nil {
+ return nil, err
+ }
+
+ opts := make(map[string]string)
+ err = yaml.Unmarshal(bs, &opts)
+ if err != nil {
+ return nil, err
+ }
+ return opts, nil
+}
+
+// GetRepoInfo returns a repository information
+func (r *RepositoryRestorer) GetRepoInfo() (*base.Repository, error) {
+ opts, err := r.getRepoOptions()
+ if err != nil {
+ return nil, err
+ }
+
+ isPrivate, _ := strconv.ParseBool(opts["is_private"])
+
+ return &base.Repository{
+ Owner: r.repoOwner,
+ Name: r.repoName,
+ IsPrivate: isPrivate,
+ Description: opts["description"],
+ OriginalURL: opts["original_url"],
+ CloneURL: filepath.Join(r.baseDir, "git"),
+ DefaultBranch: opts["default_branch"],
+ }, nil
+}
+
+// GetTopics return github topics
+func (r *RepositoryRestorer) GetTopics() ([]string, error) {
+ p := filepath.Join(r.baseDir, "topic.yml")
+
+ topics := struct {
+ Topics []string `yaml:"topics"`
+ }{}
+
+ bs, err := os.ReadFile(p)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
+ }
+
+ err = yaml.Unmarshal(bs, &topics)
+ if err != nil {
+ return nil, err
+ }
+ return topics.Topics, nil
+}
+
+// GetMilestones returns milestones
+func (r *RepositoryRestorer) GetMilestones() ([]*base.Milestone, error) {
+ milestones := make([]*base.Milestone, 0, 10)
+ p := filepath.Join(r.baseDir, "milestone.yml")
+ err := base.Load(p, &milestones, r.validation)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
+ }
+
+ return milestones, nil
+}
+
+// GetReleases returns releases
+func (r *RepositoryRestorer) GetReleases() ([]*base.Release, error) {
+ releases := make([]*base.Release, 0, 10)
+ p := filepath.Join(r.baseDir, "release.yml")
+ _, err := os.Stat(p)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
+ }
+
+ bs, err := os.ReadFile(p)
+ if err != nil {
+ return nil, err
+ }
+
+ err = yaml.Unmarshal(bs, &releases)
+ if err != nil {
+ return nil, err
+ }
+ for _, rel := range releases {
+ for _, asset := range rel.Assets {
+ if asset.DownloadURL != nil {
+ *asset.DownloadURL = "file://" + filepath.Join(r.baseDir, *asset.DownloadURL)
+ }
+ }
+ }
+ return releases, nil
+}
+
+// GetLabels returns labels
+func (r *RepositoryRestorer) GetLabels() ([]*base.Label, error) {
+ labels := make([]*base.Label, 0, 10)
+ p := filepath.Join(r.baseDir, "label.yml")
+ _, err := os.Stat(p)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
+ }
+
+ bs, err := os.ReadFile(p)
+ if err != nil {
+ return nil, err
+ }
+
+ err = yaml.Unmarshal(bs, &labels)
+ if err != nil {
+ return nil, err
+ }
+ return labels, nil
+}
+
+// GetIssues returns issues according start and limit
+func (r *RepositoryRestorer) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+ issues := make([]*base.Issue, 0, 10)
+ p := filepath.Join(r.baseDir, "issue.yml")
+ err := base.Load(p, &issues, r.validation)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, true, nil
+ }
+ return nil, false, err
+ }
+ return issues, true, nil
+}
+
+// GetComments returns comments according issueNumber
+func (r *RepositoryRestorer) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+ comments := make([]*base.Comment, 0, 10)
+ p := filepath.Join(r.commentDir(), fmt.Sprintf("%d.yml", commentable.GetForeignIndex()))
+ _, err := os.Stat(p)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, false, nil
+ }
+ return nil, false, err
+ }
+
+ bs, err := os.ReadFile(p)
+ if err != nil {
+ return nil, false, err
+ }
+
+ err = yaml.Unmarshal(bs, &comments)
+ if err != nil {
+ return nil, false, err
+ }
+ return comments, false, nil
+}
+
+// GetPullRequests returns pull requests according page and perPage
+func (r *RepositoryRestorer) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+ pulls := make([]*base.PullRequest, 0, 10)
+ p := filepath.Join(r.baseDir, "pull_request.yml")
+ _, err := os.Stat(p)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, true, nil
+ }
+ return nil, false, err
+ }
+
+ bs, err := os.ReadFile(p)
+ if err != nil {
+ return nil, false, err
+ }
+
+ err = yaml.Unmarshal(bs, &pulls)
+ if err != nil {
+ return nil, false, err
+ }
+ for _, pr := range pulls {
+ pr.PatchURL = "file://" + filepath.Join(r.baseDir, pr.PatchURL)
+ CheckAndEnsureSafePR(pr, "", r)
+ }
+ return pulls, true, nil
+}
+
+// GetReviews returns pull requests review
+func (r *RepositoryRestorer) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+ reviews := make([]*base.Review, 0, 10)
+ p := filepath.Join(r.reviewDir(), fmt.Sprintf("%d.yml", reviewable.GetForeignIndex()))
+ _, err := os.Stat(p)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
+ }
+
+ bs, err := os.ReadFile(p)
+ if err != nil {
+ return nil, err
+ }
+
+ err = yaml.Unmarshal(bs, &reviews)
+ if err != nil {
+ return nil, err
+ }
+ return reviews, nil
+}
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672
new file mode 100644
index 0000000..73532bf
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672
@@ -0,0 +1,17 @@
+X-Runtime: 0.155648
+Cache-Control: max-age=0, private, must-revalidate
+Strict-Transport-Security: max-age=31536000
+Gitlab-Lb: haproxy-main-41-lb-gprd
+Set-Cookie: _cfuvid=BI.nVv95qBu88KUbTZy0ZZJlRApJuj4nHeovyNu0YlU-1725394794027-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+X-Frame-Options: SAMEORIGIN
+X-Gitlab-Meta: {"correlation_id":"6f22438486feec038cd6ea9f15b00ae5","version":"1"}
+Cf-Cache-Status: MISS
+Content-Type: application/json
+Etag: W/"b36bd4522b7e8b2509078271491fb972"
+Vary: Origin, Accept-Encoding
+X-Content-Type-Options: nosniff
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Sv: api-gke-us-east1-d
+
+{"id":61363672,"description":"Test repository for testing migration from gitlab to forgejo","name":"test_repo","name_with_namespace":"Forgejo / test_repo","path":"test_repo","path_with_namespace":"forgejo/test_repo","created_at":"2024-09-03T07:44:30.668Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:forgejo/test_repo.git","http_url_to_repo":"https://gitlab.com/forgejo/test_repo.git","web_url":"https://gitlab.com/forgejo/test_repo","readme_url":"https://gitlab.com/forgejo/test_repo/-/blob/master/README.md","forks_count":0,"avatar_url":null,"star_count":0,"last_activity_at":"2024-09-03T20:03:18.187Z","namespace":{"id":64459497,"name":"Forgejo","path":"forgejo","kind":"group","full_path":"forgejo","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/64459497/73144-c883a242dec5299fbc06bbe3ee71d8c6.png","web_url":"https://gitlab.com/groups/forgejo"},"forked_from_project":{"id":15578026,"description":"Test repository for testing migration from gitlab to gitea","name":"test_repo","name_with_namespace":"gitea / test_repo","path":"test_repo","path_with_namespace":"gitea/test_repo","created_at":"2019-11-28T08:20:33.019Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:gitea/test_repo.git","http_url_to_repo":"https://gitlab.com/gitea/test_repo.git","web_url":"https://gitlab.com/gitea/test_repo","readme_url":"https://gitlab.com/gitea/test_repo/-/blob/master/README.md","forks_count":2,"avatar_url":null,"star_count":0,"last_activity_at":"2024-09-03T07:52:28.488Z","namespace":{"id":3181312,"name":"gitea","path":"gitea","kind":"group","full_path":"gitea","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/3181312/gitea.png","web_url":"https://gitlab.com/groups/gitea"}},"container_registry_image_prefix":"registry.gitlab.com/forgejo/test_repo","_links":{"self":"https://gitlab.com/api/v4/projects/61363672","issues":"https://gitlab.com/api/v4/projects/61363672/issues","merge_requests":"https://gitlab.com/api/v4/projects/61363672/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/61363672/repository/branches","labels":"https://gitlab.com/api/v4/projects/61363672/labels","events":"https://gitlab.com/api/v4/projects/61363672/events","members":"https://gitlab.com/api/v4/projects/61363672/members","cluster_agents":"https://gitlab.com/api/v4/projects/61363672/cluster_agents"},"packages_enabled":true,"empty_repo":false,"archived":false,"visibility":"public","resolve_outdated_diff_discussions":false,"container_expiration_policy":{"cadence":"1d","enabled":false,"keep_n":10,"older_than":"90d","name_regex":".*","name_regex_keep":null,"next_run_at":"2024-09-04T07:44:30.699Z"},"repository_object_format":"sha1","issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"service_desk_address":"contact-project+forgejo-test-repo-61363672-issue-@incoming.gitlab.com","can_create_merge_request_in":true,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","model_registry_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":true,"creator_id":2005797,"mr_default_target_self":false,"import_url":null,"import_type":null,"import_status":"finished","import_error":null,"open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:60\" dir=\"auto\"\u003eTest repository for testing migration from gitlab to forgejo\u003c/p\u003e","updated_at":"2024-09-03T20:03:18.187Z","ci_default_git_depth":50,"ci_forward_deployment_enabled":true,"ci_forward_deployment_rollback_allowed":true,"ci_job_token_scope_enabled":false,"ci_separated_caches":true,"ci_allow_fork_pipelines_to_run_in_parent_project":true,"ci_id_token_sub_claim_components":["project_path","ref_type","ref"],"build_git_strategy":"fetch","keep_latest_artifact":true,"restrict_user_defined_variables":false,"ci_pipeline_variables_minimum_override_role":"maintainer","runners_token":null,"runner_token_expiration_interval":null,"group_runners_enabled":true,"auto_cancel_pending_pipelines":"enabled","build_timeout":3600,"auto_devops_enabled":false,"auto_devops_deploy_strategy":"continuous","ci_push_repository_for_job_token_allowed":false,"ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":true,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":true,"printing_merge_request_link_enabled":true,"merge_method":"merge","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"warn_about_potentially_unwanted_characters":true,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":true,"pre_receive_secret_detection_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":{"access_level":40,"notification_level":3},"group_access":null}} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=1&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=1&per_page=2
new file mode 100644
index 0000000..ce2eb62
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=1&per_page=2
@@ -0,0 +1,24 @@
+X-Total-Pages: 1
+X-Next-Page:
+Vary: Origin, Accept-Encoding
+X-Prev-Page:
+Gitlab-Sv: api-gke-us-east1-b
+Cache-Control: max-age=0, private, must-revalidate
+X-Total: 2
+Strict-Transport-Security: max-age=31536000
+Cf-Cache-Status: MISS
+Link: <https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji?id=61363672&issue_iid=1&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji?id=61363672&issue_iid=1&page=1&per_page=2>; rel="last"
+X-Frame-Options: SAMEORIGIN
+Etag: W/"9eaad78fd40f769d67d34daaf19cfbab"
+X-Content-Type-Options: nosniff
+X-Page: 1
+Referrer-Policy: strict-origin-when-cross-origin
+Set-Cookie: _cfuvid=8x.5zI7i_tau_4nKnR1WNvq_Cb_48MmatAHtHqxalEA-1725394795846-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Type: application/json
+Content-Security-Policy: default-src 'none'
+X-Per-Page: 2
+X-Runtime: 0.062405
+X-Gitlab-Meta: {"correlation_id":"d7fc12667b2139b99804080170986c28","version":"1"}
+Gitlab-Lb: haproxy-main-18-lb-gprd
+
+[{"id":28099429,"name":"thumbsup","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T19:56:19.487Z","updated_at":"2024-09-03T19:56:19.487Z","awardable_id":152568896,"awardable_type":"Issue","url":null},{"id":28099432,"name":"open_mouth","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T19:56:24.365Z","updated_at":"2024-09-03T19:56:24.365Z","awardable_id":152568896,"awardable_type":"Issue","url":null}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=2&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=2&per_page=2
new file mode 100644
index 0000000..7755d80
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=2&per_page=2
@@ -0,0 +1,26 @@
+X-Next-Page:
+Accept-Ranges: bytes
+X-Frame-Options: SAMEORIGIN
+Strict-Transport-Security: max-age=31536000
+Content-Length: 2
+Link: <https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji?id=61363672&issue_iid=1&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji?id=61363672&issue_iid=1&page=1&per_page=2>; rel="last"
+Cf-Cache-Status: MISS
+X-Per-Page: 2
+Cache-Control: max-age=0, private, must-revalidate
+Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5"
+Vary: Origin, Accept-Encoding
+Set-Cookie: _cfuvid=hSs90HRbG8m0_RpN8VaCLGaQcrBX1vjr5h0LpLouZrg-1725394796397-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+X-Gitlab-Meta: {"correlation_id":"7ecc8cd91d20fdae3efed851c53b3009","version":"1"}
+X-Total: 2
+Gitlab-Lb: haproxy-main-55-lb-gprd
+X-Page: 2
+X-Runtime: 0.059820
+Referrer-Policy: strict-origin-when-cross-origin
+X-Prev-Page:
+X-Total-Pages: 1
+Gitlab-Sv: api-gke-us-east1-c
+Content-Type: application/json
+Content-Security-Policy: default-src 'none'
+X-Content-Type-Options: nosniff
+
+[] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=2
new file mode 100644
index 0000000..539ef68
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=2
@@ -0,0 +1,24 @@
+X-Content-Type-Options: nosniff
+X-Runtime: 0.217878
+Etag: W/"5cff9c25fad9db0de0442f8a50af76ed"
+Vary: Origin, Accept-Encoding
+Cf-Cache-Status: MISS
+Strict-Transport-Security: max-age=31536000
+Gitlab-Lb: haproxy-main-11-lb-gprd
+Gitlab-Sv: api-gke-us-east1-d
+Set-Cookie: _cfuvid=0ssSfnfiXaFlJe_DdQ9NOfPlga.fQbgnLjSEwGIfEzk-1725394796812-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+X-Frame-Options: SAMEORIGIN
+X-Prev-Page:
+Referrer-Policy: strict-origin-when-cross-origin
+X-Next-Page: 2
+X-Page: 1
+X-Gitlab-Meta: {"correlation_id":"379af21d1624cba7375460437671af6c","version":"1"}
+Content-Security-Policy: default-src 'none'
+Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=2&per_page=2>; rel="next", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="last"
+Content-Type: application/json
+X-Per-Page: 2
+X-Total: 6
+X-Total-Pages: 3
+Cache-Control: max-age=0, private, must-revalidate
+
+[{"id":28092934,"name":"thumbsup","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:45:50.310Z","updated_at":"2024-09-03T14:45:50.310Z","awardable_id":152568900,"awardable_type":"Issue","url":null},{"id":28092936,"name":"thumbsdown","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:45:51.174Z","updated_at":"2024-09-03T14:45:51.174Z","awardable_id":152568900,"awardable_type":"Issue","url":null}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=2&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=2&per_page=2
new file mode 100644
index 0000000..60c54f2
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=2&per_page=2
@@ -0,0 +1,24 @@
+Cache-Control: max-age=0, private, must-revalidate
+X-Total-Pages: 3
+Vary: Origin, Accept-Encoding
+X-Gitlab-Meta: {"correlation_id":"9bea6a0d3bfa187c0276b05afba166c4","version":"1"}
+X-Runtime: 0.086090
+X-Total: 6
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Sv: api-gke-us-east1-b
+Content-Security-Policy: default-src 'none'
+X-Frame-Options: SAMEORIGIN
+X-Prev-Page: 1
+Strict-Transport-Security: max-age=31536000
+Gitlab-Lb: haproxy-main-36-lb-gprd
+X-Content-Type-Options: nosniff
+X-Page: 2
+Set-Cookie: _cfuvid=ByaUDcdLuj9lg2l.wzIwOZ66jeGSBhcxPeVwYI6iJ0I-1725394797065-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+X-Per-Page: 2
+Content-Type: application/json
+Etag: W/"1b260e111b955c4b5b99834b5445d047"
+Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="prev", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="next", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="last"
+X-Next-Page: 3
+Cf-Cache-Status: MISS
+
+[{"id":28092944,"name":"laughing","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:46:00.936Z","updated_at":"2024-09-03T14:46:00.936Z","awardable_id":152568900,"awardable_type":"Issue","url":null},{"id":28092948,"name":"tada","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:46:09.593Z","updated_at":"2024-09-03T14:46:09.593Z","awardable_id":152568900,"awardable_type":"Issue","url":null}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=3&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=3&per_page=2
new file mode 100644
index 0000000..e3018fa
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=3&per_page=2
@@ -0,0 +1,24 @@
+X-Per-Page: 2
+X-Runtime: 0.064070
+X-Content-Type-Options: nosniff
+X-Prev-Page: 2
+X-Page: 3
+Vary: Origin, Accept-Encoding
+X-Total: 6
+Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=2&per_page=2>; rel="prev", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="last"
+X-Frame-Options: SAMEORIGIN
+X-Gitlab-Meta: {"correlation_id":"db9cabb4c4399ec8680e56916a5f9ca2","version":"1"}
+X-Next-Page:
+X-Total-Pages: 3
+Strict-Transport-Security: max-age=31536000
+Content-Security-Policy: default-src 'none'
+Content-Type: application/json
+Etag: W/"578a2e92e9d4f9fb1c21c89b9e13eb0e"
+Gitlab-Lb: haproxy-main-17-lb-gprd
+Cf-Cache-Status: MISS
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Sv: api-gke-us-east1-d
+Set-Cookie: _cfuvid=Upv78tZEcC_Ry_GNFdw5Ms5eMI9FkehWT5RF0a2i7d0-1725394797546-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Cache-Control: max-age=0, private, must-revalidate
+
+[{"id":28092953,"name":"confused","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:46:18.191Z","updated_at":"2024-09-03T14:46:18.191Z","awardable_id":152568900,"awardable_type":"Issue","url":null},{"id":28092962,"name":"hearts","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:46:35.367Z","updated_at":"2024-09-03T14:46:35.367Z","awardable_id":152568900,"awardable_type":"Issue","url":null}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=4&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=4&per_page=2
new file mode 100644
index 0000000..b7dd2a5
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=4&per_page=2
@@ -0,0 +1,26 @@
+X-Runtime: 0.059461
+X-Total: 6
+Gitlab-Lb: haproxy-main-16-lb-gprd
+Set-Cookie: _cfuvid=yVbakY3C4M4Kdnt7wIM2OYjNHbX8d6djf5tCk3NWtfw-1725394797782-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Type: application/json
+Cache-Control: max-age=0, private, must-revalidate
+X-Page: 4
+X-Per-Page: 2
+Gitlab-Sv: api-gke-us-east1-c
+X-Next-Page:
+Strict-Transport-Security: max-age=31536000
+Referrer-Policy: strict-origin-when-cross-origin
+Content-Length: 2
+Vary: Origin, Accept-Encoding
+X-Content-Type-Options: nosniff
+Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5"
+Cf-Cache-Status: MISS
+X-Prev-Page:
+Accept-Ranges: bytes
+Content-Security-Policy: default-src 'none'
+X-Frame-Options: SAMEORIGIN
+X-Gitlab-Meta: {"correlation_id":"b494fe1273622e61d5b9171bcb8be8f8","version":"1"}
+Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="last"
+X-Total-Pages: 3
+
+[] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fdiscussions%3Fpage=1&per_page=100 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fdiscussions%3Fpage=1&per_page=100
new file mode 100644
index 0000000..7acaddf
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fdiscussions%3Fpage=1&per_page=100
@@ -0,0 +1,24 @@
+X-Runtime: 0.145197
+X-Total-Pages: 1
+Strict-Transport-Security: max-age=31536000
+Vary: Origin, Accept-Encoding
+X-Prev-Page:
+X-Frame-Options: SAMEORIGIN
+X-Total: 2
+Gitlab-Lb: haproxy-main-52-lb-gprd
+Gitlab-Sv: api-gke-us-east1-c
+Content-Security-Policy: default-src 'none'
+Etag: W/"7f9e8aa5e56c4a23a0ac1fe1e32ea1cf"
+Cache-Control: max-age=0, private, must-revalidate
+X-Content-Type-Options: nosniff
+Referrer-Policy: strict-origin-when-cross-origin
+Cf-Cache-Status: MISS
+X-Next-Page:
+X-Page: 1
+Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/discussions?id=61363672&noteable_id=2&page=1&per_page=100>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/discussions?id=61363672&noteable_id=2&page=1&per_page=100>; rel="last"
+X-Gitlab-Meta: {"correlation_id":"e2dd8497292356efa5150a6c5ecd61b5","version":"1"}
+Content-Type: application/json
+X-Per-Page: 100
+Set-Cookie: _cfuvid=zB07q9Xq11k5SlfuxWW17Ez7DHpyfygT7b4L.VixX.I-1725394798110-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+
+[{"id":"8d6017e7426130502cd94fff207224b8a98efabc","individual_note":true,"notes":[{"id":2087994191,"type":null,"body":"This is a comment","attachment":null,"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:45:20.848Z","updated_at":"2024-09-03T14:45:46.592Z","system":false,"noteable_id":152568900,"noteable_type":"Issue","project_id":61363672,"resolvable":false,"confidential":false,"internal":false,"imported":false,"imported_from":"none","noteable_iid":2,"commands_changes":{}}]},{"id":"c721de2d3f2f0fe9a40005228f50d8c8d8131581","individual_note":true,"notes":[{"id":2087994632,"type":null,"body":"A second comment","attachment":null,"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:45:30.059Z","updated_at":"2024-09-03T14:45:30.059Z","system":false,"noteable_id":152568900,"noteable_type":"Issue","project_id":61363672,"resolvable":false,"confidential":false,"internal":false,"imported":false,"imported_from":"none","noteable_iid":2,"commands_changes":{}}]}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fresource_state_events%3Fpage=1&per_page=100 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fresource_state_events%3Fpage=1&per_page=100
new file mode 100644
index 0000000..ef8cac0
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fresource_state_events%3Fpage=1&per_page=100
@@ -0,0 +1,24 @@
+Cache-Control: max-age=0, private, must-revalidate
+X-Content-Type-Options: nosniff
+X-Next-Page:
+Gitlab-Sv: api-gke-us-east1-d
+Cf-Cache-Status: MISS
+Content-Type: application/json
+Strict-Transport-Security: max-age=31536000
+X-Total-Pages: 1
+Referrer-Policy: strict-origin-when-cross-origin
+Content-Security-Policy: default-src 'none'
+Set-Cookie: _cfuvid=FG.klkpkCkFafn4bGe91EcTgDxILPZT9lIAALQsMguo-1725394798392-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+X-Frame-Options: SAMEORIGIN
+X-Prev-Page:
+Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/resource_state_events?eventable_id=2&id=61363672&page=1&per_page=100>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/resource_state_events?eventable_id=2&id=61363672&page=1&per_page=100>; rel="last"
+X-Runtime: 0.103796
+X-Total: 1
+Etag: W/"7461fc73e919f707da29f7080cbbf5a5"
+Vary: Origin, Accept-Encoding
+X-Gitlab-Meta: {"correlation_id":"aacea0eebb5d187d57ce369f9bd57a96","version":"1"}
+X-Page: 1
+X-Per-Page: 100
+Gitlab-Lb: haproxy-main-02-lb-gprd
+
+[{"id":241837962,"user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:43:10.947Z","resource_type":"Issue","resource_id":152568900,"state":"closed"}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%3Fpage=1&per_page=2&sort=asc&state=all b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%3Fpage=1&per_page=2&sort=asc&state=all
new file mode 100644
index 0000000..4222407
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%3Fpage=1&per_page=2&sort=asc&state=all
@@ -0,0 +1,24 @@
+X-Total-Pages: 1
+Cache-Control: max-age=0, private, must-revalidate
+X-Runtime: 0.200064
+Etag: W/"d8fb18a73522276c6ef2dcd41f54a48c"
+Link: <https://gitlab.com/api/v4/projects/61363672/issues?id=61363672&order_by=created_at&page=1&per_page=2&sort=asc&state=all&with_labels_details=false>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues?id=61363672&order_by=created_at&page=1&per_page=2&sort=asc&state=all&with_labels_details=false>; rel="last"
+Strict-Transport-Security: max-age=31536000
+Cf-Cache-Status: MISS
+X-Gitlab-Meta: {"correlation_id":"e93266a7fd0f8392c302d86788f1915d","version":"1"}
+X-Per-Page: 2
+X-Total: 2
+Content-Type: application/json
+Vary: Origin, Accept-Encoding
+X-Next-Page:
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Lb: haproxy-main-48-lb-gprd
+X-Content-Type-Options: nosniff
+X-Frame-Options: SAMEORIGIN
+X-Prev-Page:
+Gitlab-Sv: api-gke-us-east1-b
+Set-Cookie: _cfuvid=dJlDovqc76Ccf_kb3CEsWZMasfjw9wsdzsdIUd.IMiQ-1725394795593-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+X-Page: 1
+
+[{"id":152568896,"iid":1,"project_id":61363672,"title":"Please add an animated gif icon to the merge button","description":"I just want the merge button to hurt my eyes a little. :stuck_out_tongue_closed_eyes:","state":"closed","created_at":"2024-09-03T14:42:34.924Z","updated_at":"2024-09-03T14:48:43.756Z","closed_at":"2024-09-03T14:43:10.708Z","closed_by":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"labels":["bug","discussion"],"milestone":{"id":4711993,"iid":2,"project_id":61363672,"title":"1.0.0","description":"","state":"closed","created_at":"2024-09-03T13:53:08.516Z","updated_at":"2024-09-03T20:03:57.786Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/2"},"assignees":[],"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"type":"ISSUE","assignee":null,"user_notes_count":0,"merge_requests_count":0,"upvotes":1,"downvotes":0,"due_date":null,"confidential":false,"discussion_locked":null,"issue_type":"issue","web_url":"https://gitlab.com/forgejo/test_repo/-/issues/1","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"task_completion_status":{"count":0,"completed_count":0},"blocking_issues_count":0,"has_tasks":true,"task_status":"0 of 0 checklist items completed","_links":{"self":"https://gitlab.com/api/v4/projects/61363672/issues/1","notes":"https://gitlab.com/api/v4/projects/61363672/issues/1/notes","award_emoji":"https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji","project":"https://gitlab.com/api/v4/projects/61363672","closed_as_duplicate_of":null},"references":{"short":"#1","relative":"#1","full":"forgejo/test_repo#1"},"severity":"UNKNOWN","moved_to_id":null,"imported":false,"imported_from":"none","service_desk_reply_to":null},{"id":152568900,"iid":2,"project_id":61363672,"title":"Test issue","description":"This is test issue 2, do not touch!","state":"closed","created_at":"2024-09-03T14:42:35.371Z","updated_at":"2024-09-03T20:03:43.536Z","closed_at":"2024-09-03T14:43:10.906Z","closed_by":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"labels":["duplicate"],"milestone":{"id":4711993,"iid":2,"project_id":61363672,"title":"1.0.0","description":"","state":"closed","created_at":"2024-09-03T13:53:08.516Z","updated_at":"2024-09-03T20:03:57.786Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/2"},"assignees":[],"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"type":"ISSUE","assignee":null,"user_notes_count":2,"merge_requests_count":0,"upvotes":1,"downvotes":1,"due_date":null,"confidential":false,"discussion_locked":null,"issue_type":"issue","web_url":"https://gitlab.com/forgejo/test_repo/-/issues/2","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"task_completion_status":{"count":0,"completed_count":0},"blocking_issues_count":0,"has_tasks":true,"task_status":"0 of 0 checklist items completed","_links":{"self":"https://gitlab.com/api/v4/projects/61363672/issues/2","notes":"https://gitlab.com/api/v4/projects/61363672/issues/2/notes","award_emoji":"https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji","project":"https://gitlab.com/api/v4/projects/61363672","closed_as_duplicate_of":null},"references":{"short":"#2","relative":"#2","full":"forgejo/test_repo#2"},"severity":"UNKNOWN","moved_to_id":null,"imported":false,"imported_from":"none","service_desk_reply_to":null}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Flabels%3Fpage=1&per_page=100 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Flabels%3Fpage=1&per_page=100
new file mode 100644
index 0000000..7070f55
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Flabels%3Fpage=1&per_page=100
@@ -0,0 +1,24 @@
+X-Runtime: 0.134818
+Gitlab-Lb: haproxy-main-57-lb-gprd
+X-Total: 11
+X-Total-Pages: 1
+Content-Security-Policy: default-src 'none'
+X-Prev-Page:
+Etag: W/"91f61a44ed534ef7d26e391dbef8dc0a"
+Gitlab-Sv: api-gke-us-east1-b
+Vary: Origin, Accept-Encoding
+Referrer-Policy: strict-origin-when-cross-origin
+Link: <https://gitlab.com/api/v4/projects/61363672/labels?id=61363672&include_ancestor_groups=true&page=1&per_page=100&with_counts=false>; rel="first", <https://gitlab.com/api/v4/projects/61363672/labels?id=61363672&include_ancestor_groups=true&page=1&per_page=100&with_counts=false>; rel="last"
+X-Frame-Options: SAMEORIGIN
+X-Gitlab-Meta: {"correlation_id":"25e616938688ad5e6ab58382f3e39c16","version":"1"}
+X-Next-Page:
+X-Page: 1
+Set-Cookie: _cfuvid=hdkQYZmgtcCpfA24UkICU4IGbz73Cpnd9.1NfpCL96Y-1725394794621-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Type: application/json
+Cache-Control: max-age=0, private, must-revalidate
+Cf-Cache-Status: MISS
+X-Content-Type-Options: nosniff
+X-Per-Page: 100
+Strict-Transport-Security: max-age=31536000
+
+[{"id":36554072,"name":"bug","description":null,"description_html":"","text_color":"#FFFFFF","color":"#d9534f","subscribed":false,"priority":null,"is_project_label":true},{"id":36554074,"name":"confirmed","description":null,"description_html":"","text_color":"#FFFFFF","color":"#d9534f","subscribed":false,"priority":null,"is_project_label":true},{"id":36554073,"name":"critical","description":null,"description_html":"","text_color":"#FFFFFF","color":"#d9534f","subscribed":false,"priority":null,"is_project_label":true},{"id":36554077,"name":"discussion","description":null,"description_html":"","text_color":"#FFFFFF","color":"#428bca","subscribed":false,"priority":null,"is_project_label":true},{"id":36554075,"name":"documentation","description":null,"description_html":"","text_color":"#1F1E24","color":"#f0ad4e","subscribed":false,"priority":null,"is_project_label":true},{"id":36556606,"name":"duplicate","description":"","description_html":"","text_color":"#FFFFFF","color":"#7F8C8D","subscribed":false,"priority":null,"is_project_label":true},{"id":36554079,"name":"enhancement","description":null,"description_html":"","text_color":"#FFFFFF","color":"#5cb85c","subscribed":false,"priority":null,"is_project_label":true},{"id":36554078,"name":"suggestion","description":null,"description_html":"","text_color":"#FFFFFF","color":"#428bca","subscribed":false,"priority":null,"is_project_label":true},{"id":36554076,"name":"support","description":null,"description_html":"","text_color":"#1F1E24","color":"#f0ad4e","subscribed":false,"priority":null,"is_project_label":true},{"id":36554080,"name":"test-scope::label0","description":"scoped label","description_html":"scoped label","text_color":"#FFFFFF","color":"#6699cc","subscribed":false,"priority":null,"is_project_label":true},{"id":36554094,"name":"test-scope::label1","description":"","description_html":"","text_color":"#FFFFFF","color":"#dc143c","subscribed":false,"priority":null,"is_project_label":true}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1
new file mode 100644
index 0000000..2903724
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1
@@ -0,0 +1,17 @@
+X-Content-Type-Options: nosniff
+X-Runtime: 0.132332
+Strict-Transport-Security: max-age=31536000
+Set-Cookie: _cfuvid=dCpqfgALGbwKdCAsAe6oT5DVCj6oBwrnU5y2Jd40KPs-1725394799000-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+X-Frame-Options: SAMEORIGIN
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Lb: haproxy-main-11-lb-gprd
+Content-Security-Policy: default-src 'none'
+Etag: W/"8b6a8cc6f36ac5289783c7654f292212"
+Vary: Origin, Accept-Encoding
+X-Gitlab-Meta: {"correlation_id":"bef818a29fa7cfc1f075ef0925e63404","version":"1"}
+Gitlab-Sv: api-gke-us-east1-d
+Content-Type: application/json
+Cache-Control: max-age=0, private, must-revalidate
+Cf-Cache-Status: MISS
+
+{"id":324657888,"iid":1,"project_id":61363672,"title":"Test branch","description":"do not merge this PR","state":"opened","created_at":"2024-09-03T07:57:19.866Z","updated_at":"2024-09-03T18:50:21.065Z","merged_by":null,"merge_user":null,"merged_at":null,"closed_by":null,"closed_at":null,"target_branch":"master","source_branch":"feat/test","user_notes_count":0,"upvotes":1,"downvotes":0,"author":{"id":2005797,"username":"oliverpool","name":"oliverpool","state":"active","locked":false,"avatar_url":"https://gitlab.com/uploads/-/system/user/avatar/2005797/avatar.png","web_url":"https://gitlab.com/oliverpool"},"assignees":[],"assignee":null,"reviewers":[],"source_project_id":61363672,"target_project_id":61363672,"labels":["test-scope::label0","test-scope::label1"],"draft":false,"imported":false,"imported_from":"none","work_in_progress":false,"milestone":{"id":4711991,"iid":1,"project_id":61363672,"title":"1.1.0","description":"","state":"active","created_at":"2024-09-03T13:52:48.414Z","updated_at":"2024-09-03T14:52:14.093Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/1"},"merge_when_pipeline_succeeds":false,"merge_status":"can_be_merged","detailed_merge_status":"mergeable","sha":"9f733b96b98a4175276edf6a2e1231489c3bdd23","merge_commit_sha":null,"squash_commit_sha":null,"discussion_locked":null,"should_remove_source_branch":null,"force_remove_source_branch":true,"prepared_at":"2024-09-03T08:15:46.361Z","reference":"!1","references":{"short":"!1","relative":"!1","full":"forgejo/test_repo!1"},"web_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests/1","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"squash":false,"squash_on_merge":false,"task_completion_status":{"count":0,"completed_count":0},"has_conflicts":false,"blocking_discussions_resolved":true,"approvals_before_merge":null,"subscribed":true,"changes_count":"1","latest_build_started_at":null,"latest_build_finished_at":null,"first_deployed_to_production_at":null,"pipeline":null,"head_pipeline":null,"diff_refs":{"base_sha":"c59c9b451acca9d106cc19d61d87afe3fbbb8b83","head_sha":"9f733b96b98a4175276edf6a2e1231489c3bdd23","start_sha":"c59c9b451acca9d106cc19d61d87afe3fbbb8b83"},"merge_error":null,"first_contribution":true,"user":{"can_merge":true}} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Fapprovals b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Fapprovals
new file mode 100644
index 0000000..df85ea4
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Fapprovals
@@ -0,0 +1,17 @@
+Gitlab-Sv: api-gke-us-east1-d
+Set-Cookie: _cfuvid=c8dYhAX7c7Kj.9kgrISTCaOoMKuKV0amVHZbY28k_vc-1725394800394-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+X-Frame-Options: SAMEORIGIN
+X-Gitlab-Meta: {"correlation_id":"1bfdf6ff862f2719b5ff0fa43d4b1f68","version":"1"}
+Referrer-Policy: strict-origin-when-cross-origin
+Cf-Cache-Status: MISS
+Cache-Control: max-age=0, private, must-revalidate
+X-Runtime: 0.141568
+Strict-Transport-Security: max-age=31536000
+Gitlab-Lb: haproxy-main-26-lb-gprd
+Content-Type: application/json
+Etag: W/"90fb650b1668940dd7ccac3869a3a2bd"
+Vary: Origin, Accept-Encoding
+X-Content-Type-Options: nosniff
+
+{"id":324657888,"iid":1,"project_id":61363672,"title":"Test branch","description":"do not merge this PR","state":"opened","created_at":"2024-09-03T07:57:19.866Z","updated_at":"2024-09-03T18:50:21.065Z","merge_status":"can_be_merged","approved":true,"approvals_required":0,"approvals_left":0,"require_password_to_approve":false,"approved_by":[{"user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"}}],"suggested_approvers":[],"approvers":[],"approver_groups":[],"user_has_approved":true,"user_can_approve":false,"approval_rules_left":[],"has_approval_rules":false,"merge_request_approvers_available":false,"multiple_approval_rules_available":false,"invalid_approvers_rules":[]} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=1 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=1
new file mode 100644
index 0000000..7e50312
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=1
@@ -0,0 +1,24 @@
+X-Gitlab-Meta: {"correlation_id":"46af78321ea2674ac3e1e56243baabf6","version":"1"}
+Gitlab-Lb: haproxy-main-27-lb-gprd
+Vary: Origin, Accept-Encoding
+X-Total-Pages: 2
+Strict-Transport-Security: max-age=31536000
+Content-Security-Policy: default-src 'none'
+X-Content-Type-Options: nosniff
+X-Page: 1
+X-Runtime: 0.071781
+Cf-Cache-Status: MISS
+Link: <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=2&per_page=1>; rel="next", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=1&per_page=1>; rel="first", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=2&per_page=1>; rel="last"
+Etag: W/"a08d29f7fa018b5a6f30ae6de1035350"
+X-Prev-Page:
+X-Total: 2
+Content-Type: application/json
+X-Frame-Options: SAMEORIGIN
+X-Next-Page: 2
+X-Per-Page: 1
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Sv: api-gke-us-east1-b
+Set-Cookie: _cfuvid=PKNy4TeWDnd8j772wQMiBZpmFpOjDfu9JcpnUSyVULU-1725394799568-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Cache-Control: max-age=0, private, must-revalidate
+
+[{"id":28098492,"name":"thumbsup","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T18:49:58.072Z","updated_at":"2024-09-03T18:49:58.072Z","awardable_id":324657888,"awardable_type":"MergeRequest","url":null}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=2&per_page=1 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=2&per_page=1
new file mode 100644
index 0000000..f33a33c
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=2&per_page=1
@@ -0,0 +1,24 @@
+Etag: W/"9d4f10c73db7508f9f63f83f4f3e9dd2"
+Link: <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=1&per_page=1>; rel="prev", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=1&per_page=1>; rel="first", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=2&per_page=1>; rel="last"
+X-Runtime: 0.070580
+Gitlab-Sv: api-gke-us-east1-c
+Content-Type: application/json
+Cf-Cache-Status: MISS
+Vary: Origin, Accept-Encoding
+X-Frame-Options: SAMEORIGIN
+X-Prev-Page: 1
+Gitlab-Lb: haproxy-main-58-lb-gprd
+Cache-Control: max-age=0, private, must-revalidate
+X-Total: 2
+X-Total-Pages: 2
+Strict-Transport-Security: max-age=31536000
+Referrer-Policy: strict-origin-when-cross-origin
+X-Gitlab-Meta: {"correlation_id":"c39c59a22f48b51fcdbe4d7121983045","version":"1"}
+X-Next-Page:
+X-Per-Page: 1
+Set-Cookie: _cfuvid=ocsAYkwqggUMC09s009R.yWb7q3OTyWzwjV73iFeOAM-1725394799827-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+X-Content-Type-Options: nosniff
+X-Page: 2
+
+[{"id":28098494,"name":"tada","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T18:50:02.028Z","updated_at":"2024-09-03T18:50:02.028Z","awardable_id":324657888,"awardable_type":"MergeRequest","url":null}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=3&per_page=1 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=3&per_page=1
new file mode 100644
index 0000000..783ea3b
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=3&per_page=1
@@ -0,0 +1,26 @@
+Content-Length: 2
+X-Next-Page:
+X-Per-Page: 1
+X-Runtime: 0.069736
+Link: <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=1&per_page=1>; rel="first", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=2&per_page=1>; rel="last"
+X-Total-Pages: 2
+X-Content-Type-Options: nosniff
+X-Gitlab-Meta: {"correlation_id":"4a199f75df6e91c7bb25ce7f0ae5ba87","version":"1"}
+Cf-Cache-Status: MISS
+Strict-Transport-Security: max-age=31536000
+Referrer-Policy: strict-origin-when-cross-origin
+X-Prev-Page:
+Content-Type: application/json
+Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5"
+Set-Cookie: _cfuvid=LKsdyXLErarfZPBo25O7PYiKWcvrF92MfU4i57.1wVw-1725394800092-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+Accept-Ranges: bytes
+X-Frame-Options: SAMEORIGIN
+Gitlab-Lb: haproxy-main-12-lb-gprd
+Gitlab-Sv: api-gke-us-east1-b
+Cache-Control: max-age=0, private, must-revalidate
+Vary: Origin, Accept-Encoding
+X-Page: 3
+X-Total: 2
+
+[] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F2%2Fapprovals b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F2%2Fapprovals
new file mode 100644
index 0000000..8025baa
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F2%2Fapprovals
@@ -0,0 +1,16 @@
+Content-Type: application/json
+Cache-Control: no-cache
+X-Runtime: 0.050861
+Cf-Cache-Status: MISS
+Content-Length: 27
+Strict-Transport-Security: max-age=31536000
+X-Content-Type-Options: nosniff
+Set-Cookie: _cfuvid=dOl9pLwVdWdrfHK2_lQ8ilTg21PZJf8ErnJ6hi2V6LQ-1725394529656-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+X-Gitlab-Meta: {"correlation_id":"8b1408168090614939be8b301aaf8ec1","version":"1"}
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Lb: haproxy-main-42-lb-gprd
+Vary: Origin, Accept-Encoding
+Gitlab-Sv: api-gke-us-east1-b
+
+{"message":"404 Not found"} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%3Fpage=1&per_page=1&view=simple b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%3Fpage=1&per_page=1&view=simple
new file mode 100644
index 0000000..1ad6255
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%3Fpage=1&per_page=1&view=simple
@@ -0,0 +1,24 @@
+Content-Security-Policy: default-src 'none'
+X-Prev-Page:
+Set-Cookie: _cfuvid=7GL5tIuTakQp9CVUUSpwUwMYssAGhn7PgI8tTqNnmz0-1725394798686-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+X-Gitlab-Meta: {"correlation_id":"7b65fd9c80614af0ef38989ba51e5c29","version":"1"}
+Gitlab-Lb: haproxy-main-30-lb-gprd
+Etag: W/"8a9c7ac19d2c07896e0e68bc7725d52c"
+X-Content-Type-Options: nosniff
+Strict-Transport-Security: max-age=31536000
+Gitlab-Sv: api-gke-us-east1-b
+X-Page: 1
+X-Total: 1
+Cache-Control: max-age=0, private, must-revalidate
+Link: <https://gitlab.com/api/v4/projects/61363672/merge_requests?id=61363672&order_by=created_at&page=1&per_page=1&sort=desc&state=all&view=simple&with_labels_details=false&with_merge_status_recheck=false>; rel="first", <https://gitlab.com/api/v4/projects/61363672/merge_requests?id=61363672&order_by=created_at&page=1&per_page=1&sort=desc&state=all&view=simple&with_labels_details=false&with_merge_status_recheck=false>; rel="last"
+X-Per-Page: 1
+Referrer-Policy: strict-origin-when-cross-origin
+Cf-Cache-Status: MISS
+Content-Type: application/json
+X-Total-Pages: 1
+Vary: Origin, Accept-Encoding
+X-Frame-Options: SAMEORIGIN
+X-Runtime: 0.123283
+X-Next-Page:
+
+[{"id":324657888,"iid":1,"project_id":61363672,"title":"Test branch","description":"do not merge this PR","state":"opened","created_at":"2024-09-03T07:57:19.866Z","updated_at":"2024-09-03T18:50:21.065Z","web_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests/1"}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmilestones%3Fpage=1&per_page=100&state=all b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmilestones%3Fpage=1&per_page=100&state=all
new file mode 100644
index 0000000..4795569
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmilestones%3Fpage=1&per_page=100&state=all
@@ -0,0 +1,24 @@
+X-Total: 2
+Set-Cookie: _cfuvid=uwwcVHMnVqsf5dOVdmePMl8w9SEvmr1muvo7QttWeKI-1725394794295-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+Etag: W/"a42f286b703ec341ad7f117b273a51ad"
+Link: <https://gitlab.com/api/v4/projects/61363672/milestones?id=61363672&include_ancestors=false&page=1&per_page=100&state=all>; rel="first", <https://gitlab.com/api/v4/projects/61363672/milestones?id=61363672&include_ancestors=false&page=1&per_page=100&state=all>; rel="last"
+Vary: Origin, Accept-Encoding
+X-Content-Type-Options: nosniff
+X-Gitlab-Meta: {"correlation_id":"ed978cae0ea2bf9ac4b1f46fddfdf982","version":"1"}
+X-Per-Page: 100
+Cache-Control: max-age=0, private, must-revalidate
+Cf-Cache-Status: MISS
+Content-Type: application/json
+X-Next-Page:
+X-Page: 1
+Strict-Transport-Security: max-age=31536000
+Gitlab-Sv: api-gke-us-east1-c
+X-Frame-Options: SAMEORIGIN
+X-Prev-Page:
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Lb: haproxy-main-34-lb-gprd
+X-Runtime: 0.069266
+X-Total-Pages: 1
+
+[{"id":4711993,"iid":2,"project_id":61363672,"title":"1.0.0","description":"","state":"closed","created_at":"2024-09-03T13:53:08.516Z","updated_at":"2024-09-03T20:03:57.786Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/2"},{"id":4711991,"iid":1,"project_id":61363672,"title":"1.1.0","description":"","state":"active","created_at":"2024-09-03T13:52:48.414Z","updated_at":"2024-09-03T14:52:14.093Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/1"}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Freleases%3Fpage=1&per_page=100 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Freleases%3Fpage=1&per_page=100
new file mode 100644
index 0000000..e0dcec2
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Freleases%3Fpage=1&per_page=100
@@ -0,0 +1,24 @@
+X-Total-Pages: 1
+Referrer-Policy: strict-origin-when-cross-origin
+X-Total: 1
+X-Frame-Options: SAMEORIGIN
+X-Prev-Page:
+X-Content-Type-Options: nosniff
+Strict-Transport-Security: max-age=31536000
+Link: <https://gitlab.com/api/v4/projects/61363672/releases?id=61363672&order_by=released_at&page=1&per_page=100&sort=desc>; rel="first", <https://gitlab.com/api/v4/projects/61363672/releases?id=61363672&order_by=released_at&page=1&per_page=100&sort=desc>; rel="last"
+Vary: Origin, Accept-Encoding
+X-Per-Page: 100
+Set-Cookie: _cfuvid=oZA4jh0EzL5.ONTRYvxi4IryznOCXhUFgv3_ILSeCaA-1725394795215-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Cache-Control: max-age=0, private, must-revalidate
+X-Next-Page:
+Gitlab-Sv: api-gke-us-east1-c
+Cf-Cache-Status: MISS
+X-Gitlab-Meta: {"correlation_id":"3ddca8834bb2582c7864327265a18732","version":"1"}
+Gitlab-Lb: haproxy-main-37-lb-gprd
+Etag: W/"0dca592238578abf637a888d6aa33e06"
+X-Page: 1
+X-Runtime: 0.099990
+Content-Type: application/json
+Content-Security-Policy: default-src 'none'
+
+[{"name":"First Release","tag_name":"v0.9.99","description":"A test release","created_at":"2024-09-03T15:01:01.513Z","released_at":"2024-09-03T15:01:01.000Z","upcoming_release":false,"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"commit":{"id":"0720a3ec57c1f843568298117b874319e7deee75","short_id":"0720a3ec","created_at":"2019-11-28T08:49:16.000+00:00","parent_ids":["93ea21ce45d35690c35e80961d239645139e872c"],"title":"Add new file","message":"Add new file","author_name":"Lauris BH","author_email":"lauris@nix.lv","authored_date":"2019-11-28T08:49:16.000+00:00","committer_name":"Lauris BH","committer_email":"lauris@nix.lv","committed_date":"2019-11-28T08:49:16.000+00:00","trailers":{},"extended_trailers":{},"web_url":"https://gitlab.com/forgejo/test_repo/-/commit/0720a3ec57c1f843568298117b874319e7deee75"},"commit_path":"/forgejo/test_repo/-/commit/0720a3ec57c1f843568298117b874319e7deee75","tag_path":"/forgejo/test_repo/-/tags/v0.9.99","assets":{"count":4,"sources":[{"format":"zip","url":"https://gitlab.com/forgejo/test_repo/-/archive/v0.9.99/test_repo-v0.9.99.zip"},{"format":"tar.gz","url":"https://gitlab.com/forgejo/test_repo/-/archive/v0.9.99/test_repo-v0.9.99.tar.gz"},{"format":"tar.bz2","url":"https://gitlab.com/forgejo/test_repo/-/archive/v0.9.99/test_repo-v0.9.99.tar.bz2"},{"format":"tar","url":"https://gitlab.com/forgejo/test_repo/-/archive/v0.9.99/test_repo-v0.9.99.tar"}],"links":[]},"evidences":[{"sha":"e30c1d21d05ff0c73436ee1e97b3ef12a1d6d33d0dcd","filepath":"https://gitlab.com/forgejo/test_repo/-/releases/v0.9.99/evidences/9608487.json","collected_at":"2024-09-03T15:01:02.963Z"}],"_links":{"closed_issues_url":"https://gitlab.com/forgejo/test_repo/-/issues?release_tag=v0.9.99\u0026scope=all\u0026state=closed","closed_merge_requests_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests?release_tag=v0.9.99\u0026scope=all\u0026state=closed","edit_url":"https://gitlab.com/forgejo/test_repo/-/releases/v0.9.99/edit","merged_merge_requests_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests?release_tag=v0.9.99\u0026scope=all\u0026state=merged","opened_issues_url":"https://gitlab.com/forgejo/test_repo/-/issues?release_tag=v0.9.99\u0026scope=all\u0026state=opened","opened_merge_requests_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests?release_tag=v0.9.99\u0026scope=all\u0026state=opened","self":"https://gitlab.com/forgejo/test_repo/-/releases/v0.9.99"}}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fforgejo%252Ftest_repo b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fforgejo%252Ftest_repo
new file mode 100644
index 0000000..53c925a
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fforgejo%252Ftest_repo
@@ -0,0 +1,17 @@
+Content-Security-Policy: default-src 'none'
+Etag: W/"b36bd4522b7e8b2509078271491fb972"
+X-Runtime: 0.182246
+Set-Cookie: _cfuvid=wk6gVgcAYZqUygBPZ8pK6j22vOlbZuagLq74bgkySCs-1725394793303-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Type: application/json
+X-Content-Type-Options: nosniff
+Gitlab-Sv: api-gke-us-east1-c
+Gitlab-Lb: haproxy-main-58-lb-gprd
+Cache-Control: max-age=0, private, must-revalidate
+Strict-Transport-Security: max-age=31536000
+Referrer-Policy: strict-origin-when-cross-origin
+Vary: Origin, Accept-Encoding
+X-Frame-Options: SAMEORIGIN
+X-Gitlab-Meta: {"correlation_id":"43c0c955821005b625f1707ecac8d4d8","version":"1"}
+Cf-Cache-Status: MISS
+
+{"id":61363672,"description":"Test repository for testing migration from gitlab to forgejo","name":"test_repo","name_with_namespace":"Forgejo / test_repo","path":"test_repo","path_with_namespace":"forgejo/test_repo","created_at":"2024-09-03T07:44:30.668Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:forgejo/test_repo.git","http_url_to_repo":"https://gitlab.com/forgejo/test_repo.git","web_url":"https://gitlab.com/forgejo/test_repo","readme_url":"https://gitlab.com/forgejo/test_repo/-/blob/master/README.md","forks_count":0,"avatar_url":null,"star_count":0,"last_activity_at":"2024-09-03T20:03:18.187Z","namespace":{"id":64459497,"name":"Forgejo","path":"forgejo","kind":"group","full_path":"forgejo","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/64459497/73144-c883a242dec5299fbc06bbe3ee71d8c6.png","web_url":"https://gitlab.com/groups/forgejo"},"forked_from_project":{"id":15578026,"description":"Test repository for testing migration from gitlab to gitea","name":"test_repo","name_with_namespace":"gitea / test_repo","path":"test_repo","path_with_namespace":"gitea/test_repo","created_at":"2019-11-28T08:20:33.019Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:gitea/test_repo.git","http_url_to_repo":"https://gitlab.com/gitea/test_repo.git","web_url":"https://gitlab.com/gitea/test_repo","readme_url":"https://gitlab.com/gitea/test_repo/-/blob/master/README.md","forks_count":2,"avatar_url":null,"star_count":0,"last_activity_at":"2024-09-03T07:52:28.488Z","namespace":{"id":3181312,"name":"gitea","path":"gitea","kind":"group","full_path":"gitea","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/3181312/gitea.png","web_url":"https://gitlab.com/groups/gitea"}},"container_registry_image_prefix":"registry.gitlab.com/forgejo/test_repo","_links":{"self":"https://gitlab.com/api/v4/projects/61363672","issues":"https://gitlab.com/api/v4/projects/61363672/issues","merge_requests":"https://gitlab.com/api/v4/projects/61363672/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/61363672/repository/branches","labels":"https://gitlab.com/api/v4/projects/61363672/labels","events":"https://gitlab.com/api/v4/projects/61363672/events","members":"https://gitlab.com/api/v4/projects/61363672/members","cluster_agents":"https://gitlab.com/api/v4/projects/61363672/cluster_agents"},"packages_enabled":true,"empty_repo":false,"archived":false,"visibility":"public","resolve_outdated_diff_discussions":false,"container_expiration_policy":{"cadence":"1d","enabled":false,"keep_n":10,"older_than":"90d","name_regex":".*","name_regex_keep":null,"next_run_at":"2024-09-04T07:44:30.699Z"},"repository_object_format":"sha1","issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"service_desk_address":"contact-project+forgejo-test-repo-61363672-issue-@incoming.gitlab.com","can_create_merge_request_in":true,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","model_registry_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":true,"creator_id":2005797,"mr_default_target_self":false,"import_url":null,"import_type":null,"import_status":"finished","import_error":null,"open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:60\" dir=\"auto\"\u003eTest repository for testing migration from gitlab to forgejo\u003c/p\u003e","updated_at":"2024-09-03T20:03:18.187Z","ci_default_git_depth":50,"ci_forward_deployment_enabled":true,"ci_forward_deployment_rollback_allowed":true,"ci_job_token_scope_enabled":false,"ci_separated_caches":true,"ci_allow_fork_pipelines_to_run_in_parent_project":true,"ci_id_token_sub_claim_components":["project_path","ref_type","ref"],"build_git_strategy":"fetch","keep_latest_artifact":true,"restrict_user_defined_variables":false,"ci_pipeline_variables_minimum_override_role":"maintainer","runners_token":null,"runner_token_expiration_interval":null,"group_runners_enabled":true,"auto_cancel_pending_pipelines":"enabled","build_timeout":3600,"auto_devops_enabled":false,"auto_devops_deploy_strategy":"continuous","ci_push_repository_for_job_token_allowed":false,"ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":true,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":true,"printing_merge_request_link_enabled":true,"merge_method":"merge","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"warn_about_potentially_unwanted_characters":true,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":true,"pre_receive_secret_detection_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":{"access_level":40,"notification_level":3},"group_access":null}} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fgitea%252Ftest_repo b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fgitea%252Ftest_repo
new file mode 100644
index 0000000..96f1ea8
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fgitea%252Ftest_repo
@@ -0,0 +1,17 @@
+Referrer-Policy: strict-origin-when-cross-origin
+Gitlab-Lb: haproxy-main-51-lb-gprd
+Cf-Cache-Status: MISS
+Etag: W/"8db4917b3be5f4ca0d101a702179b75a"
+X-Content-Type-Options: nosniff
+Strict-Transport-Security: max-age=31536000
+Gitlab-Sv: api-gke-us-east1-b
+Content-Type: application/json
+Cache-Control: max-age=0, private, must-revalidate
+X-Gitlab-Meta: {"correlation_id":"9b3859cf6d73ce5de261a56d286072a5","version":"1"}
+X-Runtime: 0.119487
+Content-Security-Policy: default-src 'none'
+Vary: Origin, Accept-Encoding
+Set-Cookie: _cfuvid=Cmc.ycVkdwA_tBvmR2tOVLQ5B.khzzU39ZUxgf4RNlw-1710504204838-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+X-Frame-Options: SAMEORIGIN
+
+{"id":15578026,"description":"Test repository for testing migration from gitlab to gitea","name":"test_repo","name_with_namespace":"gitea / test_repo","path":"test_repo","path_with_namespace":"gitea/test_repo","created_at":"2019-11-28T08:20:33.019Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:gitea/test_repo.git","http_url_to_repo":"https://gitlab.com/gitea/test_repo.git","web_url":"https://gitlab.com/gitea/test_repo","readme_url":"https://gitlab.com/gitea/test_repo/-/blob/master/README.md","forks_count":1,"avatar_url":null,"star_count":0,"last_activity_at":"2020-04-19T19:46:04.527Z","namespace":{"id":3181312,"name":"gitea","path":"gitea","kind":"group","full_path":"gitea","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/3181312/gitea.png","web_url":"https://gitlab.com/groups/gitea"},"container_registry_image_prefix":"registry.gitlab.com/gitea/test_repo","_links":{"self":"https://gitlab.com/api/v4/projects/15578026","issues":"https://gitlab.com/api/v4/projects/15578026/issues","merge_requests":"https://gitlab.com/api/v4/projects/15578026/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/15578026/repository/branches","labels":"https://gitlab.com/api/v4/projects/15578026/labels","events":"https://gitlab.com/api/v4/projects/15578026/events","members":"https://gitlab.com/api/v4/projects/15578026/members","cluster_agents":"https://gitlab.com/api/v4/projects/15578026/cluster_agents"},"packages_enabled":true,"empty_repo":false,"archived":false,"visibility":"public","resolve_outdated_diff_discussions":false,"repository_object_format":"sha1","issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"can_create_merge_request_in":true,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","model_registry_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":true,"creator_id":1241334,"import_status":"none","open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:58\" dir=\"auto\"\u003eTest repository for testing migration from gitlab to gitea\u003c/p\u003e","updated_at":"2024-01-11T01:23:21.057Z","ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":true,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":true,"printing_merge_request_link_enabled":true,"merge_method":"ff","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"warn_about_potentially_unwanted_characters":true,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":null,"group_access":null}} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fversion b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fversion
new file mode 100644
index 0000000..8b3dd5b
--- /dev/null
+++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fversion
@@ -0,0 +1,17 @@
+Content-Type: application/json
+Cache-Control: max-age=0, private, must-revalidate
+Etag: W/"a27b6b3c661f4ee7a68e5b905f5291fb"
+Vary: Origin, Accept-Encoding
+X-Gitlab-Meta: {"correlation_id":"10488cc696aabdc48229039f2c9e4ebd","version":"1"}
+Gitlab-Sv: api-gke-us-east1-d
+Cf-Cache-Status: MISS
+Strict-Transport-Security: max-age=31536000
+X-Frame-Options: SAMEORIGIN
+X-Runtime: 0.034189
+Referrer-Policy: strict-origin-when-cross-origin
+Set-Cookie: _cfuvid=hbFjaLVJudhzz6Sqg5QnViD.eikToNruD.b1oEG5xrc-1725394792940-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+Gitlab-Lb: haproxy-main-56-lb-gprd
+X-Content-Type-Options: nosniff
+
+{"version":"17.4.0-pre","revision":"8c6dcc9e627","kas":{"enabled":true,"externalUrl":"wss://kas.gitlab.com","version":"17.4.0+a2ca345cd681ef39094623d8f4b6ed65996de57d"},"enterprise":true} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996 b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996
new file mode 100644
index 0000000..db8d596
--- /dev/null
+++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996
@@ -0,0 +1,22 @@
+X-Runtime: 0.088022
+Strict-Transport-Security: max-age=31536000
+Ratelimit-Observed: 3
+Cache-Control: max-age=0, private, must-revalidate
+Etag: W/"03ce4f6ce1c1e8c5a31df8a44cf2fbdd"
+Gitlab-Lb: haproxy-main-11-lb-gprd
+Content-Security-Policy: default-src 'none'
+Ratelimit-Limit: 2000
+X-Gitlab-Meta: {"correlation_id":"b57b226f741f9140a1fea54f65cb5cfd","version":"1"}
+Referrer-Policy: strict-origin-when-cross-origin
+Ratelimit-Remaining: 1997
+Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:53 GMT
+Set-Cookie: _cfuvid=V0ToiOTUW0XbtWq7BirwVNfL1_YP1POMrLBnDSEWS0M-1701332633965-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+X-Content-Type-Options: nosniff
+X-Frame-Options: SAMEORIGIN
+Gitlab-Sv: localhost
+Content-Type: application/json
+Vary: Origin, Accept-Encoding
+Ratelimit-Reset: 1701332693
+Cf-Cache-Status: MISS
+
+{"id":6590996,"description":"Arch packaging and build files","name":"archbuild","name_with_namespace":"Troy Engel / archbuild","path":"archbuild","path_with_namespace":"troyengel/archbuild","created_at":"2018-06-03T22:53:17.388Z","default_branch":"master","tag_list":[],"topics":[],"ssh_url_to_repo":"git@gitlab.com:troyengel/archbuild.git","http_url_to_repo":"https://gitlab.com/troyengel/archbuild.git","web_url":"https://gitlab.com/troyengel/archbuild","readme_url":"https://gitlab.com/troyengel/archbuild/-/blob/master/README.md","forks_count":0,"avatar_url":null,"star_count":0,"last_activity_at":"2020-12-13T18:09:32.071Z","namespace":{"id":1452515,"name":"Troy Engel","path":"troyengel","kind":"user","full_path":"troyengel","parent_id":null,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"container_registry_image_prefix":"registry.gitlab.com/troyengel/archbuild","_links":{"self":"https://gitlab.com/api/v4/projects/6590996","issues":"https://gitlab.com/api/v4/projects/6590996/issues","merge_requests":"https://gitlab.com/api/v4/projects/6590996/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/6590996/repository/branches","labels":"https://gitlab.com/api/v4/projects/6590996/labels","events":"https://gitlab.com/api/v4/projects/6590996/events","members":"https://gitlab.com/api/v4/projects/6590996/members","cluster_agents":"https://gitlab.com/api/v4/projects/6590996/cluster_agents"},"packages_enabled":null,"empty_repo":false,"archived":true,"visibility":"public","owner":{"id":1215848,"username":"troyengel","name":"Troy Engel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"resolve_outdated_diff_discussions":false,"issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"can_create_merge_request_in":false,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":false,"creator_id":1215848,"import_status":"finished","open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:30\" dir=\"auto\"\u003eArch packaging and build files\u003c/p\u003e","updated_at":"2022-07-13T21:32:12.624Z","ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":false,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":null,"printing_merge_request_link_enabled":true,"merge_method":"merge","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":null,"group_access":null}} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=10 b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=10
new file mode 100644
index 0000000..8f829d0
--- /dev/null
+++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=10
@@ -0,0 +1,31 @@
+Gitlab-Sv: localhost
+X-Content-Type-Options: nosniff
+Gitlab-Lb: haproxy-main-25-lb-gprd
+X-Total-Pages: 1
+Referrer-Policy: strict-origin-when-cross-origin
+Ratelimit-Observed: 5
+Ratelimit-Remaining: 1995
+Content-Security-Policy: default-src 'none'
+X-Gitlab-Meta: {"correlation_id":"eeab46d836341bd4cb18e3d2e82abf97","version":"1"}
+Ratelimit-Limit: 2000
+Accept-Ranges: bytes
+Content-Type: application/json
+X-Page: 1
+X-Frame-Options: SAMEORIGIN
+X-Prev-Page:
+Cf-Cache-Status: MISS
+X-Total: 0
+Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:54 GMT
+Link: <https://gitlab.com/api/v4/projects/6590996/issues/2/award_emoji?id=6590996&issue_iid=2&page=1&per_page=10>; rel="first", <https://gitlab.com/api/v4/projects/6590996/issues/2/award_emoji?id=6590996&issue_iid=2&page=1&per_page=10>; rel="last"
+X-Per-Page: 10
+Set-Cookie: _cfuvid=c5HuTPxOuSXdHSuVrXQALS.uV7WvAYfc5Mc_143EAB8-1701332634513-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Length: 2
+Vary: Origin, Accept-Encoding
+Cache-Control: max-age=0, private, must-revalidate
+Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5"
+X-Runtime: 0.069269
+Strict-Transport-Security: max-age=31536000
+Ratelimit-Reset: 1701332694
+X-Next-Page:
+
+[] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%3Fpage=1&per_page=10&sort=asc&state=all b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%3Fpage=1&per_page=10&sort=asc&state=all
new file mode 100644
index 0000000..99133d5
--- /dev/null
+++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%3Fpage=1&per_page=10&sort=asc&state=all
@@ -0,0 +1,29 @@
+Link: <https://gitlab.com/api/v4/projects/6590996/issues?id=6590996&order_by=created_at&page=1&per_page=10&sort=asc&state=all&with_labels_details=false>; rel="first", <https://gitlab.com/api/v4/projects/6590996/issues?id=6590996&order_by=created_at&page=1&per_page=10&sort=asc&state=all&with_labels_details=false>; rel="last"
+Ratelimit-Observed: 4
+Ratelimit-Remaining: 1996
+Gitlab-Lb: haproxy-main-04-lb-gprd
+Vary: Origin, Accept-Encoding
+Content-Security-Policy: default-src 'none'
+X-Next-Page:
+Ratelimit-Reset: 1701332694
+Etag: W/"f50a70d0fc1465a289d231f80806ced7"
+X-Gitlab-Meta: {"correlation_id":"47afd74254dd7946d2b2bded87448c60","version":"1"}
+X-Page: 1
+X-Prev-Page:
+Referrer-Policy: strict-origin-when-cross-origin
+Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:54 GMT
+Cf-Cache-Status: MISS
+X-Total: 1
+X-Total-Pages: 1
+Strict-Transport-Security: max-age=31536000
+Content-Type: application/json
+X-Frame-Options: SAMEORIGIN
+Ratelimit-Limit: 2000
+Gitlab-Sv: localhost
+Set-Cookie: _cfuvid=YDWTZ5VoSuLBDZgKsBnXMyYxz.0rHJ9TBYXv5zBj24Q-1701332634294-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Cache-Control: max-age=0, private, must-revalidate
+X-Content-Type-Options: nosniff
+X-Per-Page: 10
+X-Runtime: 0.179458
+
+[{"id":11201348,"iid":2,"project_id":6590996,"title":"vpn unlimited errors","description":"updated version to 2.8.0, build and tried running `vpnu-arch`:\n\n```\nvpn-unlimited: /usr/lib/libcurl.so.3: no version information available (required by /usr/lib/libvpnu_rpc.so.1)\nvpn-unlimited: /usr/lib/libssl.so.1.0.0: no version information available (required by /usr/lib/libvpnu_enc.so.1)\nvpn-unlimited: symbol lookup error: /usr/lib/libvpnu_rpc.so.1: undefined symbol: _ZNK4Json5Value8asStringEv\n```\n","state":"closed","created_at":"2016-03-26T16:41:12.000Z","updated_at":"2016-03-27T12:19:27.000Z","closed_at":null,"closed_by":null,"labels":[],"milestone":null,"assignees":[],"author":{"id":10273,"username":"brauliobo","name":"Bráulio Bhavamitra","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/cd3fcb7a417c8acb989fc320b604a2a8?s=80\u0026d=identicon","web_url":"https://gitlab.com/brauliobo"},"type":"ISSUE","assignee":null,"user_notes_count":1,"merge_requests_count":0,"upvotes":0,"downvotes":0,"due_date":null,"confidential":false,"discussion_locked":null,"issue_type":"issue","web_url":"https://gitlab.com/troyengel/archbuild/-/issues/2","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"task_completion_status":{"count":0,"completed_count":0},"blocking_issues_count":0,"has_tasks":true,"task_status":"0 of 0 checklist items completed","_links":{"self":"https://gitlab.com/api/v4/projects/6590996/issues/2","notes":"https://gitlab.com/api/v4/projects/6590996/issues/2/notes","award_emoji":"https://gitlab.com/api/v4/projects/6590996/issues/2/award_emoji","project":"https://gitlab.com/api/v4/projects/6590996","closed_as_duplicate_of":null},"references":{"short":"#2","relative":"#2","full":"troyengel/archbuild#2"},"severity":"UNKNOWN","moved_to_id":null,"service_desk_reply_to":null}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1 b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1
new file mode 100644
index 0000000..18e8a85
--- /dev/null
+++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1
@@ -0,0 +1,22 @@
+Ratelimit-Observed: 7
+Set-Cookie: _cfuvid=_b9GQEo3CBPMs9QmGE89dBdOmbSTfnYjZlzValULQPs-1701332635000-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Strict-Transport-Security: max-age=31536000
+Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:54 GMT
+Gitlab-Lb: haproxy-main-50-lb-gprd
+Gitlab-Sv: localhost
+X-Gitlab-Meta: {"correlation_id":"da44cd0303a4e62cc52ed8de3b2adf14","version":"1"}
+Referrer-Policy: strict-origin-when-cross-origin
+Ratelimit-Remaining: 1993
+Etag: W/"f6299e7e884cb8df8109256c086eb4e7"
+X-Runtime: 0.107573
+Content-Type: application/json
+Ratelimit-Reset: 1701332694
+X-Frame-Options: SAMEORIGIN
+Cache-Control: max-age=0, private, must-revalidate
+X-Content-Type-Options: nosniff
+Ratelimit-Limit: 2000
+Cf-Cache-Status: MISS
+Content-Security-Policy: default-src 'none'
+Vary: Origin, Accept-Encoding
+
+{"id":10518914,"iid":1,"project_id":6590996,"title":"Review","description":"*Created by: cgtx*\n\n### remove patch from makedepends\n- patch is in base-devel\n- The group base-devel is assumed to be already installed when building with makepkg. Members of \"base-devel\" should not be included in makedepends arrays.\n- https://wiki.archlinux.org/index.php/Pkgbuild#makedepends\n### remove python2 from makedepends\n- python2 is a dependency of python2-setuptools. It is redundant to list it again.\n- You do not need to list packages that your software depends on if other packages your software depends on already have those packages listed in their dependency.\n- https://wiki.archlinux.org/index.php/Pkgbuild#depends\n### more simple find/delete command\n- just because\n","state":"merged","created_at":"2014-12-12T15:01:32.000Z","updated_at":"2014-12-12T15:28:38.000Z","merged_by":null,"merge_user":null,"merged_at":null,"closed_by":null,"closed_at":null,"target_branch":"master","source_branch":"cgtx:review","user_notes_count":1,"upvotes":0,"downvotes":0,"author":{"id":1215848,"username":"troyengel","name":"Troy Engel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"assignees":[],"assignee":null,"reviewers":[],"source_project_id":6590996,"target_project_id":6590996,"labels":[],"draft":false,"work_in_progress":false,"milestone":null,"merge_when_pipeline_succeeds":false,"merge_status":"cannot_be_merged","detailed_merge_status":"not_open","sha":"9006fee398299beed8f5d5086f8e6008ffc02280","merge_commit_sha":null,"squash_commit_sha":null,"discussion_locked":null,"should_remove_source_branch":null,"force_remove_source_branch":null,"prepared_at":"2014-12-12T15:01:32.000Z","reference":"!1","references":{"short":"!1","relative":"!1","full":"troyengel/archbuild!1"},"web_url":"https://gitlab.com/troyengel/archbuild/-/merge_requests/1","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"squash":false,"squash_on_merge":false,"task_completion_status":{"count":0,"completed_count":0},"has_conflicts":true,"blocking_discussions_resolved":true,"approvals_before_merge":null,"subscribed":false,"changes_count":"1","latest_build_started_at":null,"latest_build_finished_at":null,"first_deployed_to_production_at":null,"pipeline":null,"head_pipeline":null,"diff_refs":{"base_sha":"6edcf8fc09f6c44213c892f5108d34a5255a47e1","head_sha":"9006fee398299beed8f5d5086f8e6008ffc02280","start_sha":"6edcf8fc09f6c44213c892f5108d34a5255a47e1"},"merge_error":null,"first_contribution":false,"user":{"can_merge":false}} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=10 b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=10
new file mode 100644
index 0000000..d6f8dd4
--- /dev/null
+++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=10
@@ -0,0 +1,31 @@
+Link: <https://gitlab.com/api/v4/projects/6590996/merge_requests/1/award_emoji?id=6590996&merge_request_iid=1&page=1&per_page=10>; rel="first", <https://gitlab.com/api/v4/projects/6590996/merge_requests/1/award_emoji?id=6590996&merge_request_iid=1&page=1&per_page=10>; rel="last"
+Set-Cookie: _cfuvid=qK29tijoyp0AdVoHf9Lqjc8Y28h4jplJDW9hOFLfq28-1701332635229-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Cache-Control: max-age=0, private, must-revalidate
+Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5"
+Ratelimit-Observed: 8
+Gitlab-Sv: localhost
+Content-Length: 2
+Gitlab-Lb: haproxy-main-16-lb-gprd
+X-Total: 0
+Ratelimit-Remaining: 1992
+Ratelimit-Reset: 1701332695
+Ratelimit-Limit: 2000
+Vary: Origin, Accept-Encoding
+X-Frame-Options: SAMEORIGIN
+Content-Type: application/json
+X-Content-Type-Options: nosniff
+X-Next-Page:
+X-Page: 1
+Strict-Transport-Security: max-age=31536000
+Accept-Ranges: bytes
+Content-Security-Policy: default-src 'none'
+X-Per-Page: 10
+X-Total-Pages: 1
+Referrer-Policy: strict-origin-when-cross-origin
+Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:55 GMT
+Cf-Cache-Status: MISS
+X-Gitlab-Meta: {"correlation_id":"eb59d63fed23cdbec69308570cc49c3e","version":"1"}
+X-Runtime: 0.065972
+X-Prev-Page:
+
+[] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%3Fpage=1&per_page=10&view=simple b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%3Fpage=1&per_page=10&view=simple
new file mode 100644
index 0000000..5339392
--- /dev/null
+++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%3Fpage=1&per_page=10&view=simple
@@ -0,0 +1,29 @@
+Vary: Origin, Accept-Encoding
+Strict-Transport-Security: max-age=31536000
+Gitlab-Sv: localhost
+X-Content-Type-Options: nosniff
+X-Prev-Page:
+Ratelimit-Reset: 1701332694
+Cache-Control: max-age=0, private, must-revalidate
+Ratelimit-Limit: 2000
+Referrer-Policy: strict-origin-when-cross-origin
+Ratelimit-Observed: 6
+Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:54 GMT
+Cf-Cache-Status: MISS
+Content-Type: application/json
+Content-Security-Policy: default-src 'none'
+Etag: W/"1a50811aa3cccb2e6a404a976422a83a"
+X-Total: 1
+Ratelimit-Remaining: 1994
+Set-Cookie: _cfuvid=u.zumTkG1ayCnh_OwrT9Q1Fl3MXV9Gh98W.ma4WN2Xs-1701332634745-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Link: <https://gitlab.com/api/v4/projects/6590996/merge_requests?id=6590996&order_by=created_at&page=1&per_page=10&sort=desc&state=all&view=simple&with_labels_details=false&with_merge_status_recheck=false>; rel="first", <https://gitlab.com/api/v4/projects/6590996/merge_requests?id=6590996&order_by=created_at&page=1&per_page=10&sort=desc&state=all&view=simple&with_labels_details=false&with_merge_status_recheck=false>; rel="last"
+X-Frame-Options: SAMEORIGIN
+X-Page: 1
+X-Total-Pages: 1
+Gitlab-Lb: haproxy-main-05-lb-gprd
+X-Gitlab-Meta: {"correlation_id":"907f9e1f94131ea7a6d1405100a8cc4b","version":"1"}
+X-Next-Page:
+X-Per-Page: 10
+X-Runtime: 0.078413
+
+[{"id":10518914,"iid":1,"project_id":6590996,"title":"Review","description":"*Created by: cgtx*\n\n### remove patch from makedepends\n- patch is in base-devel\n- The group base-devel is assumed to be already installed when building with makepkg. Members of \"base-devel\" should not be included in makedepends arrays.\n- https://wiki.archlinux.org/index.php/Pkgbuild#makedepends\n### remove python2 from makedepends\n- python2 is a dependency of python2-setuptools. It is redundant to list it again.\n- You do not need to list packages that your software depends on if other packages your software depends on already have those packages listed in their dependency.\n- https://wiki.archlinux.org/index.php/Pkgbuild#depends\n### more simple find/delete command\n- just because\n","state":"merged","created_at":"2014-12-12T15:01:32.000Z","updated_at":"2014-12-12T15:28:38.000Z","web_url":"https://gitlab.com/troyengel/archbuild/-/merge_requests/1"}] \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2Ftroyengel%252Farchbuild b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2Ftroyengel%252Farchbuild
new file mode 100644
index 0000000..a8c2882
--- /dev/null
+++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2Ftroyengel%252Farchbuild
@@ -0,0 +1,22 @@
+Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:53 GMT
+Gitlab-Lb: haproxy-main-41-lb-gprd
+Cache-Control: max-age=0, private, must-revalidate
+Referrer-Policy: strict-origin-when-cross-origin
+Cf-Cache-Status: MISS
+X-Content-Type-Options: nosniff
+Set-Cookie: _cfuvid=r78xThY2IPR6QvHnea1t_L7DbvuQp4.HWOiG1cKTWUg-1701332633720-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Ratelimit-Limit: 2000
+Strict-Transport-Security: max-age=31536000
+Vary: Origin, Accept-Encoding
+X-Gitlab-Meta: {"correlation_id":"4c3e0f8b5858454b6e138ecae9902a8d","version":"1"}
+X-Runtime: 0.097047
+Ratelimit-Observed: 2
+Ratelimit-Remaining: 1998
+X-Frame-Options: SAMEORIGIN
+Content-Security-Policy: default-src 'none'
+Etag: W/"03ce4f6ce1c1e8c5a31df8a44cf2fbdd"
+Content-Type: application/json
+Gitlab-Sv: localhost
+Ratelimit-Reset: 1701332693
+
+{"id":6590996,"description":"Arch packaging and build files","name":"archbuild","name_with_namespace":"Troy Engel / archbuild","path":"archbuild","path_with_namespace":"troyengel/archbuild","created_at":"2018-06-03T22:53:17.388Z","default_branch":"master","tag_list":[],"topics":[],"ssh_url_to_repo":"git@gitlab.com:troyengel/archbuild.git","http_url_to_repo":"https://gitlab.com/troyengel/archbuild.git","web_url":"https://gitlab.com/troyengel/archbuild","readme_url":"https://gitlab.com/troyengel/archbuild/-/blob/master/README.md","forks_count":0,"avatar_url":null,"star_count":0,"last_activity_at":"2020-12-13T18:09:32.071Z","namespace":{"id":1452515,"name":"Troy Engel","path":"troyengel","kind":"user","full_path":"troyengel","parent_id":null,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"container_registry_image_prefix":"registry.gitlab.com/troyengel/archbuild","_links":{"self":"https://gitlab.com/api/v4/projects/6590996","issues":"https://gitlab.com/api/v4/projects/6590996/issues","merge_requests":"https://gitlab.com/api/v4/projects/6590996/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/6590996/repository/branches","labels":"https://gitlab.com/api/v4/projects/6590996/labels","events":"https://gitlab.com/api/v4/projects/6590996/events","members":"https://gitlab.com/api/v4/projects/6590996/members","cluster_agents":"https://gitlab.com/api/v4/projects/6590996/cluster_agents"},"packages_enabled":null,"empty_repo":false,"archived":true,"visibility":"public","owner":{"id":1215848,"username":"troyengel","name":"Troy Engel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"resolve_outdated_diff_discussions":false,"issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"can_create_merge_request_in":false,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":false,"creator_id":1215848,"import_status":"finished","open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:30\" dir=\"auto\"\u003eArch packaging and build files\u003c/p\u003e","updated_at":"2022-07-13T21:32:12.624Z","ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":false,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":null,"printing_merge_request_link_enabled":true,"merge_method":"merge","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":null,"group_access":null}} \ No newline at end of file
diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fversion b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fversion
new file mode 100644
index 0000000..eb6df2f
--- /dev/null
+++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fversion
@@ -0,0 +1,22 @@
+Ratelimit-Observed: 1
+X-Gitlab-Meta: {"correlation_id":"aa75720bd9c597c7f2f886a4042d1f80","version":"1"}
+Etag: W/"4e5c0a031c3aacb6ba0a3c19e67d7592"
+X-Content-Type-Options: nosniff
+Ratelimit-Limit: 2000
+Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:53 GMT
+X-Runtime: 0.039899
+Ratelimit-Remaining: 1999
+Set-Cookie: _cfuvid=7OAEitQ3J0BOxrXk2pMBApFg1KFnz5aBVqOY7mHwLRk-1701332633452-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None
+Content-Security-Policy: default-src 'none'
+Gitlab-Sv: localhost
+Cf-Cache-Status: MISS
+Vary: Origin, Accept-Encoding
+X-Frame-Options: SAMEORIGIN
+Cache-Control: max-age=0, private, must-revalidate
+Strict-Transport-Security: max-age=31536000
+Referrer-Policy: strict-origin-when-cross-origin
+Ratelimit-Reset: 1701332693
+Gitlab-Lb: haproxy-main-39-lb-gprd
+Content-Type: application/json
+
+{"version":"16.7.0-pre","revision":"acd848a9228","kas":{"enabled":true,"externalUrl":"wss://kas.gitlab.com","version":"v16.7.0-rc2"},"enterprise":true} \ No newline at end of file
diff --git a/services/migrations/update.go b/services/migrations/update.go
new file mode 100644
index 0000000..4a49206
--- /dev/null
+++ b/services/migrations/update.go
@@ -0,0 +1,77 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package migrations
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/externalaccount"
+)
+
+// UpdateMigrationPosterID updates all migrated repositories' issues and comments posterID
+func UpdateMigrationPosterID(ctx context.Context) error {
+ for _, gitService := range structs.SupportedFullGitService {
+ select {
+ case <-ctx.Done():
+ log.Warn("UpdateMigrationPosterID aborted before %s", gitService.Name())
+ return db.ErrCancelledf("during UpdateMigrationPosterID before %s", gitService.Name())
+ default:
+ }
+ if err := updateMigrationPosterIDByGitService(ctx, gitService); err != nil {
+ log.Error("updateMigrationPosterIDByGitService failed: %v", err)
+ }
+ }
+ return nil
+}
+
+func updateMigrationPosterIDByGitService(ctx context.Context, tp structs.GitServiceType) error {
+ provider := tp.Name()
+ if len(provider) == 0 {
+ return nil
+ }
+
+ const batchSize = 100
+ for page := 0; ; page++ {
+ select {
+ case <-ctx.Done():
+ log.Warn("UpdateMigrationPosterIDByGitService(%s) cancelled", tp.Name())
+ return nil
+ default:
+ }
+
+ users, err := db.Find[user_model.ExternalLoginUser](ctx, user_model.FindExternalUserOptions{
+ ListOptions: db.ListOptions{
+ PageSize: batchSize,
+ Page: page,
+ },
+ Provider: provider,
+ OrderBy: "login_source_id ASC, external_id ASC",
+ })
+ if err != nil {
+ return err
+ }
+
+ for _, user := range users {
+ select {
+ case <-ctx.Done():
+ log.Warn("UpdateMigrationPosterIDByGitService(%s) cancelled", tp.Name())
+ return nil
+ default:
+ }
+ externalUserID := user.ExternalID
+ if err := externalaccount.UpdateMigrationsByType(ctx, tp, externalUserID, user.UserID); err != nil {
+ log.Error("UpdateMigrationsByType type %s external user id %v to local user id %v failed: %v", tp.Name(), user.ExternalID, user.UserID, err)
+ }
+ }
+
+ if len(users) < batchSize {
+ break
+ }
+ }
+ return nil
+}
diff --git a/services/mirror/mirror.go b/services/mirror/mirror.go
new file mode 100644
index 0000000..bc2d671
--- /dev/null
+++ b/services/mirror/mirror.go
@@ -0,0 +1,146 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mirror
+
+import (
+ "context"
+ "fmt"
+
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// doMirrorSync causes this request to mirror itself
+func doMirrorSync(ctx context.Context, req *SyncRequest) {
+ if req.ReferenceID == 0 {
+ log.Warn("Skipping mirror sync request, no mirror ID was specified")
+ return
+ }
+ switch req.Type {
+ case PushMirrorType:
+ _ = SyncPushMirror(ctx, req.ReferenceID)
+ case PullMirrorType:
+ _ = SyncPullMirror(ctx, req.ReferenceID)
+ default:
+ log.Error("Unknown Request type in queue: %v for MirrorID[%d]", req.Type, req.ReferenceID)
+ }
+}
+
+var errLimit = fmt.Errorf("reached limit")
+
+// Update checks and updates mirror repositories.
+func Update(ctx context.Context, pullLimit, pushLimit int) error {
+ if !setting.Mirror.Enabled {
+ log.Warn("Mirror feature disabled, but cron job enabled: skip update")
+ return nil
+ }
+ log.Trace("Doing: Update")
+
+ handler := func(bean any) error {
+ var repo *repo_model.Repository
+ var mirrorType SyncType
+ var referenceID int64
+
+ if m, ok := bean.(*repo_model.Mirror); ok {
+ if m.GetRepository(ctx) == nil {
+ log.Error("Disconnected mirror found: %d", m.ID)
+ return nil
+ }
+ repo = m.Repo
+ mirrorType = PullMirrorType
+ referenceID = m.RepoID
+ } else if m, ok := bean.(*repo_model.PushMirror); ok {
+ if m.GetRepository(ctx) == nil {
+ log.Error("Disconnected push-mirror found: %d", m.ID)
+ return nil
+ }
+ repo = m.Repo
+ mirrorType = PushMirrorType
+ referenceID = m.ID
+ } else {
+ log.Error("Unknown bean: %v", bean)
+ return nil
+ }
+
+ // Check we've not been cancelled
+ select {
+ case <-ctx.Done():
+ return fmt.Errorf("aborted")
+ default:
+ }
+
+ // Check if the repo's owner is over quota, for pull mirrors
+ if mirrorType == PullMirrorType {
+ ok, err := quota_model.EvaluateForUser(ctx, repo.OwnerID, quota_model.LimitSubjectSizeReposAll)
+ if err != nil {
+ log.Error("quota_model.EvaluateForUser: %v", err)
+ return err
+ }
+ if !ok {
+ log.Trace("Owner quota exceeded for %-v, not syncing", repo)
+ return nil
+ }
+ }
+
+ // Push to the Queue
+ if err := PushToQueue(mirrorType, referenceID); err != nil {
+ if err == queue.ErrAlreadyInQueue {
+ if mirrorType == PushMirrorType {
+ log.Trace("PushMirrors for %-v already queued for sync", repo)
+ } else {
+ log.Trace("PullMirrors for %-v already queued for sync", repo)
+ }
+ return nil
+ }
+ return err
+ }
+ return nil
+ }
+
+ pullMirrorsRequested := 0
+ if pullLimit != 0 {
+ if err := repo_model.MirrorsIterate(ctx, pullLimit, func(_ int, bean any) error {
+ if err := handler(bean); err != nil {
+ return err
+ }
+ pullMirrorsRequested++
+ return nil
+ }); err != nil && err != errLimit {
+ log.Error("MirrorsIterate: %v", err)
+ return err
+ }
+ }
+
+ pushMirrorsRequested := 0
+ if pushLimit != 0 {
+ if err := repo_model.PushMirrorsIterate(ctx, pushLimit, func(idx int, bean any) error {
+ if err := handler(bean); err != nil {
+ return err
+ }
+ pushMirrorsRequested++
+ return nil
+ }); err != nil && err != errLimit {
+ log.Error("PushMirrorsIterate: %v", err)
+ return err
+ }
+ }
+ log.Trace("Finished: Update: %d pull mirrors and %d push mirrors queued", pullMirrorsRequested, pushMirrorsRequested)
+ return nil
+}
+
+func queueHandler(items ...*SyncRequest) []*SyncRequest {
+ for _, req := range items {
+ doMirrorSync(graceful.GetManager().ShutdownContext(), req)
+ }
+ return nil
+}
+
+// InitSyncMirrors initializes a go routine to sync the mirrors
+func InitSyncMirrors() {
+ StartSyncMirrors(queueHandler)
+}
diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go
new file mode 100644
index 0000000..9f7ffb2
--- /dev/null
+++ b/services/mirror/mirror_pull.go
@@ -0,0 +1,628 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mirror
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "time"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ system_model "code.gitea.io/gitea/models/system"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ giturl "code.gitea.io/gitea/modules/git/url"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/proxy"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// gitShortEmptySha Git short empty SHA
+const gitShortEmptySha = "0000000"
+
+// UpdateAddress writes new address to Git repository and database
+func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error {
+ u, err := giturl.Parse(addr)
+ if err != nil {
+ return fmt.Errorf("invalid addr: %v", err)
+ }
+
+ remoteName := m.GetRemoteName()
+ repoPath := m.GetRepository(ctx).RepoPath()
+ // Remove old remote
+ _, _, err = git.NewCommand(ctx, "remote", "rm").AddDynamicArguments(remoteName).RunStdString(&git.RunOpts{Dir: repoPath})
+ if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") {
+ return err
+ }
+
+ cmd := git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteName).AddArguments("--mirror=fetch").AddDynamicArguments(addr)
+ if strings.Contains(addr, "://") && strings.Contains(addr, "@") {
+ cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=fetch %s [repo_path: %s]", remoteName, util.SanitizeCredentialURLs(addr), repoPath))
+ } else {
+ cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=fetch %s [repo_path: %s]", remoteName, addr, repoPath))
+ }
+ _, _, err = cmd.RunStdString(&git.RunOpts{Dir: repoPath})
+ if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") {
+ return err
+ }
+
+ if m.Repo.HasWiki() {
+ wikiPath := m.Repo.WikiPath()
+ wikiRemotePath := repo_module.WikiRemoteURL(ctx, addr)
+ // Remove old remote of wiki
+ _, _, err = git.NewCommand(ctx, "remote", "rm").AddDynamicArguments(remoteName).RunStdString(&git.RunOpts{Dir: wikiPath})
+ if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") {
+ return err
+ }
+
+ cmd = git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteName).AddArguments("--mirror=fetch").AddDynamicArguments(wikiRemotePath)
+ if strings.Contains(wikiRemotePath, "://") && strings.Contains(wikiRemotePath, "@") {
+ cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=fetch %s [repo_path: %s]", remoteName, util.SanitizeCredentialURLs(wikiRemotePath), wikiPath))
+ } else {
+ cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=fetch %s [repo_path: %s]", remoteName, wikiRemotePath, wikiPath))
+ }
+ _, _, err = cmd.RunStdString(&git.RunOpts{Dir: wikiPath})
+ if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") {
+ return err
+ }
+ }
+
+ // erase authentication before storing in database
+ u.User = nil
+ m.Repo.OriginalURL = u.String()
+ return repo_model.UpdateRepositoryCols(ctx, m.Repo, "original_url")
+}
+
+// mirrorSyncResult contains information of a updated reference.
+// If the oldCommitID is "0000000", it means a new reference, the value of newCommitID is empty.
+// If the newCommitID is "0000000", it means the reference is deleted, the value of oldCommitID is empty.
+type mirrorSyncResult struct {
+ refName git.RefName
+ oldCommitID string
+ newCommitID string
+}
+
+// parseRemoteUpdateOutput detects create, update and delete operations of references from upstream.
+// possible output example:
+/*
+// * [new tag] v0.1.8 -> v0.1.8
+// * [new branch] master -> origin/master
+// - [deleted] (none) -> origin/test // delete a branch
+// - [deleted] (none) -> 1 // delete a tag
+// 957a993..a87ba5f test -> origin/test
+// + f895a1e...957a993 test -> origin/test (forced update)
+*/
+// TODO: return whether it's a force update
+func parseRemoteUpdateOutput(output, remoteName string) []*mirrorSyncResult {
+ results := make([]*mirrorSyncResult, 0, 3)
+ lines := strings.Split(output, "\n")
+ for i := range lines {
+ // Make sure reference name is presented before continue
+ idx := strings.Index(lines[i], "-> ")
+ if idx == -1 {
+ continue
+ }
+
+ refName := strings.TrimSpace(lines[i][idx+3:])
+
+ switch {
+ case strings.HasPrefix(lines[i], " * [new tag]"): // new tag
+ results = append(results, &mirrorSyncResult{
+ refName: git.RefNameFromTag(refName),
+ oldCommitID: gitShortEmptySha,
+ })
+ case strings.HasPrefix(lines[i], " * [new branch]"): // new branch
+ refName = strings.TrimPrefix(refName, remoteName+"/")
+ results = append(results, &mirrorSyncResult{
+ refName: git.RefNameFromBranch(refName),
+ oldCommitID: gitShortEmptySha,
+ })
+ case strings.HasPrefix(lines[i], " - "): // Delete reference
+ isTag := !strings.HasPrefix(refName, remoteName+"/")
+ var refFullName git.RefName
+ if isTag {
+ refFullName = git.RefNameFromTag(refName)
+ } else {
+ refFullName = git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/"))
+ }
+ results = append(results, &mirrorSyncResult{
+ refName: refFullName,
+ newCommitID: gitShortEmptySha,
+ })
+ case strings.HasPrefix(lines[i], " + "): // Force update
+ if idx := strings.Index(refName, " "); idx > -1 {
+ refName = refName[:idx]
+ }
+ delimIdx := strings.Index(lines[i][3:], " ")
+ if delimIdx == -1 {
+ log.Error("SHA delimiter not found: %q", lines[i])
+ continue
+ }
+ shas := strings.Split(lines[i][3:delimIdx+3], "...")
+ if len(shas) != 2 {
+ log.Error("Expect two SHAs but not what found: %q", lines[i])
+ continue
+ }
+ results = append(results, &mirrorSyncResult{
+ refName: git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/")),
+ oldCommitID: shas[0],
+ newCommitID: shas[1],
+ })
+ case strings.HasPrefix(lines[i], " "): // New commits of a reference
+ delimIdx := strings.Index(lines[i][3:], " ")
+ if delimIdx == -1 {
+ log.Error("SHA delimiter not found: %q", lines[i])
+ continue
+ }
+ shas := strings.Split(lines[i][3:delimIdx+3], "..")
+ if len(shas) != 2 {
+ log.Error("Expect two SHAs but not what found: %q", lines[i])
+ continue
+ }
+ results = append(results, &mirrorSyncResult{
+ refName: git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/")),
+ oldCommitID: shas[0],
+ newCommitID: shas[1],
+ })
+
+ default:
+ log.Warn("parseRemoteUpdateOutput: unexpected update line %q", lines[i])
+ }
+ }
+ return results
+}
+
+func pruneBrokenReferences(ctx context.Context,
+ m *repo_model.Mirror,
+ repoPath string,
+ timeout time.Duration,
+ stdoutBuilder, stderrBuilder *strings.Builder,
+ isWiki bool,
+) error {
+ wiki := ""
+ if isWiki {
+ wiki = "Wiki "
+ }
+
+ stderrBuilder.Reset()
+ stdoutBuilder.Reset()
+ pruneErr := git.NewCommand(ctx, "remote", "prune").AddDynamicArguments(m.GetRemoteName()).
+ SetDescription(fmt.Sprintf("Mirror.runSync %ssPrune references: %s ", wiki, m.Repo.FullName())).
+ Run(&git.RunOpts{
+ Timeout: timeout,
+ Dir: repoPath,
+ Stdout: stdoutBuilder,
+ Stderr: stderrBuilder,
+ })
+ if pruneErr != nil {
+ stdout := stdoutBuilder.String()
+ stderr := stderrBuilder.String()
+
+ // sanitize the output, since it may contain the remote address, which may
+ // contain a password
+ stderrMessage := util.SanitizeCredentialURLs(stderr)
+ stdoutMessage := util.SanitizeCredentialURLs(stdout)
+
+ log.Error("Failed to prune mirror repository %s%-v references:\nStdout: %s\nStderr: %s\nErr: %v", wiki, m.Repo, stdoutMessage, stderrMessage, pruneErr)
+ desc := fmt.Sprintf("Failed to prune mirror repository %s'%s' references: %s", wiki, repoPath, stderrMessage)
+ if err := system_model.CreateRepositoryNotice(desc); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ // this if will only be reached on a successful prune so try to get the mirror again
+ }
+ return pruneErr
+}
+
+// runSync returns true if sync finished without error.
+func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bool) {
+ repoPath := m.Repo.RepoPath()
+ wikiPath := m.Repo.WikiPath()
+ timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second
+
+ log.Trace("SyncMirrors [repo: %-v]: running git remote update...", m.Repo)
+
+ // use fetch but not remote update because git fetch support --tags but remote update doesn't
+ cmd := git.NewCommand(ctx, "fetch")
+ if m.EnablePrune {
+ cmd.AddArguments("--prune")
+ }
+ cmd.AddArguments("--tags").AddDynamicArguments(m.GetRemoteName())
+
+ remoteURL, remoteErr := git.GetRemoteURL(ctx, repoPath, m.GetRemoteName())
+ if remoteErr != nil {
+ log.Error("SyncMirrors [repo: %-v]: GetRemoteAddress Error %v", m.Repo, remoteErr)
+ return nil, false
+ }
+
+ envs := proxy.EnvWithProxy(remoteURL.URL)
+
+ stdoutBuilder := strings.Builder{}
+ stderrBuilder := strings.Builder{}
+ if err := cmd.
+ SetDescription(fmt.Sprintf("Mirror.runSync: %s", m.Repo.FullName())).
+ Run(&git.RunOpts{
+ Timeout: timeout,
+ Dir: repoPath,
+ Env: envs,
+ Stdout: &stdoutBuilder,
+ Stderr: &stderrBuilder,
+ }); err != nil {
+ stdout := stdoutBuilder.String()
+ stderr := stderrBuilder.String()
+
+ // sanitize the output, since it may contain the remote address, which may contain a password
+ stderrMessage := util.SanitizeCredentialURLs(stderr)
+ stdoutMessage := util.SanitizeCredentialURLs(stdout)
+
+ // Now check if the error is a resolve reference due to broken reference
+ if strings.Contains(stderr, "unable to resolve reference") && strings.Contains(stderr, "reference broken") {
+ log.Warn("SyncMirrors [repo: %-v]: failed to update mirror repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err)
+ err = nil
+
+ // Attempt prune
+ pruneErr := pruneBrokenReferences(ctx, m, repoPath, timeout, &stdoutBuilder, &stderrBuilder, false)
+ if pruneErr == nil {
+ // Successful prune - reattempt mirror
+ stderrBuilder.Reset()
+ stdoutBuilder.Reset()
+ if err = cmd.
+ SetDescription(fmt.Sprintf("Mirror.runSync: %s", m.Repo.FullName())).
+ Run(&git.RunOpts{
+ Timeout: timeout,
+ Dir: repoPath,
+ Stdout: &stdoutBuilder,
+ Stderr: &stderrBuilder,
+ }); err != nil {
+ stdout := stdoutBuilder.String()
+ stderr := stderrBuilder.String()
+
+ // sanitize the output, since it may contain the remote address, which may
+ // contain a password
+ stderrMessage = util.SanitizeCredentialURLs(stderr)
+ stdoutMessage = util.SanitizeCredentialURLs(stdout)
+ }
+ }
+ }
+
+ // If there is still an error (or there always was an error)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to update mirror repository:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err)
+ desc := fmt.Sprintf("Failed to update mirror repository '%s': %s", repoPath, stderrMessage)
+ if err = system_model.CreateRepositoryNotice(desc); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ return nil, false
+ }
+ }
+ output := stderrBuilder.String()
+
+ if err := git.WriteCommitGraph(ctx, repoPath); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: %v", m.Repo, err)
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, m.Repo)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to OpenRepository: %v", m.Repo, err)
+ return nil, false
+ }
+
+ log.Trace("SyncMirrors [repo: %-v]: syncing branches...", m.Repo)
+ if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, m.Repo, gitRepo, 0); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to synchronize branches: %v", m.Repo, err)
+ }
+
+ log.Trace("SyncMirrors [repo: %-v]: syncing releases with tags...", m.Repo)
+ if err = repo_module.SyncReleasesWithTags(ctx, m.Repo, gitRepo); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to synchronize tags to releases: %v", m.Repo, err)
+ }
+
+ if m.LFS && setting.LFS.StartServer {
+ log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo)
+ endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint)
+ lfsClient := lfs.NewClient(endpoint, nil)
+ if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err)
+ }
+ }
+ gitRepo.Close()
+
+ log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo)
+ if err := repo_module.UpdateRepoSize(ctx, m.Repo); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to update size for mirror repository: %v", m.Repo, err)
+ }
+
+ if m.Repo.HasWiki() {
+ log.Trace("SyncMirrors [repo: %-v Wiki]: running git remote update...", m.Repo)
+ stderrBuilder.Reset()
+ stdoutBuilder.Reset()
+ if err := git.NewCommand(ctx, "remote", "update", "--prune").AddDynamicArguments(m.GetRemoteName()).
+ SetDescription(fmt.Sprintf("Mirror.runSync Wiki: %s ", m.Repo.FullName())).
+ Run(&git.RunOpts{
+ Timeout: timeout,
+ Dir: wikiPath,
+ Stdout: &stdoutBuilder,
+ Stderr: &stderrBuilder,
+ }); err != nil {
+ stdout := stdoutBuilder.String()
+ stderr := stderrBuilder.String()
+
+ // sanitize the output, since it may contain the remote address, which may contain a password
+ stderrMessage := util.SanitizeCredentialURLs(stderr)
+ stdoutMessage := util.SanitizeCredentialURLs(stdout)
+
+ // Now check if the error is a resolve reference due to broken reference
+ if strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken") {
+ log.Warn("SyncMirrors [repo: %-v Wiki]: failed to update mirror wiki repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err)
+ err = nil
+
+ // Attempt prune
+ pruneErr := pruneBrokenReferences(ctx, m, repoPath, timeout, &stdoutBuilder, &stderrBuilder, true)
+ if pruneErr == nil {
+ // Successful prune - reattempt mirror
+ stderrBuilder.Reset()
+ stdoutBuilder.Reset()
+
+ if err = git.NewCommand(ctx, "remote", "update", "--prune").AddDynamicArguments(m.GetRemoteName()).
+ SetDescription(fmt.Sprintf("Mirror.runSync Wiki: %s ", m.Repo.FullName())).
+ Run(&git.RunOpts{
+ Timeout: timeout,
+ Dir: wikiPath,
+ Stdout: &stdoutBuilder,
+ Stderr: &stderrBuilder,
+ }); err != nil {
+ stdout := stdoutBuilder.String()
+ stderr := stderrBuilder.String()
+ stderrMessage = util.SanitizeCredentialURLs(stderr)
+ stdoutMessage = util.SanitizeCredentialURLs(stdout)
+ }
+ }
+ }
+
+ // If there is still an error (or there always was an error)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v Wiki]: failed to update mirror repository wiki:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err)
+ desc := fmt.Sprintf("Failed to update mirror repository wiki '%s': %s", wikiPath, stderrMessage)
+ if err = system_model.CreateRepositoryNotice(desc); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ return nil, false
+ }
+
+ if err := git.WriteCommitGraph(ctx, wikiPath); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: %v", m.Repo, err)
+ }
+ }
+ log.Trace("SyncMirrors [repo: %-v Wiki]: git remote update complete", m.Repo)
+ }
+
+ log.Trace("SyncMirrors [repo: %-v]: invalidating mirror branch caches...", m.Repo)
+ branches, _, err := gitrepo.GetBranchesByPath(ctx, m.Repo, 0, 0)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to GetBranches: %v", m.Repo, err)
+ return nil, false
+ }
+
+ for _, branch := range branches {
+ cache.Remove(m.Repo.GetCommitsCountCacheKey(branch.Name, true))
+ }
+
+ m.UpdatedUnix = timeutil.TimeStampNow()
+ return parseRemoteUpdateOutput(output, m.GetRemoteName()), true
+}
+
+// SyncPullMirror starts the sync of the pull mirror and schedules the next run.
+func SyncPullMirror(ctx context.Context, repoID int64) bool {
+ log.Trace("SyncMirrors [repo_id: %v]", repoID)
+ defer func() {
+ err := recover()
+ if err == nil {
+ return
+ }
+ // There was a panic whilst syncMirrors...
+ log.Error("PANIC whilst SyncMirrors[repo_id: %d] Panic: %v\nStacktrace: %s", repoID, err, log.Stack(2))
+ }()
+
+ m, err := repo_model.GetMirrorByRepoID(ctx, repoID)
+ if err != nil {
+ log.Error("SyncMirrors [repo_id: %v]: unable to GetMirrorByRepoID: %v", repoID, err)
+ return false
+ }
+ _ = m.GetRepository(ctx) // force load repository of mirror
+
+ ctx, _, finished := process.GetManager().AddContext(ctx, fmt.Sprintf("Syncing Mirror %s/%s", m.Repo.OwnerName, m.Repo.Name))
+ defer finished()
+
+ log.Trace("SyncMirrors [repo: %-v]: Running Sync", m.Repo)
+ results, ok := runSync(ctx, m)
+ if !ok {
+ if err = repo_model.TouchMirror(ctx, m); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to TouchMirror: %v", m.Repo, err)
+ }
+ return false
+ }
+
+ log.Trace("SyncMirrors [repo: %-v]: Scheduling next update", m.Repo)
+ m.ScheduleNextUpdate()
+ if err = repo_model.UpdateMirror(ctx, m); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to UpdateMirror with next update date: %v", m.Repo, err)
+ return false
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, m.Repo)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to OpenRepository: %v", m.Repo, err)
+ return false
+ }
+ defer gitRepo.Close()
+
+ log.Trace("SyncMirrors [repo: %-v]: %d branches updated", m.Repo, len(results))
+ if len(results) > 0 {
+ if ok := checkAndUpdateEmptyRepository(ctx, m, results); !ok {
+ log.Error("SyncMirrors [repo: %-v]: checkAndUpdateEmptyRepository: %v", m.Repo, err)
+ return false
+ }
+ }
+
+ for _, result := range results {
+ // Discard GitHub pull requests, i.e. refs/pull/*
+ if result.refName.IsPull() {
+ continue
+ }
+
+ // Create reference
+ if result.oldCommitID == gitShortEmptySha {
+ commitID, err := gitRepo.GetRefCommitID(result.refName.String())
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to GetRefCommitID [ref_name: %s]: %v", m.Repo, result.refName, err)
+ continue
+ }
+ objectFormat := git.ObjectFormatFromName(m.Repo.ObjectFormatName)
+ notify_service.SyncPushCommits(ctx, m.Repo.MustOwner(ctx), m.Repo, &repo_module.PushUpdateOptions{
+ RefFullName: result.refName,
+ OldCommitID: objectFormat.EmptyObjectID().String(),
+ NewCommitID: commitID,
+ }, repo_module.NewPushCommits())
+ notify_service.SyncCreateRef(ctx, m.Repo.MustOwner(ctx), m.Repo, result.refName, commitID)
+ continue
+ }
+
+ // Delete reference
+ if result.newCommitID == gitShortEmptySha {
+ notify_service.SyncDeleteRef(ctx, m.Repo.MustOwner(ctx), m.Repo, result.refName)
+ continue
+ }
+
+ // Push commits
+ oldCommitID, err := git.GetFullCommitID(gitRepo.Ctx, gitRepo.Path, result.oldCommitID)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to get GetFullCommitID[%s]: %v", m.Repo, result.oldCommitID, err)
+ continue
+ }
+ newCommitID, err := git.GetFullCommitID(gitRepo.Ctx, gitRepo.Path, result.newCommitID)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to get GetFullCommitID [%s]: %v", m.Repo, result.newCommitID, err)
+ continue
+ }
+ commits, err := gitRepo.CommitsBetweenIDs(newCommitID, oldCommitID)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to get CommitsBetweenIDs [new_commit_id: %s, old_commit_id: %s]: %v", m.Repo, newCommitID, oldCommitID, err)
+ continue
+ }
+
+ theCommits := repo_module.GitToPushCommits(commits)
+ if len(theCommits.Commits) > setting.UI.FeedMaxCommitNum {
+ theCommits.Commits = theCommits.Commits[:setting.UI.FeedMaxCommitNum]
+ }
+
+ newCommit, err := gitRepo.GetCommit(newCommitID)
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to get commit %s: %v", m.Repo, newCommitID, err)
+ continue
+ }
+
+ theCommits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
+ theCommits.CompareURL = m.Repo.ComposeCompareURL(oldCommitID, newCommitID)
+
+ notify_service.SyncPushCommits(ctx, m.Repo.MustOwner(ctx), m.Repo, &repo_module.PushUpdateOptions{
+ RefFullName: result.refName,
+ OldCommitID: oldCommitID,
+ NewCommitID: newCommitID,
+ }, theCommits)
+ }
+ log.Trace("SyncMirrors [repo: %-v]: done notifying updated branches/tags - now updating last commit time", m.Repo)
+
+ isEmpty, err := gitRepo.IsEmpty()
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to check empty git repo: %v", m.Repo, err)
+ return false
+ }
+ if !isEmpty {
+ // Get latest commit date and update to current repository updated time
+ commitDate, err := git.GetLatestCommitTime(ctx, m.Repo.RepoPath())
+ if err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to GetLatestCommitDate: %v", m.Repo, err)
+ return false
+ }
+
+ if err = repo_model.UpdateRepositoryUpdatedTime(ctx, m.RepoID, commitDate); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: unable to update repository 'updated_unix': %v", m.Repo, err)
+ return false
+ }
+ }
+
+ log.Trace("SyncMirrors [repo: %-v]: Successfully updated", m.Repo)
+
+ return true
+}
+
+func checkAndUpdateEmptyRepository(ctx context.Context, m *repo_model.Mirror, results []*mirrorSyncResult) bool {
+ if !m.Repo.IsEmpty {
+ return true
+ }
+
+ hasDefault := false
+ hasMaster := false
+ hasMain := false
+ defaultBranchName := m.Repo.DefaultBranch
+ if len(defaultBranchName) == 0 {
+ defaultBranchName = setting.Repository.DefaultBranch
+ }
+ firstName := ""
+ for _, result := range results {
+ if !result.refName.IsBranch() {
+ continue
+ }
+
+ name := result.refName.BranchName()
+ if len(firstName) == 0 {
+ firstName = name
+ }
+
+ hasDefault = hasDefault || name == defaultBranchName
+ hasMaster = hasMaster || name == "master"
+ hasMain = hasMain || name == "main"
+ }
+
+ if len(firstName) > 0 {
+ if hasDefault {
+ m.Repo.DefaultBranch = defaultBranchName
+ } else if hasMaster {
+ m.Repo.DefaultBranch = "master"
+ } else if hasMain {
+ m.Repo.DefaultBranch = "main"
+ } else {
+ m.Repo.DefaultBranch = firstName
+ }
+ // Update the git repository default branch
+ if err := gitrepo.SetDefaultBranch(ctx, m.Repo, m.Repo.DefaultBranch); err != nil {
+ if !git.IsErrUnsupportedVersion(err) {
+ log.Error("Failed to update default branch of underlying git repository %-v. Error: %v", m.Repo, err)
+ desc := fmt.Sprintf("Failed to update default branch of underlying git repository '%s': %v", m.Repo.RepoPath(), err)
+ if err = system_model.CreateRepositoryNotice(desc); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ return false
+ }
+ }
+ m.Repo.IsEmpty = false
+ // Update the is empty and default_branch columns
+ if err := repo_model.UpdateRepositoryCols(ctx, m.Repo, "default_branch", "is_empty"); err != nil {
+ log.Error("Failed to update default branch of repository %-v. Error: %v", m.Repo, err)
+ desc := fmt.Sprintf("Failed to update default branch of repository '%s': %v", m.Repo.RepoPath(), err)
+ if err = system_model.CreateRepositoryNotice(desc); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ return false
+ }
+ }
+ return true
+}
diff --git a/services/mirror/mirror_push.go b/services/mirror/mirror_push.go
new file mode 100644
index 0000000..3a9644c
--- /dev/null
+++ b/services/mirror/mirror_push.go
@@ -0,0 +1,313 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mirror
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "regexp"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+)
+
+var stripExitStatus = regexp.MustCompile(`exit status \d+ - `)
+
+// AddPushMirrorRemote registers the push mirror remote.
+var AddPushMirrorRemote = addPushMirrorRemote
+
+func addPushMirrorRemote(ctx context.Context, m *repo_model.PushMirror, addr string) error {
+ addRemoteAndConfig := func(addr, path string) error {
+ cmd := git.NewCommand(ctx, "remote", "add", "--mirror=push").AddDynamicArguments(m.RemoteName, addr)
+ if strings.Contains(addr, "://") && strings.Contains(addr, "@") {
+ cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=push %s [repo_path: %s]", m.RemoteName, util.SanitizeCredentialURLs(addr), path))
+ } else {
+ cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=push %s [repo_path: %s]", m.RemoteName, addr, path))
+ }
+ if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: path}); err != nil {
+ return err
+ }
+ if _, _, err := git.NewCommand(ctx, "config", "--add").AddDynamicArguments("remote."+m.RemoteName+".push", "+refs/heads/*:refs/heads/*").RunStdString(&git.RunOpts{Dir: path}); err != nil {
+ return err
+ }
+ if _, _, err := git.NewCommand(ctx, "config", "--add").AddDynamicArguments("remote."+m.RemoteName+".push", "+refs/tags/*:refs/tags/*").RunStdString(&git.RunOpts{Dir: path}); err != nil {
+ return err
+ }
+ return nil
+ }
+
+ if err := addRemoteAndConfig(addr, m.Repo.RepoPath()); err != nil {
+ return err
+ }
+
+ if m.Repo.HasWiki() {
+ wikiRemoteURL := repository.WikiRemoteURL(ctx, addr)
+ if len(wikiRemoteURL) > 0 {
+ if err := addRemoteAndConfig(wikiRemoteURL, m.Repo.WikiPath()); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+// RemovePushMirrorRemote removes the push mirror remote.
+func RemovePushMirrorRemote(ctx context.Context, m *repo_model.PushMirror) error {
+ cmd := git.NewCommand(ctx, "remote", "rm").AddDynamicArguments(m.RemoteName)
+ _ = m.GetRepository(ctx)
+
+ if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: m.Repo.RepoPath()}); err != nil {
+ return err
+ }
+
+ if m.Repo.HasWiki() {
+ if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: m.Repo.WikiPath()}); err != nil {
+ // The wiki remote may not exist
+ log.Warn("Wiki Remote[%d] could not be removed: %v", m.ID, err)
+ }
+ }
+
+ return nil
+}
+
+// SyncPushMirror starts the sync of the push mirror and schedules the next run.
+func SyncPushMirror(ctx context.Context, mirrorID int64) bool {
+ log.Trace("SyncPushMirror [mirror: %d]", mirrorID)
+ defer func() {
+ err := recover()
+ if err == nil {
+ return
+ }
+ // There was a panic whilst syncPushMirror...
+ log.Error("PANIC whilst syncPushMirror[%d] Panic: %v\nStacktrace: %s", mirrorID, err, log.Stack(2))
+ }()
+
+ // TODO: Handle "!exist" better
+ m, exist, err := db.GetByID[repo_model.PushMirror](ctx, mirrorID)
+ if err != nil || !exist {
+ log.Error("GetPushMirrorByID [%d]: %v", mirrorID, err)
+ return false
+ }
+
+ _ = m.GetRepository(ctx)
+
+ m.LastError = ""
+
+ ctx, _, finished := process.GetManager().AddContext(ctx, fmt.Sprintf("Syncing PushMirror %s/%s to %s", m.Repo.OwnerName, m.Repo.Name, m.RemoteName))
+ defer finished()
+
+ log.Trace("SyncPushMirror [mirror: %d][repo: %-v]: Running Sync", m.ID, m.Repo)
+ err = runPushSync(ctx, m)
+ if err != nil {
+ log.Error("SyncPushMirror [mirror: %d][repo: %-v]: %v", m.ID, m.Repo, err)
+ m.LastError = stripExitStatus.ReplaceAllLiteralString(err.Error(), "")
+ }
+
+ m.LastUpdateUnix = timeutil.TimeStampNow()
+
+ if err := repo_model.UpdatePushMirror(ctx, m); err != nil {
+ log.Error("UpdatePushMirror [%d]: %v", m.ID, err)
+
+ return false
+ }
+
+ log.Trace("SyncPushMirror [mirror: %d][repo: %-v]: Finished", m.ID, m.Repo)
+
+ return err == nil
+}
+
+func runPushSync(ctx context.Context, m *repo_model.PushMirror) error {
+ timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second
+
+ performPush := func(repo *repo_model.Repository, isWiki bool) error {
+ path := repo.RepoPath()
+ if isWiki {
+ path = repo.WikiPath()
+ }
+ remoteURL, err := git.GetRemoteURL(ctx, path, m.RemoteName)
+ if err != nil {
+ log.Error("GetRemoteAddress(%s) Error %v", path, err)
+ return errors.New("Unexpected error")
+ }
+
+ if setting.LFS.StartServer {
+ log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo)
+
+ var gitRepo *git.Repository
+ if isWiki {
+ gitRepo, err = gitrepo.OpenWikiRepository(ctx, repo)
+ } else {
+ gitRepo, err = gitrepo.OpenRepository(ctx, repo)
+ }
+ if err != nil {
+ log.Error("OpenRepository: %v", err)
+ return errors.New("Unexpected error")
+ }
+ defer gitRepo.Close()
+
+ endpoint := lfs.DetermineEndpoint(remoteURL.String(), "")
+ lfsClient := lfs.NewClient(endpoint, nil)
+ if err := pushAllLFSObjects(ctx, gitRepo, lfsClient); err != nil {
+ return util.SanitizeErrorCredentialURLs(err)
+ }
+ }
+
+ log.Trace("Pushing %s mirror[%d] remote %s", path, m.ID, m.RemoteName)
+
+ // OpenSSH isn't very intuitive when you want to specify a specific keypair.
+ // Therefore, we need to create a temporary file that stores the private key, so that OpenSSH can use it.
+ // We delete the the temporary file afterwards.
+ privateKeyPath := ""
+ if m.PublicKey != "" {
+ f, err := os.CreateTemp(os.TempDir(), m.RemoteName)
+ if err != nil {
+ log.Error("os.CreateTemp: %v", err)
+ return errors.New("unexpected error")
+ }
+
+ defer func() {
+ f.Close()
+ if err := os.Remove(f.Name()); err != nil {
+ log.Error("os.Remove: %v", err)
+ }
+ }()
+
+ privateKey, err := m.Privatekey()
+ if err != nil {
+ log.Error("Privatekey: %v", err)
+ return errors.New("unexpected error")
+ }
+
+ if _, err := f.Write(privateKey); err != nil {
+ log.Error("f.Write: %v", err)
+ return errors.New("unexpected error")
+ }
+
+ privateKeyPath = f.Name()
+ }
+ if err := git.Push(ctx, path, git.PushOptions{
+ Remote: m.RemoteName,
+ Force: true,
+ Mirror: true,
+ Timeout: timeout,
+ PrivateKeyPath: privateKeyPath,
+ }); err != nil {
+ log.Error("Error pushing %s mirror[%d] remote %s: %v", path, m.ID, m.RemoteName, err)
+
+ return util.SanitizeErrorCredentialURLs(err)
+ }
+
+ return nil
+ }
+
+ err := performPush(m.Repo, false)
+ if err != nil {
+ return err
+ }
+
+ if m.Repo.HasWiki() {
+ _, err := git.GetRemoteAddress(ctx, m.Repo.WikiPath(), m.RemoteName)
+ if err == nil {
+ err := performPush(m.Repo, true)
+ if err != nil {
+ return err
+ }
+ } else {
+ log.Trace("Skipping wiki: No remote configured")
+ }
+ }
+
+ return nil
+}
+
+func pushAllLFSObjects(ctx context.Context, gitRepo *git.Repository, lfsClient lfs.Client) error {
+ contentStore := lfs.NewContentStore()
+
+ pointerChan := make(chan lfs.PointerBlob)
+ errChan := make(chan error, 1)
+ go lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan, errChan)
+
+ uploadObjects := func(pointers []lfs.Pointer) error {
+ err := lfsClient.Upload(ctx, pointers, func(p lfs.Pointer, objectError error) (io.ReadCloser, error) {
+ if objectError != nil {
+ return nil, objectError
+ }
+
+ content, err := contentStore.Get(p)
+ if err != nil {
+ log.Error("Error reading LFS object %v: %v", p, err)
+ }
+ return content, err
+ })
+ if err != nil {
+ select {
+ case <-ctx.Done():
+ return nil
+ default:
+ }
+ }
+ return err
+ }
+
+ var batch []lfs.Pointer
+ for pointerBlob := range pointerChan {
+ exists, err := contentStore.Exists(pointerBlob.Pointer)
+ if err != nil {
+ log.Error("Error checking if LFS object %v exists: %v", pointerBlob.Pointer, err)
+ return err
+ }
+ if !exists {
+ log.Trace("Skipping missing LFS object %v", pointerBlob.Pointer)
+ continue
+ }
+
+ batch = append(batch, pointerBlob.Pointer)
+ if len(batch) >= lfsClient.BatchSize() {
+ if err := uploadObjects(batch); err != nil {
+ return err
+ }
+ batch = nil
+ }
+ }
+ if len(batch) > 0 {
+ if err := uploadObjects(batch); err != nil {
+ return err
+ }
+ }
+
+ err, has := <-errChan
+ if has {
+ log.Error("Error enumerating LFS objects for repository: %v", err)
+ return err
+ }
+
+ return nil
+}
+
+func syncPushMirrorWithSyncOnCommit(ctx context.Context, repoID int64) {
+ pushMirrors, err := repo_model.GetPushMirrorsSyncedOnCommit(ctx, repoID)
+ if err != nil {
+ log.Error("repo_model.GetPushMirrorsSyncedOnCommit failed: %v", err)
+ return
+ }
+
+ for _, mirror := range pushMirrors {
+ AddPushMirrorToQueue(mirror.ID)
+ }
+}
diff --git a/services/mirror/mirror_test.go b/services/mirror/mirror_test.go
new file mode 100644
index 0000000..8ad524b
--- /dev/null
+++ b/services/mirror/mirror_test.go
@@ -0,0 +1,46 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mirror
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_parseRemoteUpdateOutput(t *testing.T) {
+ output := `
+ * [new tag] v0.1.8 -> v0.1.8
+ * [new branch] master -> origin/master
+ - [deleted] (none) -> origin/test1
+ - [deleted] (none) -> tag1
+ + f895a1e...957a993 test2 -> origin/test2 (forced update)
+ 957a993..a87ba5f test3 -> origin/test3
+`
+ results := parseRemoteUpdateOutput(output, "origin")
+ assert.Len(t, results, 6)
+ assert.EqualValues(t, "refs/tags/v0.1.8", results[0].refName.String())
+ assert.EqualValues(t, gitShortEmptySha, results[0].oldCommitID)
+ assert.EqualValues(t, "", results[0].newCommitID)
+
+ assert.EqualValues(t, "refs/heads/master", results[1].refName.String())
+ assert.EqualValues(t, gitShortEmptySha, results[1].oldCommitID)
+ assert.EqualValues(t, "", results[1].newCommitID)
+
+ assert.EqualValues(t, "refs/heads/test1", results[2].refName.String())
+ assert.EqualValues(t, "", results[2].oldCommitID)
+ assert.EqualValues(t, gitShortEmptySha, results[2].newCommitID)
+
+ assert.EqualValues(t, "refs/tags/tag1", results[3].refName.String())
+ assert.EqualValues(t, "", results[3].oldCommitID)
+ assert.EqualValues(t, gitShortEmptySha, results[3].newCommitID)
+
+ assert.EqualValues(t, "refs/heads/test2", results[4].refName.String())
+ assert.EqualValues(t, "f895a1e", results[4].oldCommitID)
+ assert.EqualValues(t, "957a993", results[4].newCommitID)
+
+ assert.EqualValues(t, "refs/heads/test3", results[5].refName.String())
+ assert.EqualValues(t, "957a993", results[5].oldCommitID)
+ assert.EqualValues(t, "a87ba5f", results[5].newCommitID)
+}
diff --git a/services/mirror/notifier.go b/services/mirror/notifier.go
new file mode 100644
index 0000000..93d9044
--- /dev/null
+++ b/services/mirror/notifier.go
@@ -0,0 +1,31 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mirror
+
+import (
+ "context"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/repository"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+func init() {
+ notify_service.RegisterNotifier(&mirrorNotifier{})
+}
+
+type mirrorNotifier struct {
+ notify_service.NullNotifier
+}
+
+var _ notify_service.Notifier = &mirrorNotifier{}
+
+func (m *mirrorNotifier) PushCommits(ctx context.Context, _ *user_model.User, repo *repo_model.Repository, _ *repository.PushUpdateOptions, _ *repository.PushCommits) {
+ syncPushMirrorWithSyncOnCommit(ctx, repo.ID)
+}
+
+func (m *mirrorNotifier) SyncPushCommits(ctx context.Context, _ *user_model.User, repo *repo_model.Repository, _ *repository.PushUpdateOptions, _ *repository.PushCommits) {
+ syncPushMirrorWithSyncOnCommit(ctx, repo.ID)
+}
diff --git a/services/mirror/queue.go b/services/mirror/queue.go
new file mode 100644
index 0000000..0d9a624
--- /dev/null
+++ b/services/mirror/queue.go
@@ -0,0 +1,70 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mirror
+
+import (
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+var mirrorQueue *queue.WorkerPoolQueue[*SyncRequest]
+
+// SyncType type of sync request
+type SyncType int
+
+const (
+ // PullMirrorType for pull mirrors
+ PullMirrorType SyncType = iota
+ // PushMirrorType for push mirrors
+ PushMirrorType
+)
+
+// SyncRequest for the mirror queue
+type SyncRequest struct {
+ Type SyncType
+ ReferenceID int64 // RepoID for pull mirror, MirrorID for push mirror
+}
+
+// StartSyncMirrors starts a go routine to sync the mirrors
+func StartSyncMirrors(queueHandle func(data ...*SyncRequest) []*SyncRequest) {
+ if !setting.Mirror.Enabled {
+ return
+ }
+ mirrorQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "mirror", queueHandle)
+ if mirrorQueue == nil {
+ log.Fatal("Unable to create mirror queue")
+ }
+ go graceful.GetManager().RunWithCancel(mirrorQueue)
+}
+
+// AddPullMirrorToQueue adds repoID to mirror queue
+func AddPullMirrorToQueue(repoID int64) {
+ addMirrorToQueue(PullMirrorType, repoID)
+}
+
+// AddPushMirrorToQueue adds the push mirror to the queue
+func AddPushMirrorToQueue(mirrorID int64) {
+ addMirrorToQueue(PushMirrorType, mirrorID)
+}
+
+func addMirrorToQueue(syncType SyncType, referenceID int64) {
+ if !setting.Mirror.Enabled {
+ return
+ }
+ go func() {
+ if err := PushToQueue(syncType, referenceID); err != nil {
+ log.Error("Unable to push sync request for to the queue for pull mirror repo[%d]. Error: %v", referenceID, err)
+ }
+ }()
+}
+
+// PushToQueue adds the sync request to the queue
+func PushToQueue(mirrorType SyncType, referenceID int64) error {
+ return mirrorQueue.Push(&SyncRequest{
+ Type: mirrorType,
+ ReferenceID: referenceID,
+ })
+}
diff --git a/services/notify/notifier.go b/services/notify/notifier.go
new file mode 100644
index 0000000..3230a5e
--- /dev/null
+++ b/services/notify/notifier.go
@@ -0,0 +1,79 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package notify
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/repository"
+)
+
+// Notifier defines an interface to notify receiver
+type Notifier interface {
+ Run()
+
+ AdoptRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository)
+ CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository)
+ MigrateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository)
+ DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository)
+ ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository)
+ RenameRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldRepoName string)
+ TransferRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldOwnerName string)
+ RepoPendingTransfer(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository)
+
+ NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User)
+ IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, actionComment *issues_model.Comment, closeOrReopen bool)
+ DeleteIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Issue)
+ IssueChangeMilestone(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64)
+ IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment)
+ PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment)
+ IssueChangeContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldContent string)
+ IssueClearLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue)
+ IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string)
+ IssueChangeRef(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldRef string)
+ IssueChangeLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue,
+ addedLabels, removedLabels []*issues_model.Label)
+
+ NewPullRequest(ctx context.Context, pr *issues_model.PullRequest, mentions []*user_model.User)
+ MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest)
+ AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest)
+ PullRequestSynchronized(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest)
+ PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User)
+ PullRequestCodeComment(ctx context.Context, pr *issues_model.PullRequest, comment *issues_model.Comment, mentions []*user_model.User)
+ PullRequestChangeTargetBranch(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, oldBranch string)
+ PullRequestPushCommits(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, comment *issues_model.Comment)
+ PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment)
+
+ CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User)
+ UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string)
+ DeleteComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment)
+
+ NewWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string)
+ EditWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string)
+ DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page string)
+
+ NewUserSignUp(ctx context.Context, newUser *user_model.User)
+
+ NewRelease(ctx context.Context, rel *repo_model.Release)
+ UpdateRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release)
+ DeleteRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release)
+
+ PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits)
+ CreateRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string)
+ DeleteRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName)
+ SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits)
+ SyncCreateRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string)
+ SyncDeleteRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName)
+
+ PackageCreate(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor)
+ PackageDelete(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor)
+
+ ChangeDefaultBranch(ctx context.Context, repo *repo_model.Repository)
+}
diff --git a/services/notify/notify.go b/services/notify/notify.go
new file mode 100644
index 0000000..5ed6364
--- /dev/null
+++ b/services/notify/notify.go
@@ -0,0 +1,376 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package notify
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+)
+
+var notifiers []Notifier
+
+// RegisterNotifier providers method to receive notify messages
+func RegisterNotifier(notifier Notifier) {
+ go notifier.Run()
+ notifiers = append(notifiers, notifier)
+}
+
+// NewWikiPage notifies creating new wiki pages to notifiers
+func NewWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+ for _, notifier := range notifiers {
+ notifier.NewWikiPage(ctx, doer, repo, page, comment)
+ }
+}
+
+// EditWikiPage notifies editing or renaming wiki pages to notifiers
+func EditWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+ for _, notifier := range notifiers {
+ notifier.EditWikiPage(ctx, doer, repo, page, comment)
+ }
+}
+
+// DeleteWikiPage notifies deleting wiki pages to notifiers
+func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page string) {
+ for _, notifier := range notifiers {
+ notifier.DeleteWikiPage(ctx, doer, repo, page)
+ }
+}
+
+// CreateIssueComment notifies issue comment related message to notifiers
+func CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
+) {
+ for _, notifier := range notifiers {
+ notifier.CreateIssueComment(ctx, doer, repo, issue, comment, mentions)
+ }
+}
+
+// NewIssue notifies new issue to notifiers
+func NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User) {
+ for _, notifier := range notifiers {
+ notifier.NewIssue(ctx, issue, mentions)
+ }
+}
+
+// IssueChangeStatus notifies close or reopen issue to notifiers
+func IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, actionComment *issues_model.Comment, closeOrReopen bool) {
+ for _, notifier := range notifiers {
+ notifier.IssueChangeStatus(ctx, doer, commitID, issue, actionComment, closeOrReopen)
+ }
+}
+
+// DeleteIssue notify when some issue deleted
+func DeleteIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
+ for _, notifier := range notifiers {
+ notifier.DeleteIssue(ctx, doer, issue)
+ }
+}
+
+// MergePullRequest notifies merge pull request to notifiers
+func MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ for _, notifier := range notifiers {
+ notifier.MergePullRequest(ctx, doer, pr)
+ }
+}
+
+// AutoMergePullRequest notifies merge pull request to notifiers
+func AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ for _, notifier := range notifiers {
+ notifier.AutoMergePullRequest(ctx, doer, pr)
+ }
+}
+
+// NewPullRequest notifies new pull request to notifiers
+func NewPullRequest(ctx context.Context, pr *issues_model.PullRequest, mentions []*user_model.User) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue failed: %v", err)
+ return
+ }
+ if err := pr.Issue.LoadPoster(ctx); err != nil {
+ return
+ }
+ for _, notifier := range notifiers {
+ notifier.NewPullRequest(ctx, pr, mentions)
+ }
+}
+
+// PullRequestSynchronized notifies Synchronized pull request
+func PullRequestSynchronized(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ for _, notifier := range notifiers {
+ notifier.PullRequestSynchronized(ctx, doer, pr)
+ }
+}
+
+// PullRequestReview notifies new pull request review
+func PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
+ if err := review.LoadReviewer(ctx); err != nil {
+ log.Error("LoadReviewer failed: %v", err)
+ return
+ }
+ for _, notifier := range notifiers {
+ notifier.PullRequestReview(ctx, pr, review, comment, mentions)
+ }
+}
+
+// PullRequestCodeComment notifies new pull request code comment
+func PullRequestCodeComment(ctx context.Context, pr *issues_model.PullRequest, comment *issues_model.Comment, mentions []*user_model.User) {
+ if err := comment.LoadPoster(ctx); err != nil {
+ log.Error("LoadPoster: %v", err)
+ return
+ }
+ for _, notifier := range notifiers {
+ notifier.PullRequestCodeComment(ctx, pr, comment, mentions)
+ }
+}
+
+// PullRequestChangeTargetBranch notifies when a pull request's target branch was changed
+func PullRequestChangeTargetBranch(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, oldBranch string) {
+ for _, notifier := range notifiers {
+ notifier.PullRequestChangeTargetBranch(ctx, doer, pr, oldBranch)
+ }
+}
+
+// PullRequestPushCommits notifies when push commits to pull request's head branch
+func PullRequestPushCommits(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, comment *issues_model.Comment) {
+ for _, notifier := range notifiers {
+ notifier.PullRequestPushCommits(ctx, doer, pr, comment)
+ }
+}
+
+// PullReviewDismiss notifies when a review was dismissed by repo admin
+func PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment) {
+ for _, notifier := range notifiers {
+ notifier.PullReviewDismiss(ctx, doer, review, comment)
+ }
+}
+
+// UpdateComment notifies update comment to notifiers
+func UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
+ for _, notifier := range notifiers {
+ notifier.UpdateComment(ctx, doer, c, oldContent)
+ }
+}
+
+// DeleteComment notifies delete comment to notifiers
+func DeleteComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment) {
+ for _, notifier := range notifiers {
+ notifier.DeleteComment(ctx, doer, c)
+ }
+}
+
+// NewRelease notifies new release to notifiers
+func NewRelease(ctx context.Context, rel *repo_model.Release) {
+ if err := rel.LoadAttributes(ctx); err != nil {
+ log.Error("LoadPublisher: %v", err)
+ return
+ }
+ for _, notifier := range notifiers {
+ notifier.NewRelease(ctx, rel)
+ }
+}
+
+// UpdateRelease notifies update release to notifiers
+func UpdateRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+ for _, notifier := range notifiers {
+ notifier.UpdateRelease(ctx, doer, rel)
+ }
+}
+
+// DeleteRelease notifies delete release to notifiers
+func DeleteRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+ for _, notifier := range notifiers {
+ notifier.DeleteRelease(ctx, doer, rel)
+ }
+}
+
+// IssueChangeMilestone notifies change milestone to notifiers
+func IssueChangeMilestone(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64) {
+ for _, notifier := range notifiers {
+ notifier.IssueChangeMilestone(ctx, doer, issue, oldMilestoneID)
+ }
+}
+
+// IssueChangeContent notifies change content to notifiers
+func IssueChangeContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldContent string) {
+ for _, notifier := range notifiers {
+ notifier.IssueChangeContent(ctx, doer, issue, oldContent)
+ }
+}
+
+// IssueChangeAssignee notifies change content to notifiers
+func IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
+ for _, notifier := range notifiers {
+ notifier.IssueChangeAssignee(ctx, doer, issue, assignee, removed, comment)
+ }
+}
+
+// PullRequestReviewRequest notifies Request Review change
+func PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment) {
+ for _, notifier := range notifiers {
+ notifier.PullRequestReviewRequest(ctx, doer, issue, reviewer, isRequest, comment)
+ }
+}
+
+// IssueClearLabels notifies clear labels to notifiers
+func IssueClearLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
+ for _, notifier := range notifiers {
+ notifier.IssueClearLabels(ctx, doer, issue)
+ }
+}
+
+// IssueChangeTitle notifies change title to notifiers
+func IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string) {
+ for _, notifier := range notifiers {
+ notifier.IssueChangeTitle(ctx, doer, issue, oldTitle)
+ }
+}
+
+// IssueChangeRef notifies change reference to notifiers
+func IssueChangeRef(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldRef string) {
+ for _, notifier := range notifiers {
+ notifier.IssueChangeRef(ctx, doer, issue, oldRef)
+ }
+}
+
+// IssueChangeLabels notifies change labels to notifiers
+func IssueChangeLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue,
+ addedLabels, removedLabels []*issues_model.Label,
+) {
+ for _, notifier := range notifiers {
+ notifier.IssueChangeLabels(ctx, doer, issue, addedLabels, removedLabels)
+ }
+}
+
+// CreateRepository notifies create repository to notifiers
+func CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ for _, notifier := range notifiers {
+ notifier.CreateRepository(ctx, doer, u, repo)
+ }
+}
+
+// AdoptRepository notifies the adoption of a repository to notifiers
+func AdoptRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ for _, notifier := range notifiers {
+ notifier.AdoptRepository(ctx, doer, u, repo)
+ }
+}
+
+// MigrateRepository notifies create repository to notifiers
+func MigrateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ for _, notifier := range notifiers {
+ notifier.MigrateRepository(ctx, doer, u, repo)
+ }
+}
+
+// TransferRepository notifies create repository to notifiers
+func TransferRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, newOwnerName string) {
+ for _, notifier := range notifiers {
+ notifier.TransferRepository(ctx, doer, repo, newOwnerName)
+ }
+}
+
+// DeleteRepository notifies delete repository to notifiers
+func DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository) {
+ for _, notifier := range notifiers {
+ notifier.DeleteRepository(ctx, doer, repo)
+ }
+}
+
+// ForkRepository notifies fork repository to notifiers
+func ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
+ for _, notifier := range notifiers {
+ notifier.ForkRepository(ctx, doer, oldRepo, repo)
+ }
+}
+
+// RenameRepository notifies repository renamed
+func RenameRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldName string) {
+ for _, notifier := range notifiers {
+ notifier.RenameRepository(ctx, doer, repo, oldName)
+ }
+}
+
+// PushCommits notifies commits pushed to notifiers
+func PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ for _, notifier := range notifiers {
+ notifier.PushCommits(ctx, pusher, repo, opts, commits)
+ }
+}
+
+// CreateRef notifies branch or tag creation to notifiers
+func CreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ for _, notifier := range notifiers {
+ notifier.CreateRef(ctx, pusher, repo, refFullName, refID)
+ }
+}
+
+// DeleteRef notifies branch or tag deletion to notifiers
+func DeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ for _, notifier := range notifiers {
+ notifier.DeleteRef(ctx, pusher, repo, refFullName)
+ }
+}
+
+// SyncPushCommits notifies commits pushed to notifiers
+func SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ for _, notifier := range notifiers {
+ notifier.SyncPushCommits(ctx, pusher, repo, opts, commits)
+ }
+}
+
+// SyncCreateRef notifies branch or tag creation to notifiers
+func SyncCreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ for _, notifier := range notifiers {
+ notifier.SyncCreateRef(ctx, pusher, repo, refFullName, refID)
+ }
+}
+
+// SyncDeleteRef notifies branch or tag deletion to notifiers
+func SyncDeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ for _, notifier := range notifiers {
+ notifier.SyncDeleteRef(ctx, pusher, repo, refFullName)
+ }
+}
+
+// RepoPendingTransfer notifies creation of pending transfer to notifiers
+func RepoPendingTransfer(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository) {
+ for _, notifier := range notifiers {
+ notifier.RepoPendingTransfer(ctx, doer, newOwner, repo)
+ }
+}
+
+// NewUserSignUp notifies about a newly signed up user to notifiers
+func NewUserSignUp(ctx context.Context, newUser *user_model.User) {
+ for _, notifier := range notifiers {
+ notifier.NewUserSignUp(ctx, newUser)
+ }
+}
+
+// PackageCreate notifies creation of a package to notifiers
+func PackageCreate(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+ for _, notifier := range notifiers {
+ notifier.PackageCreate(ctx, doer, pd)
+ }
+}
+
+// PackageDelete notifies deletion of a package to notifiers
+func PackageDelete(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+ for _, notifier := range notifiers {
+ notifier.PackageDelete(ctx, doer, pd)
+ }
+}
+
+// ChangeDefaultBranch notifies change default branch to notifiers
+func ChangeDefaultBranch(ctx context.Context, repo *repo_model.Repository) {
+ for _, notifier := range notifiers {
+ notifier.ChangeDefaultBranch(ctx, repo)
+ }
+}
diff --git a/services/notify/null.go b/services/notify/null.go
new file mode 100644
index 0000000..894d118
--- /dev/null
+++ b/services/notify/null.go
@@ -0,0 +1,213 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package notify
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/repository"
+)
+
+// NullNotifier implements a blank notifier
+type NullNotifier struct{}
+
+var _ Notifier = &NullNotifier{}
+
+// Run places a place holder function
+func (*NullNotifier) Run() {
+}
+
+// CreateIssueComment places a place holder function
+func (*NullNotifier) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User) {
+}
+
+// NewIssue places a place holder function
+func (*NullNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User) {
+}
+
+// IssueChangeStatus places a place holder function
+func (*NullNotifier) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, actionComment *issues_model.Comment, isClosed bool) {
+}
+
+// DeleteIssue notify when some issue deleted
+func (*NullNotifier) DeleteIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
+}
+
+// NewPullRequest places a place holder function
+func (*NullNotifier) NewPullRequest(ctx context.Context, pr *issues_model.PullRequest, mentions []*user_model.User) {
+}
+
+// PullRequestReview places a place holder function
+func (*NullNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, r *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
+}
+
+// PullRequestCodeComment places a place holder function
+func (*NullNotifier) PullRequestCodeComment(ctx context.Context, pr *issues_model.PullRequest, comment *issues_model.Comment, mentions []*user_model.User) {
+}
+
+// MergePullRequest places a place holder function
+func (*NullNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+}
+
+// AutoMergePullRequest places a place holder function
+func (*NullNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+}
+
+// PullRequestSynchronized places a place holder function
+func (*NullNotifier) PullRequestSynchronized(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+}
+
+// PullRequestChangeTargetBranch places a place holder function
+func (*NullNotifier) PullRequestChangeTargetBranch(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, oldBranch string) {
+}
+
+// PullRequestPushCommits notifies when push commits to pull request's head branch
+func (*NullNotifier) PullRequestPushCommits(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, comment *issues_model.Comment) {
+}
+
+// PullReviewDismiss notifies when a review was dismissed by repo admin
+func (*NullNotifier) PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment) {
+}
+
+// UpdateComment places a place holder function
+func (*NullNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
+}
+
+// DeleteComment places a place holder function
+func (*NullNotifier) DeleteComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment) {
+}
+
+// NewWikiPage places a place holder function
+func (*NullNotifier) NewWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+}
+
+// EditWikiPage places a place holder function
+func (*NullNotifier) EditWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+}
+
+// DeleteWikiPage places a place holder function
+func (*NullNotifier) DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page string) {
+}
+
+// NewRelease places a place holder function
+func (*NullNotifier) NewRelease(ctx context.Context, rel *repo_model.Release) {
+}
+
+// UpdateRelease places a place holder function
+func (*NullNotifier) UpdateRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+}
+
+// DeleteRelease places a place holder function
+func (*NullNotifier) DeleteRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+}
+
+// IssueChangeMilestone places a place holder function
+func (*NullNotifier) IssueChangeMilestone(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64) {
+}
+
+// IssueChangeContent places a place holder function
+func (*NullNotifier) IssueChangeContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldContent string) {
+}
+
+// IssueChangeAssignee places a place holder function
+func (*NullNotifier) IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
+}
+
+// PullRequestReviewRequest places a place holder function
+func (*NullNotifier) PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment) {
+}
+
+// IssueClearLabels places a place holder function
+func (*NullNotifier) IssueClearLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
+}
+
+// IssueChangeTitle places a place holder function
+func (*NullNotifier) IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string) {
+}
+
+// IssueChangeRef places a place holder function
+func (*NullNotifier) IssueChangeRef(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string) {
+}
+
+// IssueChangeLabels places a place holder function
+func (*NullNotifier) IssueChangeLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue,
+ addedLabels, removedLabels []*issues_model.Label) {
+}
+
+// CreateRepository places a place holder function
+func (*NullNotifier) CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+}
+
+// AdoptRepository places a place holder function
+func (*NullNotifier) AdoptRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+}
+
+// DeleteRepository places a place holder function
+func (*NullNotifier) DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository) {
+}
+
+// ForkRepository places a place holder function
+func (*NullNotifier) ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
+}
+
+// MigrateRepository places a place holder function
+func (*NullNotifier) MigrateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+}
+
+// PushCommits notifies commits pushed to notifiers
+func (*NullNotifier) PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+}
+
+// CreateRef notifies branch or tag creation to notifiers
+func (*NullNotifier) CreateRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+}
+
+// DeleteRef notifies branch or tag deletion to notifiers
+func (*NullNotifier) DeleteRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+}
+
+// RenameRepository places a place holder function
+func (*NullNotifier) RenameRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldRepoName string) {
+}
+
+// TransferRepository places a place holder function
+func (*NullNotifier) TransferRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldOwnerName string) {
+}
+
+// SyncPushCommits places a place holder function
+func (*NullNotifier) SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+}
+
+// SyncCreateRef places a place holder function
+func (*NullNotifier) SyncCreateRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+}
+
+// SyncDeleteRef places a place holder function
+func (*NullNotifier) SyncDeleteRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+}
+
+// RepoPendingTransfer places a place holder function
+func (*NullNotifier) RepoPendingTransfer(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository) {
+}
+
+func (*NullNotifier) NewUserSignUp(ctx context.Context, newUser *user_model.User) {
+}
+
+// PackageCreate places a place holder function
+func (*NullNotifier) PackageCreate(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+}
+
+// PackageDelete places a place holder function
+func (*NullNotifier) PackageDelete(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+}
+
+// ChangeDefaultBranch places a place holder function
+func (*NullNotifier) ChangeDefaultBranch(ctx context.Context, repo *repo_model.Repository) {
+}
diff --git a/services/org/org.go b/services/org/org.go
new file mode 100644
index 0000000..dca7794
--- /dev/null
+++ b/services/org/org.go
@@ -0,0 +1,76 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ org_model "code.gitea.io/gitea/models/organization"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// DeleteOrganization completely and permanently deletes everything of organization.
+func DeleteOrganization(ctx context.Context, org *org_model.Organization, purge bool) error {
+ ctx, commiter, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer commiter.Close()
+
+ if purge {
+ err := repo_service.DeleteOwnerRepositoriesDirectly(ctx, org.AsUser())
+ if err != nil {
+ return err
+ }
+ }
+
+ // Check ownership of repository.
+ count, err := repo_model.CountRepositories(ctx, repo_model.CountRepositoryOptions{OwnerID: org.ID})
+ if err != nil {
+ return fmt.Errorf("GetRepositoryCount: %w", err)
+ } else if count > 0 {
+ return models.ErrUserOwnRepos{UID: org.ID}
+ }
+
+ // Check ownership of packages.
+ if ownsPackages, err := packages_model.HasOwnerPackages(ctx, org.ID); err != nil {
+ return fmt.Errorf("HasOwnerPackages: %w", err)
+ } else if ownsPackages {
+ return models.ErrUserOwnPackages{UID: org.ID}
+ }
+
+ if err := org_model.DeleteOrganization(ctx, org); err != nil {
+ return fmt.Errorf("DeleteOrganization: %w", err)
+ }
+
+ if err := commiter.Commit(); err != nil {
+ return err
+ }
+
+ // FIXME: system notice
+ // Note: There are something just cannot be roll back,
+ // so just keep error logs of those operations.
+ path := user_model.UserPath(org.Name)
+
+ if err := util.RemoveAll(path); err != nil {
+ return fmt.Errorf("failed to RemoveAll %s: %w", path, err)
+ }
+
+ if len(org.Avatar) > 0 {
+ avatarPath := org.CustomAvatarRelativePath()
+ if err := storage.Avatars.Delete(avatarPath); err != nil {
+ return fmt.Errorf("failed to remove %s: %w", avatarPath, err)
+ }
+ }
+
+ return nil
+}
diff --git a/services/org/org_test.go b/services/org/org_test.go
new file mode 100644
index 0000000..0735843
--- /dev/null
+++ b/services/org/org_test.go
@@ -0,0 +1,39 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func TestDeleteOrganization(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 6})
+ require.NoError(t, DeleteOrganization(db.DefaultContext, org, false))
+ unittest.AssertNotExistsBean(t, &organization.Organization{ID: 6})
+ unittest.AssertNotExistsBean(t, &organization.OrgUser{OrgID: 6})
+ unittest.AssertNotExistsBean(t, &organization.Team{OrgID: 6})
+
+ org = unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
+ err := DeleteOrganization(db.DefaultContext, org, false)
+ require.Error(t, err)
+ assert.True(t, models.IsErrUserOwnRepos(err))
+
+ user := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 5})
+ require.Error(t, DeleteOrganization(db.DefaultContext, user, false))
+ unittest.CheckConsistencyFor(t, &user_model.User{}, &organization.Team{})
+}
diff --git a/services/org/repo.go b/services/org/repo.go
new file mode 100644
index 0000000..78a829e
--- /dev/null
+++ b/services/org/repo.go
@@ -0,0 +1,27 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "context"
+ "errors"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+)
+
+// TeamAddRepository adds new repository to team of organization.
+func TeamAddRepository(ctx context.Context, t *organization.Team, repo *repo_model.Repository) (err error) {
+ if repo.OwnerID != t.OrgID {
+ return errors.New("repository does not belong to organization")
+ } else if organization.HasTeamRepo(ctx, t.OrgID, t.ID, repo.ID) {
+ return nil
+ }
+
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ return models.AddRepository(ctx, t, repo)
+ })
+}
diff --git a/services/org/repo_test.go b/services/org/repo_test.go
new file mode 100644
index 0000000..2ddb8f9
--- /dev/null
+++ b/services/org/repo_test.go
@@ -0,0 +1,34 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestTeam_AddRepository(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ testSuccess := func(teamID, repoID int64) {
+ team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ require.NoError(t, TeamAddRepository(db.DefaultContext, team, repo))
+ unittest.AssertExistsAndLoadBean(t, &organization.TeamRepo{TeamID: teamID, RepoID: repoID})
+ unittest.CheckConsistencyFor(t, &organization.Team{ID: teamID}, &repo_model.Repository{ID: repoID})
+ }
+ testSuccess(2, 3)
+ testSuccess(2, 5)
+
+ team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 1})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ require.Error(t, TeamAddRepository(db.DefaultContext, team, repo))
+ unittest.CheckConsistencyFor(t, &organization.Team{ID: 1}, &repo_model.Repository{ID: 1})
+}
diff --git a/services/org/team_invite.go b/services/org/team_invite.go
new file mode 100644
index 0000000..3f28044
--- /dev/null
+++ b/services/org/team_invite.go
@@ -0,0 +1,22 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "context"
+
+ org_model "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/services/mailer"
+)
+
+// CreateTeamInvite make a persistent invite in db and mail it
+func CreateTeamInvite(ctx context.Context, inviter *user_model.User, team *org_model.Team, uname string) error {
+ invite, err := org_model.CreateTeamInvite(ctx, inviter, team, uname)
+ if err != nil {
+ return err
+ }
+
+ return mailer.MailTeamInvite(ctx, inviter, team, invite)
+}
diff --git a/services/packages/alpine/repository.go b/services/packages/alpine/repository.go
new file mode 100644
index 0000000..92f475b
--- /dev/null
+++ b/services/packages/alpine/repository.go
@@ -0,0 +1,337 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package alpine
+
+import (
+ "archive/tar"
+ "bytes"
+ "compress/gzip"
+ "context"
+ "crypto"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/sha1"
+ "crypto/x509"
+ "encoding/hex"
+ "encoding/pem"
+ "errors"
+ "fmt"
+ "io"
+ "strings"
+
+ packages_model "code.gitea.io/gitea/models/packages"
+ alpine_model "code.gitea.io/gitea/models/packages/alpine"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/json"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ alpine_module "code.gitea.io/gitea/modules/packages/alpine"
+ "code.gitea.io/gitea/modules/util"
+ packages_service "code.gitea.io/gitea/services/packages"
+)
+
+const (
+ IndexFilename = "APKINDEX"
+ IndexArchiveFilename = IndexFilename + ".tar.gz"
+)
+
+// GetOrCreateRepositoryVersion gets or creates the internal repository package
+// The Alpine registry needs multiple index files which are stored in this package.
+func GetOrCreateRepositoryVersion(ctx context.Context, ownerID int64) (*packages_model.PackageVersion, error) {
+ return packages_service.GetOrCreateInternalPackageVersion(ctx, ownerID, packages_model.TypeAlpine, alpine_module.RepositoryPackage, alpine_module.RepositoryVersion)
+}
+
+// GetOrCreateKeyPair gets or creates the RSA keys used to sign repository files
+func GetOrCreateKeyPair(ctx context.Context, ownerID int64) (string, string, error) {
+ priv, err := user_model.GetSetting(ctx, ownerID, alpine_module.SettingKeyPrivate)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ pub, err := user_model.GetSetting(ctx, ownerID, alpine_module.SettingKeyPublic)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ if priv == "" || pub == "" {
+ priv, pub, err = util.GenerateKeyPair(4096)
+ if err != nil {
+ return "", "", err
+ }
+
+ if err := user_model.SetUserSetting(ctx, ownerID, alpine_module.SettingKeyPrivate, priv); err != nil {
+ return "", "", err
+ }
+
+ if err := user_model.SetUserSetting(ctx, ownerID, alpine_module.SettingKeyPublic, pub); err != nil {
+ return "", "", err
+ }
+ }
+
+ return priv, pub, nil
+}
+
+// BuildAllRepositoryFiles (re)builds all repository files for every available distributions, components and architectures
+func BuildAllRepositoryFiles(ctx context.Context, ownerID int64) error {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ // 1. Delete all existing repository files
+ pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ return err
+ }
+
+ for _, pf := range pfs {
+ if err := packages_service.DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+
+ // 2. (Re)Build repository files for existing packages
+ branches, err := alpine_model.GetBranches(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+ for _, branch := range branches {
+ repositories, err := alpine_model.GetRepositories(ctx, ownerID, branch)
+ if err != nil {
+ return err
+ }
+ for _, repository := range repositories {
+ architectures, err := alpine_model.GetArchitectures(ctx, ownerID, repository)
+ if err != nil {
+ return err
+ }
+ for _, architecture := range architectures {
+ if err := buildPackagesIndex(ctx, ownerID, pv, branch, repository, architecture); err != nil {
+ return fmt.Errorf("failed to build repository files [%s/%s/%s]: %w", branch, repository, architecture, err)
+ }
+ }
+ }
+ }
+
+ return nil
+}
+
+// BuildSpecificRepositoryFiles builds index files for the repository
+func BuildSpecificRepositoryFiles(ctx context.Context, ownerID int64, branch, repository, architecture string) error {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ return buildPackagesIndex(ctx, ownerID, pv, branch, repository, architecture)
+}
+
+type packageData struct {
+ Package *packages_model.Package
+ Version *packages_model.PackageVersion
+ Blob *packages_model.PackageBlob
+ VersionMetadata *alpine_module.VersionMetadata
+ FileMetadata *alpine_module.FileMetadata
+}
+
+type packageCache = map[*packages_model.PackageFile]*packageData
+
+// https://wiki.alpinelinux.org/wiki/Apk_spec#APKINDEX_Format
+func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *packages_model.PackageVersion, branch, repository, architecture string) error {
+ pfs, _, err := packages_model.SearchFiles(ctx, &packages_model.PackageFileSearchOptions{
+ OwnerID: ownerID,
+ PackageType: packages_model.TypeAlpine,
+ Query: "%.apk",
+ Properties: map[string]string{
+ alpine_module.PropertyBranch: branch,
+ alpine_module.PropertyRepository: repository,
+ alpine_module.PropertyArchitecture: architecture,
+ },
+ })
+ if err != nil {
+ return err
+ }
+
+ // Delete the package indices if there are no packages
+ if len(pfs) == 0 {
+ pf, err := packages_model.GetFileForVersionByName(ctx, repoVersion.ID, IndexArchiveFilename, fmt.Sprintf("%s|%s|%s", branch, repository, architecture))
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ } else if pf == nil {
+ return nil
+ }
+
+ return packages_service.DeletePackageFile(ctx, pf)
+ }
+
+ // Cache data needed for all repository files
+ cache := make(packageCache)
+ for _, pf := range pfs {
+ pv, err := packages_model.GetVersionByID(ctx, pf.VersionID)
+ if err != nil {
+ return err
+ }
+ p, err := packages_model.GetPackageByID(ctx, pv.PackageID)
+ if err != nil {
+ return err
+ }
+ pb, err := packages_model.GetBlobByID(ctx, pf.BlobID)
+ if err != nil {
+ return err
+ }
+ pps, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeFile, pf.ID, alpine_module.PropertyMetadata)
+ if err != nil {
+ return err
+ }
+
+ pd := &packageData{
+ Package: p,
+ Version: pv,
+ Blob: pb,
+ }
+
+ if err := json.Unmarshal([]byte(pv.MetadataJSON), &pd.VersionMetadata); err != nil {
+ return err
+ }
+ if len(pps) > 0 {
+ if err := json.Unmarshal([]byte(pps[0].Value), &pd.FileMetadata); err != nil {
+ return err
+ }
+ }
+
+ cache[pf] = pd
+ }
+
+ var buf bytes.Buffer
+ for _, pf := range pfs {
+ pd := cache[pf]
+
+ fmt.Fprintf(&buf, "C:%s\n", pd.FileMetadata.Checksum)
+ fmt.Fprintf(&buf, "P:%s\n", pd.Package.Name)
+ fmt.Fprintf(&buf, "V:%s\n", pd.Version.Version)
+ fmt.Fprintf(&buf, "A:%s\n", pd.FileMetadata.Architecture)
+ if pd.VersionMetadata.Description != "" {
+ fmt.Fprintf(&buf, "T:%s\n", pd.VersionMetadata.Description)
+ }
+ if pd.VersionMetadata.ProjectURL != "" {
+ fmt.Fprintf(&buf, "U:%s\n", pd.VersionMetadata.ProjectURL)
+ }
+ if pd.VersionMetadata.License != "" {
+ fmt.Fprintf(&buf, "L:%s\n", pd.VersionMetadata.License)
+ }
+ fmt.Fprintf(&buf, "S:%d\n", pd.Blob.Size)
+ fmt.Fprintf(&buf, "I:%d\n", pd.FileMetadata.Size)
+ fmt.Fprintf(&buf, "o:%s\n", pd.FileMetadata.Origin)
+ fmt.Fprintf(&buf, "m:%s\n", pd.VersionMetadata.Maintainer)
+ fmt.Fprintf(&buf, "t:%d\n", pd.FileMetadata.BuildDate)
+ if pd.FileMetadata.CommitHash != "" {
+ fmt.Fprintf(&buf, "c:%s\n", pd.FileMetadata.CommitHash)
+ }
+ if len(pd.FileMetadata.Dependencies) > 0 {
+ fmt.Fprintf(&buf, "D:%s\n", strings.Join(pd.FileMetadata.Dependencies, " "))
+ }
+ if len(pd.FileMetadata.Provides) > 0 {
+ fmt.Fprintf(&buf, "p:%s\n", strings.Join(pd.FileMetadata.Provides, " "))
+ }
+ if pd.FileMetadata.InstallIf != "" {
+ fmt.Fprintf(&buf, "i:%s\n", pd.FileMetadata.InstallIf)
+ }
+ if pd.FileMetadata.ProviderPriority > 0 {
+ fmt.Fprintf(&buf, "k:%d\n", pd.FileMetadata.ProviderPriority)
+ }
+ fmt.Fprint(&buf, "\n")
+ }
+
+ unsignedIndexContent, _ := packages_module.NewHashedBuffer()
+ defer unsignedIndexContent.Close()
+
+ h := sha1.New()
+
+ if err := writeGzipStream(io.MultiWriter(unsignedIndexContent, h), IndexFilename, buf.Bytes(), true); err != nil {
+ return err
+ }
+
+ priv, _, err := GetOrCreateKeyPair(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ privPem, _ := pem.Decode([]byte(priv))
+ if privPem == nil {
+ return fmt.Errorf("failed to decode private key pem")
+ }
+
+ privKey, err := x509.ParsePKCS1PrivateKey(privPem.Bytes)
+ if err != nil {
+ return err
+ }
+
+ sign, err := rsa.SignPKCS1v15(rand.Reader, privKey, crypto.SHA1, h.Sum(nil))
+ if err != nil {
+ return err
+ }
+
+ owner, err := user_model.GetUserByID(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ fingerprint, err := util.CreatePublicKeyFingerprint(&privKey.PublicKey)
+ if err != nil {
+ return err
+ }
+
+ signedIndexContent, _ := packages_module.NewHashedBuffer()
+ defer signedIndexContent.Close()
+
+ if err := writeGzipStream(
+ signedIndexContent,
+ fmt.Sprintf(".SIGN.RSA.%s@%s.rsa.pub", owner.LowerName, hex.EncodeToString(fingerprint)),
+ sign,
+ false,
+ ); err != nil {
+ return err
+ }
+
+ if _, err := io.Copy(signedIndexContent, unsignedIndexContent); err != nil {
+ return err
+ }
+
+ _, err = packages_service.AddFileToPackageVersionInternal(
+ ctx,
+ repoVersion,
+ &packages_service.PackageFileCreationInfo{
+ PackageFileInfo: packages_service.PackageFileInfo{
+ Filename: IndexArchiveFilename,
+ CompositeKey: fmt.Sprintf("%s|%s|%s", branch, repository, architecture),
+ },
+ Creator: user_model.NewGhostUser(),
+ Data: signedIndexContent,
+ IsLead: false,
+ OverwriteExisting: true,
+ },
+ )
+ return err
+}
+
+func writeGzipStream(w io.Writer, filename string, content []byte, addTarEnd bool) error {
+ zw := gzip.NewWriter(w)
+ defer zw.Close()
+
+ tw := tar.NewWriter(zw)
+ if addTarEnd {
+ defer tw.Close()
+ }
+ hdr := &tar.Header{
+ Name: filename,
+ Mode: 0o600,
+ Size: int64(len(content)),
+ }
+ if err := tw.WriteHeader(hdr); err != nil {
+ return err
+ }
+ if _, err := tw.Write(content); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/services/packages/arch/repository.go b/services/packages/arch/repository.go
new file mode 100644
index 0000000..763a0a2
--- /dev/null
+++ b/services/packages/arch/repository.go
@@ -0,0 +1,368 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package arch
+
+import (
+ "archive/tar"
+ "compress/gzip"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net/url"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ packages_model "code.gitea.io/gitea/models/packages"
+ user_model "code.gitea.io/gitea/models/user"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ arch_module "code.gitea.io/gitea/modules/packages/arch"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/sync"
+ "code.gitea.io/gitea/modules/util"
+ packages_service "code.gitea.io/gitea/services/packages"
+
+ "github.com/ProtonMail/go-crypto/openpgp"
+ "github.com/ProtonMail/go-crypto/openpgp/armor"
+ "github.com/ProtonMail/go-crypto/openpgp/packet"
+)
+
+var locker = sync.NewExclusivePool()
+
+func GetOrCreateRepositoryVersion(ctx context.Context, ownerID int64) (*packages_model.PackageVersion, error) {
+ return packages_service.GetOrCreateInternalPackageVersion(ctx, ownerID, packages_model.TypeArch, arch_module.RepositoryPackage, arch_module.RepositoryVersion)
+}
+
+func BuildAllRepositoryFiles(ctx context.Context, ownerID int64) error {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+ // remove old db files
+ pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ return err
+ }
+ for _, pf := range pfs {
+ if strings.HasSuffix(pf.Name, ".db") {
+ arch := strings.TrimSuffix(pf.Name, ".db")
+ if err := BuildPacmanDB(ctx, ownerID, pf.CompositeKey, arch); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func BuildCustomRepositoryFiles(ctx context.Context, ownerID int64, disco string) error {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+ // remove old db files
+ pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ return err
+ }
+ for _, pf := range pfs {
+ if strings.HasSuffix(pf.Name, ".db") && pf.CompositeKey == disco {
+ arch := strings.TrimSuffix(strings.TrimPrefix(pf.Name, fmt.Sprintf("%s-", pf.CompositeKey)), ".db")
+ if err := BuildPacmanDB(ctx, ownerID, pf.CompositeKey, arch); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func NewFileSign(ctx context.Context, ownerID int64, input io.Reader) (*packages_module.HashedBuffer, error) {
+ // If no signature is specified, it will be generated by Gitea.
+ priv, _, err := GetOrCreateKeyPair(ctx, ownerID)
+ if err != nil {
+ return nil, err
+ }
+ block, err := armor.Decode(strings.NewReader(priv))
+ if err != nil {
+ return nil, err
+ }
+ e, err := openpgp.ReadEntity(packet.NewReader(block.Body))
+ if err != nil {
+ return nil, err
+ }
+ pkgSig, err := packages_module.NewHashedBuffer()
+ if err != nil {
+ return nil, err
+ }
+ defer pkgSig.Close()
+ if err := openpgp.DetachSign(pkgSig, e, input, nil); err != nil {
+ return nil, err
+ }
+ return pkgSig, nil
+}
+
+// BuildPacmanDB Create db signature cache
+func BuildPacmanDB(ctx context.Context, ownerID int64, group, arch string) error {
+ key := fmt.Sprintf("pkg_%d_arch_db_%s", ownerID, group)
+ locker.CheckIn(key)
+ defer locker.CheckOut(key)
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+ // remove old db files
+ pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ return err
+ }
+ for _, pf := range pfs {
+ if pf.CompositeKey == group && pf.Name == fmt.Sprintf("%s.db", arch) {
+ // remove group and arch
+ if err := packages_service.DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+ }
+
+ db, err := createDB(ctx, ownerID, group, arch)
+ if errors.Is(err, io.EOF) {
+ return nil
+ } else if err != nil {
+ return err
+ }
+ defer db.Close()
+ // Create db signature cache
+ _, err = db.Seek(0, io.SeekStart)
+ if err != nil {
+ return err
+ }
+ sig, err := NewFileSign(ctx, ownerID, db)
+ if err != nil {
+ return err
+ }
+ defer sig.Close()
+ _, err = db.Seek(0, io.SeekStart)
+ if err != nil {
+ return err
+ }
+ for name, data := range map[string]*packages_module.HashedBuffer{
+ fmt.Sprintf("%s.db", arch): db,
+ fmt.Sprintf("%s.db.sig", arch): sig,
+ } {
+ _, err = packages_service.AddFileToPackageVersionInternal(ctx, pv, &packages_service.PackageFileCreationInfo{
+ PackageFileInfo: packages_service.PackageFileInfo{
+ Filename: name,
+ CompositeKey: group,
+ },
+ Creator: user_model.NewGhostUser(),
+ Data: data,
+ IsLead: false,
+ OverwriteExisting: true,
+ })
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func createDB(ctx context.Context, ownerID int64, group, arch string) (*packages_module.HashedBuffer, error) {
+ pkgs, err := packages_model.GetPackagesByType(ctx, ownerID, packages_model.TypeArch)
+ if err != nil {
+ return nil, err
+ }
+ if len(pkgs) == 0 {
+ return nil, io.EOF
+ }
+ db, err := packages_module.NewHashedBuffer()
+ if err != nil {
+ return nil, err
+ }
+ defer db.Close()
+ gw := gzip.NewWriter(db)
+ defer gw.Close()
+ tw := tar.NewWriter(gw)
+ defer tw.Close()
+ count := 0
+ for _, pkg := range pkgs {
+ versions, err := packages_model.GetVersionsByPackageName(
+ ctx, ownerID, packages_model.TypeArch, pkg.Name,
+ )
+ if err != nil {
+ return nil, err
+ }
+ sort.Slice(versions, func(i, j int) bool {
+ return versions[i].CreatedUnix > versions[j].CreatedUnix
+ })
+
+ for _, ver := range versions {
+ files, err := packages_model.GetFilesByVersionID(ctx, ver.ID)
+ if err != nil {
+ return nil, err
+ }
+ var pf *packages_model.PackageFile
+ for _, file := range files {
+ ext := filepath.Ext(file.Name)
+ if file.CompositeKey == group && ext != "" && ext != ".db" && ext != ".sig" {
+ if pf == nil && strings.HasSuffix(file.Name, fmt.Sprintf("any.pkg.tar%s", ext)) {
+ pf = file
+ }
+ if strings.HasSuffix(file.Name, fmt.Sprintf("%s.pkg.tar%s", arch, ext)) {
+ pf = file
+ break
+ }
+ }
+ }
+ if pf == nil {
+ // file not exists
+ continue
+ }
+ pps, err := packages_model.GetPropertiesByName(
+ ctx, packages_model.PropertyTypeFile, pf.ID, arch_module.PropertyDescription,
+ )
+ if err != nil {
+ return nil, err
+ }
+ if len(pps) >= 1 {
+ meta := []byte(pps[0].Value)
+ header := &tar.Header{
+ Name: pkg.Name + "-" + ver.Version + "/desc",
+ Size: int64(len(meta)),
+ Mode: int64(os.ModePerm),
+ }
+ if err = tw.WriteHeader(header); err != nil {
+ return nil, err
+ }
+ if _, err := tw.Write(meta); err != nil {
+ return nil, err
+ }
+ count++
+ break
+ }
+ }
+ }
+ if count == 0 {
+ return nil, io.EOF
+ }
+ return db, nil
+}
+
+// GetPackageFile Get data related to provided filename and distribution, for package files
+// update download counter.
+func GetPackageFile(ctx context.Context, group, file string, ownerID int64) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) {
+ fileSplit := strings.Split(file, "-")
+ if len(fileSplit) <= 3 {
+ return nil, nil, nil, errors.New("invalid file format, need <name>-<version>-<release>-<arch>.pkg.<archive>")
+ }
+ var (
+ pkgName = strings.Join(fileSplit[0:len(fileSplit)-3], "-")
+ pkgVer = fileSplit[len(fileSplit)-3] + "-" + fileSplit[len(fileSplit)-2]
+ )
+ version, err := packages_model.GetVersionByNameAndVersion(ctx, ownerID, packages_model.TypeArch, pkgName, pkgVer)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ pkgFile, err := packages_model.GetFileForVersionByName(ctx, version.ID, file, group)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ return packages_service.GetPackageFileStream(ctx, pkgFile)
+}
+
+func GetPackageDBFile(ctx context.Context, ownerID int64, group, arch string, sigFile bool) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+ fileName := fmt.Sprintf("%s.db", arch)
+ if sigFile {
+ fileName = fmt.Sprintf("%s.db.sig", arch)
+ }
+ file, err := packages_model.GetFileForVersionByName(ctx, pv.ID, fileName, group)
+ // fail back to any db
+ if errors.Is(err, util.ErrNotExist) && arch != "any" {
+ fileName = "any.db"
+ if sigFile {
+ fileName = "any.db.sig"
+ }
+ file, err = packages_model.GetFileForVersionByName(ctx, pv.ID, fileName, group)
+ }
+ if err != nil {
+ return nil, nil, nil, err
+ }
+ return packages_service.GetPackageFileStream(ctx, file)
+}
+
+// GetOrCreateKeyPair gets or creates the PGP keys used to sign repository metadata files
+func GetOrCreateKeyPair(ctx context.Context, ownerID int64) (string, string, error) {
+ priv, err := user_model.GetSetting(ctx, ownerID, arch_module.SettingKeyPrivate)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ pub, err := user_model.GetSetting(ctx, ownerID, arch_module.SettingKeyPublic)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ if priv == "" || pub == "" {
+ user, err := user_model.GetUserByID(ctx, ownerID)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ priv, pub, err = generateKeypair(user.Name)
+ if err != nil {
+ return "", "", err
+ }
+
+ if err := user_model.SetUserSetting(ctx, ownerID, arch_module.SettingKeyPrivate, priv); err != nil {
+ return "", "", err
+ }
+
+ if err := user_model.SetUserSetting(ctx, ownerID, arch_module.SettingKeyPublic, pub); err != nil {
+ return "", "", err
+ }
+ }
+
+ return priv, pub, nil
+}
+
+func generateKeypair(owner string) (string, string, error) {
+ e, err := openpgp.NewEntity(
+ owner,
+ "Arch Package signature only",
+ fmt.Sprintf("%s@noreply.%s", owner, setting.Packages.RegistryHost), &packet.Config{
+ RSABits: 4096,
+ })
+ if err != nil {
+ return "", "", err
+ }
+
+ var priv strings.Builder
+ var pub strings.Builder
+
+ w, err := armor.Encode(&priv, openpgp.PrivateKeyType, nil)
+ if err != nil {
+ return "", "", err
+ }
+ if err := e.SerializePrivate(w, nil); err != nil {
+ return "", "", err
+ }
+ w.Close()
+
+ w, err = armor.Encode(&pub, openpgp.PublicKeyType, nil)
+ if err != nil {
+ return "", "", err
+ }
+ if err := e.Serialize(w); err != nil {
+ return "", "", err
+ }
+ w.Close()
+
+ return priv.String(), pub.String(), nil
+}
diff --git a/services/packages/auth.go b/services/packages/auth.go
new file mode 100644
index 0000000..c5bf5af
--- /dev/null
+++ b/services/packages/auth.go
@@ -0,0 +1,75 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package packages
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/golang-jwt/jwt/v5"
+)
+
+type packageClaims struct {
+ jwt.RegisteredClaims
+ UserID int64
+ Scope auth_model.AccessTokenScope
+}
+
+func CreateAuthorizationToken(u *user_model.User, scope auth_model.AccessTokenScope) (string, error) {
+ now := time.Now()
+
+ claims := packageClaims{
+ RegisteredClaims: jwt.RegisteredClaims{
+ ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
+ NotBefore: jwt.NewNumericDate(now),
+ },
+ UserID: u.ID,
+ Scope: scope,
+ }
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
+
+ tokenString, err := token.SignedString(setting.GetGeneralTokenSigningSecret())
+ if err != nil {
+ return "", err
+ }
+
+ return tokenString, nil
+}
+
+func ParseAuthorizationToken(req *http.Request) (int64, auth_model.AccessTokenScope, error) {
+ h := req.Header.Get("Authorization")
+ if h == "" {
+ return 0, "", nil
+ }
+
+ parts := strings.SplitN(h, " ", 2)
+ if len(parts) != 2 {
+ log.Error("split token failed: %s", h)
+ return 0, "", fmt.Errorf("split token failed")
+ }
+
+ token, err := jwt.ParseWithClaims(parts[1], &packageClaims{}, func(t *jwt.Token) (any, error) {
+ if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
+ return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
+ }
+ return setting.GetGeneralTokenSigningSecret(), nil
+ })
+ if err != nil {
+ return 0, "", err
+ }
+
+ c, ok := token.Claims.(*packageClaims)
+ if !token.Valid || !ok {
+ return 0, "", fmt.Errorf("invalid token claim")
+ }
+
+ return c.UserID, c.Scope, nil
+}
diff --git a/services/packages/cargo/index.go b/services/packages/cargo/index.go
new file mode 100644
index 0000000..59823cd
--- /dev/null
+++ b/services/packages/cargo/index.go
@@ -0,0 +1,315 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cargo
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "path"
+ "strconv"
+ "time"
+
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ cargo_module "code.gitea.io/gitea/modules/packages/cargo"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ repo_service "code.gitea.io/gitea/services/repository"
+ files_service "code.gitea.io/gitea/services/repository/files"
+)
+
+const (
+ IndexRepositoryName = "_cargo-index"
+ ConfigFileName = "config.json"
+)
+
+// https://doc.rust-lang.org/cargo/reference/registries.html#index-format
+
+func BuildPackagePath(name string) string {
+ switch len(name) {
+ case 0:
+ panic("Cargo package name can not be empty")
+ case 1:
+ return path.Join("1", name)
+ case 2:
+ return path.Join("2", name)
+ case 3:
+ return path.Join("3", string(name[0]), name)
+ default:
+ return path.Join(name[0:2], name[2:4], name)
+ }
+}
+
+func InitializeIndexRepository(ctx context.Context, doer, owner *user_model.User) error {
+ repo, err := getOrCreateIndexRepository(ctx, doer, owner)
+ if err != nil {
+ return err
+ }
+
+ if err := createOrUpdateConfigFile(ctx, repo, doer, owner); err != nil {
+ return fmt.Errorf("createOrUpdateConfigFile: %w", err)
+ }
+
+ return nil
+}
+
+func RebuildIndex(ctx context.Context, doer, owner *user_model.User) error {
+ repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner.Name, IndexRepositoryName)
+ if err != nil {
+ return fmt.Errorf("GetRepositoryByOwnerAndName: %w", err)
+ }
+
+ ps, err := packages_model.GetPackagesByType(ctx, owner.ID, packages_model.TypeCargo)
+ if err != nil {
+ return fmt.Errorf("GetPackagesByType: %w", err)
+ }
+
+ return alterRepositoryContent(
+ ctx,
+ doer,
+ repo,
+ "Rebuild Cargo Index",
+ func(t *files_service.TemporaryUploadRepository) error {
+ // Remove all existing content but the Cargo config
+ files, err := t.LsFiles()
+ if err != nil {
+ return err
+ }
+ for i, file := range files {
+ if file == ConfigFileName {
+ files[i] = files[len(files)-1]
+ files = files[:len(files)-1]
+ break
+ }
+ }
+ if err := t.RemoveFilesFromIndex(files...); err != nil {
+ return err
+ }
+
+ // Add all packages
+ for _, p := range ps {
+ if err := addOrUpdatePackageIndex(ctx, t, p); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ },
+ )
+}
+
+func UpdatePackageIndexIfExists(ctx context.Context, doer, owner *user_model.User, packageID int64) error {
+ // We do not want to force the creation of the repo here
+ // cargo http index does not rely on the repo itself,
+ // so if the repo does not exist, we just do nothing.
+ repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner.Name, IndexRepositoryName)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ return nil
+ }
+ return fmt.Errorf("GetRepositoryByOwnerAndName: %w", err)
+ }
+
+ p, err := packages_model.GetPackageByID(ctx, packageID)
+ if err != nil {
+ return fmt.Errorf("GetPackageByID[%d]: %w", packageID, err)
+ }
+
+ return alterRepositoryContent(
+ ctx,
+ doer,
+ repo,
+ "Update "+p.Name,
+ func(t *files_service.TemporaryUploadRepository) error {
+ return addOrUpdatePackageIndex(ctx, t, p)
+ },
+ )
+}
+
+type IndexVersionEntry struct {
+ Name string `json:"name"`
+ Version string `json:"vers"`
+ Dependencies []*cargo_module.Dependency `json:"deps"`
+ FileChecksum string `json:"cksum"`
+ Features map[string][]string `json:"features"`
+ Yanked bool `json:"yanked"`
+ Links string `json:"links,omitempty"`
+}
+
+func BuildPackageIndex(ctx context.Context, p *packages_model.Package) (*bytes.Buffer, error) {
+ pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
+ PackageID: p.ID,
+ Sort: packages_model.SortVersionAsc,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("SearchVersions[%s]: %w", p.Name, err)
+ }
+ if len(pvs) == 0 {
+ return nil, nil
+ }
+
+ pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
+ if err != nil {
+ return nil, fmt.Errorf("GetPackageDescriptors[%s]: %w", p.Name, err)
+ }
+
+ var b bytes.Buffer
+ for _, pd := range pds {
+ metadata := pd.Metadata.(*cargo_module.Metadata)
+
+ dependencies := metadata.Dependencies
+ if dependencies == nil {
+ dependencies = make([]*cargo_module.Dependency, 0)
+ }
+
+ features := metadata.Features
+ if features == nil {
+ features = make(map[string][]string)
+ }
+
+ yanked, _ := strconv.ParseBool(pd.VersionProperties.GetByName(cargo_module.PropertyYanked))
+ entry, err := json.Marshal(&IndexVersionEntry{
+ Name: pd.Package.Name,
+ Version: pd.Version.Version,
+ Dependencies: dependencies,
+ FileChecksum: pd.Files[0].Blob.HashSHA256,
+ Features: features,
+ Yanked: yanked,
+ Links: metadata.Links,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ b.Write(entry)
+ b.WriteString("\n")
+ }
+
+ return &b, nil
+}
+
+func addOrUpdatePackageIndex(ctx context.Context, t *files_service.TemporaryUploadRepository, p *packages_model.Package) error {
+ b, err := BuildPackageIndex(ctx, p)
+ if err != nil {
+ return err
+ }
+ if b == nil {
+ return nil
+ }
+
+ return writeObjectToIndex(t, BuildPackagePath(p.LowerName), b)
+}
+
+func getOrCreateIndexRepository(ctx context.Context, doer, owner *user_model.User) (*repo_model.Repository, error) {
+ repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner.Name, IndexRepositoryName)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ repo, err = repo_service.CreateRepositoryDirectly(ctx, doer, owner, repo_service.CreateRepoOptions{
+ Name: IndexRepositoryName,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("CreateRepository: %w", err)
+ }
+ } else {
+ return nil, fmt.Errorf("GetRepositoryByOwnerAndName: %w", err)
+ }
+ }
+
+ return repo, nil
+}
+
+type Config struct {
+ DownloadURL string `json:"dl"`
+ APIURL string `json:"api"`
+ AuthRequired bool `json:"auth-required"`
+}
+
+func BuildConfig(owner *user_model.User, isPrivate bool) *Config {
+ return &Config{
+ DownloadURL: setting.AppURL + "api/packages/" + owner.Name + "/cargo/api/v1/crates",
+ APIURL: setting.AppURL + "api/packages/" + owner.Name + "/cargo",
+ AuthRequired: isPrivate,
+ }
+}
+
+func createOrUpdateConfigFile(ctx context.Context, repo *repo_model.Repository, doer, owner *user_model.User) error {
+ return alterRepositoryContent(
+ ctx,
+ doer,
+ repo,
+ "Initialize Cargo Config",
+ func(t *files_service.TemporaryUploadRepository) error {
+ var b bytes.Buffer
+ err := json.NewEncoder(&b).Encode(BuildConfig(owner, setting.Service.RequireSignInView || owner.Visibility != structs.VisibleTypePublic || repo.IsPrivate))
+ if err != nil {
+ return err
+ }
+
+ return writeObjectToIndex(t, ConfigFileName, &b)
+ },
+ )
+}
+
+// This is a shorter version of CreateOrUpdateRepoFile which allows to perform multiple actions on a git repository
+func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, commitMessage string, fn func(*files_service.TemporaryUploadRepository) error) error {
+ t, err := files_service.NewTemporaryUploadRepository(ctx, repo)
+ if err != nil {
+ return err
+ }
+ defer t.Close()
+
+ var lastCommitID string
+ if err := t.Clone(repo.DefaultBranch, true); err != nil {
+ if !git.IsErrBranchNotExist(err) || !repo.IsEmpty {
+ return err
+ }
+ if err := t.Init(repo.ObjectFormatName); err != nil {
+ return err
+ }
+ } else {
+ if err := t.SetDefaultIndex(); err != nil {
+ return err
+ }
+
+ commit, err := t.GetBranchCommit(repo.DefaultBranch)
+ if err != nil {
+ return err
+ }
+
+ lastCommitID = commit.ID.String()
+ }
+
+ if err := fn(t); err != nil {
+ return err
+ }
+
+ treeHash, err := t.WriteTree()
+ if err != nil {
+ return err
+ }
+
+ now := time.Now()
+ commitHash, err := t.CommitTreeWithDate(lastCommitID, doer, doer, treeHash, commitMessage, false, now, now)
+ if err != nil {
+ return err
+ }
+
+ return t.Push(doer, commitHash, repo.DefaultBranch)
+}
+
+func writeObjectToIndex(t *files_service.TemporaryUploadRepository, path string, r io.Reader) error {
+ hash, err := t.HashObject(r)
+ if err != nil {
+ return err
+ }
+
+ return t.AddObjectToIndex("100644", hash, path)
+}
diff --git a/services/packages/cleanup/cleanup.go b/services/packages/cleanup/cleanup.go
new file mode 100644
index 0000000..ab419a9
--- /dev/null
+++ b/services/packages/cleanup/cleanup.go
@@ -0,0 +1,198 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package container
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ packages_model "code.gitea.io/gitea/models/packages"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ packages_service "code.gitea.io/gitea/services/packages"
+ alpine_service "code.gitea.io/gitea/services/packages/alpine"
+ arch_service "code.gitea.io/gitea/services/packages/arch"
+ cargo_service "code.gitea.io/gitea/services/packages/cargo"
+ container_service "code.gitea.io/gitea/services/packages/container"
+ debian_service "code.gitea.io/gitea/services/packages/debian"
+ rpm_service "code.gitea.io/gitea/services/packages/rpm"
+)
+
+// Task method to execute cleanup rules and cleanup expired package data
+func CleanupTask(ctx context.Context, olderThan time.Duration) error {
+ if err := ExecuteCleanupRules(ctx); err != nil {
+ return err
+ }
+
+ return CleanupExpiredData(ctx, olderThan)
+}
+
+func ExecuteCleanupRules(outerCtx context.Context) error {
+ ctx, committer, err := db.TxContext(outerCtx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error {
+ select {
+ case <-outerCtx.Done():
+ return db.ErrCancelledf("While processing package cleanup rules")
+ default:
+ }
+
+ if err := pcr.CompiledPattern(); err != nil {
+ return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err)
+ }
+
+ olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays)
+
+ packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type)
+ if err != nil {
+ return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err)
+ }
+
+ anyVersionDeleted := false
+ for _, p := range packages {
+ pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
+ PackageID: p.ID,
+ IsInternal: optional.Some(false),
+ Sort: packages_model.SortCreatedDesc,
+ Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200),
+ })
+ if err != nil {
+ return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err)
+ }
+ versionDeleted := false
+ for _, pv := range pvs {
+ if pcr.Type == packages_model.TypeContainer {
+ if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil {
+ return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err)
+ } else if skip {
+ log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version)
+ continue
+ }
+ }
+
+ toMatch := pv.LowerVersion
+ if pcr.MatchFullName {
+ toMatch = p.LowerName + "/" + pv.LowerVersion
+ }
+
+ if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) {
+ log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version)
+ continue
+ }
+ if pv.CreatedUnix.AsLocalTime().After(olderThan) {
+ log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version)
+ continue
+ }
+ if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) {
+ log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version)
+ continue
+ }
+
+ log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version)
+
+ if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil {
+ return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err)
+ }
+
+ versionDeleted = true
+ anyVersionDeleted = true
+ }
+
+ if versionDeleted {
+ if pcr.Type == packages_model.TypeCargo {
+ owner, err := user_model.GetUserByID(ctx, pcr.OwnerID)
+ if err != nil {
+ return fmt.Errorf("GetUserByID failed: %w", err)
+ }
+ if err := cargo_service.UpdatePackageIndexIfExists(ctx, owner, owner, p.ID); err != nil {
+ return fmt.Errorf("CleanupRule [%d]: cargo.UpdatePackageIndexIfExists failed: %w", pcr.ID, err)
+ }
+ }
+ }
+ }
+
+ if anyVersionDeleted {
+ if pcr.Type == packages_model.TypeDebian {
+ if err := debian_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
+ return fmt.Errorf("CleanupRule [%d]: debian.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
+ }
+ } else if pcr.Type == packages_model.TypeAlpine {
+ if err := alpine_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
+ return fmt.Errorf("CleanupRule [%d]: alpine.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
+ }
+ } else if pcr.Type == packages_model.TypeRpm {
+ if err := rpm_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
+ return fmt.Errorf("CleanupRule [%d]: rpm.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
+ }
+ } else if pcr.Type == packages_model.TypeArch {
+ if err := arch_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
+ return fmt.Errorf("CleanupRule [%d]: arch.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
+ }
+ }
+ }
+ return nil
+ })
+ if err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+func CleanupExpiredData(outerCtx context.Context, olderThan time.Duration) error {
+ ctx, committer, err := db.TxContext(outerCtx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := container_service.Cleanup(ctx, olderThan); err != nil {
+ return err
+ }
+
+ pIDs, err := packages_model.FindUnreferencedPackages(ctx)
+ if err != nil {
+ return err
+ }
+ for _, pID := range pIDs {
+ if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, pID); err != nil {
+ return err
+ }
+ if err := packages_model.DeletePackageByID(ctx, pID); err != nil {
+ return err
+ }
+ }
+
+ pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan)
+ if err != nil {
+ return err
+ }
+
+ for _, pb := range pbs {
+ if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil {
+ return err
+ }
+ }
+
+ if err := committer.Commit(); err != nil {
+ return err
+ }
+
+ contentStore := packages_module.NewContentStore()
+ for _, pb := range pbs {
+ if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil {
+ log.Error("Error deleting package blob [%v]: %v", pb.ID, err)
+ }
+ }
+
+ return nil
+}
diff --git a/services/packages/cleanup/cleanup_sha256_test.go b/services/packages/cleanup/cleanup_sha256_test.go
new file mode 100644
index 0000000..6d7cc47
--- /dev/null
+++ b/services/packages/cleanup/cleanup_sha256_test.go
@@ -0,0 +1,116 @@
+// Copyright 2024 The Forgejo Authors.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package container
+
+import (
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ container_module "code.gitea.io/gitea/modules/packages/container"
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/modules/timeutil"
+ container_service "code.gitea.io/gitea/services/packages/container"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCleanupSHA256(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ defer test.MockVariableValue(&container_service.SHA256BatchSize, 1)()
+
+ ctx := db.DefaultContext
+
+ createContainer := func(t *testing.T, name, version, digest string, created timeutil.TimeStamp) {
+ t.Helper()
+
+ ownerID := int64(2001)
+
+ p := packages.Package{
+ OwnerID: ownerID,
+ LowerName: name,
+ Type: packages.TypeContainer,
+ }
+ _, err := db.GetEngine(ctx).Insert(&p)
+ // package_version").Where("version = ?", multiTag).Update(&packages_model.PackageVersion{MetadataJSON: `corrupted "manifests":[{ bad`})
+ require.NoError(t, err)
+
+ var metadata string
+ if digest != "" {
+ m := container_module.Metadata{
+ Manifests: []*container_module.Manifest{
+ {
+ Digest: digest,
+ },
+ },
+ }
+ mt, err := json.Marshal(m)
+ require.NoError(t, err)
+ metadata = string(mt)
+ }
+ v := packages.PackageVersion{
+ PackageID: p.ID,
+ LowerVersion: version,
+ MetadataJSON: metadata,
+ CreatedUnix: created,
+ }
+ _, err = db.GetEngine(ctx).NoAutoTime().Insert(&v)
+ require.NoError(t, err)
+ }
+
+ cleanupAndCheckLogs := func(t *testing.T, olderThan time.Duration, expected ...string) {
+ t.Helper()
+ logChecker, cleanup := test.NewLogChecker(log.DEFAULT, log.TRACE)
+ logChecker.Filter(expected...)
+ logChecker.StopMark(container_service.SHA256LogFinish)
+ defer cleanup()
+
+ require.NoError(t, CleanupExpiredData(ctx, olderThan))
+
+ logFiltered, logStopped := logChecker.Check(5 * time.Second)
+ assert.True(t, logStopped)
+ filtered := make([]bool, 0, len(expected))
+ for range expected {
+ filtered = append(filtered, true)
+ }
+ assert.EqualValues(t, filtered, logFiltered, expected)
+ }
+
+ ancient := 1 * time.Hour
+
+ t.Run("no packages, cleanup nothing", func(t *testing.T) {
+ cleanupAndCheckLogs(t, ancient, "Nothing to cleanup")
+ })
+
+ orphan := "orphan"
+ createdLongAgo := timeutil.TimeStamp(time.Now().Add(-(ancient * 2)).Unix())
+ createdRecently := timeutil.TimeStamp(time.Now().Add(-(ancient / 2)).Unix())
+
+ t.Run("an orphaned package created a long time ago is removed", func(t *testing.T) {
+ createContainer(t, orphan, "sha256:"+orphan, "", createdLongAgo)
+ cleanupAndCheckLogs(t, ancient, "Removing 1 entries from `package_version`")
+ cleanupAndCheckLogs(t, ancient, "Nothing to cleanup")
+ })
+
+ t.Run("a newly created orphaned package is not cleaned up", func(t *testing.T) {
+ createContainer(t, orphan, "sha256:"+orphan, "", createdRecently)
+ cleanupAndCheckLogs(t, ancient, "1 out of 1 container image(s) are not deleted because they were created less than")
+ cleanupAndCheckLogs(t, 0, "Removing 1 entries from `package_version`")
+ cleanupAndCheckLogs(t, 0, "Nothing to cleanup")
+ })
+
+ t.Run("a referenced package is not removed", func(t *testing.T) {
+ referenced := "referenced"
+ digest := "sha256:" + referenced
+ createContainer(t, referenced, digest, "", createdRecently)
+ index := "index"
+ createContainer(t, index, index, digest, createdRecently)
+ cleanupAndCheckLogs(t, ancient, "Nothing to cleanup")
+ })
+}
diff --git a/services/packages/cleanup/main_test.go b/services/packages/cleanup/main_test.go
new file mode 100644
index 0000000..ded3d76
--- /dev/null
+++ b/services/packages/cleanup/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2024 The Forgejo Authors.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package container
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/packages/container/blob_uploader.go b/services/packages/container/blob_uploader.go
new file mode 100644
index 0000000..bae2e2d
--- /dev/null
+++ b/services/packages/container/blob_uploader.go
@@ -0,0 +1,133 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package container
+
+import (
+ "context"
+ "errors"
+ "io"
+ "os"
+
+ packages_model "code.gitea.io/gitea/models/packages"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+)
+
+var (
+ // errWriteAfterRead occurs if Write is called after a read operation
+ errWriteAfterRead = errors.New("write is unsupported after a read operation")
+ // errOffsetMissmatch occurs if the file offset is different than the model
+ errOffsetMissmatch = errors.New("offset mismatch between file and model")
+)
+
+// BlobUploader handles chunked blob uploads
+type BlobUploader struct {
+ *packages_model.PackageBlobUpload
+ *packages_module.MultiHasher
+ file *os.File
+ reading bool
+}
+
+func buildFilePath(id string) string {
+ return util.FilePathJoinAbs(setting.Packages.ChunkedUploadPath, id)
+}
+
+// NewBlobUploader creates a new blob uploader for the given id
+func NewBlobUploader(ctx context.Context, id string) (*BlobUploader, error) {
+ model, err := packages_model.GetBlobUploadByID(ctx, id)
+ if err != nil {
+ return nil, err
+ }
+
+ hash := packages_module.NewMultiHasher()
+ if len(model.HashStateBytes) != 0 {
+ if err := hash.UnmarshalBinary(model.HashStateBytes); err != nil {
+ return nil, err
+ }
+ }
+
+ f, err := os.OpenFile(buildFilePath(model.ID), os.O_RDWR|os.O_CREATE, 0o666)
+ if err != nil {
+ return nil, err
+ }
+
+ return &BlobUploader{
+ model,
+ hash,
+ f,
+ false,
+ }, nil
+}
+
+// Close implements io.Closer
+func (u *BlobUploader) Close() error {
+ return u.file.Close()
+}
+
+// Append appends a chunk of data and updates the model
+func (u *BlobUploader) Append(ctx context.Context, r io.Reader) error {
+ if u.reading {
+ return errWriteAfterRead
+ }
+
+ offset, err := u.file.Seek(0, io.SeekEnd)
+ if err != nil {
+ return err
+ }
+ if offset != u.BytesReceived {
+ return errOffsetMissmatch
+ }
+
+ n, err := io.Copy(io.MultiWriter(u.file, u.MultiHasher), r)
+ if err != nil {
+ return err
+ }
+
+ // fast path if nothing was written
+ if n == 0 {
+ return nil
+ }
+
+ u.BytesReceived += n
+
+ u.HashStateBytes, err = u.MultiHasher.MarshalBinary()
+ if err != nil {
+ return err
+ }
+
+ return packages_model.UpdateBlobUpload(ctx, u.PackageBlobUpload)
+}
+
+func (u *BlobUploader) Size() int64 {
+ return u.BytesReceived
+}
+
+// Read implements io.Reader
+func (u *BlobUploader) Read(p []byte) (int, error) {
+ if !u.reading {
+ _, err := u.file.Seek(0, io.SeekStart)
+ if err != nil {
+ return 0, err
+ }
+
+ u.reading = true
+ }
+
+ return u.file.Read(p)
+}
+
+// Remove deletes the data and the model of a blob upload
+func RemoveBlobUploadByID(ctx context.Context, id string) error {
+ if err := packages_model.DeleteBlobUploadByID(ctx, id); err != nil {
+ return err
+ }
+
+ err := os.Remove(buildFilePath(id))
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ return nil
+}
diff --git a/services/packages/container/cleanup.go b/services/packages/container/cleanup.go
new file mode 100644
index 0000000..b5563c6
--- /dev/null
+++ b/services/packages/container/cleanup.go
@@ -0,0 +1,111 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package container
+
+import (
+ "context"
+ "time"
+
+ packages_model "code.gitea.io/gitea/models/packages"
+ container_model "code.gitea.io/gitea/models/packages/container"
+ "code.gitea.io/gitea/modules/optional"
+ container_module "code.gitea.io/gitea/modules/packages/container"
+ packages_service "code.gitea.io/gitea/services/packages"
+
+ digest "github.com/opencontainers/go-digest"
+)
+
+// Cleanup removes expired container data
+func Cleanup(ctx context.Context, olderThan time.Duration) error {
+ if err := cleanupExpiredBlobUploads(ctx, olderThan); err != nil {
+ return err
+ }
+ if err := CleanupSHA256(ctx, olderThan); err != nil {
+ return err
+ }
+ return cleanupExpiredUploadedBlobs(ctx, olderThan)
+}
+
+// cleanupExpiredBlobUploads removes expired blob uploads
+func cleanupExpiredBlobUploads(ctx context.Context, olderThan time.Duration) error {
+ pbus, err := packages_model.FindExpiredBlobUploads(ctx, olderThan)
+ if err != nil {
+ return err
+ }
+
+ for _, pbu := range pbus {
+ if err := RemoveBlobUploadByID(ctx, pbu.ID); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// cleanupExpiredUploadedBlobs removes expired uploaded blobs not referenced by a manifest
+func cleanupExpiredUploadedBlobs(ctx context.Context, olderThan time.Duration) error {
+ pfs, err := container_model.SearchExpiredUploadedBlobs(ctx, olderThan)
+ if err != nil {
+ return err
+ }
+
+ for _, pf := range pfs {
+ if err := packages_service.DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+
+ pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
+ Type: packages_model.TypeContainer,
+ Version: packages_model.SearchValue{
+ ExactMatch: true,
+ Value: container_model.UploadVersion,
+ },
+ IsInternal: optional.Some(true),
+ HasFiles: optional.Some(false),
+ })
+ if err != nil {
+ return err
+ }
+
+ for _, pv := range pvs {
+ if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeVersion, pv.ID); err != nil {
+ return err
+ }
+
+ if err := packages_model.DeleteVersionByID(ctx, pv.ID); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func ShouldBeSkipped(ctx context.Context, pcr *packages_model.PackageCleanupRule, p *packages_model.Package, pv *packages_model.PackageVersion) (bool, error) {
+ // Always skip the "latest" tag
+ if pv.LowerVersion == "latest" {
+ return true, nil
+ }
+
+ // Check if the version is a digest (or untagged)
+ if digest.Digest(pv.LowerVersion).Validate() == nil {
+ // Check if there is another manifest referencing this version
+ has, err := packages_model.ExistVersion(ctx, &packages_model.PackageSearchOptions{
+ PackageID: p.ID,
+ Properties: map[string]string{
+ container_module.PropertyManifestReference: pv.LowerVersion,
+ },
+ })
+ if err != nil {
+ return false, err
+ }
+
+ // Skip it if the version is referenced
+ if has {
+ return true, nil
+ }
+ }
+
+ return false, nil
+}
diff --git a/services/packages/container/cleanup_sha256.go b/services/packages/container/cleanup_sha256.go
new file mode 100644
index 0000000..16afc74
--- /dev/null
+++ b/services/packages/container/cleanup_sha256.go
@@ -0,0 +1,158 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package container
+
+import (
+ "context"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ container_module "code.gitea.io/gitea/modules/packages/container"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+var (
+ SHA256BatchSize = 500
+ SHA256Log = "cleanup dangling images with a sha256:* version"
+ SHA256LogStart = "Start to " + SHA256Log
+ SHA256LogFinish = "Finished to " + SHA256Log
+)
+
+func CleanupSHA256(ctx context.Context, olderThan time.Duration) error {
+ log.Info(SHA256LogStart)
+ err := cleanupSHA256(ctx, olderThan)
+ log.Info(SHA256LogFinish)
+ return err
+}
+
+func cleanupSHA256(outerCtx context.Context, olderThan time.Duration) error {
+ ctx, committer, err := db.TxContext(outerCtx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ foundAtLeastOneSHA256 := false
+ type packageVersion struct {
+ id int64
+ created timeutil.TimeStamp
+ }
+ shaToPackageVersion := make(map[string]packageVersion, 100)
+ knownSHA := make(map[string]any, 100)
+
+ // compute before making the inventory to not race against ongoing
+ // image creations
+ old := timeutil.TimeStamp(time.Now().Add(-olderThan).Unix())
+
+ log.Debug("Look for all package_version.version that start with sha256:")
+
+ // Iterate over all container versions in ascending order and store
+ // in shaToPackageVersion all versions with a sha256: prefix. If an index
+ // manifest is found, the sha256: digest it references are removed
+ // from shaToPackageVersion. If the sha256: digest found in an index
+ // manifest is not already in shaToPackageVersion, it is stored in
+ // knownSHA to be dealt with later.
+ //
+ // Although it is theoretically possible that a sha256: is uploaded
+ // after the index manifest that references it, this is not the
+ // normal order of operations. First the sha256: version is uploaded
+ // and then the index manifest. When the iteration completes,
+ // knownSHA will therefore be empty most of the time and
+ // shaToPackageVersion will only contain unreferenced sha256: versions.
+ if err := db.GetEngine(ctx).
+ Select("`package_version`.`id`, `package_version`.`created_unix`, `package_version`.`lower_version`, `package_version`.`metadata_json`").
+ Join("INNER", "`package`", "`package`.`id` = `package_version`.`package_id`").
+ Where("`package`.`type` = ?", packages.TypeContainer).
+ OrderBy("`package_version`.`id` ASC").
+ Iterate(new(packages.PackageVersion), func(_ int, bean any) error {
+ v := bean.(*packages.PackageVersion)
+ if strings.HasPrefix(v.LowerVersion, "sha256:") {
+ shaToPackageVersion[v.LowerVersion] = packageVersion{id: v.ID, created: v.CreatedUnix}
+ foundAtLeastOneSHA256 = true
+ } else if strings.Contains(v.MetadataJSON, `"manifests":[{`) {
+ var metadata container_module.Metadata
+ if err := json.Unmarshal([]byte(v.MetadataJSON), &metadata); err != nil {
+ log.Error("package_version.id = %d package_version.metadata_json %s is not a JSON string containing valid metadata. It was ignored but it is an inconsistency in the database that should be looked at. %v", v.ID, v.MetadataJSON, err)
+ return nil
+ }
+ for _, manifest := range metadata.Manifests {
+ if _, ok := shaToPackageVersion[manifest.Digest]; ok {
+ delete(shaToPackageVersion, manifest.Digest)
+ } else {
+ knownSHA[manifest.Digest] = true
+ }
+ }
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+
+ for sha := range knownSHA {
+ delete(shaToPackageVersion, sha)
+ }
+
+ if len(shaToPackageVersion) == 0 {
+ if foundAtLeastOneSHA256 {
+ log.Debug("All container images with a version matching sha256:* are referenced by an index manifest")
+ } else {
+ log.Debug("There are no container images with a version matching sha256:*")
+ }
+ log.Info("Nothing to cleanup")
+ return nil
+ }
+
+ found := len(shaToPackageVersion)
+
+ log.Warn("%d container image(s) with a version matching sha256:* are not referenced by an index manifest", found)
+
+ log.Debug("Deleting unreferenced image versions from `package_version`, `package_file` and `package_property` (%d at a time)", SHA256BatchSize)
+
+ packageVersionIDs := make([]int64, 0, SHA256BatchSize)
+ tooYoung := 0
+ for _, p := range shaToPackageVersion {
+ if p.created < old {
+ packageVersionIDs = append(packageVersionIDs, p.id)
+ } else {
+ tooYoung++
+ }
+ }
+
+ if tooYoung > 0 {
+ log.Warn("%d out of %d container image(s) are not deleted because they were created less than %v ago", tooYoung, found, olderThan)
+ }
+
+ for len(packageVersionIDs) > 0 {
+ upper := min(len(packageVersionIDs), SHA256BatchSize)
+ versionIDs := packageVersionIDs[0:upper]
+
+ var packageFileIDs []int64
+ if err := db.GetEngine(ctx).Select("id").Table("package_file").In("version_id", versionIDs).Find(&packageFileIDs); err != nil {
+ return err
+ }
+ log.Info("Removing %d entries from `package_file` and `package_property`", len(packageFileIDs))
+ if _, err := db.GetEngine(ctx).In("id", packageFileIDs).Delete(&packages.PackageFile{}); err != nil {
+ return err
+ }
+ if _, err := db.GetEngine(ctx).In("ref_id", packageFileIDs).And("ref_type = ?", packages.PropertyTypeFile).Delete(&packages.PackageProperty{}); err != nil {
+ return err
+ }
+
+ log.Info("Removing %d entries from `package_version` and `package_property`", upper)
+ if _, err := db.GetEngine(ctx).In("id", versionIDs).Delete(&packages.PackageVersion{}); err != nil {
+ return err
+ }
+ if _, err := db.GetEngine(ctx).In("ref_id", versionIDs).And("ref_type = ?", packages.PropertyTypeVersion).Delete(&packages.PackageProperty{}); err != nil {
+ return err
+ }
+
+ packageVersionIDs = packageVersionIDs[upper:]
+ }
+
+ return committer.Commit()
+}
diff --git a/services/packages/container/common.go b/services/packages/container/common.go
new file mode 100644
index 0000000..5a14ed5
--- /dev/null
+++ b/services/packages/container/common.go
@@ -0,0 +1,35 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package container
+
+import (
+ "context"
+ "strings"
+
+ packages_model "code.gitea.io/gitea/models/packages"
+ user_model "code.gitea.io/gitea/models/user"
+ container_module "code.gitea.io/gitea/modules/packages/container"
+)
+
+// UpdateRepositoryNames updates the repository name property for all packages of the specific owner
+func UpdateRepositoryNames(ctx context.Context, owner *user_model.User, newOwnerName string) error {
+ ps, err := packages_model.GetPackagesByType(ctx, owner.ID, packages_model.TypeContainer)
+ if err != nil {
+ return err
+ }
+
+ newOwnerName = strings.ToLower(newOwnerName)
+
+ for _, p := range ps {
+ if err := packages_model.DeletePropertyByName(ctx, packages_model.PropertyTypePackage, p.ID, container_module.PropertyRepository); err != nil {
+ return err
+ }
+
+ if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypePackage, p.ID, container_module.PropertyRepository, newOwnerName+"/"+p.LowerName); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/services/packages/debian/repository.go b/services/packages/debian/repository.go
new file mode 100644
index 0000000..e400f1e
--- /dev/null
+++ b/services/packages/debian/repository.go
@@ -0,0 +1,413 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package debian
+
+import (
+ "bytes"
+ "compress/gzip"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "sort"
+ "strings"
+ "time"
+
+ packages_model "code.gitea.io/gitea/models/packages"
+ debian_model "code.gitea.io/gitea/models/packages/debian"
+ user_model "code.gitea.io/gitea/models/user"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ debian_module "code.gitea.io/gitea/modules/packages/debian"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ packages_service "code.gitea.io/gitea/services/packages"
+
+ "github.com/ProtonMail/go-crypto/openpgp"
+ "github.com/ProtonMail/go-crypto/openpgp/armor"
+ "github.com/ProtonMail/go-crypto/openpgp/clearsign"
+ "github.com/ProtonMail/go-crypto/openpgp/packet"
+ "github.com/ulikunitz/xz"
+)
+
+// GetOrCreateRepositoryVersion gets or creates the internal repository package
+// The Debian registry needs multiple index files which are stored in this package.
+func GetOrCreateRepositoryVersion(ctx context.Context, ownerID int64) (*packages_model.PackageVersion, error) {
+ return packages_service.GetOrCreateInternalPackageVersion(ctx, ownerID, packages_model.TypeDebian, debian_module.RepositoryPackage, debian_module.RepositoryVersion)
+}
+
+// GetOrCreateKeyPair gets or creates the PGP keys used to sign repository files
+func GetOrCreateKeyPair(ctx context.Context, ownerID int64) (string, string, error) {
+ priv, err := user_model.GetSetting(ctx, ownerID, debian_module.SettingKeyPrivate)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ pub, err := user_model.GetSetting(ctx, ownerID, debian_module.SettingKeyPublic)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ if priv == "" || pub == "" {
+ priv, pub, err = generateKeypair()
+ if err != nil {
+ return "", "", err
+ }
+
+ if err := user_model.SetUserSetting(ctx, ownerID, debian_module.SettingKeyPrivate, priv); err != nil {
+ return "", "", err
+ }
+
+ if err := user_model.SetUserSetting(ctx, ownerID, debian_module.SettingKeyPublic, pub); err != nil {
+ return "", "", err
+ }
+ }
+
+ return priv, pub, nil
+}
+
+func generateKeypair() (string, string, error) {
+ e, err := openpgp.NewEntity("", "Debian Registry", "", nil)
+ if err != nil {
+ return "", "", err
+ }
+
+ var priv strings.Builder
+ var pub strings.Builder
+
+ w, err := armor.Encode(&priv, openpgp.PrivateKeyType, nil)
+ if err != nil {
+ return "", "", err
+ }
+ if err := e.SerializePrivate(w, nil); err != nil {
+ return "", "", err
+ }
+ w.Close()
+
+ w, err = armor.Encode(&pub, openpgp.PublicKeyType, nil)
+ if err != nil {
+ return "", "", err
+ }
+ if err := e.Serialize(w); err != nil {
+ return "", "", err
+ }
+ w.Close()
+
+ return priv.String(), pub.String(), nil
+}
+
+// BuildAllRepositoryFiles (re)builds all repository files for every available distributions, components and architectures
+func BuildAllRepositoryFiles(ctx context.Context, ownerID int64) error {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ // 1. Delete all existing repository files
+ pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ return err
+ }
+
+ for _, pf := range pfs {
+ if err := packages_service.DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+
+ // 2. (Re)Build repository files for existing packages
+ distributions, err := debian_model.GetDistributions(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+ for _, distribution := range distributions {
+ components, err := debian_model.GetComponents(ctx, ownerID, distribution)
+ if err != nil {
+ return err
+ }
+ architectures, err := debian_model.GetArchitectures(ctx, ownerID, distribution)
+ if err != nil {
+ return err
+ }
+
+ for _, component := range components {
+ for _, architecture := range architectures {
+ if err := buildRepositoryFiles(ctx, ownerID, pv, distribution, component, architecture); err != nil {
+ return fmt.Errorf("failed to build repository files [%s/%s/%s]: %w", distribution, component, architecture, err)
+ }
+ }
+ }
+ }
+
+ return nil
+}
+
+// BuildSpecificRepositoryFiles builds index files for the repository
+func BuildSpecificRepositoryFiles(ctx context.Context, ownerID int64, distribution, component, architecture string) error {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ return buildRepositoryFiles(ctx, ownerID, pv, distribution, component, architecture)
+}
+
+func buildRepositoryFiles(ctx context.Context, ownerID int64, repoVersion *packages_model.PackageVersion, distribution, component, architecture string) error {
+ if err := buildPackagesIndices(ctx, ownerID, repoVersion, distribution, component, architecture); err != nil {
+ return err
+ }
+
+ return buildReleaseFiles(ctx, ownerID, repoVersion, distribution)
+}
+
+// https://wiki.debian.org/DebianRepository/Format#A.22Packages.22_Indices
+func buildPackagesIndices(ctx context.Context, ownerID int64, repoVersion *packages_model.PackageVersion, distribution, component, architecture string) error {
+ opts := &debian_model.PackageSearchOptions{
+ OwnerID: ownerID,
+ Distribution: distribution,
+ Component: component,
+ Architecture: architecture,
+ }
+
+ // Delete the package indices if there are no packages
+ if has, err := debian_model.ExistPackages(ctx, opts); err != nil {
+ return err
+ } else if !has {
+ key := fmt.Sprintf("%s|%s|%s", distribution, component, architecture)
+ for _, filename := range []string{"Packages", "Packages.gz", "Packages.xz"} {
+ pf, err := packages_model.GetFileForVersionByName(ctx, repoVersion.ID, filename, key)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ } else if pf == nil {
+ continue
+ }
+
+ if err := packages_service.DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+
+ packagesContent, _ := packages_module.NewHashedBuffer()
+ defer packagesContent.Close()
+
+ packagesGzipContent, _ := packages_module.NewHashedBuffer()
+ defer packagesGzipContent.Close()
+
+ gzw := gzip.NewWriter(packagesGzipContent)
+
+ packagesXzContent, _ := packages_module.NewHashedBuffer()
+ defer packagesXzContent.Close()
+
+ xzw, _ := xz.NewWriter(packagesXzContent)
+
+ w := io.MultiWriter(packagesContent, gzw, xzw)
+
+ addSeparator := false
+ if err := debian_model.SearchPackages(ctx, opts, func(pfd *packages_model.PackageFileDescriptor) {
+ if addSeparator {
+ fmt.Fprintln(w)
+ }
+ addSeparator = true
+
+ fmt.Fprintf(w, "%s\n", strings.TrimSpace(pfd.Properties.GetByName(debian_module.PropertyControl)))
+
+ fmt.Fprintf(w, "Filename: pool/%s/%s/%s\n", distribution, component, pfd.File.Name)
+ fmt.Fprintf(w, "Size: %d\n", pfd.Blob.Size)
+ fmt.Fprintf(w, "MD5sum: %s\n", pfd.Blob.HashMD5)
+ fmt.Fprintf(w, "SHA1: %s\n", pfd.Blob.HashSHA1)
+ fmt.Fprintf(w, "SHA256: %s\n", pfd.Blob.HashSHA256)
+ fmt.Fprintf(w, "SHA512: %s\n", pfd.Blob.HashSHA512)
+ }); err != nil {
+ return err
+ }
+
+ gzw.Close()
+ xzw.Close()
+
+ for _, file := range []struct {
+ Name string
+ Data packages_module.HashedSizeReader
+ }{
+ {"Packages", packagesContent},
+ {"Packages.gz", packagesGzipContent},
+ {"Packages.xz", packagesXzContent},
+ } {
+ _, err := packages_service.AddFileToPackageVersionInternal(
+ ctx,
+ repoVersion,
+ &packages_service.PackageFileCreationInfo{
+ PackageFileInfo: packages_service.PackageFileInfo{
+ Filename: file.Name,
+ CompositeKey: fmt.Sprintf("%s|%s|%s", distribution, component, architecture),
+ },
+ Creator: user_model.NewGhostUser(),
+ Data: file.Data,
+ IsLead: false,
+ OverwriteExisting: true,
+ Properties: map[string]string{
+ debian_module.PropertyRepositoryIncludeInRelease: "",
+ debian_module.PropertyDistribution: distribution,
+ debian_module.PropertyComponent: component,
+ debian_module.PropertyArchitecture: architecture,
+ },
+ },
+ )
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// https://wiki.debian.org/DebianRepository/Format#A.22Release.22_files
+func buildReleaseFiles(ctx context.Context, ownerID int64, repoVersion *packages_model.PackageVersion, distribution string) error {
+ pfs, _, err := packages_model.SearchFiles(ctx, &packages_model.PackageFileSearchOptions{
+ VersionID: repoVersion.ID,
+ Properties: map[string]string{
+ debian_module.PropertyRepositoryIncludeInRelease: "",
+ debian_module.PropertyDistribution: distribution,
+ },
+ })
+ if err != nil {
+ return err
+ }
+
+ // Delete the release files if there are no packages
+ if len(pfs) == 0 {
+ for _, filename := range []string{"Release", "Release.gpg", "InRelease"} {
+ pf, err := packages_model.GetFileForVersionByName(ctx, repoVersion.ID, filename, distribution)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ } else if pf == nil {
+ continue
+ }
+
+ if err := packages_service.DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+
+ components, err := debian_model.GetComponents(ctx, ownerID, distribution)
+ if err != nil {
+ return err
+ }
+
+ sort.Strings(components)
+
+ architectures, err := debian_model.GetArchitectures(ctx, ownerID, distribution)
+ if err != nil {
+ return err
+ }
+
+ sort.Strings(architectures)
+
+ priv, _, err := GetOrCreateKeyPair(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ block, err := armor.Decode(strings.NewReader(priv))
+ if err != nil {
+ return err
+ }
+
+ e, err := openpgp.ReadEntity(packet.NewReader(block.Body))
+ if err != nil {
+ return err
+ }
+
+ inReleaseContent, _ := packages_module.NewHashedBuffer()
+ defer inReleaseContent.Close()
+
+ sw, err := clearsign.Encode(inReleaseContent, e.PrivateKey, nil)
+ if err != nil {
+ return err
+ }
+
+ var buf bytes.Buffer
+
+ w := io.MultiWriter(sw, &buf)
+
+ fmt.Fprintf(w, "Origin: %s\n", setting.AppName)
+ fmt.Fprintf(w, "Label: %s\n", setting.AppName)
+ fmt.Fprintf(w, "Suite: %s\n", distribution)
+ fmt.Fprintf(w, "Codename: %s\n", distribution)
+ fmt.Fprintf(w, "Components: %s\n", strings.Join(components, " "))
+ fmt.Fprintf(w, "Architectures: %s\n", strings.Join(architectures, " "))
+ fmt.Fprintf(w, "Date: %s\n", time.Now().UTC().Format(time.RFC1123))
+ fmt.Fprint(w, "Acquire-By-Hash: yes\n")
+
+ pfds, err := packages_model.GetPackageFileDescriptors(ctx, pfs)
+ if err != nil {
+ return err
+ }
+
+ var md5, sha1, sha256, sha512 strings.Builder
+ for _, pfd := range pfds {
+ path := fmt.Sprintf("%s/binary-%s/%s", pfd.Properties.GetByName(debian_module.PropertyComponent), pfd.Properties.GetByName(debian_module.PropertyArchitecture), pfd.File.Name)
+ fmt.Fprintf(&md5, " %s %d %s\n", pfd.Blob.HashMD5, pfd.Blob.Size, path)
+ fmt.Fprintf(&sha1, " %s %d %s\n", pfd.Blob.HashSHA1, pfd.Blob.Size, path)
+ fmt.Fprintf(&sha256, " %s %d %s\n", pfd.Blob.HashSHA256, pfd.Blob.Size, path)
+ fmt.Fprintf(&sha512, " %s %d %s\n", pfd.Blob.HashSHA512, pfd.Blob.Size, path)
+ }
+
+ fmt.Fprintln(w, "MD5Sum:")
+ fmt.Fprint(w, md5.String())
+ fmt.Fprintln(w, "SHA1:")
+ fmt.Fprint(w, sha1.String())
+ fmt.Fprintln(w, "SHA256:")
+ fmt.Fprint(w, sha256.String())
+ fmt.Fprintln(w, "SHA512:")
+ fmt.Fprint(w, sha512.String())
+
+ sw.Close()
+
+ releaseGpgContent, _ := packages_module.NewHashedBuffer()
+ defer releaseGpgContent.Close()
+
+ if err := openpgp.ArmoredDetachSign(releaseGpgContent, e, bytes.NewReader(buf.Bytes()), nil); err != nil {
+ return err
+ }
+
+ releaseContent, _ := packages_module.CreateHashedBufferFromReader(&buf)
+ defer releaseContent.Close()
+
+ for _, file := range []struct {
+ Name string
+ Data packages_module.HashedSizeReader
+ }{
+ {"Release", releaseContent},
+ {"Release.gpg", releaseGpgContent},
+ {"InRelease", inReleaseContent},
+ } {
+ _, err = packages_service.AddFileToPackageVersionInternal(
+ ctx,
+ repoVersion,
+ &packages_service.PackageFileCreationInfo{
+ PackageFileInfo: packages_service.PackageFileInfo{
+ Filename: file.Name,
+ CompositeKey: distribution,
+ },
+ Creator: user_model.NewGhostUser(),
+ Data: file.Data,
+ IsLead: false,
+ OverwriteExisting: true,
+ Properties: map[string]string{
+ debian_module.PropertyDistribution: distribution,
+ },
+ },
+ )
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/services/packages/packages.go b/services/packages/packages.go
new file mode 100644
index 0000000..a5b8450
--- /dev/null
+++ b/services/packages/packages.go
@@ -0,0 +1,665 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package packages
+
+import (
+ "context"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "net/url"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+var (
+ ErrQuotaTypeSize = errors.New("maximum allowed package type size exceeded")
+ ErrQuotaTotalSize = errors.New("maximum allowed package storage quota exceeded")
+ ErrQuotaTotalCount = errors.New("maximum allowed package count exceeded")
+)
+
+// PackageInfo describes a package
+type PackageInfo struct {
+ Owner *user_model.User
+ PackageType packages_model.Type
+ Name string
+ Version string
+}
+
+// PackageCreationInfo describes a package to create
+type PackageCreationInfo struct {
+ PackageInfo
+ SemverCompatible bool
+ Creator *user_model.User
+ Metadata any
+ PackageProperties map[string]string
+ VersionProperties map[string]string
+}
+
+// PackageFileInfo describes a package file
+type PackageFileInfo struct {
+ Filename string
+ CompositeKey string
+}
+
+// PackageFileCreationInfo describes a package file to create
+type PackageFileCreationInfo struct {
+ PackageFileInfo
+ Creator *user_model.User
+ Data packages_module.HashedSizeReader
+ IsLead bool
+ Properties map[string]string
+ OverwriteExisting bool
+}
+
+// CreatePackageAndAddFile creates a package with a file. If the same package exists already, ErrDuplicatePackageVersion is returned
+func CreatePackageAndAddFile(ctx context.Context, pvci *PackageCreationInfo, pfci *PackageFileCreationInfo) (*packages_model.PackageVersion, *packages_model.PackageFile, error) {
+ return createPackageAndAddFile(ctx, pvci, pfci, false)
+}
+
+// CreatePackageOrAddFileToExisting creates a package with a file or adds the file if the package exists already
+func CreatePackageOrAddFileToExisting(ctx context.Context, pvci *PackageCreationInfo, pfci *PackageFileCreationInfo) (*packages_model.PackageVersion, *packages_model.PackageFile, error) {
+ return createPackageAndAddFile(ctx, pvci, pfci, true)
+}
+
+func createPackageAndAddFile(ctx context.Context, pvci *PackageCreationInfo, pfci *PackageFileCreationInfo, allowDuplicate bool) (*packages_model.PackageVersion, *packages_model.PackageFile, error) {
+ dbCtx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, nil, err
+ }
+ defer committer.Close()
+
+ pv, created, err := createPackageAndVersion(dbCtx, pvci, allowDuplicate)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ pf, pb, blobCreated, err := addFileToPackageVersion(dbCtx, pv, &pvci.PackageInfo, pfci)
+ removeBlob := false
+ defer func() {
+ if blobCreated && removeBlob {
+ contentStore := packages_module.NewContentStore()
+ if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil {
+ log.Error("Error deleting package blob from content store: %v", err)
+ }
+ }
+ }()
+ if err != nil {
+ removeBlob = true
+ return nil, nil, err
+ }
+
+ if err := committer.Commit(); err != nil {
+ removeBlob = true
+ return nil, nil, err
+ }
+
+ if created {
+ pd, err := packages_model.GetPackageDescriptor(ctx, pv)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ notify_service.PackageCreate(ctx, pvci.Creator, pd)
+ }
+
+ return pv, pf, nil
+}
+
+func createPackageAndVersion(ctx context.Context, pvci *PackageCreationInfo, allowDuplicate bool) (*packages_model.PackageVersion, bool, error) {
+ log.Trace("Creating package: %v, %v, %v, %s, %s, %+v, %+v, %v", pvci.Creator.ID, pvci.Owner.ID, pvci.PackageType, pvci.Name, pvci.Version, pvci.PackageProperties, pvci.VersionProperties, allowDuplicate)
+
+ packageCreated := true
+ p := &packages_model.Package{
+ OwnerID: pvci.Owner.ID,
+ Type: pvci.PackageType,
+ Name: pvci.Name,
+ LowerName: strings.ToLower(pvci.Name),
+ SemverCompatible: pvci.SemverCompatible,
+ }
+ var err error
+ if p, err = packages_model.TryInsertPackage(ctx, p); err != nil {
+ if err == packages_model.ErrDuplicatePackage {
+ packageCreated = false
+ } else {
+ log.Error("Error inserting package: %v", err)
+ return nil, false, err
+ }
+ }
+
+ if packageCreated {
+ for name, value := range pvci.PackageProperties {
+ if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypePackage, p.ID, name, value); err != nil {
+ log.Error("Error setting package property: %v", err)
+ return nil, false, err
+ }
+ }
+ }
+
+ metadataJSON, err := json.Marshal(pvci.Metadata)
+ if err != nil {
+ return nil, false, err
+ }
+
+ versionCreated := true
+ pv := &packages_model.PackageVersion{
+ PackageID: p.ID,
+ CreatorID: pvci.Creator.ID,
+ Version: pvci.Version,
+ LowerVersion: strings.ToLower(pvci.Version),
+ MetadataJSON: string(metadataJSON),
+ }
+ if pv, err = packages_model.GetOrInsertVersion(ctx, pv); err != nil {
+ if err == packages_model.ErrDuplicatePackageVersion {
+ versionCreated = false
+ } else {
+ log.Error("Error inserting package: %v", err)
+ return nil, false, err
+ }
+
+ if !allowDuplicate {
+ // no need to log an error
+ return nil, false, err
+ }
+ }
+
+ if versionCreated {
+ if err := CheckCountQuotaExceeded(ctx, pvci.Creator, pvci.Owner); err != nil {
+ return nil, false, err
+ }
+
+ for name, value := range pvci.VersionProperties {
+ if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeVersion, pv.ID, name, value); err != nil {
+ log.Error("Error setting package version property: %v", err)
+ return nil, false, err
+ }
+ }
+ }
+
+ return pv, versionCreated, nil
+}
+
+// AddFileToExistingPackage adds a file to an existing package. If the package does not exist, ErrPackageNotExist is returned
+func AddFileToExistingPackage(ctx context.Context, pvi *PackageInfo, pfci *PackageFileCreationInfo) (*packages_model.PackageFile, error) {
+ return addFileToPackageWrapper(ctx, func(ctx context.Context) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error) {
+ pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version)
+ if err != nil {
+ return nil, nil, false, err
+ }
+
+ return addFileToPackageVersion(ctx, pv, pvi, pfci)
+ })
+}
+
+// AddFileToPackageVersionInternal adds a file to the package
+// This method skips quota checks and should only be used for system-managed packages.
+func AddFileToPackageVersionInternal(ctx context.Context, pv *packages_model.PackageVersion, pfci *PackageFileCreationInfo) (*packages_model.PackageFile, error) {
+ return addFileToPackageWrapper(ctx, func(ctx context.Context) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error) {
+ return addFileToPackageVersionUnchecked(ctx, pv, pfci)
+ })
+}
+
+func addFileToPackageWrapper(ctx context.Context, fn func(ctx context.Context) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error)) (*packages_model.PackageFile, error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+
+ pf, pb, blobCreated, err := fn(ctx)
+ removeBlob := false
+ defer func() {
+ if removeBlob {
+ contentStore := packages_module.NewContentStore()
+ if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil {
+ log.Error("Error deleting package blob from content store: %v", err)
+ }
+ }
+ }()
+ if err != nil {
+ removeBlob = blobCreated
+ return nil, err
+ }
+
+ if err := committer.Commit(); err != nil {
+ removeBlob = blobCreated
+ return nil, err
+ }
+
+ return pf, nil
+}
+
+// NewPackageBlob creates a package blob instance
+func NewPackageBlob(hsr packages_module.HashedSizeReader) *packages_model.PackageBlob {
+ hashMD5, hashSHA1, hashSHA256, hashSHA512 := hsr.Sums()
+
+ return &packages_model.PackageBlob{
+ Size: hsr.Size(),
+ HashMD5: hex.EncodeToString(hashMD5),
+ HashSHA1: hex.EncodeToString(hashSHA1),
+ HashSHA256: hex.EncodeToString(hashSHA256),
+ HashSHA512: hex.EncodeToString(hashSHA512),
+ }
+}
+
+func addFileToPackageVersion(ctx context.Context, pv *packages_model.PackageVersion, pvi *PackageInfo, pfci *PackageFileCreationInfo) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error) {
+ if err := CheckSizeQuotaExceeded(ctx, pfci.Creator, pvi.Owner, pvi.PackageType, pfci.Data.Size()); err != nil {
+ return nil, nil, false, err
+ }
+
+ return addFileToPackageVersionUnchecked(ctx, pv, pfci)
+}
+
+func addFileToPackageVersionUnchecked(ctx context.Context, pv *packages_model.PackageVersion, pfci *PackageFileCreationInfo) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error) {
+ log.Trace("Adding package file: %v, %s", pv.ID, pfci.Filename)
+
+ pb, exists, err := packages_model.GetOrInsertBlob(ctx, NewPackageBlob(pfci.Data))
+ if err != nil {
+ log.Error("Error inserting package blob: %v", err)
+ return nil, nil, false, err
+ }
+ if !exists {
+ contentStore := packages_module.NewContentStore()
+ if err := contentStore.Save(packages_module.BlobHash256Key(pb.HashSHA256), pfci.Data, pfci.Data.Size()); err != nil {
+ log.Error("Error saving package blob in content store: %v", err)
+ return nil, nil, false, err
+ }
+ }
+
+ if pfci.OverwriteExisting {
+ pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, pfci.Filename, pfci.CompositeKey)
+ if err != nil && err != packages_model.ErrPackageFileNotExist {
+ return nil, pb, !exists, err
+ }
+ if pf != nil {
+ // Short circuit if blob is the same
+ if pf.BlobID == pb.ID {
+ return pf, pb, !exists, nil
+ }
+
+ if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeFile, pf.ID); err != nil {
+ return nil, pb, !exists, err
+ }
+ if err := packages_model.DeleteFileByID(ctx, pf.ID); err != nil {
+ return nil, pb, !exists, err
+ }
+ }
+ }
+
+ pf := &packages_model.PackageFile{
+ VersionID: pv.ID,
+ BlobID: pb.ID,
+ Name: pfci.Filename,
+ LowerName: strings.ToLower(pfci.Filename),
+ CompositeKey: pfci.CompositeKey,
+ IsLead: pfci.IsLead,
+ }
+ if pf, err = packages_model.TryInsertFile(ctx, pf); err != nil {
+ if err != packages_model.ErrDuplicatePackageFile {
+ log.Error("Error inserting package file: %v", err)
+ }
+ return nil, pb, !exists, err
+ }
+
+ for name, value := range pfci.Properties {
+ if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeFile, pf.ID, name, value); err != nil {
+ log.Error("Error setting package file property: %v", err)
+ return pf, pb, !exists, err
+ }
+ }
+
+ return pf, pb, !exists, nil
+}
+
+// CheckCountQuotaExceeded checks if the owner has more than the allowed packages
+// The check is skipped if the doer is an admin.
+func CheckCountQuotaExceeded(ctx context.Context, doer, owner *user_model.User) error {
+ if doer.IsAdmin {
+ return nil
+ }
+
+ if setting.Packages.LimitTotalOwnerCount > -1 {
+ totalCount, err := packages_model.CountVersions(ctx, &packages_model.PackageSearchOptions{
+ OwnerID: owner.ID,
+ IsInternal: optional.Some(false),
+ })
+ if err != nil {
+ log.Error("CountVersions failed: %v", err)
+ return err
+ }
+ if totalCount > setting.Packages.LimitTotalOwnerCount {
+ return ErrQuotaTotalCount
+ }
+ }
+
+ return nil
+}
+
+// CheckSizeQuotaExceeded checks if the upload size is bigger than the allowed size
+// The check is skipped if the doer is an admin.
+func CheckSizeQuotaExceeded(ctx context.Context, doer, owner *user_model.User, packageType packages_model.Type, uploadSize int64) error {
+ if doer.IsAdmin {
+ return nil
+ }
+
+ var typeSpecificSize int64
+ switch packageType {
+ case packages_model.TypeAlpine:
+ typeSpecificSize = setting.Packages.LimitSizeAlpine
+ case packages_model.TypeArch:
+ typeSpecificSize = setting.Packages.LimitSizeArch
+ case packages_model.TypeCargo:
+ typeSpecificSize = setting.Packages.LimitSizeCargo
+ case packages_model.TypeChef:
+ typeSpecificSize = setting.Packages.LimitSizeChef
+ case packages_model.TypeComposer:
+ typeSpecificSize = setting.Packages.LimitSizeComposer
+ case packages_model.TypeConan:
+ typeSpecificSize = setting.Packages.LimitSizeConan
+ case packages_model.TypeConda:
+ typeSpecificSize = setting.Packages.LimitSizeConda
+ case packages_model.TypeContainer:
+ typeSpecificSize = setting.Packages.LimitSizeContainer
+ case packages_model.TypeCran:
+ typeSpecificSize = setting.Packages.LimitSizeCran
+ case packages_model.TypeDebian:
+ typeSpecificSize = setting.Packages.LimitSizeDebian
+ case packages_model.TypeGeneric:
+ typeSpecificSize = setting.Packages.LimitSizeGeneric
+ case packages_model.TypeGo:
+ typeSpecificSize = setting.Packages.LimitSizeGo
+ case packages_model.TypeHelm:
+ typeSpecificSize = setting.Packages.LimitSizeHelm
+ case packages_model.TypeMaven:
+ typeSpecificSize = setting.Packages.LimitSizeMaven
+ case packages_model.TypeNpm:
+ typeSpecificSize = setting.Packages.LimitSizeNpm
+ case packages_model.TypeNuGet:
+ typeSpecificSize = setting.Packages.LimitSizeNuGet
+ case packages_model.TypePub:
+ typeSpecificSize = setting.Packages.LimitSizePub
+ case packages_model.TypePyPI:
+ typeSpecificSize = setting.Packages.LimitSizePyPI
+ case packages_model.TypeRpm:
+ typeSpecificSize = setting.Packages.LimitSizeRpm
+ case packages_model.TypeRubyGems:
+ typeSpecificSize = setting.Packages.LimitSizeRubyGems
+ case packages_model.TypeSwift:
+ typeSpecificSize = setting.Packages.LimitSizeSwift
+ case packages_model.TypeVagrant:
+ typeSpecificSize = setting.Packages.LimitSizeVagrant
+ }
+ if typeSpecificSize > -1 && typeSpecificSize < uploadSize {
+ return ErrQuotaTypeSize
+ }
+
+ if setting.Packages.LimitTotalOwnerSize > -1 {
+ totalSize, err := packages_model.CalculateFileSize(ctx, &packages_model.PackageFileSearchOptions{
+ OwnerID: owner.ID,
+ })
+ if err != nil {
+ log.Error("CalculateFileSize failed: %v", err)
+ return err
+ }
+ if totalSize+uploadSize > setting.Packages.LimitTotalOwnerSize {
+ return ErrQuotaTotalSize
+ }
+ }
+
+ return nil
+}
+
+// GetOrCreateInternalPackageVersion gets or creates an internal package
+// Some package types need such internal packages for housekeeping.
+func GetOrCreateInternalPackageVersion(ctx context.Context, ownerID int64, packageType packages_model.Type, name, version string) (*packages_model.PackageVersion, error) {
+ var pv *packages_model.PackageVersion
+
+ return pv, db.WithTx(ctx, func(ctx context.Context) error {
+ p := &packages_model.Package{
+ OwnerID: ownerID,
+ Type: packageType,
+ Name: name,
+ LowerName: name,
+ IsInternal: true,
+ }
+ var err error
+ if p, err = packages_model.TryInsertPackage(ctx, p); err != nil {
+ if err != packages_model.ErrDuplicatePackage {
+ log.Error("Error inserting package: %v", err)
+ return err
+ }
+ }
+
+ pv = &packages_model.PackageVersion{
+ PackageID: p.ID,
+ CreatorID: ownerID,
+ Version: version,
+ LowerVersion: version,
+ IsInternal: true,
+ MetadataJSON: "null",
+ }
+ if pv, err = packages_model.GetOrInsertVersion(ctx, pv); err != nil {
+ if err != packages_model.ErrDuplicatePackageVersion {
+ log.Error("Error inserting package version: %v", err)
+ return err
+ }
+ }
+
+ return nil
+ })
+}
+
+// RemovePackageVersionByNameAndVersion deletes a package version and all associated files
+func RemovePackageVersionByNameAndVersion(ctx context.Context, doer *user_model.User, pvi *PackageInfo) error {
+ pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version)
+ if err != nil {
+ return err
+ }
+
+ return RemovePackageVersion(ctx, doer, pv)
+}
+
+// RemovePackageVersion deletes the package version and all associated files
+func RemovePackageVersion(ctx context.Context, doer *user_model.User, pv *packages_model.PackageVersion) error {
+ dbCtx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ pd, err := packages_model.GetPackageDescriptor(dbCtx, pv)
+ if err != nil {
+ return err
+ }
+
+ log.Trace("Deleting package: %v", pv.ID)
+
+ if err := DeletePackageVersionAndReferences(dbCtx, pv); err != nil {
+ return err
+ }
+
+ if err := committer.Commit(); err != nil {
+ return err
+ }
+
+ notify_service.PackageDelete(ctx, doer, pd)
+
+ return nil
+}
+
+// RemovePackageFileAndVersionIfUnreferenced deletes the package file and the version if there are no referenced files afterwards
+func RemovePackageFileAndVersionIfUnreferenced(ctx context.Context, doer *user_model.User, pf *packages_model.PackageFile) error {
+ var pd *packages_model.PackageDescriptor
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+
+ has, err := packages_model.HasVersionFileReferences(ctx, pf.VersionID)
+ if err != nil {
+ return err
+ }
+ if !has {
+ pv, err := packages_model.GetVersionByID(ctx, pf.VersionID)
+ if err != nil {
+ return err
+ }
+
+ pd, err = packages_model.GetPackageDescriptor(ctx, pv)
+ if err != nil {
+ return err
+ }
+
+ if err := DeletePackageVersionAndReferences(ctx, pv); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }); err != nil {
+ return err
+ }
+
+ if pd != nil {
+ notify_service.PackageDelete(ctx, doer, pd)
+ }
+
+ return nil
+}
+
+// DeletePackageVersionAndReferences deletes the package version and its properties and files
+func DeletePackageVersionAndReferences(ctx context.Context, pv *packages_model.PackageVersion) error {
+ if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeVersion, pv.ID); err != nil {
+ return err
+ }
+
+ pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ return err
+ }
+
+ for _, pf := range pfs {
+ if err := DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+
+ return packages_model.DeleteVersionByID(ctx, pv.ID)
+}
+
+// DeletePackageFile deletes the package file and its properties
+func DeletePackageFile(ctx context.Context, pf *packages_model.PackageFile) error {
+ if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeFile, pf.ID); err != nil {
+ return err
+ }
+ return packages_model.DeleteFileByID(ctx, pf.ID)
+}
+
+// GetFileStreamByPackageNameAndVersion returns the content of the specific package file
+func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) {
+ log.Trace("Getting package file stream: %v, %v, %s, %s, %s, %s", pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version, pfi.Filename, pfi.CompositeKey)
+
+ pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version)
+ if err != nil {
+ if err == packages_model.ErrPackageNotExist {
+ return nil, nil, nil, err
+ }
+ log.Error("Error getting package: %v", err)
+ return nil, nil, nil, err
+ }
+
+ return GetFileStreamByPackageVersion(ctx, pv, pfi)
+}
+
+// GetFileStreamByPackageVersion returns the content of the specific package file
+func GetFileStreamByPackageVersion(ctx context.Context, pv *packages_model.PackageVersion, pfi *PackageFileInfo) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) {
+ pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, pfi.Filename, pfi.CompositeKey)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ return GetPackageFileStream(ctx, pf)
+}
+
+// GetPackageFileStream returns the content of the specific package file
+func GetPackageFileStream(ctx context.Context, pf *packages_model.PackageFile) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) {
+ pb, err := packages_model.GetBlobByID(ctx, pf.BlobID)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ return GetPackageBlobStream(ctx, pf, pb)
+}
+
+// GetPackageBlobStream returns the content of the specific package blob
+// If the storage supports direct serving and it's enabled, only the direct serving url is returned.
+func GetPackageBlobStream(ctx context.Context, pf *packages_model.PackageFile, pb *packages_model.PackageBlob) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) {
+ key := packages_module.BlobHash256Key(pb.HashSHA256)
+
+ cs := packages_module.NewContentStore()
+
+ var s io.ReadSeekCloser
+ var u *url.URL
+ var err error
+
+ if cs.ShouldServeDirect() {
+ u, err = cs.GetServeDirectURL(key, pf.Name)
+ if err != nil && !errors.Is(err, storage.ErrURLNotSupported) {
+ log.Error("Error getting serve direct url: %v", err)
+ }
+ }
+ if u == nil {
+ s, err = cs.Get(key)
+ }
+
+ if err == nil {
+ if pf.IsLead {
+ if err := packages_model.IncrementDownloadCounter(ctx, pf.VersionID); err != nil {
+ log.Error("Error incrementing download counter: %v", err)
+ }
+ }
+ }
+ return s, u, pf, err
+}
+
+// RemoveAllPackages for User
+func RemoveAllPackages(ctx context.Context, userID int64) (int, error) {
+ count := 0
+ for {
+ pkgVersions, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
+ Paginator: &db.ListOptions{
+ PageSize: repo_model.RepositoryListDefaultPageSize,
+ Page: 1,
+ },
+ OwnerID: userID,
+ IsInternal: optional.None[bool](),
+ })
+ if err != nil {
+ return count, fmt.Errorf("GetOwnedPackages[%d]: %w", userID, err)
+ }
+ if len(pkgVersions) == 0 {
+ break
+ }
+ for _, pv := range pkgVersions {
+ if err := DeletePackageVersionAndReferences(ctx, pv); err != nil {
+ return count, fmt.Errorf("unable to delete package %d:%s[%d]. Error: %w", pv.PackageID, pv.Version, pv.ID, err)
+ }
+ count++
+ }
+ }
+ return count, nil
+}
diff --git a/services/packages/rpm/repository.go b/services/packages/rpm/repository.go
new file mode 100644
index 0000000..2cea042
--- /dev/null
+++ b/services/packages/rpm/repository.go
@@ -0,0 +1,674 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package rpm
+
+import (
+ "bytes"
+ "compress/gzip"
+ "context"
+ "crypto/sha256"
+ "encoding/hex"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "io"
+ "strings"
+ "time"
+
+ packages_model "code.gitea.io/gitea/models/packages"
+ rpm_model "code.gitea.io/gitea/models/packages/rpm"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ rpm_module "code.gitea.io/gitea/modules/packages/rpm"
+ "code.gitea.io/gitea/modules/util"
+ packages_service "code.gitea.io/gitea/services/packages"
+
+ "github.com/ProtonMail/go-crypto/openpgp"
+ "github.com/ProtonMail/go-crypto/openpgp/armor"
+ "github.com/ProtonMail/go-crypto/openpgp/packet"
+ "github.com/sassoftware/go-rpmutils"
+)
+
+// GetOrCreateRepositoryVersion gets or creates the internal repository package
+// The RPM registry needs multiple metadata files which are stored in this package.
+func GetOrCreateRepositoryVersion(ctx context.Context, ownerID int64) (*packages_model.PackageVersion, error) {
+ return packages_service.GetOrCreateInternalPackageVersion(ctx, ownerID, packages_model.TypeRpm, rpm_module.RepositoryPackage, rpm_module.RepositoryVersion)
+}
+
+// GetOrCreateKeyPair gets or creates the PGP keys used to sign repository metadata files
+func GetOrCreateKeyPair(ctx context.Context, ownerID int64) (string, string, error) {
+ priv, err := user_model.GetSetting(ctx, ownerID, rpm_module.SettingKeyPrivate)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ pub, err := user_model.GetSetting(ctx, ownerID, rpm_module.SettingKeyPublic)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return "", "", err
+ }
+
+ if priv == "" || pub == "" {
+ priv, pub, err = generateKeypair()
+ if err != nil {
+ return "", "", err
+ }
+
+ if err := user_model.SetUserSetting(ctx, ownerID, rpm_module.SettingKeyPrivate, priv); err != nil {
+ return "", "", err
+ }
+
+ if err := user_model.SetUserSetting(ctx, ownerID, rpm_module.SettingKeyPublic, pub); err != nil {
+ return "", "", err
+ }
+ }
+
+ return priv, pub, nil
+}
+
+func generateKeypair() (string, string, error) {
+ e, err := openpgp.NewEntity("", "RPM Registry", "", nil)
+ if err != nil {
+ return "", "", err
+ }
+
+ var priv strings.Builder
+ var pub strings.Builder
+
+ w, err := armor.Encode(&priv, openpgp.PrivateKeyType, nil)
+ if err != nil {
+ return "", "", err
+ }
+ if err := e.SerializePrivate(w, nil); err != nil {
+ return "", "", err
+ }
+ w.Close()
+
+ w, err = armor.Encode(&pub, openpgp.PublicKeyType, nil)
+ if err != nil {
+ return "", "", err
+ }
+ if err := e.Serialize(w); err != nil {
+ return "", "", err
+ }
+ w.Close()
+
+ return priv.String(), pub.String(), nil
+}
+
+// BuildAllRepositoryFiles (re)builds all repository files for every available group
+func BuildAllRepositoryFiles(ctx context.Context, ownerID int64) error {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ // 1. Delete all existing repository files
+ pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ return err
+ }
+
+ for _, pf := range pfs {
+ if err := packages_service.DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+
+ // 2. (Re)Build repository files for existing packages
+ groups, err := rpm_model.GetGroups(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+ for _, group := range groups {
+ if err := BuildSpecificRepositoryFiles(ctx, ownerID, group); err != nil {
+ return fmt.Errorf("failed to build repository files [%s]: %w", group, err)
+ }
+ }
+
+ return nil
+}
+
+type repoChecksum struct {
+ Value string `xml:",chardata"`
+ Type string `xml:"type,attr"`
+}
+
+type repoLocation struct {
+ Href string `xml:"href,attr"`
+}
+
+type repoData struct {
+ Type string `xml:"type,attr"`
+ Checksum repoChecksum `xml:"checksum"`
+ OpenChecksum repoChecksum `xml:"open-checksum"`
+ Location repoLocation `xml:"location"`
+ Timestamp int64 `xml:"timestamp"`
+ Size int64 `xml:"size"`
+ OpenSize int64 `xml:"open-size"`
+}
+
+type packageData struct {
+ Package *packages_model.Package
+ Version *packages_model.PackageVersion
+ Blob *packages_model.PackageBlob
+ VersionMetadata *rpm_module.VersionMetadata
+ FileMetadata *rpm_module.FileMetadata
+}
+
+type packageCache = map[*packages_model.PackageFile]*packageData
+
+// BuildSpecificRepositoryFiles builds metadata files for the repository
+func BuildSpecificRepositoryFiles(ctx context.Context, ownerID int64, group string) error {
+ pv, err := GetOrCreateRepositoryVersion(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ pfs, _, err := packages_model.SearchFiles(ctx, &packages_model.PackageFileSearchOptions{
+ OwnerID: ownerID,
+ PackageType: packages_model.TypeRpm,
+ Query: "%.rpm",
+ CompositeKey: group,
+ })
+ if err != nil {
+ return err
+ }
+
+ // Delete the repository files if there are no packages
+ if len(pfs) == 0 {
+ pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID)
+ if err != nil {
+ return err
+ }
+ for _, pf := range pfs {
+ if err := packages_service.DeletePackageFile(ctx, pf); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+
+ // Cache data needed for all repository files
+ cache := make(packageCache)
+ for _, pf := range pfs {
+ pv, err := packages_model.GetVersionByID(ctx, pf.VersionID)
+ if err != nil {
+ return err
+ }
+ p, err := packages_model.GetPackageByID(ctx, pv.PackageID)
+ if err != nil {
+ return err
+ }
+ pb, err := packages_model.GetBlobByID(ctx, pf.BlobID)
+ if err != nil {
+ return err
+ }
+ pps, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeFile, pf.ID, rpm_module.PropertyMetadata)
+ if err != nil {
+ return err
+ }
+
+ pd := &packageData{
+ Package: p,
+ Version: pv,
+ Blob: pb,
+ }
+
+ if err := json.Unmarshal([]byte(pv.MetadataJSON), &pd.VersionMetadata); err != nil {
+ return err
+ }
+ if len(pps) > 0 {
+ if err := json.Unmarshal([]byte(pps[0].Value), &pd.FileMetadata); err != nil {
+ return err
+ }
+ }
+
+ cache[pf] = pd
+ }
+
+ primary, err := buildPrimary(ctx, pv, pfs, cache, group)
+ if err != nil {
+ return err
+ }
+ filelists, err := buildFilelists(ctx, pv, pfs, cache, group)
+ if err != nil {
+ return err
+ }
+ other, err := buildOther(ctx, pv, pfs, cache, group)
+ if err != nil {
+ return err
+ }
+
+ return buildRepomd(
+ ctx,
+ pv,
+ ownerID,
+ []*repoData{
+ primary,
+ filelists,
+ other,
+ },
+ group,
+ )
+}
+
+// https://docs.pulpproject.org/en/2.19/plugins/pulp_rpm/tech-reference/rpm.html#repomd-xml
+func buildRepomd(ctx context.Context, pv *packages_model.PackageVersion, ownerID int64, data []*repoData, group string) error {
+ type Repomd struct {
+ XMLName xml.Name `xml:"repomd"`
+ Xmlns string `xml:"xmlns,attr"`
+ XmlnsRpm string `xml:"xmlns:rpm,attr"`
+ Data []*repoData `xml:"data"`
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString(xml.Header)
+ if err := xml.NewEncoder(&buf).Encode(&Repomd{
+ Xmlns: "http://linux.duke.edu/metadata/repo",
+ XmlnsRpm: "http://linux.duke.edu/metadata/rpm",
+ Data: data,
+ }); err != nil {
+ return err
+ }
+
+ priv, _, err := GetOrCreateKeyPair(ctx, ownerID)
+ if err != nil {
+ return err
+ }
+
+ block, err := armor.Decode(strings.NewReader(priv))
+ if err != nil {
+ return err
+ }
+
+ e, err := openpgp.ReadEntity(packet.NewReader(block.Body))
+ if err != nil {
+ return err
+ }
+
+ repomdAscContent, _ := packages_module.NewHashedBuffer()
+ defer repomdAscContent.Close()
+
+ if err := openpgp.ArmoredDetachSign(repomdAscContent, e, bytes.NewReader(buf.Bytes()), nil); err != nil {
+ return err
+ }
+
+ repomdContent, _ := packages_module.CreateHashedBufferFromReader(&buf)
+ defer repomdContent.Close()
+
+ for _, file := range []struct {
+ Name string
+ Data packages_module.HashedSizeReader
+ }{
+ {"repomd.xml", repomdContent},
+ {"repomd.xml.asc", repomdAscContent},
+ } {
+ _, err = packages_service.AddFileToPackageVersionInternal(
+ ctx,
+ pv,
+ &packages_service.PackageFileCreationInfo{
+ PackageFileInfo: packages_service.PackageFileInfo{
+ Filename: file.Name,
+ CompositeKey: group,
+ },
+ Creator: user_model.NewGhostUser(),
+ Data: file.Data,
+ IsLead: false,
+ OverwriteExisting: true,
+ },
+ )
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// https://docs.pulpproject.org/en/2.19/plugins/pulp_rpm/tech-reference/rpm.html#primary-xml
+func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) {
+ type Version struct {
+ Epoch string `xml:"epoch,attr"`
+ Version string `xml:"ver,attr"`
+ Release string `xml:"rel,attr"`
+ }
+
+ type Checksum struct {
+ Checksum string `xml:",chardata"`
+ Type string `xml:"type,attr"`
+ Pkgid string `xml:"pkgid,attr"`
+ }
+
+ type Times struct {
+ File uint64 `xml:"file,attr"`
+ Build uint64 `xml:"build,attr"`
+ }
+
+ type Sizes struct {
+ Package int64 `xml:"package,attr"`
+ Installed uint64 `xml:"installed,attr"`
+ Archive uint64 `xml:"archive,attr"`
+ }
+
+ type Location struct {
+ Href string `xml:"href,attr"`
+ }
+
+ type EntryList struct {
+ Entries []*rpm_module.Entry `xml:"rpm:entry"`
+ }
+
+ type Format struct {
+ License string `xml:"rpm:license"`
+ Vendor string `xml:"rpm:vendor"`
+ Group string `xml:"rpm:group"`
+ Buildhost string `xml:"rpm:buildhost"`
+ Sourcerpm string `xml:"rpm:sourcerpm"`
+ Provides EntryList `xml:"rpm:provides"`
+ Requires EntryList `xml:"rpm:requires"`
+ Conflicts EntryList `xml:"rpm:conflicts"`
+ Obsoletes EntryList `xml:"rpm:obsoletes"`
+ Files []*rpm_module.File `xml:"file"`
+ }
+
+ type Package struct {
+ XMLName xml.Name `xml:"package"`
+ Type string `xml:"type,attr"`
+ Name string `xml:"name"`
+ Architecture string `xml:"arch"`
+ Version Version `xml:"version"`
+ Checksum Checksum `xml:"checksum"`
+ Summary string `xml:"summary"`
+ Description string `xml:"description"`
+ Packager string `xml:"packager"`
+ URL string `xml:"url"`
+ Time Times `xml:"time"`
+ Size Sizes `xml:"size"`
+ Location Location `xml:"location"`
+ Format Format `xml:"format"`
+ }
+
+ type Metadata struct {
+ XMLName xml.Name `xml:"metadata"`
+ Xmlns string `xml:"xmlns,attr"`
+ XmlnsRpm string `xml:"xmlns:rpm,attr"`
+ PackageCount int `xml:"packages,attr"`
+ Packages []*Package `xml:"package"`
+ }
+
+ packages := make([]*Package, 0, len(pfs))
+ for _, pf := range pfs {
+ pd := c[pf]
+
+ files := make([]*rpm_module.File, 0, 3)
+ for _, f := range pd.FileMetadata.Files {
+ if f.IsExecutable {
+ files = append(files, f)
+ }
+ }
+ packageVersion := fmt.Sprintf("%s-%s", pd.FileMetadata.Version, pd.FileMetadata.Release)
+ packages = append(packages, &Package{
+ Type: "rpm",
+ Name: pd.Package.Name,
+ Architecture: pd.FileMetadata.Architecture,
+ Version: Version{
+ Epoch: pd.FileMetadata.Epoch,
+ Version: pd.FileMetadata.Version,
+ Release: pd.FileMetadata.Release,
+ },
+ Checksum: Checksum{
+ Type: "sha256",
+ Checksum: pd.Blob.HashSHA256,
+ Pkgid: "YES",
+ },
+ Summary: pd.VersionMetadata.Summary,
+ Description: pd.VersionMetadata.Description,
+ Packager: pd.FileMetadata.Packager,
+ URL: pd.VersionMetadata.ProjectURL,
+ Time: Times{
+ File: pd.FileMetadata.FileTime,
+ Build: pd.FileMetadata.BuildTime,
+ },
+ Size: Sizes{
+ Package: pd.Blob.Size,
+ Installed: pd.FileMetadata.InstalledSize,
+ Archive: pd.FileMetadata.ArchiveSize,
+ },
+ Location: Location{
+ Href: fmt.Sprintf("package/%s/%s/%s/%s-%s.%s.rpm", pd.Package.Name, packageVersion, pd.FileMetadata.Architecture, pd.Package.Name, packageVersion, pd.FileMetadata.Architecture),
+ },
+ Format: Format{
+ License: pd.VersionMetadata.License,
+ Vendor: pd.FileMetadata.Vendor,
+ Group: pd.FileMetadata.Group,
+ Buildhost: pd.FileMetadata.BuildHost,
+ Sourcerpm: pd.FileMetadata.SourceRpm,
+ Provides: EntryList{
+ Entries: pd.FileMetadata.Provides,
+ },
+ Requires: EntryList{
+ Entries: pd.FileMetadata.Requires,
+ },
+ Conflicts: EntryList{
+ Entries: pd.FileMetadata.Conflicts,
+ },
+ Obsoletes: EntryList{
+ Entries: pd.FileMetadata.Obsoletes,
+ },
+ Files: files,
+ },
+ })
+ }
+
+ return addDataAsFileToRepo(ctx, pv, "primary", &Metadata{
+ Xmlns: "http://linux.duke.edu/metadata/common",
+ XmlnsRpm: "http://linux.duke.edu/metadata/rpm",
+ PackageCount: len(pfs),
+ Packages: packages,
+ }, group)
+}
+
+// https://docs.pulpproject.org/en/2.19/plugins/pulp_rpm/tech-reference/rpm.html#filelists-xml
+func buildFilelists(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl
+ type Version struct {
+ Epoch string `xml:"epoch,attr"`
+ Version string `xml:"ver,attr"`
+ Release string `xml:"rel,attr"`
+ }
+
+ type Package struct {
+ Pkgid string `xml:"pkgid,attr"`
+ Name string `xml:"name,attr"`
+ Architecture string `xml:"arch,attr"`
+ Version Version `xml:"version"`
+ Files []*rpm_module.File `xml:"file"`
+ }
+
+ type Filelists struct {
+ XMLName xml.Name `xml:"filelists"`
+ Xmlns string `xml:"xmlns,attr"`
+ PackageCount int `xml:"packages,attr"`
+ Packages []*Package `xml:"package"`
+ }
+
+ packages := make([]*Package, 0, len(pfs))
+ for _, pf := range pfs {
+ pd := c[pf]
+
+ packages = append(packages, &Package{
+ Pkgid: pd.Blob.HashSHA256,
+ Name: pd.Package.Name,
+ Architecture: pd.FileMetadata.Architecture,
+ Version: Version{
+ Epoch: pd.FileMetadata.Epoch,
+ Version: pd.FileMetadata.Version,
+ Release: pd.FileMetadata.Release,
+ },
+ Files: pd.FileMetadata.Files,
+ })
+ }
+
+ return addDataAsFileToRepo(ctx, pv, "filelists", &Filelists{
+ Xmlns: "http://linux.duke.edu/metadata/other",
+ PackageCount: len(pfs),
+ Packages: packages,
+ }, group)
+}
+
+// https://docs.pulpproject.org/en/2.19/plugins/pulp_rpm/tech-reference/rpm.html#other-xml
+func buildOther(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl
+ type Version struct {
+ Epoch string `xml:"epoch,attr"`
+ Version string `xml:"ver,attr"`
+ Release string `xml:"rel,attr"`
+ }
+
+ type Package struct {
+ Pkgid string `xml:"pkgid,attr"`
+ Name string `xml:"name,attr"`
+ Architecture string `xml:"arch,attr"`
+ Version Version `xml:"version"`
+ Changelogs []*rpm_module.Changelog `xml:"changelog"`
+ }
+
+ type Otherdata struct {
+ XMLName xml.Name `xml:"otherdata"`
+ Xmlns string `xml:"xmlns,attr"`
+ PackageCount int `xml:"packages,attr"`
+ Packages []*Package `xml:"package"`
+ }
+
+ packages := make([]*Package, 0, len(pfs))
+ for _, pf := range pfs {
+ pd := c[pf]
+
+ packages = append(packages, &Package{
+ Pkgid: pd.Blob.HashSHA256,
+ Name: pd.Package.Name,
+ Architecture: pd.FileMetadata.Architecture,
+ Version: Version{
+ Epoch: pd.FileMetadata.Epoch,
+ Version: pd.FileMetadata.Version,
+ Release: pd.FileMetadata.Release,
+ },
+ Changelogs: pd.FileMetadata.Changelogs,
+ })
+ }
+
+ return addDataAsFileToRepo(ctx, pv, "other", &Otherdata{
+ Xmlns: "http://linux.duke.edu/metadata/other",
+ PackageCount: len(pfs),
+ Packages: packages,
+ }, group)
+}
+
+// writtenCounter counts all written bytes
+type writtenCounter struct {
+ written int64
+}
+
+func (wc *writtenCounter) Write(buf []byte) (int, error) {
+ n := len(buf)
+
+ wc.written += int64(n)
+
+ return n, nil
+}
+
+func (wc *writtenCounter) Written() int64 {
+ return wc.written
+}
+
+func addDataAsFileToRepo(ctx context.Context, pv *packages_model.PackageVersion, filetype string, obj any, group string) (*repoData, error) {
+ content, _ := packages_module.NewHashedBuffer()
+ defer content.Close()
+
+ gzw := gzip.NewWriter(content)
+ wc := &writtenCounter{}
+ h := sha256.New()
+
+ w := io.MultiWriter(gzw, wc, h)
+ _, _ = w.Write([]byte(xml.Header))
+
+ if err := xml.NewEncoder(w).Encode(obj); err != nil {
+ return nil, err
+ }
+
+ if err := gzw.Close(); err != nil {
+ return nil, err
+ }
+
+ filename := filetype + ".xml.gz"
+
+ _, err := packages_service.AddFileToPackageVersionInternal(
+ ctx,
+ pv,
+ &packages_service.PackageFileCreationInfo{
+ PackageFileInfo: packages_service.PackageFileInfo{
+ Filename: filename,
+ CompositeKey: group,
+ },
+ Creator: user_model.NewGhostUser(),
+ Data: content,
+ IsLead: false,
+ OverwriteExisting: true,
+ },
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ _, _, hashSHA256, _ := content.Sums()
+
+ return &repoData{
+ Type: filetype,
+ Checksum: repoChecksum{
+ Type: "sha256",
+ Value: hex.EncodeToString(hashSHA256),
+ },
+ OpenChecksum: repoChecksum{
+ Type: "sha256",
+ Value: hex.EncodeToString(h.Sum(nil)),
+ },
+ Location: repoLocation{
+ Href: "repodata/" + filename,
+ },
+ Timestamp: time.Now().Unix(),
+ Size: content.Size(),
+ OpenSize: wc.Written(),
+ }, nil
+}
+
+func NewSignedRPMBuffer(rpm *packages_module.HashedBuffer, privateKey string) (*packages_module.HashedBuffer, error) {
+ keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewReader([]byte(privateKey)))
+ if err != nil {
+ // failed to parse key
+ return nil, err
+ }
+ entity := keyring[0]
+ h, err := rpmutils.SignRpmStream(rpm, entity.PrivateKey, nil)
+ if err != nil {
+ // error signing rpm
+ return nil, err
+ }
+ signBlob, err := h.DumpSignatureHeader(false)
+ if err != nil {
+ // error writing sig header
+ return nil, err
+ }
+ if len(signBlob)%8 != 0 {
+ log.Info("incorrect padding: got %d bytes, expected a multiple of 8", len(signBlob))
+ return nil, err
+ }
+
+ // move fp to sign end
+ if _, err := rpm.Seek(int64(h.OriginalSignatureHeaderSize()), io.SeekStart); err != nil {
+ return nil, err
+ }
+ // create signed rpm buf
+ return packages_module.CreateHashedBufferFromReader(io.MultiReader(bytes.NewReader(signBlob), rpm))
+}
diff --git a/services/pull/check.go b/services/pull/check.go
new file mode 100644
index 0000000..2d91ed0
--- /dev/null
+++ b/services/pull/check.go
@@ -0,0 +1,404 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/timeutil"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// prPatchCheckerQueue represents a queue to handle update pull request tests
+var prPatchCheckerQueue *queue.WorkerPoolQueue[string]
+
+var (
+ ErrIsClosed = errors.New("pull is closed")
+ ErrUserNotAllowedToMerge = models.ErrDisallowedToMerge{}
+ ErrHasMerged = errors.New("has already been merged")
+ ErrIsWorkInProgress = errors.New("work in progress PRs cannot be merged")
+ ErrIsChecking = errors.New("cannot merge while conflict checking is in progress")
+ ErrNotMergeableState = errors.New("not in mergeable state")
+ ErrDependenciesLeft = errors.New("is blocked by an open dependency")
+)
+
+// AddToTaskQueue adds itself to pull request test task queue.
+func AddToTaskQueue(ctx context.Context, pr *issues_model.PullRequest) {
+ pr.Status = issues_model.PullRequestStatusChecking
+ err := pr.UpdateColsIfNotMerged(ctx, "status")
+ if err != nil {
+ log.Error("AddToTaskQueue(%-v).UpdateCols.(add to queue): %v", pr, err)
+ return
+ }
+ log.Trace("Adding %-v to the test pull requests queue", pr)
+ err = prPatchCheckerQueue.Push(strconv.FormatInt(pr.ID, 10))
+ if err != nil && err != queue.ErrAlreadyInQueue {
+ log.Error("Error adding %-v to the test pull requests queue: %v", pr, err)
+ }
+}
+
+type MergeCheckType int
+
+const (
+ MergeCheckTypeGeneral MergeCheckType = iota // general merge checks for "merge", "rebase", "squash", etc
+ MergeCheckTypeManually // Manually Merged button (mark a PR as merged manually)
+ MergeCheckTypeAuto // Auto Merge (Scheduled Merge) After Checks Succeed
+)
+
+// CheckPullMergeable check if the pull mergeable based on all conditions (branch protection, merge options, ...)
+func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *access_model.Permission, pr *issues_model.PullRequest, mergeCheckType MergeCheckType, adminSkipProtectionCheck bool) error {
+ return db.WithTx(stdCtx, func(ctx context.Context) error {
+ if pr.HasMerged {
+ return ErrHasMerged
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("Unable to load issue[%d] for %-v: %v", pr.IssueID, pr, err)
+ return err
+ } else if pr.Issue.IsClosed {
+ return ErrIsClosed
+ }
+
+ if allowedMerge, err := IsUserAllowedToMerge(ctx, pr, *perm, doer); err != nil {
+ log.Error("Error whilst checking if %-v is allowed to merge %-v: %v", doer, pr, err)
+ return err
+ } else if !allowedMerge {
+ return ErrUserNotAllowedToMerge
+ }
+
+ if mergeCheckType == MergeCheckTypeManually {
+ // if doer is doing "manually merge" (mark as merged manually), do not check anything
+ return nil
+ }
+
+ if pr.IsWorkInProgress(ctx) {
+ return ErrIsWorkInProgress
+ }
+
+ if !pr.CanAutoMerge() && !pr.IsEmpty() {
+ return ErrNotMergeableState
+ }
+
+ if pr.IsChecking() {
+ return ErrIsChecking
+ }
+
+ if pb, err := CheckPullBranchProtections(ctx, pr, false); err != nil {
+ if !models.IsErrDisallowedToMerge(err) {
+ log.Error("Error whilst checking pull branch protection for %-v: %v", pr, err)
+ return err
+ }
+
+ // Now the branch protection check failed, check whether the failure could be skipped (skip by setting err = nil)
+
+ // * when doing Auto Merge (Scheduled Merge After Checks Succeed), skip the branch protection check
+ if mergeCheckType == MergeCheckTypeAuto {
+ err = nil
+ }
+
+ // * if the doer is admin, they could skip the branch protection check,
+ // if that's allowed by the protected branch rule.
+ if adminSkipProtectionCheck {
+ if doer.IsAdmin {
+ err = nil // instance admin can skip the check, so clear the error
+ } else if !pb.ApplyToAdmins {
+ if isRepoAdmin, errCheckAdmin := access_model.IsUserRepoAdmin(ctx, pr.BaseRepo, doer); errCheckAdmin != nil {
+ log.Error("Unable to check if %-v is a repo admin in %-v: %v", doer, pr.BaseRepo, errCheckAdmin)
+ return errCheckAdmin
+ } else if isRepoAdmin {
+ err = nil // repo admin can skip the check, so clear the error
+ }
+ }
+ }
+
+ // If there is still a branch protection check error, return it
+ if err != nil {
+ return err
+ }
+ }
+
+ if _, err := isSignedIfRequired(ctx, pr, doer); err != nil {
+ return err
+ }
+
+ if noDeps, err := issues_model.IssueNoDependenciesLeft(ctx, pr.Issue); err != nil {
+ return err
+ } else if !noDeps {
+ return ErrDependenciesLeft
+ }
+
+ return nil
+ })
+}
+
+// isSignedIfRequired check if merge will be signed if required
+func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) (bool, error) {
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, err
+ }
+
+ if pb == nil || !pb.RequireSignedCommits {
+ return true, nil
+ }
+
+ sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.GetGitRefName())
+
+ return sign, err
+}
+
+// checkAndUpdateStatus checks if pull request is possible to leaving checking status,
+// and set to be either conflict or mergeable.
+func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) {
+ // If status has not been changed to conflict by testPatch then we are mergeable
+ if pr.Status == issues_model.PullRequestStatusChecking {
+ pr.Status = issues_model.PullRequestStatusMergeable
+ }
+
+ // Make sure there is no waiting test to process before leaving the checking status.
+ has, err := prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10))
+ if err != nil {
+ log.Error("Unable to check if the queue is waiting to reprocess %-v. Error: %v", pr, err)
+ }
+
+ if has {
+ log.Trace("Not updating status for %-v as it is due to be rechecked", pr)
+ return
+ }
+
+ if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files"); err != nil {
+ log.Error("Update[%-v]: %v", pr, err)
+ }
+}
+
+// getMergeCommit checks if a pull request has been merged
+// Returns the git.Commit of the pull request if merged
+func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Commit, error) {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return nil, fmt.Errorf("unable to load base repo for %s: %w", pr, err)
+ }
+
+ prHeadRef := pr.GetGitRefName()
+
+ // Check if the pull request is merged into BaseBranch
+ if _, _, err := git.NewCommand(ctx, "merge-base", "--is-ancestor").
+ AddDynamicArguments(prHeadRef, pr.BaseBranch).
+ RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()}); err != nil {
+ if strings.Contains(err.Error(), "exit status 1") {
+ // prHeadRef is not an ancestor of the base branch
+ return nil, nil
+ }
+ // Errors are signaled by a non-zero status that is not 1
+ return nil, fmt.Errorf("%-v git merge-base --is-ancestor: %w", pr, err)
+ }
+
+ // If merge-base successfully exits then prHeadRef is an ancestor of pr.BaseBranch
+
+ // Find the head commit id
+ prHeadCommitID, err := git.GetFullCommitID(ctx, pr.BaseRepo.RepoPath(), prHeadRef)
+ if err != nil {
+ return nil, fmt.Errorf("GetFullCommitID(%s) in %s: %w", prHeadRef, pr.BaseRepo.FullName(), err)
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ return nil, fmt.Errorf("%-v OpenRepository: %w", pr.BaseRepo, err)
+ }
+ defer gitRepo.Close()
+
+ objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
+
+ // Get the commit from BaseBranch where the pull request got merged
+ mergeCommit, _, err := git.NewCommand(ctx, "rev-list", "--ancestry-path", "--merges", "--reverse").
+ AddDynamicArguments(prHeadCommitID + ".." + pr.BaseBranch).
+ RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()})
+ if err != nil {
+ return nil, fmt.Errorf("git rev-list --ancestry-path --merges --reverse: %w", err)
+ } else if len(mergeCommit) < objectFormat.FullLength() {
+ // PR was maybe fast-forwarded, so just use last commit of PR
+ mergeCommit = prHeadCommitID
+ }
+ mergeCommit = strings.TrimSpace(mergeCommit)
+
+ commit, err := gitRepo.GetCommit(mergeCommit)
+ if err != nil {
+ return nil, fmt.Errorf("GetMergeCommit[%s]: %w", mergeCommit, err)
+ }
+
+ return commit, nil
+}
+
+// manuallyMerged checks if a pull request got manually merged
+// When a pull request got manually merged mark the pull request as merged
+func manuallyMerged(ctx context.Context, pr *issues_model.PullRequest) bool {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("%-v LoadBaseRepo: %v", pr, err)
+ return false
+ }
+
+ if unit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests); err == nil {
+ config := unit.PullRequestsConfig()
+ if !config.AutodetectManualMerge {
+ return false
+ }
+ } else {
+ log.Error("%-v BaseRepo.GetUnit(unit.TypePullRequests): %v", pr, err)
+ return false
+ }
+
+ commit, err := getMergeCommit(ctx, pr)
+ if err != nil {
+ log.Error("%-v getMergeCommit: %v", pr, err)
+ return false
+ }
+
+ if commit == nil {
+ // no merge commit found
+ return false
+ }
+
+ pr.MergedCommitID = commit.ID.String()
+ pr.MergedUnix = timeutil.TimeStamp(commit.Author.When.Unix())
+ pr.Status = issues_model.PullRequestStatusManuallyMerged
+ merger, _ := user_model.GetUserByEmail(ctx, commit.Author.Email)
+
+ // When the commit author is unknown set the BaseRepo owner as merger
+ if merger == nil {
+ if pr.BaseRepo.Owner == nil {
+ if err = pr.BaseRepo.LoadOwner(ctx); err != nil {
+ log.Error("%-v BaseRepo.LoadOwner: %v", pr, err)
+ return false
+ }
+ }
+ merger = pr.BaseRepo.Owner
+ }
+ pr.Merger = merger
+ pr.MergerID = merger.ID
+
+ if merged, err := pr.SetMerged(ctx); err != nil {
+ log.Error("%-v setMerged : %v", pr, err)
+ return false
+ } else if !merged {
+ return false
+ }
+
+ notify_service.MergePullRequest(ctx, merger, pr)
+
+ log.Info("manuallyMerged[%-v]: Marked as manually merged into %s/%s by commit id: %s", pr, pr.BaseRepo.Name, pr.BaseBranch, commit.ID.String())
+ return true
+}
+
+// InitializePullRequests checks and tests untested patches of pull requests.
+func InitializePullRequests(ctx context.Context) {
+ prs, err := issues_model.GetPullRequestIDsByCheckStatus(ctx, issues_model.PullRequestStatusChecking)
+ if err != nil {
+ log.Error("Find Checking PRs: %v", err)
+ return
+ }
+ for _, prID := range prs {
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ log.Trace("Adding PR[%d] to the pull requests patch checking queue", prID)
+ if err := prPatchCheckerQueue.Push(strconv.FormatInt(prID, 10)); err != nil {
+ log.Error("Error adding PR[%d] to the pull requests patch checking queue %v", prID, err)
+ }
+ }
+ }
+}
+
+// handle passed PR IDs and test the PRs
+func handler(items ...string) []string {
+ for _, s := range items {
+ id, _ := strconv.ParseInt(s, 10, 64)
+ testPR(id)
+ }
+ return nil
+}
+
+func testPR(id int64) {
+ pullWorkingPool.CheckIn(fmt.Sprint(id))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(id))
+ ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("Test PR[%d] from patch checking queue", id))
+ defer finished()
+
+ pr, err := issues_model.GetPullRequestByID(ctx, id)
+ if err != nil {
+ log.Error("Unable to GetPullRequestByID[%d] for testPR: %v", id, err)
+ return
+ }
+
+ log.Trace("Testing %-v", pr)
+ defer func() {
+ log.Trace("Done testing %-v (status: %s)", pr, pr.Status)
+ }()
+
+ if pr.HasMerged {
+ log.Trace("%-v is already merged (status: %s, merge commit: %s)", pr, pr.Status, pr.MergedCommitID)
+ return
+ }
+
+ if manuallyMerged(ctx, pr) {
+ log.Trace("%-v is manually merged (status: %s, merge commit: %s)", pr, pr.Status, pr.MergedCommitID)
+ return
+ }
+
+ if err := TestPatch(pr); err != nil {
+ log.Error("testPatch[%-v]: %v", pr, err)
+ pr.Status = issues_model.PullRequestStatusError
+ if err := pr.UpdateCols(ctx, "status"); err != nil {
+ log.Error("update pr [%-v] status to PullRequestStatusError failed: %v", pr, err)
+ }
+ return
+ }
+ checkAndUpdateStatus(ctx, pr)
+}
+
+// CheckPRsForBaseBranch check all pulls with baseBrannch
+func CheckPRsForBaseBranch(ctx context.Context, baseRepo *repo_model.Repository, baseBranchName string) error {
+ prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, baseRepo.ID, baseBranchName)
+ if err != nil {
+ return err
+ }
+
+ for _, pr := range prs {
+ AddToTaskQueue(ctx, pr)
+ }
+
+ return nil
+}
+
+// Init runs the task queue to test all the checking status pull requests
+func Init() error {
+ prPatchCheckerQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_patch_checker", handler)
+
+ if prPatchCheckerQueue == nil {
+ return fmt.Errorf("unable to create pr_patch_checker queue")
+ }
+
+ go graceful.GetManager().RunWithCancel(prPatchCheckerQueue)
+ go graceful.GetManager().RunWithShutdownContext(InitializePullRequests)
+ return nil
+}
diff --git a/services/pull/check_test.go b/services/pull/check_test.go
new file mode 100644
index 0000000..b99cf01
--- /dev/null
+++ b/services/pull/check_test.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "strconv"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestPullRequest_AddToTaskQueue(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ idChan := make(chan int64, 10)
+ testHandler := func(items ...string) []string {
+ for _, s := range items {
+ id, _ := strconv.ParseInt(s, 10, 64)
+ idChan <- id
+ }
+ return nil
+ }
+
+ cfg, err := setting.GetQueueSettings(setting.CfgProvider, "pr_patch_checker")
+ require.NoError(t, err)
+ prPatchCheckerQueue, err = queue.NewWorkerPoolQueueWithContext(context.Background(), "pr_patch_checker", cfg, testHandler, true)
+ require.NoError(t, err)
+
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ AddToTaskQueue(db.DefaultContext, pr)
+
+ assert.Eventually(t, func() bool {
+ pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ return pr.Status == issues_model.PullRequestStatusChecking
+ }, 1*time.Second, 100*time.Millisecond)
+
+ has, err := prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10))
+ assert.True(t, has)
+ require.NoError(t, err)
+
+ go prPatchCheckerQueue.Run()
+
+ select {
+ case id := <-idChan:
+ assert.EqualValues(t, pr.ID, id)
+ case <-time.After(time.Second):
+ assert.FailNow(t, "Timeout: nothing was added to pullRequestQueue")
+ }
+
+ has, err = prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10))
+ assert.False(t, has)
+ require.NoError(t, err)
+
+ pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ assert.Equal(t, issues_model.PullRequestStatusChecking, pr.Status)
+
+ prPatchCheckerQueue.ShutdownWait(5 * time.Second)
+ prPatchCheckerQueue = nil
+}
diff --git a/services/pull/comment.go b/services/pull/comment.go
new file mode 100644
index 0000000..53587d4
--- /dev/null
+++ b/services/pull/comment.go
@@ -0,0 +1,94 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/json"
+)
+
+// getCommitIDsFromRepo get commit IDs from repo in between oldCommitID and newCommitID
+// isForcePush will be true if oldCommit isn't on the branch
+// Commit on baseBranch will skip
+func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, isForcePush bool, err error) {
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ return nil, false, err
+ }
+ defer closer.Close()
+
+ oldCommit, err := gitRepo.GetCommit(oldCommitID)
+ if err != nil {
+ return nil, false, err
+ }
+
+ newCommit, err := gitRepo.GetCommit(newCommitID)
+ if err != nil {
+ return nil, false, err
+ }
+
+ isForcePush, err = newCommit.IsForcePush(oldCommitID)
+ if err != nil {
+ return nil, false, err
+ }
+
+ if isForcePush {
+ commitIDs = make([]string, 2)
+ commitIDs[0] = oldCommitID
+ commitIDs[1] = newCommitID
+
+ return commitIDs, isForcePush, err
+ }
+
+ // Find commits between new and old commit excluding base branch commits
+ commits, err := gitRepo.CommitsBetweenNotBase(newCommit, oldCommit, baseBranch)
+ if err != nil {
+ return nil, false, err
+ }
+
+ commitIDs = make([]string, 0, len(commits))
+ for i := len(commits) - 1; i >= 0; i-- {
+ commitIDs = append(commitIDs, commits[i].ID.String())
+ }
+
+ return commitIDs, isForcePush, err
+}
+
+// CreatePushPullComment create push code to pull base comment
+func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (comment *issues_model.Comment, err error) {
+ if pr.HasMerged || oldCommitID == "" || newCommitID == "" {
+ return nil, nil
+ }
+
+ ops := &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypePullRequestPush,
+ Doer: pusher,
+ Repo: pr.BaseRepo,
+ }
+
+ var data issues_model.PushActionContent
+
+ data.CommitIDs, data.IsForcePush, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
+ if err != nil {
+ return nil, err
+ }
+
+ ops.Issue = pr.Issue
+
+ dataJSON, err := json.Marshal(data)
+ if err != nil {
+ return nil, err
+ }
+
+ ops.Content = string(dataJSON)
+
+ comment, err = issues_model.CreateComment(ctx, ops)
+
+ return comment, err
+}
diff --git a/services/pull/commit_status.go b/services/pull/commit_status.go
new file mode 100644
index 0000000..0d4763a
--- /dev/null
+++ b/services/pull/commit_status.go
@@ -0,0 +1,171 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "errors"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/gobwas/glob"
+)
+
+// MergeRequiredContextsCommitStatus returns a commit status state for given required contexts
+func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus, requiredContexts []string) structs.CommitStatusState {
+ // matchedCount is the number of `CommitStatus.Context` that match any context of `requiredContexts`
+ matchedCount := 0
+ returnedStatus := structs.CommitStatusSuccess
+
+ if len(requiredContexts) > 0 {
+ requiredContextsGlob := make(map[string]glob.Glob, len(requiredContexts))
+ for _, ctx := range requiredContexts {
+ if gp, err := glob.Compile(ctx); err != nil {
+ log.Error("glob.Compile %s failed. Error: %v", ctx, err)
+ } else {
+ requiredContextsGlob[ctx] = gp
+ }
+ }
+
+ for _, gp := range requiredContextsGlob {
+ var targetStatus structs.CommitStatusState
+ for _, commitStatus := range commitStatuses {
+ if gp.Match(commitStatus.Context) {
+ targetStatus = commitStatus.State
+ matchedCount++
+ break
+ }
+ }
+
+ // If required rule not match any action, then it is pending
+ if targetStatus == "" {
+ if structs.CommitStatusPending.NoBetterThan(returnedStatus) {
+ returnedStatus = structs.CommitStatusPending
+ }
+ break
+ }
+
+ if targetStatus.NoBetterThan(returnedStatus) {
+ returnedStatus = targetStatus
+ }
+ }
+ }
+
+ if matchedCount == 0 && returnedStatus == structs.CommitStatusSuccess {
+ status := git_model.CalcCommitStatus(commitStatuses)
+ if status != nil {
+ return status.State
+ }
+ return ""
+ }
+
+ return returnedStatus
+}
+
+// IsCommitStatusContextSuccess returns true if all required status check contexts succeed.
+func IsCommitStatusContextSuccess(commitStatuses []*git_model.CommitStatus, requiredContexts []string) bool {
+ // If no specific context is required, require that last commit status is a success
+ if len(requiredContexts) == 0 {
+ status := git_model.CalcCommitStatus(commitStatuses)
+ if status == nil || status.State != structs.CommitStatusSuccess {
+ return false
+ }
+ return true
+ }
+
+ for _, ctx := range requiredContexts {
+ var found bool
+ for _, commitStatus := range commitStatuses {
+ if commitStatus.Context == ctx {
+ if commitStatus.State != structs.CommitStatusSuccess {
+ return false
+ }
+
+ found = true
+ break
+ }
+ }
+ if !found {
+ return false
+ }
+ }
+ return true
+}
+
+// IsPullCommitStatusPass returns if all required status checks PASS
+func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) (bool, error) {
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, fmt.Errorf("GetFirstMatchProtectedBranchRule: %w", err)
+ }
+ if pb == nil || !pb.EnableStatusCheck {
+ return true, nil
+ }
+
+ state, err := GetPullRequestCommitStatusState(ctx, pr)
+ if err != nil {
+ return false, err
+ }
+ return state.IsSuccess(), nil
+}
+
+// GetPullRequestCommitStatusState returns pull request merged commit status state
+func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullRequest) (structs.CommitStatusState, error) {
+ // Ensure HeadRepo is loaded
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return "", fmt.Errorf("LoadHeadRepo: %w", err)
+ }
+
+ // check if all required status checks are successful
+ headGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.HeadRepo)
+ if err != nil {
+ return "", fmt.Errorf("RepositoryFromContextOrOpen: %w", err)
+ }
+ defer closer.Close()
+
+ if pr.Flow == issues_model.PullRequestFlowGithub && !headGitRepo.IsBranchExist(pr.HeadBranch) {
+ return "", errors.New("head branch does not exist, can not merge")
+ }
+ if pr.Flow == issues_model.PullRequestFlowAGit && !git.IsReferenceExist(ctx, headGitRepo.Path, pr.GetGitRefName()) {
+ return "", errors.New("head branch does not exist, can not merge")
+ }
+
+ var sha string
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ sha, err = headGitRepo.GetBranchCommitID(pr.HeadBranch)
+ } else {
+ sha, err = headGitRepo.GetRefCommitID(pr.GetGitRefName())
+ }
+ if err != nil {
+ return "", err
+ }
+
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return "", fmt.Errorf("LoadBaseRepo: %w", err)
+ }
+
+ commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
+ if err != nil {
+ return "", fmt.Errorf("GetLatestCommitStatus: %w", err)
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return "", fmt.Errorf("GetFirstMatchProtectedBranchRule: %w", err)
+ }
+ var requiredContexts []string
+ if pb != nil {
+ requiredContexts = pb.StatusCheckContexts
+ }
+
+ return MergeRequiredContextsCommitStatus(commitStatuses, requiredContexts), nil
+}
diff --git a/services/pull/commit_status_test.go b/services/pull/commit_status_test.go
new file mode 100644
index 0000000..592acdd
--- /dev/null
+++ b/services/pull/commit_status_test.go
@@ -0,0 +1,65 @@
+// Copyright 2024 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "testing"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMergeRequiredContextsCommitStatus(t *testing.T) {
+ testCases := [][]*git_model.CommitStatus{
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 3", State: structs.CommitStatusSuccess},
+ },
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 2t", State: structs.CommitStatusPending},
+ },
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 2t", State: structs.CommitStatusFailure},
+ },
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 2t", State: structs.CommitStatusSuccess},
+ },
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 2t", State: structs.CommitStatusSuccess},
+ },
+ }
+ testCasesRequiredContexts := [][]string{
+ {"Build*"},
+ {"Build*", "Build 2t*"},
+ {"Build*", "Build 2t*"},
+ {"Build*", "Build 2t*", "Build 3*"},
+ {"Build*", "Build *", "Build 2t*", "Build 1*"},
+ }
+
+ testCasesExpected := []structs.CommitStatusState{
+ structs.CommitStatusSuccess,
+ structs.CommitStatusPending,
+ structs.CommitStatusFailure,
+ structs.CommitStatusPending,
+ structs.CommitStatusSuccess,
+ }
+
+ for i, commitStatuses := range testCases {
+ if MergeRequiredContextsCommitStatus(commitStatuses, testCasesRequiredContexts[i]) != testCasesExpected[i] {
+ assert.Fail(t, "Test case failed", "Test case %d failed", i+1)
+ }
+ }
+}
diff --git a/services/pull/edits.go b/services/pull/edits.go
new file mode 100644
index 0000000..c7550dc
--- /dev/null
+++ b/services/pull/edits.go
@@ -0,0 +1,40 @@
+// Copyright 2022 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "errors"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+var ErrUserHasNoPermissionForAction = errors.New("user not allowed to do this action")
+
+// SetAllowEdits allow edits from maintainers to PRs
+func SetAllowEdits(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, allow bool) error {
+ if doer == nil || !pr.Issue.IsPoster(doer.ID) {
+ return ErrUserHasNoPermissionForAction
+ }
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return err
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
+ if err != nil {
+ return err
+ }
+
+ if !permission.CanWrite(unit_model.TypeCode) {
+ return ErrUserHasNoPermissionForAction
+ }
+
+ pr.AllowMaintainerEdit = allow
+ return issues_model.UpdateAllowEdits(ctx, pr)
+}
diff --git a/services/pull/lfs.go b/services/pull/lfs.go
new file mode 100644
index 0000000..ed03583
--- /dev/null
+++ b/services/pull/lfs.go
@@ -0,0 +1,135 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bufio"
+ "context"
+ "io"
+ "strconv"
+ "sync"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/git/pipeline"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// LFSPush pushes lfs objects referred to in new commits in the head repository from the base repository
+func LFSPush(ctx context.Context, tmpBasePath, mergeHeadSHA, mergeBaseSHA string, pr *issues_model.PullRequest) error {
+ // Now we have to implement git lfs push
+ // git rev-list --objects --filter=blob:limit=1k HEAD --not base
+ // pass blob shas in to git cat-file --batch-check (possibly unnecessary)
+ // ensure only blobs and <=1k size then pass in to git cat-file --batch
+ // to read each sha and check each as a pointer
+ // Then if they are lfs -> add them to the baseRepo
+ revListReader, revListWriter := io.Pipe()
+ shasToCheckReader, shasToCheckWriter := io.Pipe()
+ catFileCheckReader, catFileCheckWriter := io.Pipe()
+ shasToBatchReader, shasToBatchWriter := io.Pipe()
+ catFileBatchReader, catFileBatchWriter := io.Pipe()
+ errChan := make(chan error, 1)
+ wg := sync.WaitGroup{}
+ wg.Add(6)
+ // Create the go-routines in reverse order.
+
+ // 6. Take the output of cat-file --batch and check if each file in turn
+ // to see if they're pointers to files in the LFS store associated with
+ // the head repo and add them to the base repo if so
+ go createLFSMetaObjectsFromCatFileBatch(db.DefaultContext, catFileBatchReader, &wg, pr)
+
+ // 5. Take the shas of the blobs and batch read them
+ go pipeline.CatFileBatch(ctx, shasToBatchReader, catFileBatchWriter, &wg, tmpBasePath)
+
+ // 4. From the provided objects restrict to blobs <=1k
+ go pipeline.BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader, shasToBatchWriter, &wg)
+
+ // 3. Run batch-check on the objects retrieved from rev-list
+ go pipeline.CatFileBatchCheck(ctx, shasToCheckReader, catFileCheckWriter, &wg, tmpBasePath)
+
+ // 2. Check each object retrieved rejecting those without names as they will be commits or trees
+ go pipeline.BlobsFromRevListObjects(revListReader, shasToCheckWriter, &wg)
+
+ // 1. Run rev-list objects from mergeHead to mergeBase
+ go pipeline.RevListObjects(ctx, revListWriter, &wg, tmpBasePath, mergeHeadSHA, mergeBaseSHA, errChan)
+
+ wg.Wait()
+ select {
+ case err, has := <-errChan:
+ if has {
+ return err
+ }
+ default:
+ }
+ return nil
+}
+
+func createLFSMetaObjectsFromCatFileBatch(ctx context.Context, catFileBatchReader *io.PipeReader, wg *sync.WaitGroup, pr *issues_model.PullRequest) {
+ defer wg.Done()
+ defer catFileBatchReader.Close()
+
+ contentStore := lfs.NewContentStore()
+
+ bufferedReader := bufio.NewReader(catFileBatchReader)
+ buf := make([]byte, 1025)
+ for {
+ // File descriptor line: sha
+ _, err := bufferedReader.ReadString(' ')
+ if err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ // Throw away the blob
+ if _, err := bufferedReader.ReadString(' '); err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ sizeStr, err := bufferedReader.ReadString('\n')
+ if err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ size, err := strconv.Atoi(sizeStr[:len(sizeStr)-1])
+ if err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ pointerBuf := buf[:size+1]
+ if _, err := io.ReadFull(bufferedReader, pointerBuf); err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ pointerBuf = pointerBuf[:size]
+ // Now we need to check if the pointerBuf is an LFS pointer
+ pointer, _ := lfs.ReadPointerFromBuffer(pointerBuf)
+ if !pointer.IsValid() {
+ continue
+ }
+
+ exist, _ := contentStore.Exists(pointer)
+ if !exist {
+ continue
+ }
+
+ // Then we need to check that this pointer is in the db
+ if _, err := git_model.GetLFSMetaObjectByOid(ctx, pr.HeadRepoID, pointer.Oid); err != nil {
+ if err == git_model.ErrLFSObjectNotExist {
+ log.Warn("During merge of: %d in %-v, there is a pointer to LFS Oid: %s which although present in the LFS store is not associated with the head repo %-v", pr.Index, pr.BaseRepo, pointer.Oid, pr.HeadRepo)
+ continue
+ }
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ // OK we have a pointer that is associated with the head repo
+ // and is actually a file in the LFS
+ // Therefore it should be associated with the base repo
+ if _, err := git_model.NewLFSMetaObject(ctx, pr.BaseRepoID, pointer); err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ }
+}
diff --git a/services/pull/main_test.go b/services/pull/main_test.go
new file mode 100644
index 0000000..efbb63a
--- /dev/null
+++ b/services/pull/main_test.go
@@ -0,0 +1,17 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models/actions"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/pull/merge.go b/services/pull/merge.go
new file mode 100644
index 0000000..a1585e6
--- /dev/null
+++ b/services/pull/merge.go
@@ -0,0 +1,562 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/references"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ issue_service "code.gitea.io/gitea/services/issue"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// getMergeMessage composes the message used when merging a pull request.
+func getMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle, extraVars map[string]string) (message, body string, err error) {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return "", "", err
+ }
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return "", "", err
+ }
+ if err := pr.LoadIssue(ctx); err != nil {
+ return "", "", err
+ }
+ if err := pr.Issue.LoadPoster(ctx); err != nil {
+ return "", "", err
+ }
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ return "", "", err
+ }
+
+ isExternalTracker := pr.BaseRepo.UnitEnabled(ctx, unit.TypeExternalTracker)
+ issueReference := "#"
+ if isExternalTracker {
+ issueReference = "!"
+ }
+
+ issueURL, err := url.JoinPath(setting.AppURL, pr.Issue.Link())
+ if err != nil {
+ return "", "", err
+ }
+ reviewedOn := fmt.Sprintf("Reviewed-on: %s", issueURL)
+ reviewedBy := pr.GetApprovers(ctx)
+
+ if mergeStyle != "" {
+ commit, err := baseGitRepo.GetBranchCommit(pr.BaseRepo.DefaultBranch)
+ if err != nil {
+ return "", "", err
+ }
+
+ templateFilepathForgejo := fmt.Sprintf(".forgejo/default_merge_message/%s_TEMPLATE.md", strings.ToUpper(string(mergeStyle)))
+ templateFilepathGitea := fmt.Sprintf(".gitea/default_merge_message/%s_TEMPLATE.md", strings.ToUpper(string(mergeStyle)))
+
+ templateContent, err := commit.GetFileContent(templateFilepathForgejo, setting.Repository.PullRequest.DefaultMergeMessageSize)
+ if _, ok := err.(git.ErrNotExist); ok {
+ templateContent, err = commit.GetFileContent(templateFilepathGitea, setting.Repository.PullRequest.DefaultMergeMessageSize)
+ }
+ if err != nil {
+ if !git.IsErrNotExist(err) {
+ return "", "", err
+ }
+ } else {
+ vars := map[string]string{
+ "BaseRepoOwnerName": pr.BaseRepo.OwnerName,
+ "BaseRepoName": pr.BaseRepo.Name,
+ "BaseBranch": pr.BaseBranch,
+ "HeadRepoOwnerName": "",
+ "HeadRepoName": "",
+ "HeadBranch": pr.HeadBranch,
+ "PullRequestTitle": pr.Issue.Title,
+ "PullRequestDescription": pr.Issue.Content,
+ "PullRequestPosterName": pr.Issue.Poster.Name,
+ "PullRequestIndex": strconv.FormatInt(pr.Index, 10),
+ "PullRequestReference": fmt.Sprintf("%s%d", issueReference, pr.Index),
+ "ReviewedOn": reviewedOn,
+ "ReviewedBy": reviewedBy,
+ }
+ if pr.HeadRepo != nil {
+ vars["HeadRepoOwnerName"] = pr.HeadRepo.OwnerName
+ vars["HeadRepoName"] = pr.HeadRepo.Name
+ }
+ for extraKey, extraValue := range extraVars {
+ vars[extraKey] = extraValue
+ }
+ refs, err := pr.ResolveCrossReferences(ctx)
+ if err == nil {
+ closeIssueIndexes := make([]string, 0, len(refs))
+ closeWord := "close"
+ if len(setting.Repository.PullRequest.CloseKeywords) > 0 {
+ closeWord = setting.Repository.PullRequest.CloseKeywords[0]
+ }
+ for _, ref := range refs {
+ if ref.RefAction == references.XRefActionCloses {
+ if err := ref.LoadIssue(ctx); err != nil {
+ return "", "", err
+ }
+ closeIssueIndexes = append(closeIssueIndexes, fmt.Sprintf("%s %s%d", closeWord, issueReference, ref.Issue.Index))
+ }
+ }
+ if len(closeIssueIndexes) > 0 {
+ vars["ClosingIssues"] = strings.Join(closeIssueIndexes, ", ")
+ } else {
+ vars["ClosingIssues"] = ""
+ }
+ }
+ message, body = expandDefaultMergeMessage(templateContent, vars)
+ return message, body, nil
+ }
+ }
+
+ if mergeStyle == repo_model.MergeStyleRebase {
+ // for fast-forward rebase, do not amend the last commit if there is no template
+ return "", "", nil
+ }
+
+ body = fmt.Sprintf("%s\n%s", reviewedOn, reviewedBy)
+
+ // Squash merge has a different from other styles.
+ if mergeStyle == repo_model.MergeStyleSquash {
+ return fmt.Sprintf("%s (%s%d)", pr.Issue.Title, issueReference, pr.Issue.Index), body, nil
+ }
+
+ if pr.BaseRepoID == pr.HeadRepoID {
+ return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch), body, nil
+ }
+
+ if pr.HeadRepo == nil {
+ return fmt.Sprintf("Merge pull request '%s' (%s%d) from <deleted>:%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch), body, nil
+ }
+
+ return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s:%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch), body, nil
+}
+
+func expandDefaultMergeMessage(template string, vars map[string]string) (message, body string) {
+ message = strings.TrimSpace(template)
+ if splits := strings.SplitN(message, "\n", 2); len(splits) == 2 {
+ message = splits[0]
+ body = strings.TrimSpace(splits[1])
+ }
+ mapping := func(s string) string { return vars[s] }
+ return os.Expand(message, mapping), os.Expand(body, mapping)
+}
+
+// GetDefaultMergeMessage returns default message used when merging pull request
+func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle) (message, body string, err error) {
+ return getMergeMessage(ctx, baseGitRepo, pr, mergeStyle, nil)
+}
+
+// Merge merges pull request to base repository.
+// Caller should check PR is ready to be merged (review and status checks)
+func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, wasAutoMerged bool) error {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to load base repo: %v", err)
+ return fmt.Errorf("unable to load base repo: %w", err)
+ } else if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("Unable to load head repo: %v", err)
+ return fmt.Errorf("unable to load head repo: %w", err)
+ }
+
+ pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
+
+ prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ log.Error("pr.BaseRepo.GetUnit(unit.TypePullRequests): %v", err)
+ return err
+ }
+ prConfig := prUnit.PullRequestsConfig()
+
+ // Check if merge style is correct and allowed
+ if !prConfig.IsMergeStyleAllowed(mergeStyle) {
+ return models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: mergeStyle}
+ }
+
+ defer func() {
+ AddTestPullRequestTask(ctx, doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "", 0)
+ }()
+
+ _, err = doMergeAndPush(ctx, pr, doer, mergeStyle, expectedHeadCommitID, message, repo_module.PushTriggerPRMergeToBase)
+ if err != nil {
+ return err
+ }
+
+ // reload pull request because it has been updated by post receive hook
+ pr, err = issues_model.GetPullRequestByID(ctx, pr.ID)
+ if err != nil {
+ return err
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue %-v: %v", pr, err)
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo %-v: %v", pr, err)
+ }
+ if err := pr.Issue.Repo.LoadOwner(ctx); err != nil {
+ log.Error("LoadOwner for %-v: %v", pr, err)
+ }
+
+ if wasAutoMerged {
+ notify_service.AutoMergePullRequest(ctx, doer, pr)
+ } else {
+ notify_service.MergePullRequest(ctx, doer, pr)
+ }
+
+ // Reset cached commit count
+ cache.Remove(pr.Issue.Repo.GetCommitsCountCacheKey(pr.BaseBranch, true))
+
+ return handleCloseCrossReferences(ctx, pr, doer)
+}
+
+func handleCloseCrossReferences(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) error {
+ // Resolve cross references
+ refs, err := pr.ResolveCrossReferences(ctx)
+ if err != nil {
+ log.Error("ResolveCrossReferences: %v", err)
+ return nil
+ }
+
+ for _, ref := range refs {
+ if err = ref.LoadIssue(ctx); err != nil {
+ return err
+ }
+ if err = ref.Issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+ isClosed := ref.RefAction == references.XRefActionCloses
+ if isClosed != ref.Issue.IsClosed {
+ if err = issue_service.ChangeStatus(ctx, ref.Issue, doer, pr.MergedCommitID, isClosed); err != nil {
+ // Allow ErrDependenciesLeft
+ if !issues_model.IsErrDependenciesLeft(err) {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// doMergeAndPush performs the merge operation without changing any pull information in database and pushes it up to the base repository
+func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, pushTrigger repo_module.PushTrigger) (string, error) { //nolint:unparam
+ // Clone base repo.
+ mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, expectedHeadCommitID)
+ if err != nil {
+ return "", err
+ }
+ defer cancel()
+
+ // Merge commits.
+ switch mergeStyle {
+ case repo_model.MergeStyleMerge:
+ if err := doMergeStyleMerge(mergeCtx, message); err != nil {
+ return "", err
+ }
+ case repo_model.MergeStyleRebase, repo_model.MergeStyleRebaseMerge:
+ if err := doMergeStyleRebase(mergeCtx, mergeStyle, message); err != nil {
+ return "", err
+ }
+ case repo_model.MergeStyleSquash:
+ if err := doMergeStyleSquash(mergeCtx, message); err != nil {
+ return "", err
+ }
+ case repo_model.MergeStyleFastForwardOnly:
+ if err := doMergeStyleFastForwardOnly(mergeCtx); err != nil {
+ return "", err
+ }
+ default:
+ return "", models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: mergeStyle}
+ }
+
+ // OK we should cache our current head and origin/headbranch
+ mergeHeadSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "HEAD")
+ if err != nil {
+ return "", fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
+ }
+ mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+baseBranch)
+ if err != nil {
+ return "", fmt.Errorf("Failed to get full commit id for origin/%s: %w", pr.BaseBranch, err)
+ }
+ mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, baseBranch)
+ if err != nil {
+ return "", fmt.Errorf("Failed to get full commit id for the new merge: %w", err)
+ }
+
+ // Now it's questionable about where this should go - either after or before the push
+ // I think in the interests of data safety - failures to push to the lfs should prevent
+ // the merge as you can always remerge.
+ if setting.LFS.StartServer {
+ if err := LFSPush(ctx, mergeCtx.tmpBasePath, mergeHeadSHA, mergeBaseSHA, pr); err != nil {
+ return "", err
+ }
+ }
+
+ var headUser *user_model.User
+ err = pr.HeadRepo.LoadOwner(ctx)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("Can't find user: %d for head repository in %-v: %v", pr.HeadRepo.OwnerID, pr, err)
+ return "", err
+ }
+ log.Warn("Can't find user: %d for head repository in %-v - defaulting to doer: %s - %v", pr.HeadRepo.OwnerID, pr, doer.Name, err)
+ headUser = doer
+ } else {
+ headUser = pr.HeadRepo.Owner
+ }
+
+ mergeCtx.env = repo_module.FullPushingEnvironment(
+ headUser,
+ doer,
+ pr.BaseRepo,
+ pr.BaseRepo.Name,
+ pr.ID,
+ )
+
+ mergeCtx.env = append(mergeCtx.env, repo_module.EnvPushTrigger+"="+string(pushTrigger))
+ pushCmd := git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
+
+ // Push back to upstream.
+ // This cause an api call to "/api/internal/hook/post-receive/...",
+ // If it's merge, all db transaction and operations should be there but not here to prevent deadlock.
+ if err := pushCmd.Run(mergeCtx.RunOpts()); err != nil {
+ if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
+ return "", &git.ErrPushOutOfDate{
+ StdOut: mergeCtx.outbuf.String(),
+ StdErr: mergeCtx.errbuf.String(),
+ Err: err,
+ }
+ } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
+ err := &git.ErrPushRejected{
+ StdOut: mergeCtx.outbuf.String(),
+ StdErr: mergeCtx.errbuf.String(),
+ Err: err,
+ }
+ err.GenerateMessage()
+ return "", err
+ }
+ return "", fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
+ }
+ mergeCtx.outbuf.Reset()
+ mergeCtx.errbuf.Reset()
+
+ return mergeCommitID, nil
+}
+
+func commitAndSignNoAuthor(ctx *mergeContext, message string) error {
+ cmdCommit := git.NewCommand(ctx, "commit").AddOptionFormat("--message=%s", message)
+ if ctx.signKeyID == "" {
+ cmdCommit.AddArguments("--no-gpg-sign")
+ } else {
+ cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID)
+ }
+ if err := cmdCommit.Run(ctx.RunOpts()); err != nil {
+ log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git commit %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ return nil
+}
+
+func runMergeCommand(ctx *mergeContext, mergeStyle repo_model.MergeStyle, cmd *git.Command) error {
+ if err := cmd.Run(ctx.RunOpts()); err != nil {
+ // Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict
+ if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil {
+ // We have a merge conflict error
+ log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return models.ErrMergeConflicts{
+ Style: mergeStyle,
+ StdOut: ctx.outbuf.String(),
+ StdErr: ctx.errbuf.String(),
+ Err: err,
+ }
+ } else if strings.Contains(ctx.errbuf.String(), "refusing to merge unrelated histories") {
+ log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return models.ErrMergeUnrelatedHistories{
+ Style: mergeStyle,
+ StdOut: ctx.outbuf.String(),
+ StdErr: ctx.errbuf.String(),
+ Err: err,
+ }
+ } else if mergeStyle == repo_model.MergeStyleFastForwardOnly && strings.Contains(ctx.errbuf.String(), "Not possible to fast-forward, aborting") {
+ log.Debug("MergeDivergingFastForwardOnly %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return models.ErrMergeDivergingFastForwardOnly{
+ StdOut: ctx.outbuf.String(),
+ StdErr: ctx.errbuf.String(),
+ Err: err,
+ }
+ }
+ log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ return nil
+}
+
+var escapedSymbols = regexp.MustCompile(`([*[?! \\])`)
+
+// IsUserAllowedToMerge check if user is allowed to merge PR with given permissions and branch protections
+func IsUserAllowedToMerge(ctx context.Context, pr *issues_model.PullRequest, p access_model.Permission, user *user_model.User) (bool, error) {
+ if user == nil {
+ return false, nil
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, err
+ }
+
+ if (p.CanWrite(unit.TypeCode) && pb == nil) || (pb != nil && git_model.IsUserMergeWhitelisted(ctx, pb, user.ID, p)) {
+ return true, nil
+ }
+
+ return false, nil
+}
+
+// CheckPullBranchProtections checks whether the PR is ready to be merged (reviews and status checks).
+// Returns the protected branch rule when `ErrDisallowedToMerge` is returned as error.
+func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullRequest, skipProtectedFilesCheck bool) (protectedBranchRule *git_model.ProtectedBranch, err error) {
+ if err = pr.LoadBaseRepo(ctx); err != nil {
+ return nil, fmt.Errorf("LoadBaseRepo: %w", err)
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return nil, fmt.Errorf("LoadProtectedBranch: %v", err)
+ }
+ if pb == nil {
+ return nil, nil
+ }
+
+ isPass, err := IsPullCommitStatusPass(ctx, pr)
+ if err != nil {
+ return nil, err
+ }
+ if !isPass {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "Not all required status checks successful",
+ }
+ }
+
+ if !issues_model.HasEnoughApprovals(ctx, pb, pr) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "Does not have enough approvals",
+ }
+ }
+ if issues_model.MergeBlockedByRejectedReview(ctx, pb, pr) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "There are requested changes",
+ }
+ }
+ if issues_model.MergeBlockedByOfficialReviewRequests(ctx, pb, pr) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "There are official review requests",
+ }
+ }
+
+ if issues_model.MergeBlockedByOutdatedBranch(pb, pr) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "The head branch is behind the base branch",
+ }
+ }
+
+ if skipProtectedFilesCheck {
+ return nil, nil
+ }
+
+ if pb.MergeBlockedByProtectedFiles(pr.ChangedProtectedFiles) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "Changed protected files",
+ }
+ }
+
+ return nil, nil
+}
+
+// MergedManually mark pr as merged manually
+func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, commitID string) error {
+ pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return err
+ }
+ prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ return err
+ }
+ prConfig := prUnit.PullRequestsConfig()
+
+ // Check if merge style is correct and allowed
+ if !prConfig.IsMergeStyleAllowed(repo_model.MergeStyleManuallyMerged) {
+ return models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: repo_model.MergeStyleManuallyMerged}
+ }
+
+ objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
+ if len(commitID) != objectFormat.FullLength() {
+ return fmt.Errorf("Wrong commit ID")
+ }
+
+ commit, err := baseGitRepo.GetCommit(commitID)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ return fmt.Errorf("Wrong commit ID")
+ }
+ return err
+ }
+ commitID = commit.ID.String()
+
+ ok, err := baseGitRepo.IsCommitInBranch(commitID, pr.BaseBranch)
+ if err != nil {
+ return err
+ }
+ if !ok {
+ return fmt.Errorf("Wrong commit ID")
+ }
+
+ pr.MergedCommitID = commitID
+ pr.MergedUnix = timeutil.TimeStamp(commit.Author.When.Unix())
+ pr.Status = issues_model.PullRequestStatusManuallyMerged
+ pr.Merger = doer
+ pr.MergerID = doer.ID
+
+ var merged bool
+ if merged, err = pr.SetMerged(ctx); err != nil {
+ return err
+ } else if !merged {
+ return fmt.Errorf("SetMerged failed")
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+
+ notify_service.MergePullRequest(baseGitRepo.Ctx, doer, pr)
+ log.Info("manuallyMerged[%d]: Marked as manually merged into %s/%s by commit id: %s", pr.ID, pr.BaseRepo.Name, pr.BaseBranch, commitID)
+
+ return handleCloseCrossReferences(ctx, pr, doer)
+}
diff --git a/services/pull/merge_ff_only.go b/services/pull/merge_ff_only.go
new file mode 100644
index 0000000..f57c732
--- /dev/null
+++ b/services/pull/merge_ff_only.go
@@ -0,0 +1,21 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// doMergeStyleFastForwardOnly merges the tracking into the current HEAD - which is assumed to be staging branch (equal to the pr.BaseBranch)
+func doMergeStyleFastForwardOnly(ctx *mergeContext) error {
+ cmd := git.NewCommand(ctx, "merge", "--ff-only").AddDynamicArguments(trackingBranch)
+ if err := runMergeCommand(ctx, repo_model.MergeStyleFastForwardOnly, cmd); err != nil {
+ log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
+ return err
+ }
+
+ return nil
+}
diff --git a/services/pull/merge_merge.go b/services/pull/merge_merge.go
new file mode 100644
index 0000000..bf56c07
--- /dev/null
+++ b/services/pull/merge_merge.go
@@ -0,0 +1,25 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// doMergeStyleMerge merges the tracking branch into the current HEAD - which is assumed to be the staging branch (equal to the pr.BaseBranch)
+func doMergeStyleMerge(ctx *mergeContext, message string) error {
+ cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
+ if err := runMergeCommand(ctx, repo_model.MergeStyleMerge, cmd); err != nil {
+ log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
+ return err
+ }
+
+ if err := commitAndSignNoAuthor(ctx, message); err != nil {
+ log.Error("%-v Unable to make final commit: %v", ctx.pr, err)
+ return err
+ }
+ return nil
+}
diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go
new file mode 100644
index 0000000..88f6c03
--- /dev/null
+++ b/services/pull/merge_prepare.go
@@ -0,0 +1,288 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+)
+
+type mergeContext struct {
+ *prContext
+ doer *user_model.User
+ sig *git.Signature
+ committer *git.Signature
+ signKeyID string // empty for no-sign, non-empty to sign
+ env []string
+}
+
+func (ctx *mergeContext) RunOpts() *git.RunOpts {
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return &git.RunOpts{
+ Env: ctx.env,
+ Dir: ctx.tmpBasePath,
+ Stdout: ctx.outbuf,
+ Stderr: ctx.errbuf,
+ }
+}
+
+func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, expectedHeadCommitID string) (mergeCtx *mergeContext, cancel context.CancelFunc, err error) {
+ // Clone base repo.
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ log.Error("createTemporaryRepoForPR: %v", err)
+ return nil, cancel, err
+ }
+
+ mergeCtx = &mergeContext{
+ prContext: prCtx,
+ doer: doer,
+ }
+
+ if expectedHeadCommitID != "" {
+ trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
+ if err != nil {
+ defer cancel()
+ log.Error("failed to get sha of head branch in %-v: show-ref[%s] --hash refs/heads/tracking: %v", mergeCtx.pr, mergeCtx.tmpBasePath, err)
+ return nil, nil, fmt.Errorf("unable to get sha of head branch in %v %w", pr, err)
+ }
+ if strings.TrimSpace(trackingCommitID) != expectedHeadCommitID {
+ defer cancel()
+ return nil, nil, models.ErrSHADoesNotMatch{
+ GivenSHA: expectedHeadCommitID,
+ CurrentSHA: trackingCommitID,
+ }
+ }
+ }
+
+ mergeCtx.outbuf.Reset()
+ mergeCtx.errbuf.Reset()
+ if err := prepareTemporaryRepoForMerge(mergeCtx); err != nil {
+ defer cancel()
+ return nil, nil, err
+ }
+
+ mergeCtx.sig = doer.NewGitSig()
+ mergeCtx.committer = mergeCtx.sig
+
+ // Determine if we should sign
+ sign, keyID, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch)
+ if sign {
+ mergeCtx.signKeyID = keyID
+ if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
+ mergeCtx.committer = signer
+ }
+ }
+
+ commitTimeStr := time.Now().Format(time.RFC3339)
+
+ // Because this may call hooks we should pass in the environment
+ mergeCtx.env = append(os.Environ(),
+ "GIT_AUTHOR_NAME="+mergeCtx.sig.Name,
+ "GIT_AUTHOR_EMAIL="+mergeCtx.sig.Email,
+ "GIT_AUTHOR_DATE="+commitTimeStr,
+ "GIT_COMMITTER_NAME="+mergeCtx.committer.Name,
+ "GIT_COMMITTER_EMAIL="+mergeCtx.committer.Email,
+ "GIT_COMMITTER_DATE="+commitTimeStr,
+ )
+
+ return mergeCtx, cancel, nil
+}
+
+// prepareTemporaryRepoForMerge takes a repository that has been created using createTemporaryRepo
+// it then sets up the sparse-checkout and other things
+func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
+ infoPath := filepath.Join(ctx.tmpBasePath, ".git", "info")
+ if err := os.MkdirAll(infoPath, 0o700); err != nil {
+ log.Error("%-v Unable to create .git/info in %s: %v", ctx.pr, ctx.tmpBasePath, err)
+ return fmt.Errorf("Unable to create .git/info in tmpBasePath: %w", err)
+ }
+
+ // Enable sparse-checkout
+ // Here we use the .git/info/sparse-checkout file as described in the git documentation
+ sparseCheckoutListFile, err := os.OpenFile(filepath.Join(infoPath, "sparse-checkout"), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o600)
+ if err != nil {
+ log.Error("%-v Unable to write .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err)
+ return fmt.Errorf("Unable to write .git/info/sparse-checkout file in tmpBasePath: %w", err)
+ }
+ defer sparseCheckoutListFile.Close() // we will close it earlier but we need to ensure it is closed if there is an error
+
+ if err := getDiffTree(ctx, ctx.tmpBasePath, baseBranch, trackingBranch, sparseCheckoutListFile); err != nil {
+ log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, baseBranch, trackingBranch, err)
+ return fmt.Errorf("getDiffTree: %w", err)
+ }
+
+ if err := sparseCheckoutListFile.Close(); err != nil {
+ log.Error("%-v Unable to close .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err)
+ return fmt.Errorf("Unable to close .git/info/sparse-checkout file in tmpBasePath: %w", err)
+ }
+
+ setConfig := func(key, value string) error {
+ if err := git.NewCommand(ctx, "config", "--local").AddDynamicArguments(key, value).
+ Run(ctx.RunOpts()); err != nil {
+ log.Error("git config [%s -> %q]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git config [%s -> %q]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ return nil
+ }
+
+ // Switch off LFS process (set required, clean and smudge here also)
+ if err := setConfig("filter.lfs.process", ""); err != nil {
+ return err
+ }
+
+ if err := setConfig("filter.lfs.required", "false"); err != nil {
+ return err
+ }
+
+ if err := setConfig("filter.lfs.clean", ""); err != nil {
+ return err
+ }
+
+ if err := setConfig("filter.lfs.smudge", ""); err != nil {
+ return err
+ }
+
+ if err := setConfig("core.sparseCheckout", "true"); err != nil {
+ return err
+ }
+
+ // Read base branch index
+ if err := git.NewCommand(ctx, "read-tree", "HEAD").
+ Run(ctx.RunOpts()); err != nil {
+ log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ return nil
+}
+
+// getDiffTree returns a string containing all the files that were changed between headBranch and baseBranch
+// the filenames are escaped so as to fit the format required for .git/info/sparse-checkout
+func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, out io.Writer) error {
+ diffOutReader, diffOutWriter, err := os.Pipe()
+ if err != nil {
+ log.Error("Unable to create os.Pipe for %s", repoPath)
+ return err
+ }
+ defer func() {
+ _ = diffOutReader.Close()
+ _ = diffOutWriter.Close()
+ }()
+
+ scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+ if atEOF && len(data) == 0 {
+ return 0, nil, nil
+ }
+ if i := bytes.IndexByte(data, '\x00'); i >= 0 {
+ return i + 1, data[0:i], nil
+ }
+ if atEOF {
+ return len(data), data, nil
+ }
+ return 0, nil, nil
+ }
+
+ err = git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch).
+ Run(&git.RunOpts{
+ Dir: repoPath,
+ Stdout: diffOutWriter,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ // Close the writer end of the pipe to begin processing
+ _ = diffOutWriter.Close()
+ defer func() {
+ // Close the reader on return to terminate the git command if necessary
+ _ = diffOutReader.Close()
+ }()
+
+ // Now scan the output from the command
+ scanner := bufio.NewScanner(diffOutReader)
+ scanner.Split(scanNullTerminatedStrings)
+ for scanner.Scan() {
+ filepath := scanner.Text()
+ // escape '*', '?', '[', spaces and '!' prefix
+ filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`)
+ // no necessary to escape the first '#' symbol because the first symbol is '/'
+ fmt.Fprintf(out, "/%s\n", filepath)
+ }
+ return scanner.Err()
+ },
+ })
+ return err
+}
+
+// rebaseTrackingOnToBase checks out the tracking branch as staging and rebases it on to the base branch
+// if there is a conflict it will return a models.ErrRebaseConflicts
+func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error {
+ // Checkout head branch
+ if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
+ Run(ctx.RunOpts()); err != nil {
+ return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ // Rebase before merging
+ if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch).
+ Run(ctx.RunOpts()); err != nil {
+ // Rebase will leave a REBASE_HEAD file in .git if there is a conflict
+ if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil {
+ var commitSha string
+ ok := false
+ failingCommitPaths := []string{
+ filepath.Join(ctx.tmpBasePath, ".git", "rebase-apply", "original-commit"), // Git < 2.26
+ filepath.Join(ctx.tmpBasePath, ".git", "rebase-merge", "stopped-sha"), // Git >= 2.26
+ }
+ for _, failingCommitPath := range failingCommitPaths {
+ if _, statErr := os.Stat(failingCommitPath); statErr == nil {
+ commitShaBytes, readErr := os.ReadFile(failingCommitPath)
+ if readErr != nil {
+ // Abandon this attempt to handle the error
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ commitSha = strings.TrimSpace(string(commitShaBytes))
+ ok = true
+ break
+ }
+ }
+ if !ok {
+ log.Error("Unable to determine failing commit sha for failing rebase in temp repo for %-v. Cannot cast as models.ErrRebaseConflicts.", ctx.pr)
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return models.ErrRebaseConflicts{
+ CommitSHA: commitSha,
+ Style: mergeStyle,
+ StdOut: ctx.outbuf.String(),
+ StdErr: ctx.errbuf.String(),
+ Err: err,
+ }
+ }
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return nil
+}
diff --git a/services/pull/merge_rebase.go b/services/pull/merge_rebase.go
new file mode 100644
index 0000000..ecf3762
--- /dev/null
+++ b/services/pull/merge_rebase.go
@@ -0,0 +1,121 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "fmt"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// getRebaseAmendMessage composes the message to amend commits in rebase merge of a pull request.
+func getRebaseAmendMessage(ctx *mergeContext, baseGitRepo *git.Repository) (message string, err error) {
+ // Get existing commit message.
+ commitMessage, _, err := git.NewCommand(ctx, "show", "--format=%B", "-s").RunStdString(&git.RunOpts{Dir: ctx.tmpBasePath})
+ if err != nil {
+ return "", err
+ }
+
+ commitTitle, commitBody, _ := strings.Cut(commitMessage, "\n")
+ extraVars := map[string]string{"CommitTitle": strings.TrimSpace(commitTitle), "CommitBody": strings.TrimSpace(commitBody)}
+
+ message, body, err := getMergeMessage(ctx, baseGitRepo, ctx.pr, repo_model.MergeStyleRebase, extraVars)
+ if err != nil || message == "" {
+ return "", err
+ }
+
+ if len(body) > 0 {
+ message = message + "\n\n" + body
+ }
+ return message, err
+}
+
+// Perform rebase merge without merge commit.
+func doMergeRebaseFastForward(ctx *mergeContext) error {
+ baseHeadSHA, err := git.GetFullCommitID(ctx, ctx.tmpBasePath, "HEAD")
+ if err != nil {
+ return fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
+ }
+
+ cmd := git.NewCommand(ctx, "merge", "--ff-only").AddDynamicArguments(stagingBranch)
+ if err := runMergeCommand(ctx, repo_model.MergeStyleRebase, cmd); err != nil {
+ log.Error("Unable to merge staging into base: %v", err)
+ return err
+ }
+
+ // Check if anything actually changed before we amend the message, fast forward can skip commits.
+ newMergeHeadSHA, err := git.GetFullCommitID(ctx, ctx.tmpBasePath, "HEAD")
+ if err != nil {
+ return fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
+ }
+ if baseHeadSHA == newMergeHeadSHA {
+ return nil
+ }
+
+ // Original repo to read template from.
+ baseGitRepo, err := gitrepo.OpenRepository(ctx, ctx.pr.BaseRepo)
+ if err != nil {
+ log.Error("Unable to get Git repo for rebase: %v", err)
+ return err
+ }
+ defer baseGitRepo.Close()
+
+ // Amend last commit message based on template, if one exists
+ newMessage, err := getRebaseAmendMessage(ctx, baseGitRepo)
+ if err != nil {
+ log.Error("Unable to get commit message for amend: %v", err)
+ return err
+ }
+
+ if newMessage != "" {
+ if err := git.NewCommand(ctx, "commit", "--amend").AddOptionFormat("--message=%s", newMessage).Run(&git.RunOpts{Dir: ctx.tmpBasePath}); err != nil {
+ log.Error("Unable to amend commit message: %v", err)
+ return err
+ }
+ }
+
+ return nil
+}
+
+// Perform rebase merge with merge commit.
+func doMergeRebaseMergeCommit(ctx *mergeContext, message string) error {
+ cmd := git.NewCommand(ctx, "merge").AddArguments("--no-ff", "--no-commit").AddDynamicArguments(stagingBranch)
+
+ if err := runMergeCommand(ctx, repo_model.MergeStyleRebaseMerge, cmd); err != nil {
+ log.Error("Unable to merge staging into base: %v", err)
+ return err
+ }
+ if err := commitAndSignNoAuthor(ctx, message); err != nil {
+ log.Error("Unable to make final commit: %v", err)
+ return err
+ }
+
+ return nil
+}
+
+// doMergeStyleRebase rebases the tracking branch on the base branch as the current HEAD with or with a merge commit to the original pr branch
+func doMergeStyleRebase(ctx *mergeContext, mergeStyle repo_model.MergeStyle, message string) error {
+ if err := rebaseTrackingOnToBase(ctx, mergeStyle); err != nil {
+ return err
+ }
+
+ // Checkout base branch again
+ if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch).
+ Run(ctx.RunOpts()); err != nil {
+ log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ if mergeStyle == repo_model.MergeStyleRebase {
+ return doMergeRebaseFastForward(ctx)
+ }
+
+ return doMergeRebaseMergeCommit(ctx, message)
+}
diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go
new file mode 100644
index 0000000..197d810
--- /dev/null
+++ b/services/pull/merge_squash.go
@@ -0,0 +1,86 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "fmt"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// doMergeStyleSquash gets a commit author signature for squash commits
+func getAuthorSignatureSquash(ctx *mergeContext) (*git.Signature, error) {
+ if err := ctx.pr.Issue.LoadPoster(ctx); err != nil {
+ log.Error("%-v Issue[%d].LoadPoster: %v", ctx.pr, ctx.pr.Issue.ID, err)
+ return nil, err
+ }
+
+ // Try to get an signature from the same user in one of the commits, as the
+ // poster email might be private or commits might have a different signature
+ // than the primary email address of the poster.
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpenPath(ctx, ctx.tmpBasePath)
+ if err != nil {
+ log.Error("%-v Unable to open base repository: %v", ctx.pr, err)
+ return nil, err
+ }
+ defer closer.Close()
+
+ commits, err := gitRepo.CommitsBetweenIDs(trackingBranch, "HEAD")
+ if err != nil {
+ log.Error("%-v Unable to get commits between: %s %s: %v", ctx.pr, "HEAD", trackingBranch, err)
+ return nil, err
+ }
+
+ uniqueEmails := make(container.Set[string])
+ for _, commit := range commits {
+ if commit.Author != nil && uniqueEmails.Add(commit.Author.Email) {
+ commitUser, _ := user_model.GetUserByEmail(ctx, commit.Author.Email)
+ if commitUser != nil && commitUser.ID == ctx.pr.Issue.Poster.ID {
+ return commit.Author, nil
+ }
+ }
+ }
+
+ return ctx.pr.Issue.Poster.NewGitSig(), nil
+}
+
+// doMergeStyleSquash squashes the tracking branch on the current HEAD (=base)
+func doMergeStyleSquash(ctx *mergeContext, message string) error {
+ sig, err := getAuthorSignatureSquash(ctx)
+ if err != nil {
+ return fmt.Errorf("getAuthorSignatureSquash: %w", err)
+ }
+
+ cmdMerge := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch)
+ if err := runMergeCommand(ctx, repo_model.MergeStyleSquash, cmdMerge); err != nil {
+ log.Error("%-v Unable to merge --squash tracking into base: %v", ctx.pr, err)
+ return err
+ }
+
+ if setting.Repository.PullRequest.AddCoCommitterTrailers && ctx.committer.String() != sig.String() {
+ // add trailer
+ message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String())
+ }
+ cmdCommit := git.NewCommand(ctx, "commit").
+ AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
+ AddOptionFormat("--message=%s", message)
+ if ctx.signKeyID == "" {
+ cmdCommit.AddArguments("--no-gpg-sign")
+ } else {
+ cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID)
+ }
+ if err := cmdCommit.Run(ctx.RunOpts()); err != nil {
+ log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return nil
+}
diff --git a/services/pull/merge_test.go b/services/pull/merge_test.go
new file mode 100644
index 0000000..6df6f55
--- /dev/null
+++ b/services/pull/merge_test.go
@@ -0,0 +1,67 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_expandDefaultMergeMessage(t *testing.T) {
+ type args struct {
+ template string
+ vars map[string]string
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ wantBody string
+ }{
+ {
+ name: "single line",
+ args: args{
+ template: "Merge ${PullRequestTitle}",
+ vars: map[string]string{
+ "PullRequestTitle": "PullRequestTitle",
+ "PullRequestDescription": "Pull\nRequest\nDescription\n",
+ },
+ },
+ want: "Merge PullRequestTitle",
+ wantBody: "",
+ },
+ {
+ name: "multiple lines",
+ args: args{
+ template: "Merge ${PullRequestTitle}\nDescription:\n\n${PullRequestDescription}\n",
+ vars: map[string]string{
+ "PullRequestTitle": "PullRequestTitle",
+ "PullRequestDescription": "Pull\nRequest\nDescription\n",
+ },
+ },
+ want: "Merge PullRequestTitle",
+ wantBody: "Description:\n\nPull\nRequest\nDescription\n",
+ },
+ {
+ name: "leading newlines",
+ args: args{
+ template: "\n\n\nMerge ${PullRequestTitle}\n\n\nDescription:\n\n${PullRequestDescription}\n",
+ vars: map[string]string{
+ "PullRequestTitle": "PullRequestTitle",
+ "PullRequestDescription": "Pull\nRequest\nDescription\n",
+ },
+ },
+ want: "Merge PullRequestTitle",
+ wantBody: "Description:\n\nPull\nRequest\nDescription\n",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, got1 := expandDefaultMergeMessage(tt.args.template, tt.args.vars)
+ assert.Equalf(t, tt.want, got, "expandDefaultMergeMessage(%v, %v)", tt.args.template, tt.args.vars)
+ assert.Equalf(t, tt.wantBody, got1, "expandDefaultMergeMessage(%v, %v)", tt.args.template, tt.args.vars)
+ })
+ }
+}
diff --git a/services/pull/patch.go b/services/pull/patch.go
new file mode 100644
index 0000000..e90b4bd
--- /dev/null
+++ b/services/pull/patch.go
@@ -0,0 +1,582 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bufio"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+
+ "github.com/gobwas/glob"
+)
+
+// DownloadDiffOrPatch will write the patch for the pr to the writer
+func DownloadDiffOrPatch(ctx context.Context, pr *issues_model.PullRequest, w io.Writer, patch, binary bool) error {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to load base repository ID %d for pr #%d [%d]", pr.BaseRepoID, pr.Index, pr.ID)
+ return err
+ }
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.BaseRepo)
+ if err != nil {
+ return fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer closer.Close()
+
+ if err := gitRepo.GetDiffOrPatch(pr.MergeBase, pr.GetGitRefName(), w, patch, binary); err != nil {
+ log.Error("Unable to get patch file from %s to %s in %s Error: %v", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
+ return fmt.Errorf("Unable to get patch file from %s to %s in %s Error: %w", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
+ }
+ return nil
+}
+
+var patchErrorSuffices = []string{
+ ": already exists in index",
+ ": patch does not apply",
+ ": already exists in working directory",
+ "unrecognized input",
+ ": No such file or directory",
+ ": does not exist in index",
+}
+
+// TestPatch will test whether a simple patch will apply
+func TestPatch(pr *issues_model.PullRequest) error {
+ ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("TestPatch: %s", pr))
+ defer finished()
+
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ if !git_model.IsErrBranchNotExist(err) {
+ log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
+ }
+ return err
+ }
+ defer cancel()
+
+ return testPatch(ctx, prCtx, pr)
+}
+
+func testPatch(ctx context.Context, prCtx *prContext, pr *issues_model.PullRequest) error {
+ gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
+ if err != nil {
+ return fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ // 1. update merge base
+ pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: prCtx.tmpBasePath})
+ if err != nil {
+ var err2 error
+ pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base")
+ if err2 != nil {
+ return fmt.Errorf("GetMergeBase: %v and can't find commit ID for base: %w", err, err2)
+ }
+ }
+ pr.MergeBase = strings.TrimSpace(pr.MergeBase)
+ if pr.HeadCommitID, err = gitRepo.GetRefCommitID(git.BranchPrefix + "tracking"); err != nil {
+ return fmt.Errorf("GetBranchCommitID: can't find commit ID for head: %w", err)
+ }
+
+ if pr.HeadCommitID == pr.MergeBase {
+ pr.Status = issues_model.PullRequestStatusAncestor
+ return nil
+ }
+
+ // 2. Check for conflicts
+ if conflicts, err := checkConflicts(ctx, pr, gitRepo, prCtx.tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
+ return err
+ }
+
+ // 3. Check for protected files changes
+ if err = checkPullFilesProtection(ctx, pr, gitRepo); err != nil {
+ return fmt.Errorf("pr.CheckPullFilesProtection(): %v", err)
+ }
+
+ if len(pr.ChangedProtectedFiles) > 0 {
+ log.Trace("Found %d protected files changed", len(pr.ChangedProtectedFiles))
+ }
+
+ pr.Status = issues_model.PullRequestStatusMergeable
+
+ return nil
+}
+
+type errMergeConflict struct {
+ filename string
+}
+
+func (e *errMergeConflict) Error() string {
+ return fmt.Sprintf("conflict detected at: %s", e.filename)
+}
+
+func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, filesToRemove *[]string, filesToAdd *[]git.IndexObjectInfo) error {
+ log.Trace("Attempt to merge:\n%v", file)
+
+ switch {
+ case file.stage1 != nil && (file.stage2 == nil || file.stage3 == nil):
+ // 1. Deleted in one or both:
+ //
+ // Conflict <==> the stage1 !SameAs to the undeleted one
+ if (file.stage2 != nil && !file.stage1.SameAs(file.stage2)) || (file.stage3 != nil && !file.stage1.SameAs(file.stage3)) {
+ // Conflict!
+ return &errMergeConflict{file.stage1.path}
+ }
+
+ // Not a genuine conflict and we can simply remove the file from the index
+ *filesToRemove = append(*filesToRemove, file.stage1.path)
+ return nil
+ case file.stage1 == nil && file.stage2 != nil && (file.stage3 == nil || file.stage2.SameAs(file.stage3)):
+ // 2. Added in ours but not in theirs or identical in both
+ //
+ // Not a genuine conflict just add to the index
+ *filesToAdd = append(*filesToAdd, git.IndexObjectInfo{Mode: file.stage2.mode, Object: git.MustIDFromString(file.stage2.sha), Filename: file.stage2.path})
+ return nil
+ case file.stage1 == nil && file.stage2 != nil && file.stage3 != nil && file.stage2.sha == file.stage3.sha && file.stage2.mode != file.stage3.mode:
+ // 3. Added in both with the same sha but the modes are different
+ //
+ // Conflict! (Not sure that this can actually happen but we should handle)
+ return &errMergeConflict{file.stage2.path}
+ case file.stage1 == nil && file.stage2 == nil && file.stage3 != nil:
+ // 4. Added in theirs but not ours:
+ //
+ // Not a genuine conflict just add to the index
+ *filesToAdd = append(*filesToAdd, git.IndexObjectInfo{Mode: file.stage3.mode, Object: git.MustIDFromString(file.stage3.sha), Filename: file.stage3.path})
+ return nil
+ case file.stage1 == nil:
+ // 5. Created by new in both
+ //
+ // Conflict!
+ return &errMergeConflict{file.stage2.path}
+ case file.stage2 != nil && file.stage3 != nil:
+ // 5. Modified in both - we should try to merge in the changes but first:
+ //
+ if file.stage2.mode == "120000" || file.stage3.mode == "120000" {
+ // 5a. Conflicting symbolic link change
+ return &errMergeConflict{file.stage2.path}
+ }
+ if file.stage2.mode == "160000" || file.stage3.mode == "160000" {
+ // 5b. Conflicting submodule change
+ return &errMergeConflict{file.stage2.path}
+ }
+ if file.stage2.mode != file.stage3.mode {
+ // 5c. Conflicting mode change
+ return &errMergeConflict{file.stage2.path}
+ }
+
+ // Need to get the objects from the object db to attempt to merge
+ root, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage1.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return fmt.Errorf("unable to get root object: %s at path: %s for merging. Error: %w", file.stage1.sha, file.stage1.path, err)
+ }
+ root = strings.TrimSpace(root)
+ defer func() {
+ _ = util.Remove(filepath.Join(tmpBasePath, root))
+ }()
+
+ base, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage2.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return fmt.Errorf("unable to get base object: %s at path: %s for merging. Error: %w", file.stage2.sha, file.stage2.path, err)
+ }
+ base = strings.TrimSpace(filepath.Join(tmpBasePath, base))
+ defer func() {
+ _ = util.Remove(base)
+ }()
+ head, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage3.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return fmt.Errorf("unable to get head object:%s at path: %s for merging. Error: %w", file.stage3.sha, file.stage3.path, err)
+ }
+ head = strings.TrimSpace(head)
+ defer func() {
+ _ = util.Remove(filepath.Join(tmpBasePath, head))
+ }()
+
+ // now git merge-file annoyingly takes a different order to the merge-tree ...
+ _, _, conflictErr := git.NewCommand(ctx, "merge-file").AddDynamicArguments(base, root, head).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if conflictErr != nil {
+ return &errMergeConflict{file.stage2.path}
+ }
+
+ // base now contains the merged data
+ hash, _, err := git.NewCommand(ctx, "hash-object", "-w", "--path").AddDynamicArguments(file.stage2.path, base).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return err
+ }
+ hash = strings.TrimSpace(hash)
+ *filesToAdd = append(*filesToAdd, git.IndexObjectInfo{Mode: file.stage2.mode, Object: git.MustIDFromString(hash), Filename: file.stage2.path})
+ return nil
+ default:
+ if file.stage1 != nil {
+ return &errMergeConflict{file.stage1.path}
+ } else if file.stage2 != nil {
+ return &errMergeConflict{file.stage2.path}
+ } else if file.stage3 != nil {
+ return &errMergeConflict{file.stage3.path}
+ }
+ }
+ return nil
+}
+
+// AttemptThreeWayMerge will attempt to three way merge using git read-tree and then follow the git merge-one-file algorithm to attempt to resolve basic conflicts
+func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repository, base, ours, theirs, description string) (bool, []string, error) {
+ ctx, cancel := context.WithCancel(ctx)
+ defer cancel()
+
+ // First we use read-tree to do a simple three-way merge
+ if _, _, err := git.NewCommand(ctx, "read-tree", "-m").AddDynamicArguments(base, ours, theirs).RunStdString(&git.RunOpts{Dir: gitPath}); err != nil {
+ log.Error("Unable to run read-tree -m! Error: %v", err)
+ return false, nil, fmt.Errorf("unable to run read-tree -m! Error: %w", err)
+ }
+
+ var filesToRemove []string
+ var filesToAdd []git.IndexObjectInfo
+
+ // Then we use git ls-files -u to list the unmerged files and collate the triples in unmergedfiles
+ unmerged := make(chan *unmergedFile)
+ go unmergedFiles(ctx, gitPath, unmerged)
+
+ defer func() {
+ cancel()
+ for range unmerged {
+ // empty the unmerged channel
+ }
+ }()
+
+ numberOfConflicts := 0
+ conflict := false
+ conflictedFiles := make([]string, 0, 5)
+
+ for file := range unmerged {
+ if file == nil {
+ break
+ }
+ if file.err != nil {
+ cancel()
+ return false, nil, file.err
+ }
+
+ // OK now we have the unmerged file triplet attempt to merge it
+ if err := attemptMerge(ctx, file, gitPath, &filesToRemove, &filesToAdd); err != nil {
+ if conflictErr, ok := err.(*errMergeConflict); ok {
+ log.Trace("Conflict: %s in %s", conflictErr.filename, description)
+ conflict = true
+ if numberOfConflicts < 10 {
+ conflictedFiles = append(conflictedFiles, conflictErr.filename)
+ }
+ numberOfConflicts++
+ continue
+ }
+ return false, nil, err
+ }
+ }
+
+ // Add and remove files in one command, as this is slow with many files otherwise
+ if err := gitRepo.RemoveFilesFromIndex(filesToRemove...); err != nil {
+ return false, nil, err
+ }
+ if err := gitRepo.AddObjectsToIndex(filesToAdd...); err != nil {
+ return false, nil, err
+ }
+
+ return conflict, conflictedFiles, nil
+}
+
+func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, tmpBasePath string) (bool, error) {
+ // 1. checkConflicts resets the conflict status - therefore - reset the conflict status
+ pr.ConflictedFiles = nil
+
+ // 2. AttemptThreeWayMerge first - this is much quicker than plain patch to base
+ description := fmt.Sprintf("PR[%d] %s/%s#%d", pr.ID, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, pr.Index)
+ conflict, conflictFiles, err := AttemptThreeWayMerge(ctx,
+ tmpBasePath, gitRepo, pr.MergeBase, "base", "tracking", description)
+ if err != nil {
+ return false, err
+ }
+
+ if !conflict {
+ // No conflicts detected so we need to check if the patch is empty...
+ // a. Write the newly merged tree and check the new tree-hash
+ var treeHash string
+ treeHash, _, err = git.NewCommand(ctx, "write-tree").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ lsfiles, _, _ := git.NewCommand(ctx, "ls-files", "-u").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ return false, fmt.Errorf("unable to write unconflicted tree: %w\n`git ls-files -u`:\n%s", err, lsfiles)
+ }
+ treeHash = strings.TrimSpace(treeHash)
+ baseTree, err := gitRepo.GetTree("base")
+ if err != nil {
+ return false, err
+ }
+
+ // b. compare the new tree-hash with the base tree hash
+ if treeHash == baseTree.ID.String() {
+ log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID)
+ pr.Status = issues_model.PullRequestStatusEmpty
+ }
+
+ return false, nil
+ }
+
+ // 3. OK the three-way merge method has detected conflicts
+ // 3a. Are still testing with GitApply? If not set the conflict status and move on
+ if !setting.Repository.PullRequest.TestConflictingPatchesWithGitApply {
+ pr.Status = issues_model.PullRequestStatusConflict
+ pr.ConflictedFiles = conflictFiles
+
+ log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
+ return true, nil
+ }
+
+ // 3b. Create a plain patch from head to base
+ tmpPatchFile, err := os.CreateTemp("", "patch")
+ if err != nil {
+ log.Error("Unable to create temporary patch file! Error: %v", err)
+ return false, fmt.Errorf("unable to create temporary patch file! Error: %w", err)
+ }
+ defer func() {
+ _ = util.Remove(tmpPatchFile.Name())
+ }()
+
+ if err := gitRepo.GetDiffBinary(pr.MergeBase, "tracking", tmpPatchFile); err != nil {
+ tmpPatchFile.Close()
+ log.Error("Unable to get patch file from %s to %s in %s Error: %v", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
+ return false, fmt.Errorf("unable to get patch file from %s to %s in %s Error: %w", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
+ }
+ stat, err := tmpPatchFile.Stat()
+ if err != nil {
+ tmpPatchFile.Close()
+ return false, fmt.Errorf("unable to stat patch file: %w", err)
+ }
+ patchPath := tmpPatchFile.Name()
+ tmpPatchFile.Close()
+
+ // 3c. if the size of that patch is 0 - there can be no conflicts!
+ if stat.Size() == 0 {
+ log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID)
+ pr.Status = issues_model.PullRequestStatusEmpty
+ return false, nil
+ }
+
+ log.Trace("PullRequest[%d].testPatch (patchPath): %s", pr.ID, patchPath)
+
+ // 4. Read the base branch in to the index of the temporary repository
+ _, _, err = git.NewCommand(gitRepo.Ctx, "read-tree", "base").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return false, fmt.Errorf("git read-tree %s: %w", pr.BaseBranch, err)
+ }
+
+ // 5. Now get the pull request configuration to check if we need to ignore whitespace
+ prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ return false, err
+ }
+ prConfig := prUnit.PullRequestsConfig()
+
+ // 6. Prepare the arguments to apply the patch against the index
+ cmdApply := git.NewCommand(gitRepo.Ctx, "apply", "--check", "--cached")
+ if prConfig.IgnoreWhitespaceConflicts {
+ cmdApply.AddArguments("--ignore-whitespace")
+ }
+ is3way := false
+ if git.CheckGitVersionAtLeast("2.32.0") == nil {
+ cmdApply.AddArguments("--3way")
+ is3way = true
+ }
+ cmdApply.AddDynamicArguments(patchPath)
+
+ // 7. Prep the pipe:
+ // - Here we could do the equivalent of:
+ // `git apply --check --cached patch_file > conflicts`
+ // Then iterate through the conflicts. However, that means storing all the conflicts
+ // in memory - which is very wasteful.
+ // - alternatively we can do the equivalent of:
+ // `git apply --check ... | grep ...`
+ // meaning we don't store all of the conflicts unnecessarily.
+ stderrReader, stderrWriter, err := os.Pipe()
+ if err != nil {
+ log.Error("Unable to open stderr pipe: %v", err)
+ return false, fmt.Errorf("unable to open stderr pipe: %w", err)
+ }
+ defer func() {
+ _ = stderrReader.Close()
+ _ = stderrWriter.Close()
+ }()
+
+ // 8. Run the check command
+ conflict = false
+ err = cmdApply.Run(&git.RunOpts{
+ Dir: tmpBasePath,
+ Stderr: stderrWriter,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ // Close the writer end of the pipe to begin processing
+ _ = stderrWriter.Close()
+ defer func() {
+ // Close the reader on return to terminate the git command if necessary
+ _ = stderrReader.Close()
+ }()
+
+ const prefix = "error: patch failed:"
+ const errorPrefix = "error: "
+ const threewayFailed = "Failed to perform three-way merge..."
+ const appliedPatchPrefix = "Applied patch to '"
+ const withConflicts = "' with conflicts."
+
+ conflicts := make(container.Set[string])
+
+ // Now scan the output from the command
+ scanner := bufio.NewScanner(stderrReader)
+ for scanner.Scan() {
+ line := scanner.Text()
+ log.Trace("PullRequest[%d].testPatch: stderr: %s", pr.ID, line)
+ if strings.HasPrefix(line, prefix) {
+ conflict = true
+ filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0])
+ conflicts.Add(filepath)
+ } else if is3way && line == threewayFailed {
+ conflict = true
+ } else if strings.HasPrefix(line, errorPrefix) {
+ conflict = true
+ for _, suffix := range patchErrorSuffices {
+ if strings.HasSuffix(line, suffix) {
+ filepath := strings.TrimSpace(strings.TrimSuffix(line[len(errorPrefix):], suffix))
+ if filepath != "" {
+ conflicts.Add(filepath)
+ }
+ break
+ }
+ }
+ } else if is3way && strings.HasPrefix(line, appliedPatchPrefix) && strings.HasSuffix(line, withConflicts) {
+ conflict = true
+ filepath := strings.TrimPrefix(strings.TrimSuffix(line, withConflicts), appliedPatchPrefix)
+ if filepath != "" {
+ conflicts.Add(filepath)
+ }
+ }
+ // only list 10 conflicted files
+ if len(conflicts) >= 10 {
+ break
+ }
+ }
+
+ if len(conflicts) > 0 {
+ pr.ConflictedFiles = make([]string, 0, len(conflicts))
+ for key := range conflicts {
+ pr.ConflictedFiles = append(pr.ConflictedFiles, key)
+ }
+ }
+
+ return nil
+ },
+ })
+
+ // 9. Check if the found conflictedfiles is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts.
+ // Note: `"err" could be non-nil` is due that if enable 3-way merge, it doesn't return any error on found conflicts.
+ if len(pr.ConflictedFiles) > 0 {
+ if conflict {
+ pr.Status = issues_model.PullRequestStatusConflict
+ log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
+
+ return true, nil
+ }
+ } else if err != nil {
+ return false, fmt.Errorf("git apply --check: %w", err)
+ }
+ return false, nil
+}
+
+// CheckFileProtection check file Protection
+func CheckFileProtection(repo *git.Repository, oldCommitID, newCommitID string, patterns []glob.Glob, limit int, env []string) ([]string, error) {
+ if len(patterns) == 0 {
+ return nil, nil
+ }
+ affectedFiles, err := git.GetAffectedFiles(repo, oldCommitID, newCommitID, env)
+ if err != nil {
+ return nil, err
+ }
+ changedProtectedFiles := make([]string, 0, limit)
+ for _, affectedFile := range affectedFiles {
+ lpath := strings.ToLower(affectedFile)
+ for _, pat := range patterns {
+ if pat.Match(lpath) {
+ changedProtectedFiles = append(changedProtectedFiles, lpath)
+ break
+ }
+ }
+ if len(changedProtectedFiles) >= limit {
+ break
+ }
+ }
+ if len(changedProtectedFiles) > 0 {
+ err = models.ErrFilePathProtected{
+ Path: changedProtectedFiles[0],
+ }
+ }
+ return changedProtectedFiles, err
+}
+
+// CheckUnprotectedFiles check if the commit only touches unprotected files
+func CheckUnprotectedFiles(repo *git.Repository, oldCommitID, newCommitID string, patterns []glob.Glob, env []string) (bool, error) {
+ if len(patterns) == 0 {
+ return false, nil
+ }
+ affectedFiles, err := git.GetAffectedFiles(repo, oldCommitID, newCommitID, env)
+ if err != nil {
+ return false, err
+ }
+ for _, affectedFile := range affectedFiles {
+ lpath := strings.ToLower(affectedFile)
+ unprotected := false
+ for _, pat := range patterns {
+ if pat.Match(lpath) {
+ unprotected = true
+ break
+ }
+ }
+ if !unprotected {
+ return false, nil
+ }
+ }
+ return true, nil
+}
+
+// checkPullFilesProtection check if pr changed protected files and save results
+func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) error {
+ if pr.Status == issues_model.PullRequestStatusEmpty {
+ pr.ChangedProtectedFiles = nil
+ return nil
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return err
+ }
+
+ if pb == nil {
+ pr.ChangedProtectedFiles = nil
+ return nil
+ }
+
+ pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.MergeBase, "tracking", pb.GetProtectedFilePatterns(), 10, os.Environ())
+ if err != nil && !models.IsErrFilePathProtected(err) {
+ return err
+ }
+ return nil
+}
diff --git a/services/pull/patch_unmerged.go b/services/pull/patch_unmerged.go
new file mode 100644
index 0000000..c60c48d
--- /dev/null
+++ b/services/pull/patch_unmerged.go
@@ -0,0 +1,203 @@
+// Copyright 2021 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bufio"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// lsFileLine is a Quadruplet struct (+error) representing a partially parsed line from ls-files
+type lsFileLine struct {
+ mode string
+ sha string
+ stage int
+ path string
+ err error
+}
+
+// SameAs checks if two lsFileLines are referring to the same path, sha and mode (ignoring stage)
+func (line *lsFileLine) SameAs(other *lsFileLine) bool {
+ if line == nil || other == nil {
+ return false
+ }
+
+ if line.err != nil || other.err != nil {
+ return false
+ }
+
+ return line.mode == other.mode &&
+ line.sha == other.sha &&
+ line.path == other.path
+}
+
+// String provides a string representation for logging
+func (line *lsFileLine) String() string {
+ if line == nil {
+ return "<nil>"
+ }
+ if line.err != nil {
+ return fmt.Sprintf("%d %s %s %s %v", line.stage, line.mode, line.path, line.sha, line.err)
+ }
+ return fmt.Sprintf("%d %s %s %s", line.stage, line.mode, line.path, line.sha)
+}
+
+// readUnmergedLsFileLines calls git ls-files -u -z and parses the lines into mode-sha-stage-path quadruplets
+// it will push these to the provided channel closing it at the end
+func readUnmergedLsFileLines(ctx context.Context, tmpBasePath string, outputChan chan *lsFileLine) {
+ defer func() {
+ // Always close the outputChan at the end of this function
+ close(outputChan)
+ }()
+
+ lsFilesReader, lsFilesWriter, err := os.Pipe()
+ if err != nil {
+ log.Error("Unable to open stderr pipe: %v", err)
+ outputChan <- &lsFileLine{err: fmt.Errorf("unable to open stderr pipe: %w", err)}
+ return
+ }
+ defer func() {
+ _ = lsFilesWriter.Close()
+ _ = lsFilesReader.Close()
+ }()
+
+ stderr := &strings.Builder{}
+ err = git.NewCommand(ctx, "ls-files", "-u", "-z").
+ Run(&git.RunOpts{
+ Dir: tmpBasePath,
+ Stdout: lsFilesWriter,
+ Stderr: stderr,
+ PipelineFunc: func(_ context.Context, _ context.CancelFunc) error {
+ _ = lsFilesWriter.Close()
+ defer func() {
+ _ = lsFilesReader.Close()
+ }()
+ bufferedReader := bufio.NewReader(lsFilesReader)
+
+ for {
+ line, err := bufferedReader.ReadString('\000')
+ if err != nil {
+ if err == io.EOF {
+ return nil
+ }
+ return err
+ }
+ toemit := &lsFileLine{}
+
+ split := strings.SplitN(line, " ", 3)
+ if len(split) < 3 {
+ return fmt.Errorf("malformed line: %s", line)
+ }
+ toemit.mode = split[0]
+ toemit.sha = split[1]
+
+ if len(split[2]) < 4 {
+ return fmt.Errorf("malformed line: %s", line)
+ }
+
+ toemit.stage, err = strconv.Atoi(split[2][0:1])
+ if err != nil {
+ return fmt.Errorf("malformed line: %s", line)
+ }
+
+ toemit.path = split[2][2 : len(split[2])-1]
+ outputChan <- toemit
+ }
+ },
+ })
+ if err != nil {
+ outputChan <- &lsFileLine{err: fmt.Errorf("git ls-files -u -z: %w", git.ConcatenateError(err, stderr.String()))}
+ }
+}
+
+// unmergedFile is triple (+error) of lsFileLines split into stages 1,2 & 3.
+type unmergedFile struct {
+ stage1 *lsFileLine
+ stage2 *lsFileLine
+ stage3 *lsFileLine
+ err error
+}
+
+// String provides a string representation of the an unmerged file for logging
+func (u *unmergedFile) String() string {
+ if u == nil {
+ return "<nil>"
+ }
+ if u.err != nil {
+ return fmt.Sprintf("error: %v\n%v\n%v\n%v", u.err, u.stage1, u.stage2, u.stage3)
+ }
+ return fmt.Sprintf("%v\n%v\n%v", u.stage1, u.stage2, u.stage3)
+}
+
+// unmergedFiles will collate the output from readUnstagedLsFileLines in to file triplets and send them
+// to the provided channel, closing at the end.
+func unmergedFiles(ctx context.Context, tmpBasePath string, unmerged chan *unmergedFile) {
+ defer func() {
+ // Always close the channel
+ close(unmerged)
+ }()
+
+ ctx, cancel := context.WithCancel(ctx)
+ lsFileLineChan := make(chan *lsFileLine, 10) // give lsFileLineChan a buffer
+ go readUnmergedLsFileLines(ctx, tmpBasePath, lsFileLineChan)
+ defer func() {
+ cancel()
+ for range lsFileLineChan {
+ // empty channel
+ }
+ }()
+
+ next := &unmergedFile{}
+ for line := range lsFileLineChan {
+ log.Trace("Got line: %v Current State:\n%v", line, next)
+ if line.err != nil {
+ log.Error("Unable to run ls-files -u -z! Error: %v", line.err)
+ unmerged <- &unmergedFile{err: fmt.Errorf("unable to run ls-files -u -z! Error: %w", line.err)}
+ return
+ }
+
+ // stages are always emitted 1,2,3 but sometimes 1, 2 or 3 are dropped
+ switch line.stage {
+ case 0:
+ // Should not happen as this represents successfully merged file - we will tolerate and ignore though
+ case 1:
+ if next.stage1 != nil || next.stage2 != nil || next.stage3 != nil {
+ // We need to handle the unstaged file stage1,stage2,stage3
+ unmerged <- next
+ }
+ next = &unmergedFile{stage1: line}
+ case 2:
+ if next.stage3 != nil || next.stage2 != nil || (next.stage1 != nil && next.stage1.path != line.path) {
+ // We need to handle the unstaged file stage1,stage2,stage3
+ unmerged <- next
+ next = &unmergedFile{}
+ }
+ next.stage2 = line
+ case 3:
+ if next.stage3 != nil || (next.stage1 != nil && next.stage1.path != line.path) || (next.stage2 != nil && next.stage2.path != line.path) {
+ // We need to handle the unstaged file stage1,stage2,stage3
+ unmerged <- next
+ next = &unmergedFile{}
+ }
+ next.stage3 = line
+ default:
+ log.Error("Unexpected stage %d for path %s in run ls-files -u -z!", line.stage, line.path)
+ unmerged <- &unmergedFile{err: fmt.Errorf("unexpected stage %d for path %s in git ls-files -u -z", line.stage, line.path)}
+ return
+ }
+ }
+ // We need to handle the unstaged file stage1,stage2,stage3
+ if next.stage1 != nil || next.stage2 != nil || next.stage3 != nil {
+ unmerged <- next
+ }
+}
diff --git a/services/pull/pull.go b/services/pull/pull.go
new file mode 100644
index 0000000..6af7d8b
--- /dev/null
+++ b/services/pull/pull.go
@@ -0,0 +1,1032 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "regexp"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/sync"
+ "code.gitea.io/gitea/modules/util"
+ gitea_context "code.gitea.io/gitea/services/context"
+ issue_service "code.gitea.io/gitea/services/issue"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// TODO: use clustered lock (unique queue? or *abuse* cache)
+var pullWorkingPool = sync.NewExclusivePool()
+
+// NewPullRequest creates new pull request with labels for repository.
+func NewPullRequest(ctx context.Context, repo *repo_model.Repository, issue *issues_model.Issue, labelIDs []int64, uuids []string, pr *issues_model.PullRequest, assigneeIDs []int64) error {
+ // Check if the doer is not blocked by the repository's owner.
+ if user_model.IsBlocked(ctx, repo.OwnerID, issue.PosterID) {
+ return user_model.ErrBlockedByUser
+ }
+
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ if !git_model.IsErrBranchNotExist(err) {
+ log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
+ }
+ return err
+ }
+ defer cancel()
+
+ if err := testPatch(ctx, prCtx, pr); err != nil {
+ return err
+ }
+
+ divergence, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch)
+ if err != nil {
+ return err
+ }
+ pr.CommitsAhead = divergence.Ahead
+ pr.CommitsBehind = divergence.Behind
+
+ assigneeCommentMap := make(map[int64]*issues_model.Comment)
+
+ // add first push codes comment
+ baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ return err
+ }
+ defer baseGitRepo.Close()
+
+ var reviewNotifers []*issue_service.ReviewRequestNotifier
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := issues_model.NewPullRequest(ctx, repo, issue, labelIDs, uuids, pr); err != nil {
+ return err
+ }
+
+ for _, assigneeID := range assigneeIDs {
+ comment, err := issue_service.AddAssigneeIfNotAssigned(ctx, issue, issue.Poster, assigneeID, false)
+ if err != nil {
+ return err
+ }
+ assigneeCommentMap[assigneeID] = comment
+ }
+
+ pr.Issue = issue
+ issue.PullRequest = pr
+
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ err = PushToBaseRepo(ctx, pr)
+ } else {
+ err = UpdateRef(ctx, pr)
+ }
+ if err != nil {
+ return err
+ }
+
+ compareInfo, err := baseGitRepo.GetCompareInfo(pr.BaseRepo.RepoPath(),
+ git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false, false)
+ if err != nil {
+ return err
+ }
+ if len(compareInfo.Commits) == 0 {
+ return nil
+ }
+
+ data := issues_model.PushActionContent{IsForcePush: false}
+ data.CommitIDs = make([]string, 0, len(compareInfo.Commits))
+ for i := len(compareInfo.Commits) - 1; i >= 0; i-- {
+ data.CommitIDs = append(data.CommitIDs, compareInfo.Commits[i].ID.String())
+ }
+
+ dataJSON, err := json.Marshal(data)
+ if err != nil {
+ return err
+ }
+
+ ops := &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypePullRequestPush,
+ Doer: issue.Poster,
+ Repo: repo,
+ Issue: pr.Issue,
+ IsForcePush: false,
+ Content: string(dataJSON),
+ }
+
+ if _, err = issues_model.CreateComment(ctx, ops); err != nil {
+ return err
+ }
+
+ if !pr.IsWorkInProgress(ctx) {
+ reviewNotifers, err = issue_service.PullRequestCodeOwnersReview(ctx, issue, pr)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ }); err != nil {
+ // cleanup: this will only remove the reference, the real commit will be clean up when next GC
+ if err1 := baseGitRepo.RemoveReference(pr.GetGitRefName()); err1 != nil {
+ log.Error("RemoveReference: %v", err1)
+ }
+ return err
+ }
+ baseGitRepo.Close() // close immediately to avoid notifications will open the repository again
+
+ issue_service.ReviewRequestNotify(ctx, issue, issue.Poster, reviewNotifers)
+
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, issue.Poster, issue.Content)
+ if err != nil {
+ return err
+ }
+ notify_service.NewPullRequest(ctx, pr, mentions)
+ if len(issue.Labels) > 0 {
+ notify_service.IssueChangeLabels(ctx, issue.Poster, issue, issue.Labels, nil)
+ }
+ if issue.Milestone != nil {
+ notify_service.IssueChangeMilestone(ctx, issue.Poster, issue, 0)
+ }
+ for _, assigneeID := range assigneeIDs {
+ assignee, err := user_model.GetUserByID(ctx, assigneeID)
+ if err != nil {
+ return ErrDependenciesLeft
+ }
+ notify_service.IssueChangeAssignee(ctx, issue.Poster, issue, assignee, false, assigneeCommentMap[assigneeID])
+ }
+
+ return nil
+}
+
+// ChangeTargetBranch changes the target branch of this pull request, as the given user.
+func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, targetBranch string) (err error) {
+ pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
+
+ // Current target branch is already the same
+ if pr.BaseBranch == targetBranch {
+ return nil
+ }
+
+ if pr.Issue.IsClosed {
+ return issues_model.ErrIssueIsClosed{
+ ID: pr.Issue.ID,
+ RepoID: pr.Issue.RepoID,
+ Index: pr.Issue.Index,
+ }
+ }
+
+ if pr.HasMerged {
+ return models.ErrPullRequestHasMerged{
+ ID: pr.ID,
+ IssueID: pr.Index,
+ HeadRepoID: pr.HeadRepoID,
+ BaseRepoID: pr.BaseRepoID,
+ HeadBranch: pr.HeadBranch,
+ BaseBranch: pr.BaseBranch,
+ }
+ }
+
+ // Check if branches are equal
+ branchesEqual, err := IsHeadEqualWithBranch(ctx, pr, targetBranch)
+ if err != nil {
+ return err
+ }
+ if branchesEqual {
+ return git_model.ErrBranchesEqual{
+ HeadBranchName: pr.HeadBranch,
+ BaseBranchName: targetBranch,
+ }
+ }
+
+ // Check if pull request for the new target branch already exists
+ existingPr, err := issues_model.GetUnmergedPullRequest(ctx, pr.HeadRepoID, pr.BaseRepoID, pr.HeadBranch, targetBranch, issues_model.PullRequestFlowGithub)
+ if existingPr != nil {
+ return issues_model.ErrPullRequestAlreadyExists{
+ ID: existingPr.ID,
+ IssueID: existingPr.Index,
+ HeadRepoID: existingPr.HeadRepoID,
+ BaseRepoID: existingPr.BaseRepoID,
+ HeadBranch: existingPr.HeadBranch,
+ BaseBranch: existingPr.BaseBranch,
+ }
+ }
+ if err != nil && !issues_model.IsErrPullRequestNotExist(err) {
+ return err
+ }
+
+ // Set new target branch
+ oldBranch := pr.BaseBranch
+ pr.BaseBranch = targetBranch
+
+ // Refresh patch
+ if err := TestPatch(pr); err != nil {
+ return err
+ }
+
+ // Update target branch, PR diff and status
+ // This is the same as checkAndUpdateStatus in check service, but also updates base_branch
+ if pr.Status == issues_model.PullRequestStatusChecking {
+ pr.Status = issues_model.PullRequestStatusMergeable
+ }
+
+ // Update Commit Divergence
+ divergence, err := GetDiverging(ctx, pr)
+ if err != nil {
+ return err
+ }
+ pr.CommitsAhead = divergence.Ahead
+ pr.CommitsBehind = divergence.Behind
+
+ if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files", "base_branch", "commits_ahead", "commits_behind"); err != nil {
+ return err
+ }
+
+ // Create comment
+ options := &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeChangeTargetBranch,
+ Doer: doer,
+ Repo: pr.Issue.Repo,
+ Issue: pr.Issue,
+ OldRef: oldBranch,
+ NewRef: targetBranch,
+ }
+ if _, err = issues_model.CreateComment(ctx, options); err != nil {
+ return fmt.Errorf("CreateChangeTargetBranchComment: %w", err)
+ }
+
+ return nil
+}
+
+func checkForInvalidation(ctx context.Context, requests issues_model.PullRequestList, repoID int64, doer *user_model.User, branch string) error {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ return fmt.Errorf("GetRepositoryByIDCtx: %w", err)
+ }
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("gitrepo.OpenRepository: %w", err)
+ }
+ go func() {
+ // FIXME: graceful: We need to tell the manager we're doing something...
+ err := InvalidateCodeComments(ctx, requests, doer, gitRepo, branch)
+ if err != nil {
+ log.Error("PullRequestList.InvalidateCodeComments: %v", err)
+ }
+ gitRepo.Close()
+ }()
+ return nil
+}
+
+// AddTestPullRequestTask adds new test tasks by given head/base repository and head/base branch,
+// and generate new patch for testing as needed.
+func AddTestPullRequestTask(ctx context.Context, doer *user_model.User, repoID int64, branch string, isSync bool, oldCommitID, newCommitID string, timeNano int64) {
+ description := fmt.Sprintf("AddTestPullRequestTask [head_repo_id: %d, head_branch: %s]: only pull requests created before nano time %d will be considered", repoID, branch, timeNano)
+ log.Trace(description)
+ go graceful.GetManager().RunWithShutdownContext(func(shutdownCtx context.Context) {
+ // make it a process to allow for cancellation (especially during integration tests where no global shutdown happens)
+ ctx, _, finished := process.GetManager().AddContext(shutdownCtx, description)
+ defer finished()
+ // There is no sensible way to shut this down ":-("
+ // If you don't let it run all the way then you will lose data
+ // TODO: graceful: TestPullRequest needs to become a queue!
+
+ TestPullRequest(ctx, doer, repoID, timeNano, branch, isSync, oldCommitID, newCommitID)
+ })
+}
+
+func TestPullRequest(ctx context.Context, doer *user_model.User, repoID, olderThan int64, branch string, isSync bool, oldCommitID, newCommitID string) {
+ // Only consider PR that are older than olderThan, which is the time at
+ // which the newCommitID was added to repoID.
+ //
+ // * commit C is pushed
+ // * the git hook queues AddTestPullRequestTask for processing and returns with success
+ // * TestPullRequest is not called yet
+ // * a pull request P with commit C as the head is created
+ // * TestPullRequest runs and ignores P because it was created after the commit was received
+ //
+ // In other words, a PR must not be updated based on events that happened before it existed
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfoMax(ctx, repoID, olderThan, branch)
+ if err != nil {
+ log.Error("Find pull requests [head_repo_id: %d, head_branch: %s]: %v", repoID, branch, err)
+ return
+ }
+
+ for _, pr := range prs {
+ log.Trace("Updating PR[id=%d,index=%d]: composing new test task", pr.ID, pr.Index)
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ if err := PushToBaseRepo(ctx, pr); err != nil {
+ log.Error("PushToBaseRepo: %v", err)
+ continue
+ }
+ } else {
+ continue
+ }
+
+ AddToTaskQueue(ctx, pr)
+ comment, err := CreatePushPullComment(ctx, doer, pr, oldCommitID, newCommitID)
+ if err == nil && comment != nil {
+ notify_service.PullRequestPushCommits(ctx, doer, pr, comment)
+ }
+ }
+
+ if isSync {
+ requests := issues_model.PullRequestList(prs)
+ if err = requests.LoadAttributes(ctx); err != nil {
+ log.Error("PullRequestList.LoadAttributes: %v", err)
+ }
+ if invalidationErr := checkForInvalidation(ctx, requests, repoID, doer, branch); invalidationErr != nil {
+ log.Error("checkForInvalidation: %v", invalidationErr)
+ }
+ if err == nil {
+ for _, pr := range prs {
+ ValidatePullRequest(ctx, pr, newCommitID, oldCommitID, doer)
+ notify_service.PullRequestSynchronized(ctx, doer, pr)
+ }
+ }
+ }
+
+ log.Trace("TestPullRequest [base_repo_id: %d, base_branch: %s]: finding pull requests", repoID, branch)
+ prs, err = issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, repoID, branch)
+ if err != nil {
+ log.Error("Find pull requests [base_repo_id: %d, base_branch: %s]: %v", repoID, branch, err)
+ return
+ }
+ for _, pr := range prs {
+ divergence, err := GetDiverging(ctx, pr)
+ if err != nil {
+ if git_model.IsErrBranchNotExist(err) && !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) {
+ log.Warn("Cannot test PR %s/%d: head_branch %s no longer exists", pr.BaseRepo.Name, pr.IssueID, pr.HeadBranch)
+ } else {
+ log.Error("GetDiverging: %v", err)
+ }
+ } else {
+ err = pr.UpdateCommitDivergence(ctx, divergence.Ahead, divergence.Behind)
+ if err != nil {
+ log.Error("UpdateCommitDivergence: %v", err)
+ }
+ }
+ AddToTaskQueue(ctx, pr)
+ }
+}
+
+// Mark old reviews as stale if diff to mergebase has changed.
+// Dismiss all approval reviews if protected branch rule item enabled.
+// Update commit divergence.
+func ValidatePullRequest(ctx context.Context, pr *issues_model.PullRequest, newCommitID, oldCommitID string, doer *user_model.User) {
+ objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
+ if newCommitID != "" && newCommitID != objectFormat.EmptyObjectID().String() {
+ changed, err := checkIfPRContentChanged(ctx, pr, oldCommitID, newCommitID)
+ if err != nil {
+ log.Error("checkIfPRContentChanged: %v", err)
+ }
+ if changed {
+ if err := issues_model.MarkReviewsAsStale(ctx, pr.IssueID); err != nil {
+ log.Error("MarkReviewsAsStale: %v", err)
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ log.Error("GetFirstMatchProtectedBranchRule: %v", err)
+ }
+ if pb != nil && pb.DismissStaleApprovals {
+ if err := DismissApprovalReviews(ctx, doer, pr); err != nil {
+ log.Error("DismissApprovalReviews: %v", err)
+ }
+ }
+ }
+ if err := issues_model.MarkReviewsAsNotStale(ctx, pr.IssueID, newCommitID); err != nil {
+ log.Error("MarkReviewsAsNotStale: %v", err)
+ }
+ divergence, err := GetDiverging(ctx, pr)
+ if err != nil {
+ log.Error("GetDiverging: %v", err)
+ } else {
+ err = pr.UpdateCommitDivergence(ctx, divergence.Ahead, divergence.Behind)
+ if err != nil {
+ log.Error("UpdateCommitDivergence: %v", err)
+ }
+ }
+ }
+}
+
+// checkIfPRContentChanged checks if diff to target branch has changed by push
+// A commit can be considered to leave the PR untouched if the patch/diff with its merge base is unchanged
+func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (hasChanged bool, err error) {
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
+ return false, err
+ }
+ defer cancel()
+
+ tmpRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
+ if err != nil {
+ return false, fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer tmpRepo.Close()
+
+ // Find the merge-base
+ _, base, err := tmpRepo.GetMergeBase("", "base", "tracking")
+ if err != nil {
+ return false, fmt.Errorf("GetMergeBase: %w", err)
+ }
+
+ cmd := git.NewCommand(ctx, "diff", "--name-only", "-z").AddDynamicArguments(newCommitID, oldCommitID, base)
+ stdoutReader, stdoutWriter, err := os.Pipe()
+ if err != nil {
+ return false, fmt.Errorf("unable to open pipe for to run diff: %w", err)
+ }
+
+ stderr := new(bytes.Buffer)
+ if err := cmd.Run(&git.RunOpts{
+ Dir: prCtx.tmpBasePath,
+ Stdout: stdoutWriter,
+ Stderr: stderr,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ _ = stdoutWriter.Close()
+ defer func() {
+ _ = stdoutReader.Close()
+ }()
+ return util.IsEmptyReader(stdoutReader)
+ },
+ }); err != nil {
+ if err == util.ErrNotEmpty {
+ return true, nil
+ }
+ err = git.ConcatenateError(err, stderr.String())
+
+ log.Error("Unable to run diff on %s %s %s in tempRepo for PR[%d]%s/%s...%s/%s: Error: %v",
+ newCommitID, oldCommitID, base,
+ pr.ID, pr.BaseRepo.FullName(), pr.BaseBranch, pr.HeadRepo.FullName(), pr.HeadBranch,
+ err)
+
+ return false, fmt.Errorf("Unable to run git diff --name-only -z %s %s %s: %w", newCommitID, oldCommitID, base, err)
+ }
+
+ return false, nil
+}
+
+// PushToBaseRepo pushes commits from branches of head repository to
+// corresponding branches of base repository.
+// FIXME: Only push branches that are actually updates?
+func PushToBaseRepo(ctx context.Context, pr *issues_model.PullRequest) (err error) {
+ return pushToBaseRepoHelper(ctx, pr, "")
+}
+
+func pushToBaseRepoHelper(ctx context.Context, pr *issues_model.PullRequest, prefixHeadBranch string) (err error) {
+ log.Trace("PushToBaseRepo[%d]: pushing commits to base repo '%s'", pr.BaseRepoID, pr.GetGitRefName())
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("Unable to load head repository for PR[%d] Error: %v", pr.ID, err)
+ return err
+ }
+ headRepoPath := pr.HeadRepo.RepoPath()
+
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err)
+ return err
+ }
+ baseRepoPath := pr.BaseRepo.RepoPath()
+
+ if err = pr.LoadIssue(ctx); err != nil {
+ return fmt.Errorf("unable to load issue %d for pr %d: %w", pr.IssueID, pr.ID, err)
+ }
+ if err = pr.Issue.LoadPoster(ctx); err != nil {
+ return fmt.Errorf("unable to load poster %d for pr %d: %w", pr.Issue.PosterID, pr.ID, err)
+ }
+
+ gitRefName := pr.GetGitRefName()
+
+ if err := git.Push(ctx, headRepoPath, git.PushOptions{
+ Remote: baseRepoPath,
+ Branch: prefixHeadBranch + pr.HeadBranch + ":" + gitRefName,
+ Force: true,
+ // Use InternalPushingEnvironment here because we know that pre-receive and post-receive do not run on a refs/pulls/...
+ Env: repo_module.InternalPushingEnvironment(pr.Issue.Poster, pr.BaseRepo),
+ }); err != nil {
+ if git.IsErrPushOutOfDate(err) {
+ // This should not happen as we're using force!
+ log.Error("Unable to push PR head for %s#%d (%-v:%s) due to ErrPushOfDate: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, err)
+ return err
+ } else if git.IsErrPushRejected(err) {
+ rejectErr := err.(*git.ErrPushRejected)
+ log.Info("Unable to push PR head for %s#%d (%-v:%s) due to rejection:\nStdout: %s\nStderr: %s\nError: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, rejectErr.StdOut, rejectErr.StdErr, rejectErr.Err)
+ return err
+ } else if git.IsErrMoreThanOne(err) {
+ if prefixHeadBranch != "" {
+ log.Info("Can't push with %s%s", prefixHeadBranch, pr.HeadBranch)
+ return err
+ }
+ log.Info("Retrying to push with %s%s", git.BranchPrefix, pr.HeadBranch)
+ err = pushToBaseRepoHelper(ctx, pr, git.BranchPrefix)
+ return err
+ }
+ log.Error("Unable to push PR head for %s#%d (%-v:%s) due to Error: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, err)
+ return fmt.Errorf("Push: %s:%s %s:%s %w", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), gitRefName, err)
+ }
+
+ return nil
+}
+
+// UpdateRef update refs/pull/id/head directly for agit flow pull request
+func UpdateRef(ctx context.Context, pr *issues_model.PullRequest) (err error) {
+ log.Trace("UpdateRef[%d]: upgate pull request ref in base repo '%s'", pr.ID, pr.GetGitRefName())
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err)
+ return err
+ }
+
+ _, _, err = git.NewCommand(ctx, "update-ref").AddDynamicArguments(pr.GetGitRefName(), pr.HeadCommitID).RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()})
+ if err != nil {
+ log.Error("Unable to update ref in base repository for PR[%d] Error: %v", pr.ID, err)
+ }
+
+ return err
+}
+
+type errlist []error
+
+func (errs errlist) Error() string {
+ if len(errs) > 0 {
+ var buf strings.Builder
+ for i, err := range errs {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+ buf.WriteString(err.Error())
+ }
+ return buf.String()
+ }
+ return ""
+}
+
+// RetargetChildrenOnMerge retarget children pull requests on merge if possible
+func RetargetChildrenOnMerge(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) error {
+ if setting.Repository.PullRequest.RetargetChildrenOnMerge && pr.BaseRepoID == pr.HeadRepoID {
+ return RetargetBranchPulls(ctx, doer, pr.HeadRepoID, pr.HeadBranch, pr.BaseBranch)
+ }
+ return nil
+}
+
+// RetargetBranchPulls change target branch for all pull requests whose base branch is the branch
+// Both branch and targetBranch must be in the same repo (for security reasons)
+func RetargetBranchPulls(ctx context.Context, doer *user_model.User, repoID int64, branch, targetBranch string) error {
+ prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, repoID, branch)
+ if err != nil {
+ return err
+ }
+
+ if err := issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ var errs errlist
+ for _, pr := range prs {
+ if err = pr.Issue.LoadRepo(ctx); err != nil {
+ errs = append(errs, err)
+ } else if err = ChangeTargetBranch(ctx, pr, doer, targetBranch); err != nil &&
+ !issues_model.IsErrIssueIsClosed(err) && !models.IsErrPullRequestHasMerged(err) &&
+ !issues_model.IsErrPullRequestAlreadyExists(err) {
+ errs = append(errs, err)
+ }
+ }
+
+ if len(errs) > 0 {
+ return errs
+ }
+ return nil
+}
+
+// CloseBranchPulls close all the pull requests who's head branch is the branch
+func CloseBranchPulls(ctx context.Context, doer *user_model.User, repoID int64, branch string) error {
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repoID, branch)
+ if err != nil {
+ return err
+ }
+
+ prs2, err := issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, repoID, branch)
+ if err != nil {
+ return err
+ }
+
+ prs = append(prs, prs2...)
+ if err := issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ var errs errlist
+ for _, pr := range prs {
+ if err = issue_service.ChangeStatus(ctx, pr.Issue, doer, "", true); err != nil && !issues_model.IsErrPullWasClosed(err) && !issues_model.IsErrDependenciesLeft(err) {
+ errs = append(errs, err)
+ }
+ }
+ if len(errs) > 0 {
+ return errs
+ }
+ return nil
+}
+
+// CloseRepoBranchesPulls close all pull requests which head branches are in the given repository, but only whose base repo is not in the given repository
+func CloseRepoBranchesPulls(ctx context.Context, doer *user_model.User, repo *repo_model.Repository) error {
+ branches, _, err := gitrepo.GetBranchesByPath(ctx, repo, 0, 0)
+ if err != nil {
+ return err
+ }
+
+ var errs errlist
+ for _, branch := range branches {
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repo.ID, branch.Name)
+ if err != nil {
+ return err
+ }
+
+ if err = issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ for _, pr := range prs {
+ // If the base repository for this pr is this repository there is no need to close it
+ // as it is going to be deleted anyway
+ if pr.BaseRepoID == repo.ID {
+ continue
+ }
+ if err = issue_service.ChangeStatus(ctx, pr.Issue, doer, "", true); err != nil && !issues_model.IsErrPullWasClosed(err) {
+ errs = append(errs, err)
+ }
+ }
+ }
+
+ if len(errs) > 0 {
+ return errs
+ }
+ return nil
+}
+
+var commitMessageTrailersPattern = regexp.MustCompile(`(?:^|\n\n)(?:[\w-]+[ \t]*:[^\n]+\n*(?:[ \t]+[^\n]+\n*)*)+$`)
+
+// GetSquashMergeCommitMessages returns the commit messages between head and merge base (if there is one)
+func GetSquashMergeCommitMessages(ctx context.Context, pr *issues_model.PullRequest) string {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("Cannot load issue %d for PR id %d: Error: %v", pr.IssueID, pr.ID, err)
+ return ""
+ }
+
+ if err := pr.Issue.LoadPoster(ctx); err != nil {
+ log.Error("Cannot load poster %d for pr id %d, index %d Error: %v", pr.Issue.PosterID, pr.ID, pr.Index, err)
+ return ""
+ }
+
+ if pr.HeadRepo == nil {
+ var err error
+ pr.HeadRepo, err = repo_model.GetRepositoryByID(ctx, pr.HeadRepoID)
+ if err != nil {
+ log.Error("GetRepositoryByIdCtx[%d]: %v", pr.HeadRepoID, err)
+ return ""
+ }
+ }
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.HeadRepo)
+ if err != nil {
+ log.Error("Unable to open head repository: Error: %v", err)
+ return ""
+ }
+ defer closer.Close()
+
+ var headCommit *git.Commit
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ headCommit, err = gitRepo.GetBranchCommit(pr.HeadBranch)
+ } else {
+ pr.HeadCommitID, err = gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ log.Error("Unable to get head commit: %s Error: %v", pr.GetGitRefName(), err)
+ return ""
+ }
+ headCommit, err = gitRepo.GetCommit(pr.HeadCommitID)
+ }
+ if err != nil {
+ log.Error("Unable to get head commit: %s Error: %v", pr.HeadBranch, err)
+ return ""
+ }
+
+ mergeBase, err := gitRepo.GetCommit(pr.MergeBase)
+ if err != nil {
+ log.Error("Unable to get merge base commit: %s Error: %v", pr.MergeBase, err)
+ return ""
+ }
+
+ limit := setting.Repository.PullRequest.DefaultMergeMessageCommitsLimit
+
+ commits, err := gitRepo.CommitsBetweenLimit(headCommit, mergeBase, limit, 0)
+ if err != nil {
+ log.Error("Unable to get commits between: %s %s Error: %v", pr.HeadBranch, pr.MergeBase, err)
+ return ""
+ }
+
+ posterSig := pr.Issue.Poster.NewGitSig().String()
+
+ uniqueAuthors := make(container.Set[string])
+ authors := make([]string, 0, len(commits))
+ stringBuilder := strings.Builder{}
+
+ if !setting.Repository.PullRequest.PopulateSquashCommentWithCommitMessages {
+ message := strings.TrimSpace(pr.Issue.Content)
+ stringBuilder.WriteString(message)
+ if stringBuilder.Len() > 0 {
+ stringBuilder.WriteRune('\n')
+ if !commitMessageTrailersPattern.MatchString(message) {
+ stringBuilder.WriteRune('\n')
+ }
+ }
+ }
+
+ // commits list is in reverse chronological order
+ first := true
+ for i := len(commits) - 1; i >= 0; i-- {
+ commit := commits[i]
+
+ if setting.Repository.PullRequest.PopulateSquashCommentWithCommitMessages {
+ maxSize := setting.Repository.PullRequest.DefaultMergeMessageSize
+ if maxSize < 0 || stringBuilder.Len() < maxSize {
+ var toWrite []byte
+ if first {
+ first = false
+ toWrite = []byte(strings.TrimPrefix(commit.CommitMessage, pr.Issue.Title))
+ } else {
+ toWrite = []byte(commit.CommitMessage)
+ }
+
+ if len(toWrite) > maxSize-stringBuilder.Len() && maxSize > -1 {
+ toWrite = append(toWrite[:maxSize-stringBuilder.Len()], "..."...)
+ }
+ if _, err := stringBuilder.Write(toWrite); err != nil {
+ log.Error("Unable to write commit message Error: %v", err)
+ return ""
+ }
+
+ if _, err := stringBuilder.WriteRune('\n'); err != nil {
+ log.Error("Unable to write commit message Error: %v", err)
+ return ""
+ }
+ }
+ }
+
+ authorString := commit.Author.String()
+ if uniqueAuthors.Add(authorString) && authorString != posterSig {
+ // Compare use account as well to avoid adding the same author multiple times
+ // times when email addresses are private or multiple emails are used.
+ commitUser, _ := user_model.GetUserByEmail(ctx, commit.Author.Email)
+ if commitUser == nil || commitUser.ID != pr.Issue.Poster.ID {
+ authors = append(authors, authorString)
+ }
+ }
+ }
+
+ // Consider collecting the remaining authors
+ if limit >= 0 && setting.Repository.PullRequest.DefaultMergeMessageAllAuthors {
+ skip := limit
+ limit = 30
+ for {
+ commits, err := gitRepo.CommitsBetweenLimit(headCommit, mergeBase, limit, skip)
+ if err != nil {
+ log.Error("Unable to get commits between: %s %s Error: %v", pr.HeadBranch, pr.MergeBase, err)
+ return ""
+ }
+ if len(commits) == 0 {
+ break
+ }
+ for _, commit := range commits {
+ authorString := commit.Author.String()
+ if uniqueAuthors.Add(authorString) && authorString != posterSig {
+ commitUser, _ := user_model.GetUserByEmail(ctx, commit.Author.Email)
+ if commitUser == nil || commitUser.ID != pr.Issue.Poster.ID {
+ authors = append(authors, authorString)
+ }
+ }
+ }
+ skip += limit
+ }
+ }
+
+ for _, author := range authors {
+ if _, err := stringBuilder.WriteString("Co-authored-by: "); err != nil {
+ log.Error("Unable to write to string builder Error: %v", err)
+ return ""
+ }
+ if _, err := stringBuilder.WriteString(author); err != nil {
+ log.Error("Unable to write to string builder Error: %v", err)
+ return ""
+ }
+ if _, err := stringBuilder.WriteRune('\n'); err != nil {
+ log.Error("Unable to write to string builder Error: %v", err)
+ return ""
+ }
+ }
+
+ return stringBuilder.String()
+}
+
+// GetIssuesLastCommitStatus returns a map of issue ID to the most recent commit's latest status
+func GetIssuesLastCommitStatus(ctx context.Context, issues issues_model.IssueList) (map[int64]*git_model.CommitStatus, error) {
+ _, lastStatus, err := GetIssuesAllCommitStatus(ctx, issues)
+ return lastStatus, err
+}
+
+// GetIssuesAllCommitStatus returns a map of issue ID to a list of all statuses for the most recent commit as well as a map of issue ID to only the commit's latest status
+func GetIssuesAllCommitStatus(ctx context.Context, issues issues_model.IssueList) (map[int64][]*git_model.CommitStatus, map[int64]*git_model.CommitStatus, error) {
+ if err := issues.LoadPullRequests(ctx); err != nil {
+ return nil, nil, err
+ }
+ if _, err := issues.LoadRepositories(ctx); err != nil {
+ return nil, nil, err
+ }
+
+ var (
+ gitRepos = make(map[int64]*git.Repository)
+ res = make(map[int64][]*git_model.CommitStatus)
+ lastRes = make(map[int64]*git_model.CommitStatus)
+ err error
+ )
+ defer func() {
+ for _, gitRepo := range gitRepos {
+ gitRepo.Close()
+ }
+ }()
+
+ for _, issue := range issues {
+ if !issue.IsPull {
+ continue
+ }
+ gitRepo, ok := gitRepos[issue.RepoID]
+ if !ok {
+ gitRepo, err = gitrepo.OpenRepository(ctx, issue.Repo)
+ if err != nil {
+ log.Error("Cannot open git repository %-v for issue #%d[%d]. Error: %v", issue.Repo, issue.Index, issue.ID, err)
+ continue
+ }
+ gitRepos[issue.RepoID] = gitRepo
+ }
+
+ statuses, lastStatus, err := getAllCommitStatus(ctx, gitRepo, issue.PullRequest)
+ if err != nil {
+ log.Error("getAllCommitStatus: can't get commit statuses of pull [%d]: %v", issue.PullRequest.ID, err)
+ continue
+ }
+ res[issue.PullRequest.ID] = statuses
+ lastRes[issue.PullRequest.ID] = lastStatus
+ }
+ return res, lastRes, nil
+}
+
+// getAllCommitStatus get pr's commit statuses.
+func getAllCommitStatus(ctx context.Context, gitRepo *git.Repository, pr *issues_model.PullRequest) (statuses []*git_model.CommitStatus, lastStatus *git_model.CommitStatus, err error) {
+ sha, shaErr := gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if shaErr != nil {
+ return nil, nil, shaErr
+ }
+
+ statuses, _, err = git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
+ lastStatus = git_model.CalcCommitStatus(statuses)
+ return statuses, lastStatus, err
+}
+
+// IsHeadEqualWithBranch returns if the commits of branchName are available in pull request head
+func IsHeadEqualWithBranch(ctx context.Context, pr *issues_model.PullRequest, branchName string) (bool, error) {
+ var err error
+ if err = pr.LoadBaseRepo(ctx); err != nil {
+ return false, err
+ }
+ baseGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.BaseRepo)
+ if err != nil {
+ return false, err
+ }
+ defer closer.Close()
+
+ baseCommit, err := baseGitRepo.GetBranchCommit(branchName)
+ if err != nil {
+ return false, err
+ }
+
+ if err = pr.LoadHeadRepo(ctx); err != nil {
+ return false, err
+ }
+ var headGitRepo *git.Repository
+ if pr.HeadRepoID == pr.BaseRepoID {
+ headGitRepo = baseGitRepo
+ } else {
+ var closer io.Closer
+
+ headGitRepo, closer, err = gitrepo.RepositoryFromContextOrOpen(ctx, pr.HeadRepo)
+ if err != nil {
+ return false, err
+ }
+ defer closer.Close()
+ }
+
+ var headCommit *git.Commit
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ headCommit, err = headGitRepo.GetBranchCommit(pr.HeadBranch)
+ if err != nil {
+ return false, err
+ }
+ } else {
+ pr.HeadCommitID, err = baseGitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ return false, err
+ }
+ if headCommit, err = baseGitRepo.GetCommit(pr.HeadCommitID); err != nil {
+ return false, err
+ }
+ }
+ return baseCommit.HasPreviousCommit(headCommit.ID)
+}
+
+type CommitInfo struct {
+ Summary string `json:"summary"`
+ CommitterOrAuthorName string `json:"committer_or_author_name"`
+ ID string `json:"id"`
+ ShortSha string `json:"short_sha"`
+ Time string `json:"time"`
+}
+
+// GetPullCommits returns all commits on given pull request and the last review commit sha
+// Attention: The last review commit sha must be from the latest review whose commit id is not empty.
+// So the type of the latest review cannot be "ReviewTypeRequest".
+func GetPullCommits(ctx *gitea_context.Context, issue *issues_model.Issue) ([]CommitInfo, string, error) {
+ pull := issue.PullRequest
+
+ baseGitRepo := ctx.Repo.GitRepo
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ return nil, "", err
+ }
+ baseBranch := pull.BaseBranch
+ if pull.HasMerged {
+ baseBranch = pull.MergeBase
+ }
+ prInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(), baseBranch, pull.GetGitRefName(), true, false)
+ if err != nil {
+ return nil, "", err
+ }
+
+ commits := make([]CommitInfo, 0, len(prInfo.Commits))
+
+ for _, commit := range prInfo.Commits {
+ var committerOrAuthorName string
+ var commitTime time.Time
+ if commit.Author != nil {
+ committerOrAuthorName = commit.Author.Name
+ commitTime = commit.Author.When
+ } else {
+ committerOrAuthorName = commit.Committer.Name
+ commitTime = commit.Committer.When
+ }
+
+ commits = append(commits, CommitInfo{
+ Summary: commit.Summary(),
+ CommitterOrAuthorName: committerOrAuthorName,
+ ID: commit.ID.String(),
+ ShortSha: base.ShortSha(commit.ID.String()),
+ Time: commitTime.Format(time.RFC3339),
+ })
+ }
+
+ var lastReviewCommitID string
+ if ctx.IsSigned {
+ // get last review of current user and store information in context (if available)
+ lastreview, err := issues_model.FindLatestReviews(ctx, issues_model.FindReviewOptions{
+ IssueID: issue.ID,
+ ReviewerID: ctx.Doer.ID,
+ Types: []issues_model.ReviewType{
+ issues_model.ReviewTypeApprove,
+ issues_model.ReviewTypeComment,
+ issues_model.ReviewTypeReject,
+ },
+ })
+
+ if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ return nil, "", err
+ }
+ if len(lastreview) > 0 {
+ lastReviewCommitID = lastreview[0].CommitID
+ }
+ }
+
+ return commits, lastReviewCommitID, nil
+}
diff --git a/services/pull/pull_test.go b/services/pull/pull_test.go
new file mode 100644
index 0000000..c51619e
--- /dev/null
+++ b/services/pull/pull_test.go
@@ -0,0 +1,94 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// TODO TestPullRequest_PushToBaseRepo
+
+func TestPullRequest_CommitMessageTrailersPattern(t *testing.T) {
+ // Not a valid trailer section
+ assert.False(t, commitMessageTrailersPattern.MatchString(""))
+ assert.False(t, commitMessageTrailersPattern.MatchString("No trailer."))
+ assert.False(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob <bob@example.com>\nNot a trailer due to following text."))
+ assert.False(t, commitMessageTrailersPattern.MatchString("Message body not correctly separated from trailer section by empty line.\nSigned-off-by: Bob <bob@example.com>"))
+ // Valid trailer section
+ assert.True(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob <bob@example.com>"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob <bob@example.com>\nOther-Trailer: Value"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Message body correctly separated from trailer section by empty line.\n\nSigned-off-by: Bob <bob@example.com>"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Multiple trailers.\n\nSigned-off-by: Bob <bob@example.com>\nOther-Trailer: Value"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Newline after trailer section.\n\nSigned-off-by: Bob <bob@example.com>\n"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("No space after colon is accepted.\n\nSigned-off-by:Bob <bob@example.com>"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Additional whitespace is accepted.\n\nSigned-off-by \t : \tBob <bob@example.com> "))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Folded value.\n\nFolded-trailer: This is\n a folded\n trailer value\nOther-Trailer: Value"))
+}
+
+func TestPullRequest_GetDefaultMergeMessage_InternalTracker(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+
+ require.NoError(t, pr.LoadBaseRepo(db.DefaultContext))
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, pr.BaseRepo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ mergeMessage, body, err := GetDefaultMergeMessage(db.DefaultContext, gitRepo, pr, "")
+ require.NoError(t, err)
+ assert.Equal(t, "Merge pull request 'issue3' (#3) from branch2 into master", mergeMessage)
+ assert.Equal(t, "Reviewed-on: https://try.gitea.io/user2/repo1/pulls/3\n", body)
+
+ pr.BaseRepoID = 1
+ pr.HeadRepoID = 2
+ mergeMessage, _, err = GetDefaultMergeMessage(db.DefaultContext, gitRepo, pr, "")
+ require.NoError(t, err)
+ assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo1:branch2 into master", mergeMessage)
+}
+
+func TestPullRequest_GetDefaultMergeMessage_ExternalTracker(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ externalTracker := repo_model.RepoUnit{
+ Type: unit.TypeExternalTracker,
+ Config: &repo_model.ExternalTrackerConfig{
+ ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}",
+ },
+ }
+ baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ baseRepo.Units = []*repo_model.RepoUnit{&externalTracker}
+
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2, BaseRepo: baseRepo})
+
+ require.NoError(t, pr.LoadBaseRepo(db.DefaultContext))
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, pr.BaseRepo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ mergeMessage, _, err := GetDefaultMergeMessage(db.DefaultContext, gitRepo, pr, "")
+ require.NoError(t, err)
+
+ assert.Equal(t, "Merge pull request 'issue3' (!3) from branch2 into master", mergeMessage)
+
+ pr.BaseRepoID = 1
+ pr.HeadRepoID = 2
+ pr.BaseRepo = nil
+ pr.HeadRepo = nil
+ mergeMessage, _, err = GetDefaultMergeMessage(db.DefaultContext, gitRepo, pr, "")
+ require.NoError(t, err)
+
+ assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo2:branch2 into master", mergeMessage)
+}
diff --git a/services/pull/review.go b/services/pull/review.go
new file mode 100644
index 0000000..927c431
--- /dev/null
+++ b/services/pull/review.go
@@ -0,0 +1,465 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "regexp"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+var notEnoughLines = regexp.MustCompile(`fatal: file .* has only \d+ lines?`)
+
+// ErrDismissRequestOnClosedPR represents an error when an user tries to dismiss a review associated to a closed or merged PR.
+type ErrDismissRequestOnClosedPR struct{}
+
+// IsErrDismissRequestOnClosedPR checks if an error is an ErrDismissRequestOnClosedPR.
+func IsErrDismissRequestOnClosedPR(err error) bool {
+ _, ok := err.(ErrDismissRequestOnClosedPR)
+ return ok
+}
+
+func (err ErrDismissRequestOnClosedPR) Error() string {
+ return "can't dismiss a review associated to a closed or merged PR"
+}
+
+func (err ErrDismissRequestOnClosedPR) Unwrap() error {
+ return util.ErrPermissionDenied
+}
+
+// checkInvalidation checks if the line of code comment got changed by another commit.
+// If the line got changed the comment is going to be invalidated.
+func checkInvalidation(ctx context.Context, c *issues_model.Comment, repo *git.Repository, branch string) error {
+ // FIXME differentiate between previous and proposed line
+ commit, err := repo.LineBlame(branch, repo.Path, c.TreePath, uint(c.UnsignedLine()))
+ if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
+ c.Invalidated = true
+ return issues_model.UpdateCommentInvalidate(ctx, c)
+ }
+ if err != nil {
+ return err
+ }
+ if c.CommitSHA != "" && c.CommitSHA != commit.ID.String() {
+ c.Invalidated = true
+ return issues_model.UpdateCommentInvalidate(ctx, c)
+ }
+ return nil
+}
+
+// InvalidateCodeComments will lookup the prs for code comments which got invalidated by change
+func InvalidateCodeComments(ctx context.Context, prs issues_model.PullRequestList, doer *user_model.User, repo *git.Repository, branch string) error {
+ if len(prs) == 0 {
+ return nil
+ }
+ issueIDs := prs.GetIssueIDs()
+
+ codeComments, err := db.Find[issues_model.Comment](ctx, issues_model.FindCommentsOptions{
+ ListOptions: db.ListOptionsAll,
+ Type: issues_model.CommentTypeCode,
+ Invalidated: optional.Some(false),
+ IssueIDs: issueIDs,
+ })
+ if err != nil {
+ return fmt.Errorf("find code comments: %v", err)
+ }
+ for _, comment := range codeComments {
+ if err := checkInvalidation(ctx, comment, repo, branch); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// CreateCodeComment creates a comment on the code line
+func CreateCodeComment(ctx context.Context, doer *user_model.User, gitRepo *git.Repository, issue *issues_model.Issue, line int64, content, treePath string, pendingReview bool, replyReviewID int64, latestCommitID string, attachments []string) (*issues_model.Comment, error) {
+ var (
+ existsReview bool
+ err error
+ )
+
+ // CreateCodeComment() is used for:
+ // - Single comments
+ // - Comments that are part of a review
+ // - Comments that reply to an existing review
+
+ if !pendingReview && replyReviewID != 0 {
+ // It's not part of a review; maybe a reply to a review comment or a single comment.
+ // Check if there are reviews for that line already; if there are, this is a reply
+ if existsReview, err = issues_model.ReviewExists(ctx, issue, treePath, line); err != nil {
+ return nil, err
+ }
+ }
+
+ // Comments that are replies don't require a review header to show up in the issue view
+ if !pendingReview && existsReview {
+ if err = issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ comment, err := CreateCodeCommentKnownReviewID(ctx,
+ doer,
+ issue.Repo,
+ issue,
+ content,
+ treePath,
+ line,
+ replyReviewID,
+ attachments,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, doer, comment.Content)
+ if err != nil {
+ return nil, err
+ }
+
+ notify_service.CreateIssueComment(ctx, doer, issue.Repo, issue, comment, mentions)
+
+ return comment, nil
+ }
+
+ review, err := issues_model.GetCurrentReview(ctx, doer, issue)
+ if err != nil {
+ if !issues_model.IsErrReviewNotExist(err) {
+ return nil, err
+ }
+
+ if review, err = issues_model.CreateReview(ctx, issues_model.CreateReviewOptions{
+ Type: issues_model.ReviewTypePending,
+ Reviewer: doer,
+ Issue: issue,
+ Official: false,
+ CommitID: latestCommitID,
+ }); err != nil {
+ return nil, err
+ }
+ }
+
+ comment, err := CreateCodeCommentKnownReviewID(ctx,
+ doer,
+ issue.Repo,
+ issue,
+ content,
+ treePath,
+ line,
+ review.ID,
+ attachments,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ if !pendingReview && !existsReview {
+ // Submit the review we've just created so the comment shows up in the issue view
+ if _, _, err = SubmitReview(ctx, doer, gitRepo, issue, issues_model.ReviewTypeComment, "", latestCommitID, nil); err != nil {
+ return nil, err
+ }
+ }
+
+ // NOTICE: if it's a pending review the notifications will not be fired until user submit review.
+
+ return comment, nil
+}
+
+// CreateCodeCommentKnownReviewID creates a plain code comment at the specified line / path
+func CreateCodeCommentKnownReviewID(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, content, treePath string, line, reviewID int64, attachments []string) (*issues_model.Comment, error) {
+ var commitID, patch string
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return nil, fmt.Errorf("LoadPullRequest: %w", err)
+ }
+ pr := issue.PullRequest
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return nil, fmt.Errorf("LoadBaseRepo: %w", err)
+ }
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.BaseRepo)
+ if err != nil {
+ return nil, fmt.Errorf("RepositoryFromContextOrOpen: %w", err)
+ }
+ defer closer.Close()
+
+ invalidated := false
+ head := pr.GetGitRefName()
+ if line > 0 {
+ if reviewID != 0 {
+ first, err := issues_model.FindComments(ctx, &issues_model.FindCommentsOptions{
+ ReviewID: reviewID,
+ Line: line,
+ TreePath: treePath,
+ Type: issues_model.CommentTypeCode,
+ ListOptions: db.ListOptions{
+ PageSize: 1,
+ Page: 1,
+ },
+ })
+ if err == nil && len(first) > 0 {
+ commitID = first[0].CommitSHA
+ invalidated = first[0].Invalidated
+ patch = first[0].Patch
+ } else if err != nil && !issues_model.IsErrCommentNotExist(err) {
+ return nil, fmt.Errorf("Find first comment for %d line %d path %s. Error: %w", reviewID, line, treePath, err)
+ } else {
+ review, err := issues_model.GetReviewByID(ctx, reviewID)
+ if err == nil && len(review.CommitID) > 0 {
+ head = review.CommitID
+ } else if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ return nil, fmt.Errorf("GetReviewByID %d. Error: %w", reviewID, err)
+ }
+ }
+ }
+
+ if len(commitID) == 0 {
+ // FIXME validate treePath
+ // Get latest commit referencing the commented line
+ // No need for get commit for base branch changes
+ commit, err := gitRepo.LineBlame(head, gitRepo.Path, treePath, uint(line))
+ if err == nil {
+ commitID = commit.ID.String()
+ } else if !(strings.Contains(err.Error(), "exit status 128 - fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
+ return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %w", pr.GetGitRefName(), gitRepo.Path, treePath, line, err)
+ }
+ }
+ }
+
+ // Only fetch diff if comment is review comment
+ if len(patch) == 0 && reviewID != 0 {
+ headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ return nil, fmt.Errorf("GetRefCommitID[%s]: %w", pr.GetGitRefName(), err)
+ }
+ if len(commitID) == 0 {
+ commitID = headCommitID
+ }
+ reader, writer := io.Pipe()
+ defer func() {
+ _ = reader.Close()
+ _ = writer.Close()
+ }()
+ go func() {
+ if err := git.GetRepoRawDiffForFile(gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, treePath, writer); err != nil {
+ _ = writer.CloseWithError(fmt.Errorf("GetRawDiffForLine[%s, %s, %s, %s]: %w", gitRepo.Path, pr.MergeBase, headCommitID, treePath, err))
+ return
+ }
+ _ = writer.Close()
+ }()
+
+ patch, err = git.CutDiffAroundLine(reader, int64((&issues_model.Comment{Line: line}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines)
+ if err != nil {
+ log.Error("Error whilst generating patch: %v", err)
+ return nil, err
+ }
+ }
+ return issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeCode,
+ Doer: doer,
+ Repo: repo,
+ Issue: issue,
+ Content: content,
+ LineNum: line,
+ TreePath: treePath,
+ CommitSHA: commitID,
+ ReviewID: reviewID,
+ Patch: patch,
+ Invalidated: invalidated,
+ Attachments: attachments,
+ })
+}
+
+// SubmitReview creates a review out of the existing pending review or creates a new one if no pending review exist
+func SubmitReview(ctx context.Context, doer *user_model.User, gitRepo *git.Repository, issue *issues_model.Issue, reviewType issues_model.ReviewType, content, commitID string, attachmentUUIDs []string) (*issues_model.Review, *issues_model.Comment, error) {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return nil, nil, err
+ }
+
+ pr := issue.PullRequest
+ var stale bool
+ if reviewType != issues_model.ReviewTypeApprove && reviewType != issues_model.ReviewTypeReject {
+ stale = false
+ } else {
+ headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if headCommitID == commitID {
+ stale = false
+ } else {
+ stale, err = checkIfPRContentChanged(ctx, pr, commitID, headCommitID)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+ }
+
+ review, comm, err := issues_model.SubmitReview(ctx, doer, issue, reviewType, content, commitID, stale, attachmentUUIDs)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, doer, comm.Content)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ notify_service.PullRequestReview(ctx, pr, review, comm, mentions)
+
+ for _, lines := range review.CodeComments {
+ for _, comments := range lines {
+ for _, codeComment := range comments {
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, doer, codeComment.Content)
+ if err != nil {
+ return nil, nil, err
+ }
+ notify_service.PullRequestCodeComment(ctx, pr, codeComment, mentions)
+ }
+ }
+ }
+
+ return review, comm, nil
+}
+
+// DismissApprovalReviews dismiss all approval reviews because of new commits
+func DismissApprovalReviews(ctx context.Context, doer *user_model.User, pull *issues_model.PullRequest) error {
+ reviews, err := issues_model.FindReviews(ctx, issues_model.FindReviewOptions{
+ ListOptions: db.ListOptionsAll,
+ IssueID: pull.IssueID,
+ Types: []issues_model.ReviewType{issues_model.ReviewTypeApprove},
+ Dismissed: optional.Some(false),
+ })
+ if err != nil {
+ return err
+ }
+
+ if err := reviews.LoadIssues(ctx); err != nil {
+ return err
+ }
+
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ for _, review := range reviews {
+ if err := issues_model.DismissReview(ctx, review, true); err != nil {
+ return err
+ }
+
+ comment, err := issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Doer: doer,
+ Content: "New commits pushed, approval review dismissed automatically according to repository settings",
+ Type: issues_model.CommentTypeDismissReview,
+ ReviewID: review.ID,
+ Issue: review.Issue,
+ Repo: review.Issue.Repo,
+ })
+ if err != nil {
+ return err
+ }
+
+ comment.Review = review
+ comment.Poster = doer
+ comment.Issue = review.Issue
+
+ notify_service.PullReviewDismiss(ctx, doer, review, comment)
+ }
+ return nil
+ })
+}
+
+// DismissReview dismissing stale review by repo admin
+func DismissReview(ctx context.Context, reviewID, repoID int64, message string, doer *user_model.User, isDismiss, dismissPriors bool) (comment *issues_model.Comment, err error) {
+ review, err := issues_model.GetReviewByID(ctx, reviewID)
+ if err != nil {
+ return nil, err
+ }
+
+ if review.Type != issues_model.ReviewTypeApprove && review.Type != issues_model.ReviewTypeReject {
+ return nil, fmt.Errorf("not need to dismiss this review because it's type is not Approve or change request")
+ }
+
+ // load data for notify
+ if err := review.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+
+ // Check if the review's repoID is the one we're currently expecting.
+ if review.Issue.RepoID != repoID {
+ return nil, fmt.Errorf("reviews's repository is not the same as the one we expect")
+ }
+
+ issue := review.Issue
+
+ if issue.IsClosed {
+ return nil, ErrDismissRequestOnClosedPR{}
+ }
+
+ if issue.IsPull {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return nil, err
+ }
+ if issue.PullRequest.HasMerged {
+ return nil, ErrDismissRequestOnClosedPR{}
+ }
+ }
+
+ if err := issues_model.DismissReview(ctx, review, isDismiss); err != nil {
+ return nil, err
+ }
+
+ if dismissPriors {
+ reviews, err := issues_model.FindReviews(ctx, issues_model.FindReviewOptions{
+ IssueID: review.IssueID,
+ ReviewerID: review.ReviewerID,
+ Dismissed: optional.Some(false),
+ })
+ if err != nil {
+ return nil, err
+ }
+ for _, oldReview := range reviews {
+ if err = issues_model.DismissReview(ctx, oldReview, true); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ if !isDismiss {
+ return nil, nil
+ }
+
+ if err := review.Issue.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+
+ comment, err = issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Doer: doer,
+ Content: message,
+ Type: issues_model.CommentTypeDismissReview,
+ ReviewID: review.ID,
+ Issue: review.Issue,
+ Repo: review.Issue.Repo,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ comment.Review = review
+ comment.Poster = doer
+ comment.Issue = review.Issue
+
+ notify_service.PullReviewDismiss(ctx, doer, review, comment)
+
+ return comment, nil
+}
diff --git a/services/pull/review_test.go b/services/pull/review_test.go
new file mode 100644
index 0000000..4cb3ad0
--- /dev/null
+++ b/services/pull/review_test.go
@@ -0,0 +1,49 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ pull_service "code.gitea.io/gitea/services/pull"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDismissReview(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{})
+ require.NoError(t, pull.LoadIssue(db.DefaultContext))
+ issue := pull.Issue
+ require.NoError(t, issue.LoadRepo(db.DefaultContext))
+ reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ review, err := issues_model.CreateReview(db.DefaultContext, issues_model.CreateReviewOptions{
+ Issue: issue,
+ Reviewer: reviewer,
+ Type: issues_model.ReviewTypeReject,
+ })
+
+ require.NoError(t, err)
+ issue.IsClosed = true
+ pull.HasMerged = false
+ require.NoError(t, issues_model.UpdateIssueCols(db.DefaultContext, issue, "is_closed"))
+ require.NoError(t, pull.UpdateCols(db.DefaultContext, "has_merged"))
+ _, err = pull_service.DismissReview(db.DefaultContext, review.ID, issue.RepoID, "", &user_model.User{}, false, false)
+ require.Error(t, err)
+ assert.True(t, pull_service.IsErrDismissRequestOnClosedPR(err))
+
+ pull.HasMerged = true
+ pull.Issue.IsClosed = false
+ require.NoError(t, issues_model.UpdateIssueCols(db.DefaultContext, issue, "is_closed"))
+ require.NoError(t, pull.UpdateCols(db.DefaultContext, "has_merged"))
+ _, err = pull_service.DismissReview(db.DefaultContext, review.ID, issue.RepoID, "", &user_model.User{}, false, false)
+ require.Error(t, err)
+ assert.True(t, pull_service.IsErrDismissRequestOnClosedPR(err))
+}
diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go
new file mode 100644
index 0000000..36bdbde
--- /dev/null
+++ b/services/pull/temp_repo.go
@@ -0,0 +1,196 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+)
+
+// Temporary repos created here use standard branch names to help simplify
+// merging code
+const (
+ baseBranch = "base" // equivalent to pr.BaseBranch
+ trackingBranch = "tracking" // equivalent to pr.HeadBranch
+ stagingBranch = "staging" // this is used for a working branch
+)
+
+type prContext struct {
+ context.Context
+ tmpBasePath string
+ pr *issues_model.PullRequest
+ outbuf *strings.Builder // we keep these around to help reduce needless buffer recreation,
+ errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use
+}
+
+func (ctx *prContext) RunOpts() *git.RunOpts {
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return &git.RunOpts{
+ Dir: ctx.tmpBasePath,
+ Stdout: ctx.outbuf,
+ Stderr: ctx.errbuf,
+ }
+}
+
+// createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch
+// it also create a second base branch called "original_base"
+func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prContext, cancel context.CancelFunc, err error) {
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("%-v LoadHeadRepo: %v", pr, err)
+ return nil, nil, fmt.Errorf("%v LoadHeadRepo: %w", pr, err)
+ } else if pr.HeadRepo == nil {
+ log.Error("%-v HeadRepo %d does not exist", pr, pr.HeadRepoID)
+ return nil, nil, &repo_model.ErrRepoNotExist{
+ ID: pr.HeadRepoID,
+ }
+ } else if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("%-v LoadBaseRepo: %v", pr, err)
+ return nil, nil, fmt.Errorf("%v LoadBaseRepo: %w", pr, err)
+ } else if pr.BaseRepo == nil {
+ log.Error("%-v BaseRepo %d does not exist", pr, pr.BaseRepoID)
+ return nil, nil, &repo_model.ErrRepoNotExist{
+ ID: pr.BaseRepoID,
+ }
+ } else if err := pr.HeadRepo.LoadOwner(ctx); err != nil {
+ log.Error("%-v HeadRepo.LoadOwner: %v", pr, err)
+ return nil, nil, fmt.Errorf("%v HeadRepo.LoadOwner: %w", pr, err)
+ } else if err := pr.BaseRepo.LoadOwner(ctx); err != nil {
+ log.Error("%-v BaseRepo.LoadOwner: %v", pr, err)
+ return nil, nil, fmt.Errorf("%v BaseRepo.LoadOwner: %w", pr, err)
+ }
+
+ // Clone base repo.
+ tmpBasePath, err := repo_module.CreateTemporaryPath("pull")
+ if err != nil {
+ log.Error("CreateTemporaryPath[%-v]: %v", pr, err)
+ return nil, nil, err
+ }
+ prCtx = &prContext{
+ Context: ctx,
+ tmpBasePath: tmpBasePath,
+ pr: pr,
+ outbuf: &strings.Builder{},
+ errbuf: &strings.Builder{},
+ }
+ cancel = func() {
+ if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
+ log.Error("Error whilst removing removing temporary repo for %-v: %v", pr, err)
+ }
+ }
+
+ baseRepoPath := pr.BaseRepo.RepoPath()
+ headRepoPath := pr.HeadRepo.RepoPath()
+
+ if err := git.InitRepository(ctx, tmpBasePath, false, pr.BaseRepo.ObjectFormatName); err != nil {
+ log.Error("Unable to init tmpBasePath for %-v: %v", pr, err)
+ cancel()
+ return nil, nil, err
+ }
+
+ remoteRepoName := "head_repo"
+ baseBranch := "base"
+
+ fetchArgs := git.TrustedCmdArgs{"--no-tags"}
+ if git.CheckGitVersionAtLeast("2.25.0") == nil {
+ // Writing the commit graph can be slow and is not needed here
+ fetchArgs = append(fetchArgs, "--no-write-commit-graph")
+ }
+
+ // addCacheRepo adds git alternatives for the cacheRepoPath in the repoPath
+ addCacheRepo := func(repoPath, cacheRepoPath string) error {
+ p := filepath.Join(repoPath, ".git", "objects", "info", "alternates")
+ f, err := os.OpenFile(p, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o600)
+ if err != nil {
+ log.Error("Could not create .git/objects/info/alternates file in %s: %v", repoPath, err)
+ return err
+ }
+ defer f.Close()
+ data := filepath.Join(cacheRepoPath, "objects")
+ if _, err := fmt.Fprintln(f, data); err != nil {
+ log.Error("Could not write to .git/objects/info/alternates file in %s: %v", repoPath, err)
+ return err
+ }
+ return nil
+ }
+
+ // Add head repo remote.
+ if err := addCacheRepo(tmpBasePath, baseRepoPath); err != nil {
+ log.Error("%-v Unable to add base repository to temporary repo [%s -> %s]: %v", pr, pr.BaseRepo.FullName(), tmpBasePath, err)
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err)
+ }
+
+ if err := git.NewCommand(ctx, "remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath).
+ Run(prCtx.RunOpts()); err != nil {
+ log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+
+ if err := git.NewCommand(ctx, "fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
+ Run(prCtx.RunOpts()); err != nil {
+ log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+
+ if err := git.NewCommand(ctx, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch).
+ Run(prCtx.RunOpts()); err != nil {
+ log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+
+ if err := addCacheRepo(tmpBasePath, headRepoPath); err != nil {
+ log.Error("%-v Unable to add head repository to temporary repo [%s -> %s]: %v", pr, pr.HeadRepo.FullName(), tmpBasePath, err)
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to add head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err)
+ }
+
+ if err := git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath).
+ Run(prCtx.RunOpts()); err != nil {
+ log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+
+ trackingBranch := "tracking"
+ objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
+ // Fetch head branch
+ var headBranch string
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ headBranch = git.BranchPrefix + pr.HeadBranch
+ } else if len(pr.HeadCommitID) == objectFormat.FullLength() { // for not created pull request
+ headBranch = pr.HeadCommitID
+ } else {
+ headBranch = pr.GetGitRefName()
+ }
+ if err := git.NewCommand(ctx, "fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch).
+ Run(prCtx.RunOpts()); err != nil {
+ cancel()
+ if !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) {
+ return nil, nil, git_model.ErrBranchNotExist{
+ BranchName: pr.HeadBranch,
+ }
+ }
+ log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+ prCtx.outbuf.Reset()
+ prCtx.errbuf.Reset()
+
+ return prCtx, cancel, nil
+}
diff --git a/services/pull/update.go b/services/pull/update.go
new file mode 100644
index 0000000..dbc1b71
--- /dev/null
+++ b/services/pull/update.go
@@ -0,0 +1,180 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+)
+
+// Update updates pull request with base branch.
+func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error {
+ if pr.Flow == issues_model.PullRequestFlowAGit {
+ // TODO: update of agit flow pull request's head branch is unsupported
+ return fmt.Errorf("update of agit flow pull request's head branch is unsupported")
+ }
+
+ pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
+
+ diffCount, err := GetDiverging(ctx, pr)
+ if err != nil {
+ return err
+ } else if diffCount.Behind == 0 {
+ return fmt.Errorf("HeadBranch of PR %d is up to date", pr.Index)
+ }
+
+ if rebase {
+ defer func() {
+ AddTestPullRequestTask(ctx, doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "", 0)
+ }()
+
+ return updateHeadByRebaseOnToBase(ctx, pr, doer)
+ }
+
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("unable to load BaseRepo for %-v during update-by-merge: %v", pr, err)
+ return fmt.Errorf("unable to load BaseRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
+ }
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err)
+ return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
+ }
+ if pr.HeadRepo == nil {
+ // LoadHeadRepo will swallow ErrRepoNotExist so if pr.HeadRepo is still nil recreate the error
+ err := repo_model.ErrRepoNotExist{
+ ID: pr.HeadRepoID,
+ }
+ log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err)
+ return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
+ }
+
+ // use merge functions but switch repos and branches
+ reversePR := &issues_model.PullRequest{
+ ID: pr.ID,
+
+ HeadRepoID: pr.BaseRepoID,
+ HeadRepo: pr.BaseRepo,
+ HeadBranch: pr.BaseBranch,
+
+ BaseRepoID: pr.HeadRepoID,
+ BaseRepo: pr.HeadRepo,
+ BaseBranch: pr.HeadBranch,
+ }
+
+ _, err = doMergeAndPush(ctx, reversePR, doer, repo_model.MergeStyleMerge, "", message, repository.PushTriggerPRUpdateWithBase)
+
+ defer func() {
+ AddTestPullRequestTask(ctx, doer, reversePR.HeadRepo.ID, reversePR.HeadBranch, false, "", "", 0)
+ }()
+
+ return err
+}
+
+// IsUserAllowedToUpdate check if user is allowed to update PR with given permissions and branch protections
+func IsUserAllowedToUpdate(ctx context.Context, pull *issues_model.PullRequest, user *user_model.User) (mergeAllowed, rebaseAllowed bool, err error) {
+ if pull.Flow == issues_model.PullRequestFlowAGit {
+ return false, false, nil
+ }
+
+ if user == nil {
+ return false, false, nil
+ }
+ headRepoPerm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, user)
+ if err != nil {
+ if repo_model.IsErrUnitTypeNotExist(err) {
+ return false, false, nil
+ }
+ return false, false, err
+ }
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ return false, false, err
+ }
+
+ pr := &issues_model.PullRequest{
+ HeadRepoID: pull.BaseRepoID,
+ HeadRepo: pull.BaseRepo,
+ BaseRepoID: pull.HeadRepoID,
+ BaseRepo: pull.HeadRepo,
+ HeadBranch: pull.BaseBranch,
+ BaseBranch: pull.HeadBranch,
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, false, err
+ }
+
+ // can't do rebase on protected branch because need force push
+ if pb == nil {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return false, false, err
+ }
+ prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ if repo_model.IsErrUnitTypeNotExist(err) {
+ return false, false, nil
+ }
+ log.Error("pr.BaseRepo.GetUnit(unit.TypePullRequests): %v", err)
+ return false, false, err
+ }
+ rebaseAllowed = prUnit.PullRequestsConfig().AllowRebaseUpdate
+ }
+
+ // Update function need push permission
+ if pb != nil {
+ pb.Repo = pull.BaseRepo
+ if !pb.CanUserPush(ctx, user) {
+ return false, false, nil
+ }
+ }
+
+ baseRepoPerm, err := access_model.GetUserRepoPermission(ctx, pull.BaseRepo, user)
+ if err != nil {
+ return false, false, err
+ }
+
+ mergeAllowed, err = IsUserAllowedToMerge(ctx, pr, headRepoPerm, user)
+ if err != nil {
+ return false, false, err
+ }
+
+ if pull.AllowMaintainerEdit {
+ mergeAllowedMaintainer, err := IsUserAllowedToMerge(ctx, pr, baseRepoPerm, user)
+ if err != nil {
+ return false, false, err
+ }
+
+ mergeAllowed = mergeAllowed || mergeAllowedMaintainer
+ }
+
+ return mergeAllowed, rebaseAllowed, nil
+}
+
+// GetDiverging determines how many commits a PR is ahead or behind the PR base branch
+func GetDiverging(ctx context.Context, pr *issues_model.PullRequest) (*git.DivergeObject, error) {
+ log.Trace("GetDiverging[%-v]: compare commits", pr)
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ if !git_model.IsErrBranchNotExist(err) {
+ log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
+ }
+ return nil, err
+ }
+ defer cancel()
+
+ diff, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch)
+ return &diff, err
+}
diff --git a/services/pull/update_rebase.go b/services/pull/update_rebase.go
new file mode 100644
index 0000000..3e2a7be
--- /dev/null
+++ b/services/pull/update_rebase.go
@@ -0,0 +1,107 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch
+func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) error {
+ // "Clone" base repo and add the cache headers for the head repo and branch
+ mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "")
+ if err != nil {
+ return err
+ }
+ defer cancel()
+
+ // Determine the old merge-base before the rebase - we use this for LFS push later on
+ oldMergeBase, _, _ := git.NewCommand(ctx, "merge-base").AddDashesAndList(baseBranch, trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
+ oldMergeBase = strings.TrimSpace(oldMergeBase)
+
+ // Rebase the tracking branch on to the base as the staging branch
+ if err := rebaseTrackingOnToBase(mergeCtx, repo_model.MergeStyleRebaseUpdate); err != nil {
+ return err
+ }
+
+ if setting.LFS.StartServer {
+ // Now we need to ensure that the head repository contains any LFS objects between the new base and the old mergebase
+ // It's questionable about where this should go - either after or before the push
+ // I think in the interests of data safety - failures to push to the lfs should prevent
+ // the push as you can always re-rebase.
+ if err := LFSPush(ctx, mergeCtx.tmpBasePath, baseBranch, oldMergeBase, &issues_model.PullRequest{
+ HeadRepoID: pr.BaseRepoID,
+ BaseRepoID: pr.HeadRepoID,
+ }); err != nil {
+ log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", baseBranch, oldMergeBase, pr, err)
+ return err
+ }
+ }
+
+ // Now determine who the pushing author should be
+ var headUser *user_model.User
+ if err := pr.HeadRepo.LoadOwner(ctx); err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("Can't find user: %d for head repository in %-v - %v", pr.HeadRepo.OwnerID, pr, err)
+ return err
+ }
+ log.Error("Can't find user: %d for head repository in %-v - defaulting to doer: %-v - %v", pr.HeadRepo.OwnerID, pr, doer, err)
+ headUser = doer
+ } else {
+ headUser = pr.HeadRepo.Owner
+ }
+
+ pushCmd := git.NewCommand(ctx, "push", "-f", "head_repo").
+ AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
+
+ // Push back to the head repository.
+ // TODO: this cause an api call to "/api/internal/hook/post-receive/...",
+ // that prevents us from doint the whole merge in one db transaction
+ mergeCtx.outbuf.Reset()
+ mergeCtx.errbuf.Reset()
+
+ if err := pushCmd.Run(&git.RunOpts{
+ Env: repo_module.FullPushingEnvironment(
+ headUser,
+ doer,
+ pr.HeadRepo,
+ pr.HeadRepo.Name,
+ pr.ID,
+ ),
+ Dir: mergeCtx.tmpBasePath,
+ Stdout: mergeCtx.outbuf,
+ Stderr: mergeCtx.errbuf,
+ }); err != nil {
+ if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
+ return &git.ErrPushOutOfDate{
+ StdOut: mergeCtx.outbuf.String(),
+ StdErr: mergeCtx.errbuf.String(),
+ Err: err,
+ }
+ } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
+ err := &git.ErrPushRejected{
+ StdOut: mergeCtx.outbuf.String(),
+ StdErr: mergeCtx.errbuf.String(),
+ Err: err,
+ }
+ err.GenerateMessage()
+ return err
+ }
+ return fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
+ }
+ mergeCtx.outbuf.Reset()
+ mergeCtx.errbuf.Reset()
+
+ return nil
+}
diff --git a/services/release/release.go b/services/release/release.go
new file mode 100644
index 0000000..99851ed
--- /dev/null
+++ b/services/release/release.go
@@ -0,0 +1,470 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package release
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/attachment"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+type AttachmentChange struct {
+ Action string // "add", "delete", "update
+ Type string // "attachment", "external"
+ UUID string
+ Name string
+ ExternalURL string
+}
+
+func createTag(ctx context.Context, gitRepo *git.Repository, rel *repo_model.Release, msg string) (bool, error) {
+ err := rel.LoadAttributes(ctx)
+ if err != nil {
+ return false, err
+ }
+
+ err = rel.Repo.MustNotBeArchived()
+ if err != nil {
+ return false, err
+ }
+
+ var created bool
+ // Only actual create when publish.
+ if !rel.IsDraft {
+ if !gitRepo.IsTagExist(rel.TagName) {
+ if err := rel.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return false, err
+ }
+
+ protectedTags, err := git_model.GetProtectedTags(ctx, rel.Repo.ID)
+ if err != nil {
+ return false, fmt.Errorf("GetProtectedTags: %w", err)
+ }
+
+ // Trim '--' prefix to prevent command line argument vulnerability.
+ rel.TagName = strings.TrimPrefix(rel.TagName, "--")
+ isAllowed, err := git_model.IsUserAllowedToControlTag(ctx, protectedTags, rel.TagName, rel.PublisherID)
+ if err != nil {
+ return false, err
+ }
+ if !isAllowed {
+ return false, models.ErrProtectedTagName{
+ TagName: rel.TagName,
+ }
+ }
+
+ commit, err := gitRepo.GetCommit(rel.Target)
+ if err != nil {
+ return false, err
+ }
+
+ if len(msg) > 0 {
+ if err = gitRepo.CreateAnnotatedTag(rel.TagName, msg, commit.ID.String()); err != nil {
+ if strings.Contains(err.Error(), "is not a valid tag name") {
+ return false, models.ErrInvalidTagName{
+ TagName: rel.TagName,
+ }
+ }
+ return false, err
+ }
+ } else if err = gitRepo.CreateTag(rel.TagName, commit.ID.String()); err != nil {
+ if strings.Contains(err.Error(), "is not a valid tag name") {
+ return false, models.ErrInvalidTagName{
+ TagName: rel.TagName,
+ }
+ }
+ return false, err
+ }
+ created = true
+ rel.LowerTagName = strings.ToLower(rel.TagName)
+
+ objectFormat := git.ObjectFormatFromName(rel.Repo.ObjectFormatName)
+ commits := repository.NewPushCommits()
+ commits.HeadCommit = repository.CommitToPushCommit(commit)
+ commits.CompareURL = rel.Repo.ComposeCompareURL(objectFormat.EmptyObjectID().String(), commit.ID.String())
+
+ refFullName := git.RefNameFromTag(rel.TagName)
+ notify_service.PushCommits(
+ ctx, rel.Publisher, rel.Repo,
+ &repository.PushUpdateOptions{
+ RefFullName: refFullName,
+ OldCommitID: objectFormat.EmptyObjectID().String(),
+ NewCommitID: commit.ID.String(),
+ }, commits)
+ notify_service.CreateRef(ctx, rel.Publisher, rel.Repo, refFullName, commit.ID.String())
+ rel.CreatedUnix = timeutil.TimeStampNow()
+ }
+ commit, err := gitRepo.GetTagCommit(rel.TagName)
+ if err != nil {
+ return false, fmt.Errorf("GetTagCommit: %w", err)
+ }
+
+ rel.Sha1 = commit.ID.String()
+ rel.NumCommits, err = commit.CommitsCount()
+ if err != nil {
+ return false, fmt.Errorf("CommitsCount: %w", err)
+ }
+
+ if rel.PublisherID <= 0 {
+ u, err := user_model.GetUserByEmail(ctx, commit.Author.Email)
+ if err == nil {
+ rel.PublisherID = u.ID
+ }
+ }
+ } else {
+ rel.CreatedUnix = timeutil.TimeStampNow()
+ }
+ return created, nil
+}
+
+// CreateRelease creates a new release of repository.
+func CreateRelease(gitRepo *git.Repository, rel *repo_model.Release, msg string, attachmentChanges []*AttachmentChange) error {
+ has, err := repo_model.IsReleaseExist(gitRepo.Ctx, rel.RepoID, rel.TagName)
+ if err != nil {
+ return err
+ } else if has {
+ return repo_model.ErrReleaseAlreadyExist{
+ TagName: rel.TagName,
+ }
+ }
+
+ if _, err = createTag(gitRepo.Ctx, gitRepo, rel, msg); err != nil {
+ return err
+ }
+
+ rel.LowerTagName = strings.ToLower(rel.TagName)
+ if err = db.Insert(gitRepo.Ctx, rel); err != nil {
+ return err
+ }
+
+ addAttachmentUUIDs := make(container.Set[string])
+
+ for _, attachmentChange := range attachmentChanges {
+ if attachmentChange.Action != "add" {
+ return fmt.Errorf("can only create new attachments when creating release")
+ }
+ switch attachmentChange.Type {
+ case "attachment":
+ if attachmentChange.UUID == "" {
+ return fmt.Errorf("new attachment should have a uuid")
+ }
+ addAttachmentUUIDs.Add(attachmentChange.UUID)
+ case "external":
+ if attachmentChange.Name == "" || attachmentChange.ExternalURL == "" {
+ return fmt.Errorf("new external attachment should have a name and external url")
+ }
+
+ _, err = attachment.NewExternalAttachment(gitRepo.Ctx, &repo_model.Attachment{
+ Name: attachmentChange.Name,
+ UploaderID: rel.PublisherID,
+ RepoID: rel.RepoID,
+ ReleaseID: rel.ID,
+ ExternalURL: attachmentChange.ExternalURL,
+ })
+ if err != nil {
+ return err
+ }
+ default:
+ if attachmentChange.Type == "" {
+ return fmt.Errorf("missing attachment type")
+ }
+ return fmt.Errorf("unknown attachment type: '%q'", attachmentChange.Type)
+ }
+ }
+
+ if err = repo_model.AddReleaseAttachments(gitRepo.Ctx, rel.ID, addAttachmentUUIDs.Values()); err != nil {
+ return err
+ }
+
+ if !rel.IsDraft {
+ notify_service.NewRelease(gitRepo.Ctx, rel)
+ }
+
+ return nil
+}
+
+// CreateNewTag creates a new repository tag
+func CreateNewTag(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, commit, tagName, msg string) error {
+ has, err := repo_model.IsReleaseExist(ctx, repo.ID, tagName)
+ if err != nil {
+ return err
+ } else if has {
+ return models.ErrTagAlreadyExists{
+ TagName: tagName,
+ }
+ }
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ rel := &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: doer.ID,
+ Publisher: doer,
+ TagName: tagName,
+ Target: commit,
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: true,
+ }
+
+ if _, err = createTag(ctx, gitRepo, rel, msg); err != nil {
+ return err
+ }
+
+ return db.Insert(ctx, rel)
+}
+
+// UpdateRelease updates information, attachments of a release and will create tag if it's not a draft and tag not exist.
+// addAttachmentUUIDs accept a slice of new created attachments' uuids which will be reassigned release_id as the created release
+// delAttachmentUUIDs accept a slice of attachments' uuids which will be deleted from the release
+// editAttachments accept a map of attachment uuid to new attachment name which will be updated with attachments.
+func UpdateRelease(ctx context.Context, doer *user_model.User, gitRepo *git.Repository, rel *repo_model.Release, createdFromTag bool, attachmentChanges []*AttachmentChange,
+) error {
+ if rel.ID == 0 {
+ return errors.New("UpdateRelease only accepts an exist release")
+ }
+ isCreated, err := createTag(gitRepo.Ctx, gitRepo, rel, "")
+ if err != nil {
+ return err
+ }
+ rel.LowerTagName = strings.ToLower(rel.TagName)
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = repo_model.UpdateRelease(ctx, rel); err != nil {
+ return err
+ }
+
+ addAttachmentUUIDs := make(container.Set[string])
+ delAttachmentUUIDs := make(container.Set[string])
+ updateAttachmentUUIDs := make(container.Set[string])
+ updateAttachments := make(container.Set[*AttachmentChange])
+
+ for _, attachmentChange := range attachmentChanges {
+ switch attachmentChange.Action {
+ case "add":
+ switch attachmentChange.Type {
+ case "attachment":
+ if attachmentChange.UUID == "" {
+ return fmt.Errorf("new attachment should have a uuid (%s)}", attachmentChange.Name)
+ }
+ addAttachmentUUIDs.Add(attachmentChange.UUID)
+ case "external":
+ if attachmentChange.Name == "" || attachmentChange.ExternalURL == "" {
+ return fmt.Errorf("new external attachment should have a name and external url")
+ }
+ _, err := attachment.NewExternalAttachment(ctx, &repo_model.Attachment{
+ Name: attachmentChange.Name,
+ UploaderID: doer.ID,
+ RepoID: rel.RepoID,
+ ReleaseID: rel.ID,
+ ExternalURL: attachmentChange.ExternalURL,
+ })
+ if err != nil {
+ return err
+ }
+ default:
+ if attachmentChange.Type == "" {
+ return fmt.Errorf("missing attachment type")
+ }
+ return fmt.Errorf("unknown attachment type: %q", attachmentChange.Type)
+ }
+ case "delete":
+ if attachmentChange.UUID == "" {
+ return fmt.Errorf("attachment deletion should have a uuid")
+ }
+ delAttachmentUUIDs.Add(attachmentChange.UUID)
+ case "update":
+ updateAttachmentUUIDs.Add(attachmentChange.UUID)
+ updateAttachments.Add(attachmentChange)
+ default:
+ if attachmentChange.Action == "" {
+ return fmt.Errorf("missing attachment action")
+ }
+ return fmt.Errorf("unknown attachment action: %q", attachmentChange.Action)
+ }
+ }
+
+ if err = repo_model.AddReleaseAttachments(ctx, rel.ID, addAttachmentUUIDs.Values()); err != nil {
+ return fmt.Errorf("AddReleaseAttachments: %w", err)
+ }
+
+ deletedUUIDs := make(container.Set[string])
+ if len(delAttachmentUUIDs) > 0 {
+ // Check attachments
+ attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, delAttachmentUUIDs.Values())
+ if err != nil {
+ return fmt.Errorf("GetAttachmentsByUUIDs [uuids: %v]: %w", delAttachmentUUIDs, err)
+ }
+ for _, attach := range attachments {
+ if attach.ReleaseID != rel.ID {
+ return util.SilentWrap{
+ Message: "delete attachment of release permission denied",
+ Err: util.ErrPermissionDenied,
+ }
+ }
+ deletedUUIDs.Add(attach.UUID)
+ }
+
+ if _, err := repo_model.DeleteAttachments(ctx, attachments, true); err != nil {
+ return fmt.Errorf("DeleteAttachments [uuids: %v]: %w", delAttachmentUUIDs, err)
+ }
+ }
+
+ if len(updateAttachmentUUIDs) > 0 {
+ // Check attachments
+ attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, updateAttachmentUUIDs.Values())
+ if err != nil {
+ return fmt.Errorf("GetAttachmentsByUUIDs [uuids: %v]: %w", updateAttachmentUUIDs, err)
+ }
+ for _, attach := range attachments {
+ if attach.ReleaseID != rel.ID {
+ return util.SilentWrap{
+ Message: "update attachment of release permission denied",
+ Err: util.ErrPermissionDenied,
+ }
+ }
+ }
+ }
+
+ for attachmentChange := range updateAttachments {
+ if !deletedUUIDs.Contains(attachmentChange.UUID) {
+ if err = repo_model.UpdateAttachmentByUUID(ctx, &repo_model.Attachment{
+ UUID: attachmentChange.UUID,
+ Name: attachmentChange.Name,
+ ExternalURL: attachmentChange.ExternalURL,
+ }, "name", "external_url"); err != nil {
+ return err
+ }
+ }
+ }
+
+ if err := committer.Commit(); err != nil {
+ return err
+ }
+
+ for _, uuid := range delAttachmentUUIDs.Values() {
+ if err := storage.Attachments.Delete(repo_model.AttachmentRelativePath(uuid)); err != nil {
+ // Even delete files failed, but the attachments has been removed from database, so we
+ // should not return error but only record the error on logs.
+ // users have to delete this attachments manually or we should have a
+ // synchronize between database attachment table and attachment storage
+ log.Error("delete attachment[uuid: %s] failed: %v", uuid, err)
+ }
+ }
+
+ if !rel.IsDraft {
+ if createdFromTag || isCreated {
+ notify_service.NewRelease(gitRepo.Ctx, rel)
+ return nil
+ }
+ notify_service.UpdateRelease(gitRepo.Ctx, doer, rel)
+ }
+ return nil
+}
+
+// DeleteReleaseByID deletes a release and corresponding Git tag by given ID.
+func DeleteReleaseByID(ctx context.Context, repo *repo_model.Repository, rel *repo_model.Release, doer *user_model.User, delTag bool) error {
+ if delTag {
+ protectedTags, err := git_model.GetProtectedTags(ctx, rel.RepoID)
+ if err != nil {
+ return fmt.Errorf("GetProtectedTags: %w", err)
+ }
+ isAllowed, err := git_model.IsUserAllowedToControlTag(ctx, protectedTags, rel.TagName, rel.PublisherID)
+ if err != nil {
+ return err
+ }
+ if !isAllowed {
+ return models.ErrProtectedTagName{
+ TagName: rel.TagName,
+ }
+ }
+
+ err = repo_model.DeleteArchiveDownloadCountForRelease(ctx, rel.ID)
+ if err != nil {
+ return err
+ }
+
+ if stdout, _, err := git.NewCommand(ctx, "tag", "-d").AddDashesAndList(rel.TagName).
+ SetDescription(fmt.Sprintf("DeleteReleaseByID (git tag -d): %d", rel.ID)).
+ RunStdString(&git.RunOpts{Dir: repo.RepoPath()}); err != nil && !strings.Contains(err.Error(), "not found") {
+ log.Error("DeleteReleaseByID (git tag -d): %d in %v Failed:\nStdout: %s\nError: %v", rel.ID, repo, stdout, err)
+ return fmt.Errorf("git tag -d: %w", err)
+ }
+
+ refName := git.RefNameFromTag(rel.TagName)
+ objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
+ notify_service.PushCommits(
+ ctx, doer, repo,
+ &repository.PushUpdateOptions{
+ RefFullName: refName,
+ OldCommitID: rel.Sha1,
+ NewCommitID: objectFormat.EmptyObjectID().String(),
+ }, repository.NewPushCommits())
+ notify_service.DeleteRef(ctx, doer, repo, refName)
+
+ if _, err := db.DeleteByID[repo_model.Release](ctx, rel.ID); err != nil {
+ return fmt.Errorf("DeleteReleaseByID: %w", err)
+ }
+ } else {
+ rel.IsTag = true
+
+ if err := repo_model.UpdateRelease(ctx, rel); err != nil {
+ return fmt.Errorf("Update: %w", err)
+ }
+ }
+
+ rel.Repo = repo
+ if err := rel.LoadAttributes(ctx); err != nil {
+ return fmt.Errorf("LoadAttributes: %w", err)
+ }
+
+ if err := repo_model.DeleteAttachmentsByRelease(ctx, rel.ID); err != nil {
+ return fmt.Errorf("DeleteAttachments: %w", err)
+ }
+
+ for i := range rel.Attachments {
+ attachment := rel.Attachments[i]
+ if err := storage.Attachments.Delete(attachment.RelativePath()); err != nil {
+ log.Error("Delete attachment %s of release %s failed: %v", attachment.UUID, rel.ID, err)
+ }
+ }
+
+ if !rel.IsDraft {
+ notify_service.DeleteRelease(ctx, doer, rel)
+ }
+ return nil
+}
+
+// Init start release service
+func Init() error {
+ return initTagSyncQueue(graceful.GetManager().ShutdownContext())
+}
diff --git a/services/release/release_test.go b/services/release/release_test.go
new file mode 100644
index 0000000..026bba8
--- /dev/null
+++ b/services/release/release_test.go
@@ -0,0 +1,475 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package release
+
+import (
+ "strings"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/services/attachment"
+
+ _ "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func TestRelease_Create(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ require.NoError(t, CreateRelease(gitRepo, &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v0.1",
+ Target: "master",
+ Title: "v0.1 is released",
+ Note: "v0.1 is released",
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: false,
+ }, "", []*AttachmentChange{}))
+
+ require.NoError(t, CreateRelease(gitRepo, &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v0.1.1",
+ Target: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ Title: "v0.1.1 is released",
+ Note: "v0.1.1 is released",
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: false,
+ }, "", []*AttachmentChange{}))
+
+ require.NoError(t, CreateRelease(gitRepo, &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v0.1.2",
+ Target: "65f1bf2",
+ Title: "v0.1.2 is released",
+ Note: "v0.1.2 is released",
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: false,
+ }, "", []*AttachmentChange{}))
+
+ require.NoError(t, CreateRelease(gitRepo, &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v0.1.3",
+ Target: "65f1bf2",
+ Title: "v0.1.3 is released",
+ Note: "v0.1.3 is released",
+ IsDraft: true,
+ IsPrerelease: false,
+ IsTag: false,
+ }, "", []*AttachmentChange{}))
+
+ require.NoError(t, CreateRelease(gitRepo, &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v0.1.4",
+ Target: "65f1bf2",
+ Title: "v0.1.4 is released",
+ Note: "v0.1.4 is released",
+ IsDraft: false,
+ IsPrerelease: true,
+ IsTag: false,
+ }, "", []*AttachmentChange{}))
+
+ testPlayload := "testtest"
+
+ attach, err := attachment.NewAttachment(db.DefaultContext, &repo_model.Attachment{
+ RepoID: repo.ID,
+ UploaderID: user.ID,
+ Name: "test.txt",
+ }, strings.NewReader(testPlayload), int64(len([]byte(testPlayload))))
+ require.NoError(t, err)
+
+ release := repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v0.1.5",
+ Target: "65f1bf2",
+ Title: "v0.1.5 is released",
+ Note: "v0.1.5 is released",
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: true,
+ }
+ require.NoError(t, CreateRelease(gitRepo, &release, "test", []*AttachmentChange{
+ {
+ Action: "add",
+ Type: "attachment",
+ UUID: attach.UUID,
+ },
+ }))
+ assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, &release))
+ assert.Len(t, release.Attachments, 1)
+ assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID)
+ assert.EqualValues(t, attach.Name, release.Attachments[0].Name)
+ assert.EqualValues(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
+
+ release = repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v0.1.6",
+ Target: "65f1bf2",
+ Title: "v0.1.6 is released",
+ Note: "v0.1.6 is released",
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: true,
+ }
+ assert.NoError(t, CreateRelease(gitRepo, &release, "", []*AttachmentChange{
+ {
+ Action: "add",
+ Type: "external",
+ Name: "test",
+ ExternalURL: "https://forgejo.org/",
+ },
+ }))
+ assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, &release))
+ assert.Len(t, release.Attachments, 1)
+ assert.EqualValues(t, "test", release.Attachments[0].Name)
+ assert.EqualValues(t, "https://forgejo.org/", release.Attachments[0].ExternalURL)
+
+ release = repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v0.1.7",
+ Target: "65f1bf2",
+ Title: "v0.1.7 is released",
+ Note: "v0.1.7 is released",
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: true,
+ }
+ assert.Error(t, CreateRelease(gitRepo, &repo_model.Release{}, "", []*AttachmentChange{
+ {
+ Action: "add",
+ Type: "external",
+ Name: "Click me",
+ // Invalid URL (API URL of current instance), this should result in an error
+ ExternalURL: "https://try.gitea.io/api/v1/user/follow",
+ },
+ }))
+}
+
+func TestRelease_Update(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ // Test a changed release
+ require.NoError(t, CreateRelease(gitRepo, &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v1.1.1",
+ Target: "master",
+ Title: "v1.1.1 is released",
+ Note: "v1.1.1 is released",
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: false,
+ }, "", []*AttachmentChange{}))
+ release, err := repo_model.GetRelease(db.DefaultContext, repo.ID, "v1.1.1")
+ require.NoError(t, err)
+ releaseCreatedUnix := release.CreatedUnix
+ time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ release.Note = "Changed note"
+ require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{}))
+ release, err = repo_model.GetReleaseByID(db.DefaultContext, release.ID)
+ require.NoError(t, err)
+ assert.Equal(t, int64(releaseCreatedUnix), int64(release.CreatedUnix))
+
+ // Test a changed draft
+ require.NoError(t, CreateRelease(gitRepo, &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v1.2.1",
+ Target: "65f1bf2",
+ Title: "v1.2.1 is draft",
+ Note: "v1.2.1 is draft",
+ IsDraft: true,
+ IsPrerelease: false,
+ IsTag: false,
+ }, "", []*AttachmentChange{}))
+ release, err = repo_model.GetRelease(db.DefaultContext, repo.ID, "v1.2.1")
+ require.NoError(t, err)
+ releaseCreatedUnix = release.CreatedUnix
+ time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ release.Title = "Changed title"
+ require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{}))
+ release, err = repo_model.GetReleaseByID(db.DefaultContext, release.ID)
+ require.NoError(t, err)
+ assert.Less(t, int64(releaseCreatedUnix), int64(release.CreatedUnix))
+
+ // Test a changed pre-release
+ require.NoError(t, CreateRelease(gitRepo, &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v1.3.1",
+ Target: "65f1bf2",
+ Title: "v1.3.1 is pre-released",
+ Note: "v1.3.1 is pre-released",
+ IsDraft: false,
+ IsPrerelease: true,
+ IsTag: false,
+ }, "", []*AttachmentChange{}))
+ release, err = repo_model.GetRelease(db.DefaultContext, repo.ID, "v1.3.1")
+ require.NoError(t, err)
+ releaseCreatedUnix = release.CreatedUnix
+ time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ release.Title = "Changed title"
+ release.Note = "Changed note"
+ require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{}))
+ release, err = repo_model.GetReleaseByID(db.DefaultContext, release.ID)
+ require.NoError(t, err)
+ assert.Equal(t, int64(releaseCreatedUnix), int64(release.CreatedUnix))
+
+ // Test create release
+ release = &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v1.1.2",
+ Target: "master",
+ Title: "v1.1.2 is released",
+ Note: "v1.1.2 is released",
+ IsDraft: true,
+ IsPrerelease: false,
+ IsTag: false,
+ }
+ require.NoError(t, CreateRelease(gitRepo, release, "", []*AttachmentChange{}))
+ assert.Positive(t, release.ID)
+
+ release.IsDraft = false
+ tagName := release.TagName
+
+ require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{}))
+ release, err = repo_model.GetReleaseByID(db.DefaultContext, release.ID)
+ require.NoError(t, err)
+ assert.Equal(t, tagName, release.TagName)
+
+ // Add new attachments
+ samplePayload := "testtest"
+ attach, err := attachment.NewAttachment(db.DefaultContext, &repo_model.Attachment{
+ RepoID: repo.ID,
+ UploaderID: user.ID,
+ Name: "test.txt",
+ }, strings.NewReader(samplePayload), int64(len([]byte(samplePayload))))
+ require.NoError(t, err)
+
+ require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{
+ {
+ Action: "add",
+ Type: "attachment",
+ UUID: attach.UUID,
+ },
+ }))
+ require.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
+ assert.Len(t, release.Attachments, 1)
+ assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID)
+ assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.EqualValues(t, attach.Name, release.Attachments[0].Name)
+ assert.EqualValues(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
+
+ // update the attachment name
+ require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{
+ {
+ Action: "update",
+ Name: "test2.txt",
+ UUID: attach.UUID,
+ },
+ }))
+ release.Attachments = nil
+ require.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
+ assert.Len(t, release.Attachments, 1)
+ assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID)
+ assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.EqualValues(t, "test2.txt", release.Attachments[0].Name)
+ assert.EqualValues(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
+
+ // delete the attachment
+ require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{
+ {
+ Action: "delete",
+ UUID: attach.UUID,
+ },
+ }))
+ release.Attachments = nil
+ assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
+ assert.Empty(t, release.Attachments)
+
+ // Add new external attachment
+ assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{
+ {
+ Action: "add",
+ Type: "external",
+ Name: "test",
+ ExternalURL: "https://forgejo.org/",
+ },
+ }))
+ assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
+ assert.Len(t, release.Attachments, 1)
+ assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.EqualValues(t, "test", release.Attachments[0].Name)
+ assert.EqualValues(t, "https://forgejo.org/", release.Attachments[0].ExternalURL)
+ externalAttachmentUUID := release.Attachments[0].UUID
+
+ // update the attachment name
+ assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{
+ {
+ Action: "update",
+ Name: "test2",
+ UUID: externalAttachmentUUID,
+ ExternalURL: "https://about.gitea.com/",
+ },
+ }))
+ release.Attachments = nil
+ assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
+ assert.Len(t, release.Attachments, 1)
+ assert.EqualValues(t, externalAttachmentUUID, release.Attachments[0].UUID)
+ assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.EqualValues(t, "test2", release.Attachments[0].Name)
+ assert.EqualValues(t, "https://about.gitea.com/", release.Attachments[0].ExternalURL)
+}
+
+func TestRelease_createTag(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ // Test a changed release
+ release := &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v2.1.1",
+ Target: "master",
+ Title: "v2.1.1 is released",
+ Note: "v2.1.1 is released",
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: false,
+ }
+ _, err = createTag(db.DefaultContext, gitRepo, release, "")
+ require.NoError(t, err)
+ assert.NotEmpty(t, release.CreatedUnix)
+ releaseCreatedUnix := release.CreatedUnix
+ time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ release.Note = "Changed note"
+ _, err = createTag(db.DefaultContext, gitRepo, release, "")
+ require.NoError(t, err)
+ assert.Equal(t, int64(releaseCreatedUnix), int64(release.CreatedUnix))
+
+ // Test a changed draft
+ release = &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v2.2.1",
+ Target: "65f1bf2",
+ Title: "v2.2.1 is draft",
+ Note: "v2.2.1 is draft",
+ IsDraft: true,
+ IsPrerelease: false,
+ IsTag: false,
+ }
+ _, err = createTag(db.DefaultContext, gitRepo, release, "")
+ require.NoError(t, err)
+ releaseCreatedUnix = release.CreatedUnix
+ time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ release.Title = "Changed title"
+ _, err = createTag(db.DefaultContext, gitRepo, release, "")
+ require.NoError(t, err)
+ assert.Less(t, int64(releaseCreatedUnix), int64(release.CreatedUnix))
+
+ // Test a changed pre-release
+ release = &repo_model.Release{
+ RepoID: repo.ID,
+ Repo: repo,
+ PublisherID: user.ID,
+ Publisher: user,
+ TagName: "v2.3.1",
+ Target: "65f1bf2",
+ Title: "v2.3.1 is pre-released",
+ Note: "v2.3.1 is pre-released",
+ IsDraft: false,
+ IsPrerelease: true,
+ IsTag: false,
+ }
+ _, err = createTag(db.DefaultContext, gitRepo, release, "")
+ require.NoError(t, err)
+ releaseCreatedUnix = release.CreatedUnix
+ time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ release.Title = "Changed title"
+ release.Note = "Changed note"
+ _, err = createTag(db.DefaultContext, gitRepo, release, "")
+ require.NoError(t, err)
+ assert.Equal(t, int64(releaseCreatedUnix), int64(release.CreatedUnix))
+}
+
+func TestCreateNewTag(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ require.NoError(t, CreateNewTag(git.DefaultContext, user, repo, "master", "v2.0",
+ "v2.0 is released \n\n BUGFIX: .... \n\n 123"))
+}
diff --git a/services/release/tag.go b/services/release/tag.go
new file mode 100644
index 0000000..dae2b70
--- /dev/null
+++ b/services/release/tag.go
@@ -0,0 +1,61 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package release
+
+import (
+ "context"
+ "errors"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/queue"
+ repo_module "code.gitea.io/gitea/modules/repository"
+
+ "xorm.io/builder"
+)
+
+type TagSyncOptions struct {
+ RepoID int64
+}
+
+// tagSyncQueue represents a queue to handle tag sync jobs.
+var tagSyncQueue *queue.WorkerPoolQueue[*TagSyncOptions]
+
+func handlerTagSync(items ...*TagSyncOptions) []*TagSyncOptions {
+ for _, opts := range items {
+ err := repo_module.SyncRepoTags(graceful.GetManager().ShutdownContext(), opts.RepoID)
+ if err != nil {
+ log.Error("syncRepoTags [%d] failed: %v", opts.RepoID, err)
+ }
+ }
+ return nil
+}
+
+func addRepoToTagSyncQueue(repoID int64) error {
+ return tagSyncQueue.Push(&TagSyncOptions{
+ RepoID: repoID,
+ })
+}
+
+func initTagSyncQueue(ctx context.Context) error {
+ tagSyncQueue = queue.CreateUniqueQueue(ctx, "tag_sync", handlerTagSync)
+ if tagSyncQueue == nil {
+ return errors.New("unable to create tag_sync queue")
+ }
+ go graceful.GetManager().RunWithCancel(tagSyncQueue)
+
+ return nil
+}
+
+func AddAllRepoTagsToSyncQueue(ctx context.Context) error {
+ if err := db.Iterate(ctx, builder.Eq{"is_empty": false}, func(ctx context.Context, repo *repo_model.Repository) error {
+ return addRepoToTagSyncQueue(repo.ID)
+ }); err != nil {
+ return fmt.Errorf("run sync all tags failed: %v", err)
+ }
+ return nil
+}
diff --git a/services/remote/promote.go b/services/remote/promote.go
new file mode 100644
index 0000000..eb41ace
--- /dev/null
+++ b/services/remote/promote.go
@@ -0,0 +1,133 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// SPDX-License-Identifier: MIT
+
+package remote
+
+import (
+ "context"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+ remote_source "code.gitea.io/gitea/services/auth/source/remote"
+)
+
+type Reason int
+
+const (
+ ReasonNoMatch Reason = iota
+ ReasonNotAuth2
+ ReasonBadAuth2
+ ReasonLoginNameNotExists
+ ReasonNotRemote
+ ReasonEmailIsSet
+ ReasonNoSource
+ ReasonSourceWrongType
+ ReasonCanPromote
+ ReasonPromoted
+ ReasonUpdateFail
+ ReasonErrorLoginName
+ ReasonErrorGetSource
+)
+
+func NewReason(level log.Level, reason Reason, message string, args ...any) Reason {
+ log.Log(1, level, message, args...)
+ return reason
+}
+
+func getUsersByLoginName(ctx context.Context, name string) ([]*user_model.User, error) {
+ if len(name) == 0 {
+ return nil, user_model.ErrUserNotExist{Name: name}
+ }
+
+ users := make([]*user_model.User, 0, 5)
+
+ return users, db.GetEngine(ctx).
+ Table("user").
+ Where("login_name = ? AND login_type = ? AND type = ?", name, auth_model.Remote, user_model.UserTypeRemoteUser).
+ Find(&users)
+}
+
+// The remote user has:
+//
+// Type UserTypeRemoteUser
+// LogingType Remote
+// LoginName set to the unique identifier of the originating authentication source
+// LoginSource set to the Remote source that can be matched against an OAuth2 source
+//
+// If the source from which an authentication happens is OAuth2, an existing
+// remote user will be promoted to an OAuth2 user provided:
+//
+// user.LoginName is the same as goth.UserID (argument loginName)
+// user.LoginSource has a MatchingSource equals to the name of the OAuth2 provider
+//
+// Once promoted, the user will be logged in without further interaction from the
+// user and will own all repositories, issues, etc. associated with it.
+func MaybePromoteRemoteUser(ctx context.Context, source *auth_model.Source, loginName, email string) (promoted bool, reason Reason, err error) {
+ user, reason, err := getRemoteUserToPromote(ctx, source, loginName, email)
+ if err != nil || user == nil {
+ return false, reason, err
+ }
+ promote := &user_model.User{
+ ID: user.ID,
+ Type: user_model.UserTypeIndividual,
+ Email: email,
+ LoginSource: source.ID,
+ LoginType: source.Type,
+ }
+ reason = NewReason(log.DEBUG, ReasonPromoted, "promote user %v: LoginName %v => %v, LoginSource %v => %v, LoginType %v => %v, Email %v => %v", user.ID, user.LoginName, promote.LoginName, user.LoginSource, promote.LoginSource, user.LoginType, promote.LoginType, user.Email, promote.Email)
+ if err := user_model.UpdateUserCols(ctx, promote, "type", "email", "login_source", "login_type"); err != nil {
+ return false, ReasonUpdateFail, err
+ }
+ return true, reason, nil
+}
+
+func getRemoteUserToPromote(ctx context.Context, source *auth_model.Source, loginName, email string) (*user_model.User, Reason, error) { //nolint:unparam
+ if !source.IsOAuth2() {
+ return nil, NewReason(log.DEBUG, ReasonNotAuth2, "source %v is not OAuth2", source), nil
+ }
+ oauth2Source, ok := source.Cfg.(*oauth2.Source)
+ if !ok {
+ return nil, NewReason(log.ERROR, ReasonBadAuth2, "source claims to be OAuth2 but is not"), nil
+ }
+
+ users, err := getUsersByLoginName(ctx, loginName)
+ if err != nil {
+ return nil, NewReason(log.ERROR, ReasonErrorLoginName, "getUserByLoginName('%s') %v", loginName, err), err
+ }
+ if len(users) == 0 {
+ return nil, NewReason(log.ERROR, ReasonLoginNameNotExists, "no user with LoginType UserTypeRemoteUser and LoginName '%s'", loginName), nil
+ }
+
+ reason := ReasonNoSource
+ for _, u := range users {
+ userSource, err := auth_model.GetSourceByID(ctx, u.LoginSource)
+ if err != nil {
+ if auth_model.IsErrSourceNotExist(err) {
+ reason = NewReason(log.DEBUG, ReasonNoSource, "source id = %v for user %v not found %v", u.LoginSource, u.ID, err)
+ continue
+ }
+ return nil, NewReason(log.ERROR, ReasonErrorGetSource, "GetSourceByID('%s') %v", u.LoginSource, err), err
+ }
+ if u.Email != "" {
+ reason = NewReason(log.DEBUG, ReasonEmailIsSet, "the user email is already set to '%s'", u.Email)
+ continue
+ }
+ remoteSource, ok := userSource.Cfg.(*remote_source.Source)
+ if !ok {
+ reason = NewReason(log.DEBUG, ReasonSourceWrongType, "expected a remote source but got %T %v", userSource, userSource)
+ continue
+ }
+
+ if oauth2Source.Provider != remoteSource.MatchingSource {
+ reason = NewReason(log.DEBUG, ReasonNoMatch, "skip OAuth2 source %s because it is different from %s which is the expected match for the remote source %s", oauth2Source.Provider, remoteSource.MatchingSource, remoteSource.URL)
+ continue
+ }
+
+ return u, ReasonCanPromote, nil
+ }
+
+ return nil, reason, nil
+}
diff --git a/services/repository/adopt.go b/services/repository/adopt.go
new file mode 100644
index 0000000..3d6fe71
--- /dev/null
+++ b/services/repository/adopt.go
@@ -0,0 +1,370 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ notify_service "code.gitea.io/gitea/services/notify"
+
+ "github.com/gobwas/glob"
+)
+
+// AdoptRepository adopts pre-existing repository files for the user/organization.
+func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) {
+ if !doer.IsAdmin && !u.CanCreateRepo() {
+ return nil, repo_model.ErrReachLimitOfRepo{
+ Limit: u.MaxRepoCreation,
+ }
+ }
+
+ repo := &repo_model.Repository{
+ OwnerID: u.ID,
+ Owner: u,
+ OwnerName: u.Name,
+ Name: opts.Name,
+ LowerName: strings.ToLower(opts.Name),
+ Description: opts.Description,
+ OriginalURL: opts.OriginalURL,
+ OriginalServiceType: opts.GitServiceType,
+ IsPrivate: opts.IsPrivate,
+ IsFsckEnabled: !opts.IsMirror,
+ CloseIssuesViaCommitInAnyBranch: setting.Repository.DefaultCloseIssuesViaCommitsInAnyBranch,
+ Status: opts.Status,
+ IsEmpty: !opts.AutoInit,
+ }
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ repoPath := repo_model.RepoPath(u.Name, repo.Name)
+ isExist, err := util.IsExist(repoPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ return err
+ }
+ if !isExist {
+ return repo_model.ErrRepoNotExist{
+ OwnerName: u.Name,
+ Name: repo.Name,
+ }
+ }
+
+ if err := repo_module.CreateRepositoryByExample(ctx, doer, u, repo, true, false); err != nil {
+ return err
+ }
+
+ // Re-fetch the repository from database before updating it (else it would
+ // override changes that were done earlier with sql)
+ if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil {
+ return fmt.Errorf("getRepositoryByID: %w", err)
+ }
+
+ if err := adoptRepository(ctx, repoPath, repo, opts.DefaultBranch); err != nil {
+ return fmt.Errorf("adoptRepository: %w", err)
+ }
+
+ if err := repo_module.CheckDaemonExportOK(ctx, repo); err != nil {
+ return fmt.Errorf("checkDaemonExportOK: %w", err)
+ }
+
+ // Initialize Issue Labels if selected
+ if len(opts.IssueLabels) > 0 {
+ if err := repo_module.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil {
+ return fmt.Errorf("InitializeLabels: %w", err)
+ }
+ }
+
+ if stdout, _, err := git.NewCommand(ctx, "update-server-info").
+ SetDescription(fmt.Sprintf("CreateRepository(git update-server-info): %s", repoPath)).
+ RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
+ return fmt.Errorf("CreateRepository(git update-server-info): %w", err)
+ }
+ return nil
+ }); err != nil {
+ return nil, err
+ }
+
+ notify_service.AdoptRepository(ctx, doer, u, repo)
+
+ return repo, nil
+}
+
+func adoptRepository(ctx context.Context, repoPath string, repo *repo_model.Repository, defaultBranch string) (err error) {
+ isExist, err := util.IsExist(repoPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ return err
+ }
+ if !isExist {
+ return fmt.Errorf("adoptRepository: path does not already exist: %s", repoPath)
+ }
+
+ if err := repo_module.CreateDelegateHooks(repoPath); err != nil {
+ return fmt.Errorf("createDelegateHooks: %w", err)
+ }
+
+ repo.IsEmpty = false
+
+ if len(defaultBranch) > 0 {
+ repo.DefaultBranch = defaultBranch
+
+ if err = gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil {
+ return fmt.Errorf("setDefaultBranch: %w", err)
+ }
+ } else {
+ repo.DefaultBranch, err = gitrepo.GetDefaultBranch(ctx, repo)
+ if err != nil {
+ repo.DefaultBranch = setting.Repository.DefaultBranch
+ if err = gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil {
+ return fmt.Errorf("setDefaultBranch: %w", err)
+ }
+ }
+ }
+
+ // Don't bother looking this repo in the context it won't be there
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("openRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, 0); err != nil {
+ return fmt.Errorf("SyncRepoBranchesWithRepo: %w", err)
+ }
+
+ if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
+ return fmt.Errorf("SyncReleasesWithTags: %w", err)
+ }
+
+ branches, _ := git_model.FindBranchNames(ctx, git_model.FindBranchOptions{
+ RepoID: repo.ID,
+ ListOptions: db.ListOptionsAll,
+ IsDeletedBranch: optional.Some(false),
+ })
+
+ found := false
+ hasDefault := false
+ hasMaster := false
+ hasMain := false
+ for _, branch := range branches {
+ if branch == repo.DefaultBranch {
+ found = true
+ break
+ } else if branch == setting.Repository.DefaultBranch {
+ hasDefault = true
+ } else if branch == "master" {
+ hasMaster = true
+ } else if branch == "main" {
+ hasMain = true
+ }
+ }
+ if !found {
+ if hasDefault {
+ repo.DefaultBranch = setting.Repository.DefaultBranch
+ } else if hasMaster {
+ repo.DefaultBranch = "master"
+ } else if hasMain {
+ repo.DefaultBranch = "main"
+ } else if len(branches) > 0 {
+ repo.DefaultBranch = branches[0]
+ } else {
+ repo.IsEmpty = true
+ repo.DefaultBranch = setting.Repository.DefaultBranch
+ }
+
+ if err = gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil {
+ return fmt.Errorf("setDefaultBranch: %w", err)
+ }
+ }
+ if err = repo_module.UpdateRepository(ctx, repo, false); err != nil {
+ return fmt.Errorf("updateRepository: %w", err)
+ }
+
+ return nil
+}
+
+// DeleteUnadoptedRepository deletes unadopted repository files from the filesystem
+func DeleteUnadoptedRepository(ctx context.Context, doer, u *user_model.User, repoName string) error {
+ if err := repo_model.IsUsableRepoName(repoName); err != nil {
+ return err
+ }
+
+ repoPath := repo_model.RepoPath(u.Name, repoName)
+ isExist, err := util.IsExist(repoPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ return err
+ }
+ if !isExist {
+ return repo_model.ErrRepoNotExist{
+ OwnerName: u.Name,
+ Name: repoName,
+ }
+ }
+
+ if exist, err := repo_model.IsRepositoryModelExist(ctx, u, repoName); err != nil {
+ return err
+ } else if exist {
+ return repo_model.ErrRepoAlreadyExist{
+ Uname: u.Name,
+ Name: repoName,
+ }
+ }
+
+ return util.RemoveAll(repoPath)
+}
+
+type unadoptedRepositories struct {
+ repositories []string
+ index int
+ start int
+ end int
+}
+
+func (unadopted *unadoptedRepositories) add(repository string) {
+ if unadopted.index >= unadopted.start && unadopted.index < unadopted.end {
+ unadopted.repositories = append(unadopted.repositories, repository)
+ }
+ unadopted.index++
+}
+
+func checkUnadoptedRepositories(ctx context.Context, userName string, repoNamesToCheck []string, unadopted *unadoptedRepositories) error {
+ if len(repoNamesToCheck) == 0 {
+ return nil
+ }
+ ctxUser, err := user_model.GetUserByName(ctx, userName)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ log.Debug("Missing user: %s", userName)
+ return nil
+ }
+ return err
+ }
+ repos, _, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{
+ Actor: ctxUser,
+ Private: true,
+ ListOptions: db.ListOptions{
+ Page: 1,
+ PageSize: len(repoNamesToCheck),
+ }, LowerNames: repoNamesToCheck,
+ })
+ if err != nil {
+ return err
+ }
+ if len(repos) == len(repoNamesToCheck) {
+ return nil
+ }
+ repoNames := make(container.Set[string], len(repos))
+ for _, repo := range repos {
+ repoNames.Add(repo.LowerName)
+ }
+ for _, repoName := range repoNamesToCheck {
+ if !repoNames.Contains(repoName) {
+ unadopted.add(path.Join(userName, repoName)) // These are not used as filepaths - but as reponames - therefore use path.Join not filepath.Join
+ }
+ }
+ return nil
+}
+
+// ListUnadoptedRepositories lists all the unadopted repositories that match the provided query
+func ListUnadoptedRepositories(ctx context.Context, query string, opts *db.ListOptions) ([]string, int, error) {
+ globUser, _ := glob.Compile("*")
+ globRepo, _ := glob.Compile("*")
+
+ qsplit := strings.SplitN(query, "/", 2)
+ if len(qsplit) > 0 && len(query) > 0 {
+ var err error
+ globUser, err = glob.Compile(qsplit[0])
+ if err != nil {
+ log.Info("Invalid glob expression '%s' (skipped): %v", qsplit[0], err)
+ }
+ if len(qsplit) > 1 {
+ globRepo, err = glob.Compile(qsplit[1])
+ if err != nil {
+ log.Info("Invalid glob expression '%s' (skipped): %v", qsplit[1], err)
+ }
+ }
+ }
+ var repoNamesToCheck []string
+
+ start := (opts.Page - 1) * opts.PageSize
+ unadopted := &unadoptedRepositories{
+ repositories: make([]string, 0, opts.PageSize),
+ start: start,
+ end: start + opts.PageSize,
+ index: 0,
+ }
+
+ var userName string
+
+ // We're going to iterate by pagesize.
+ root := filepath.Clean(setting.RepoRootPath)
+ if err := filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+ if !d.IsDir() || path == root {
+ return nil
+ }
+
+ name := d.Name()
+
+ if !strings.ContainsRune(path[len(root)+1:], filepath.Separator) {
+ // Got a new user
+ if err = checkUnadoptedRepositories(ctx, userName, repoNamesToCheck, unadopted); err != nil {
+ return err
+ }
+ repoNamesToCheck = repoNamesToCheck[:0]
+
+ if !globUser.Match(name) {
+ return filepath.SkipDir
+ }
+
+ userName = name
+ return nil
+ }
+
+ if !strings.HasSuffix(name, ".git") {
+ return filepath.SkipDir
+ }
+ name = name[:len(name)-4]
+ if repo_model.IsUsableRepoName(name) != nil || strings.ToLower(name) != name || !globRepo.Match(name) {
+ return filepath.SkipDir
+ }
+
+ repoNamesToCheck = append(repoNamesToCheck, name)
+ if len(repoNamesToCheck) >= setting.Database.IterateBufferSize {
+ if err = checkUnadoptedRepositories(ctx, userName, repoNamesToCheck, unadopted); err != nil {
+ return err
+ }
+ repoNamesToCheck = repoNamesToCheck[:0]
+ }
+ return filepath.SkipDir
+ }); err != nil {
+ return nil, 0, err
+ }
+
+ if err := checkUnadoptedRepositories(ctx, userName, repoNamesToCheck, unadopted); err != nil {
+ return nil, 0, err
+ }
+
+ return unadopted.repositories, unadopted.index, nil
+}
diff --git a/services/repository/adopt_test.go b/services/repository/adopt_test.go
new file mode 100644
index 0000000..71fb1fc
--- /dev/null
+++ b/services/repository/adopt_test.go
@@ -0,0 +1,115 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "os"
+ "path"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCheckUnadoptedRepositories_Add(t *testing.T) {
+ start := 10
+ end := 20
+ unadopted := &unadoptedRepositories{
+ start: start,
+ end: end,
+ index: 0,
+ }
+
+ total := 30
+ for i := 0; i < total; i++ {
+ unadopted.add("something")
+ }
+
+ assert.Equal(t, total, unadopted.index)
+ assert.Len(t, unadopted.repositories, end-start)
+}
+
+func TestCheckUnadoptedRepositories(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ //
+ // Non existent user
+ //
+ unadopted := &unadoptedRepositories{start: 0, end: 100}
+ err := checkUnadoptedRepositories(db.DefaultContext, "notauser", []string{"repo"}, unadopted)
+ require.NoError(t, err)
+ assert.Empty(t, unadopted.repositories)
+ //
+ // Unadopted repository is returned
+ // Existing (adopted) repository is not returned
+ //
+ userName := "user2"
+ repoName := "repo2"
+ unadoptedRepoName := "unadopted"
+ unadopted = &unadoptedRepositories{start: 0, end: 100}
+ err = checkUnadoptedRepositories(db.DefaultContext, userName, []string{repoName, unadoptedRepoName}, unadopted)
+ require.NoError(t, err)
+ assert.Equal(t, []string{path.Join(userName, unadoptedRepoName)}, unadopted.repositories)
+ //
+ // Existing (adopted) repository is not returned
+ //
+ unadopted = &unadoptedRepositories{start: 0, end: 100}
+ err = checkUnadoptedRepositories(db.DefaultContext, userName, []string{repoName}, unadopted)
+ require.NoError(t, err)
+ assert.Empty(t, unadopted.repositories)
+ assert.Equal(t, 0, unadopted.index)
+}
+
+func TestListUnadoptedRepositories_ListOptions(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ username := "user2"
+ unadoptedList := []string{path.Join(username, "unadopted1"), path.Join(username, "unadopted2")}
+ for _, unadopted := range unadoptedList {
+ _ = os.Mkdir(path.Join(setting.RepoRootPath, unadopted+".git"), 0o755)
+ }
+
+ opts := db.ListOptions{Page: 1, PageSize: 1}
+ repoNames, count, err := ListUnadoptedRepositories(db.DefaultContext, "", &opts)
+ require.NoError(t, err)
+ assert.Equal(t, 2, count)
+ assert.Equal(t, unadoptedList[0], repoNames[0])
+
+ opts = db.ListOptions{Page: 2, PageSize: 1}
+ repoNames, count, err = ListUnadoptedRepositories(db.DefaultContext, "", &opts)
+ require.NoError(t, err)
+ assert.Equal(t, 2, count)
+ assert.Equal(t, unadoptedList[1], repoNames[0])
+}
+
+func TestAdoptRepository(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ username := "user2"
+
+ unadopted := "unadopted"
+ require.NoError(t, unittest.CopyDir(
+ "../../modules/git/tests/repos/repo1_bare",
+ path.Join(setting.RepoRootPath, username, unadopted+".git"),
+ ))
+
+ opts := db.ListOptions{Page: 1, PageSize: 1}
+ repoNames, _, err := ListUnadoptedRepositories(db.DefaultContext, "", &opts)
+ require.NoError(t, err)
+ require.Contains(t, repoNames, path.Join(username, unadopted))
+
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo, err := AdoptRepository(db.DefaultContext, doer, owner, CreateRepoOptions{
+ Name: unadopted,
+ Description: "description",
+ IsPrivate: false,
+ AutoInit: true,
+ })
+ require.NoError(t, err)
+ assert.Equal(t, git.Sha1ObjectFormat.Name(), repo.ObjectFormatName)
+}
diff --git a/services/repository/archiver/archiver.go b/services/repository/archiver/archiver.go
new file mode 100644
index 0000000..c74712b
--- /dev/null
+++ b/services/repository/archiver/archiver.go
@@ -0,0 +1,377 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package archiver
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+)
+
+// ArchiveRequest defines the parameters of an archive request, which notably
+// includes the specific repository being archived as well as the commit, the
+// name by which it was requested, and the kind of archive being requested.
+// This is entirely opaque to external entities, though, and mostly used as a
+// handle elsewhere.
+type ArchiveRequest struct {
+ RepoID int64
+ refName string
+ Type git.ArchiveType
+ CommitID string
+ ReleaseID int64
+}
+
+// ErrUnknownArchiveFormat request archive format is not supported
+type ErrUnknownArchiveFormat struct {
+ RequestFormat string
+}
+
+// Error implements error
+func (err ErrUnknownArchiveFormat) Error() string {
+ return fmt.Sprintf("unknown format: %s", err.RequestFormat)
+}
+
+// Is implements error
+func (ErrUnknownArchiveFormat) Is(err error) bool {
+ _, ok := err.(ErrUnknownArchiveFormat)
+ return ok
+}
+
+// RepoRefNotFoundError is returned when a requested reference (commit, tag) was not found.
+type RepoRefNotFoundError struct {
+ RefName string
+}
+
+// Error implements error.
+func (e RepoRefNotFoundError) Error() string {
+ return fmt.Sprintf("unrecognized repository reference: %s", e.RefName)
+}
+
+func (e RepoRefNotFoundError) Is(err error) bool {
+ _, ok := err.(RepoRefNotFoundError)
+ return ok
+}
+
+// NewRequest creates an archival request, based on the URI. The
+// resulting ArchiveRequest is suitable for being passed to ArchiveRepository()
+// if it's determined that the request still needs to be satisfied.
+func NewRequest(ctx context.Context, repoID int64, repo *git.Repository, uri string) (*ArchiveRequest, error) {
+ r := &ArchiveRequest{
+ RepoID: repoID,
+ }
+
+ var ext string
+ switch {
+ case strings.HasSuffix(uri, ".zip"):
+ ext = ".zip"
+ r.Type = git.ZIP
+ case strings.HasSuffix(uri, ".tar.gz"):
+ ext = ".tar.gz"
+ r.Type = git.TARGZ
+ case strings.HasSuffix(uri, ".bundle"):
+ ext = ".bundle"
+ r.Type = git.BUNDLE
+ default:
+ return nil, ErrUnknownArchiveFormat{RequestFormat: uri}
+ }
+
+ r.refName = strings.TrimSuffix(uri, ext)
+
+ // Get corresponding commit.
+ commitID, err := repo.ConvertToGitID(r.refName)
+ if err != nil {
+ return nil, RepoRefNotFoundError{RefName: r.refName}
+ }
+
+ r.CommitID = commitID.String()
+
+ release, err := repo_model.GetRelease(ctx, repoID, r.refName)
+ if err != nil {
+ if !repo_model.IsErrReleaseNotExist(err) {
+ return nil, err
+ }
+ }
+ if release != nil {
+ r.ReleaseID = release.ID
+ }
+
+ return r, nil
+}
+
+// GetArchiveName returns the name of the caller, based on the ref used by the
+// caller to create this request.
+func (aReq *ArchiveRequest) GetArchiveName() string {
+ return strings.ReplaceAll(aReq.refName, "/", "-") + "." + aReq.Type.String()
+}
+
+// Await awaits the completion of an ArchiveRequest. If the archive has
+// already been prepared the method returns immediately. Otherwise an archiver
+// process will be started and its completion awaited. On success the returned
+// RepoArchiver may be used to download the archive. Note that even if the
+// context is cancelled/times out a started archiver will still continue to run
+// in the background.
+func (aReq *ArchiveRequest) Await(ctx context.Context) (*repo_model.RepoArchiver, error) {
+ archiver, err := repo_model.GetRepoArchiver(ctx, aReq.RepoID, aReq.Type, aReq.CommitID)
+ if err != nil {
+ return nil, fmt.Errorf("models.GetRepoArchiver: %w", err)
+ }
+
+ if archiver != nil {
+ archiver.ReleaseID = aReq.ReleaseID
+ }
+
+ if archiver != nil && archiver.Status == repo_model.ArchiverReady {
+ // Archive already generated, we're done.
+ return archiver, nil
+ }
+
+ if err := StartArchive(aReq); err != nil {
+ return nil, fmt.Errorf("archiver.StartArchive: %w", err)
+ }
+
+ poll := time.NewTicker(time.Second * 1)
+ defer poll.Stop()
+
+ for {
+ select {
+ case <-graceful.GetManager().HammerContext().Done():
+ // System stopped.
+ return nil, graceful.GetManager().HammerContext().Err()
+ case <-ctx.Done():
+ return nil, ctx.Err()
+ case <-poll.C:
+ archiver, err = repo_model.GetRepoArchiver(ctx, aReq.RepoID, aReq.Type, aReq.CommitID)
+ if err != nil {
+ return nil, fmt.Errorf("repo_model.GetRepoArchiver: %w", err)
+ }
+ if archiver != nil && archiver.Status == repo_model.ArchiverReady {
+ archiver.ReleaseID = aReq.ReleaseID
+ return archiver, nil
+ }
+ }
+ }
+}
+
+func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver, error) {
+ txCtx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+ ctx, _, finished := process.GetManager().AddContext(txCtx, fmt.Sprintf("ArchiveRequest[%d]: %s", r.RepoID, r.GetArchiveName()))
+ defer finished()
+
+ archiver, err := repo_model.GetRepoArchiver(ctx, r.RepoID, r.Type, r.CommitID)
+ if err != nil {
+ return nil, err
+ }
+
+ if archiver != nil {
+ // FIXME: If another process are generating it, we think it's not ready and just return
+ // Or we should wait until the archive generated.
+ if archiver.Status == repo_model.ArchiverGenerating {
+ return nil, nil
+ }
+ } else {
+ archiver = &repo_model.RepoArchiver{
+ RepoID: r.RepoID,
+ Type: r.Type,
+ CommitID: r.CommitID,
+ Status: repo_model.ArchiverGenerating,
+ }
+ if err := db.Insert(ctx, archiver); err != nil {
+ return nil, err
+ }
+ }
+
+ rPath := archiver.RelativePath()
+ _, err = storage.RepoArchives.Stat(rPath)
+ if err == nil {
+ if archiver.Status == repo_model.ArchiverGenerating {
+ archiver.Status = repo_model.ArchiverReady
+ if err = repo_model.UpdateRepoArchiverStatus(ctx, archiver); err != nil {
+ return nil, err
+ }
+ }
+ return archiver, committer.Commit()
+ }
+
+ if !errors.Is(err, os.ErrNotExist) {
+ return nil, fmt.Errorf("unable to stat archive: %w", err)
+ }
+
+ rd, w := io.Pipe()
+ defer func() {
+ w.Close()
+ rd.Close()
+ }()
+ done := make(chan error, 1) // Ensure that there is some capacity which will ensure that the goroutine below can always finish
+ repo, err := repo_model.GetRepositoryByID(ctx, archiver.RepoID)
+ if err != nil {
+ return nil, fmt.Errorf("archiver.LoadRepo failed: %w", err)
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return nil, err
+ }
+ defer gitRepo.Close()
+
+ go func(done chan error, w *io.PipeWriter, archiver *repo_model.RepoArchiver, gitRepo *git.Repository) {
+ defer func() {
+ if r := recover(); r != nil {
+ done <- fmt.Errorf("%v", r)
+ }
+ }()
+
+ if archiver.Type == git.BUNDLE {
+ err = gitRepo.CreateBundle(
+ ctx,
+ archiver.CommitID,
+ w,
+ )
+ } else {
+ err = gitRepo.CreateArchive(
+ ctx,
+ archiver.Type,
+ w,
+ setting.Repository.PrefixArchiveFiles,
+ archiver.CommitID,
+ )
+ }
+ _ = w.CloseWithError(err)
+ done <- err
+ }(done, w, archiver, gitRepo)
+
+ // TODO: add lfs data to zip
+ // TODO: add submodule data to zip
+
+ if _, err := storage.RepoArchives.Save(rPath, rd, -1); err != nil {
+ return nil, fmt.Errorf("unable to write archive: %w", err)
+ }
+
+ err = <-done
+ if err != nil {
+ return nil, err
+ }
+
+ if archiver.Status == repo_model.ArchiverGenerating {
+ archiver.Status = repo_model.ArchiverReady
+ if err = repo_model.UpdateRepoArchiverStatus(ctx, archiver); err != nil {
+ return nil, err
+ }
+ }
+
+ return archiver, committer.Commit()
+}
+
+// ArchiveRepository satisfies the ArchiveRequest being passed in. Processing
+// will occur in a separate goroutine, as this phase may take a while to
+// complete. If the archive already exists, ArchiveRepository will not do
+// anything. In all cases, the caller should be examining the *ArchiveRequest
+// being returned for completion, as it may be different than the one they passed
+// in.
+func ArchiveRepository(ctx context.Context, request *ArchiveRequest) (*repo_model.RepoArchiver, error) {
+ return doArchive(ctx, request)
+}
+
+var archiverQueue *queue.WorkerPoolQueue[*ArchiveRequest]
+
+// Init initializes archiver
+func Init(ctx context.Context) error {
+ handler := func(items ...*ArchiveRequest) []*ArchiveRequest {
+ for _, archiveReq := range items {
+ log.Trace("ArchiverData Process: %#v", archiveReq)
+ if _, err := doArchive(ctx, archiveReq); err != nil {
+ log.Error("Archive %v failed: %v", archiveReq, err)
+ }
+ }
+ return nil
+ }
+
+ archiverQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "repo-archive", handler)
+ if archiverQueue == nil {
+ return errors.New("unable to create repo-archive queue")
+ }
+ go graceful.GetManager().RunWithCancel(archiverQueue)
+
+ return nil
+}
+
+// StartArchive push the archive request to the queue
+func StartArchive(request *ArchiveRequest) error {
+ has, err := archiverQueue.Has(request)
+ if err != nil {
+ return err
+ }
+ if has {
+ return nil
+ }
+ return archiverQueue.Push(request)
+}
+
+func deleteOldRepoArchiver(ctx context.Context, archiver *repo_model.RepoArchiver) error {
+ if _, err := db.DeleteByID[repo_model.RepoArchiver](ctx, archiver.ID); err != nil {
+ return err
+ }
+ p := archiver.RelativePath()
+ if err := storage.RepoArchives.Delete(p); err != nil {
+ log.Error("delete repo archive file failed: %v", err)
+ }
+ return nil
+}
+
+// DeleteOldRepositoryArchives deletes old repository archives.
+func DeleteOldRepositoryArchives(ctx context.Context, olderThan time.Duration) error {
+ log.Trace("Doing: ArchiveCleanup")
+
+ for {
+ archivers, err := db.Find[repo_model.RepoArchiver](ctx, repo_model.FindRepoArchiversOption{
+ ListOptions: db.ListOptions{
+ PageSize: 100,
+ Page: 1,
+ },
+ OlderThan: olderThan,
+ })
+ if err != nil {
+ log.Trace("Error: ArchiveClean: %v", err)
+ return err
+ }
+
+ for _, archiver := range archivers {
+ if err := deleteOldRepoArchiver(ctx, archiver); err != nil {
+ return err
+ }
+ }
+ if len(archivers) < 100 {
+ break
+ }
+ }
+
+ log.Trace("Finished: ArchiveCleanup")
+ return nil
+}
+
+// DeleteRepositoryArchives deletes all repositories' archives.
+func DeleteRepositoryArchives(ctx context.Context) error {
+ if err := repo_model.DeleteAllRepoArchives(ctx); err != nil {
+ return err
+ }
+ return storage.Clean(storage.RepoArchives)
+}
diff --git a/services/repository/archiver/archiver_test.go b/services/repository/archiver/archiver_test.go
new file mode 100644
index 0000000..9f822a3
--- /dev/null
+++ b/services/repository/archiver/archiver_test.go
@@ -0,0 +1,134 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package archiver
+
+import (
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/services/contexttest"
+
+ _ "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func TestArchive_Basic(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ ctx, _ := contexttest.MockContext(t, "user27/repo49")
+ firstCommit, secondCommit := "51f84af23134", "aacbdfe9e1c4"
+
+ contexttest.LoadRepo(t, ctx, 49)
+ contexttest.LoadGitRepo(t, ctx)
+ defer ctx.Repo.GitRepo.Close()
+
+ bogusReq, err := NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
+ require.NoError(t, err)
+ assert.NotNil(t, bogusReq)
+ assert.EqualValues(t, firstCommit+".zip", bogusReq.GetArchiveName())
+
+ // Check a series of bogus requests.
+ // Step 1, valid commit with a bad extension.
+ bogusReq, err = NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".dilbert")
+ require.Error(t, err)
+ assert.Nil(t, bogusReq)
+
+ // Step 2, missing commit.
+ bogusReq, err = NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "dbffff.zip")
+ require.Error(t, err)
+ assert.Nil(t, bogusReq)
+
+ // Step 3, doesn't look like branch/tag/commit.
+ bogusReq, err = NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "db.zip")
+ require.Error(t, err)
+ assert.Nil(t, bogusReq)
+
+ bogusReq, err = NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "master.zip")
+ require.NoError(t, err)
+ assert.NotNil(t, bogusReq)
+ assert.EqualValues(t, "master.zip", bogusReq.GetArchiveName())
+
+ bogusReq, err = NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "test/archive.zip")
+ require.NoError(t, err)
+ assert.NotNil(t, bogusReq)
+ assert.EqualValues(t, "test-archive.zip", bogusReq.GetArchiveName())
+
+ // Now two valid requests, firstCommit with valid extensions.
+ zipReq, err := NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
+ require.NoError(t, err)
+ assert.NotNil(t, zipReq)
+
+ tgzReq, err := NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".tar.gz")
+ require.NoError(t, err)
+ assert.NotNil(t, tgzReq)
+
+ secondReq, err := NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, secondCommit+".zip")
+ require.NoError(t, err)
+ assert.NotNil(t, secondReq)
+
+ inFlight := make([]*ArchiveRequest, 3)
+ inFlight[0] = zipReq
+ inFlight[1] = tgzReq
+ inFlight[2] = secondReq
+
+ ArchiveRepository(db.DefaultContext, zipReq)
+ ArchiveRepository(db.DefaultContext, tgzReq)
+ ArchiveRepository(db.DefaultContext, secondReq)
+
+ // Make sure sending an unprocessed request through doesn't affect the queue
+ // count.
+ ArchiveRepository(db.DefaultContext, zipReq)
+
+ // Sleep two seconds to make sure the queue doesn't change.
+ time.Sleep(2 * time.Second)
+
+ zipReq2, err := NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
+ require.NoError(t, err)
+ // This zipReq should match what's sitting in the queue, as we haven't
+ // let it release yet. From the consumer's point of view, this looks like
+ // a long-running archive task.
+ assert.Equal(t, zipReq, zipReq2)
+
+ // We still have the other three stalled at completion, waiting to remove
+ // from archiveInProgress. Try to submit this new one before its
+ // predecessor has cleared out of the queue.
+ ArchiveRepository(db.DefaultContext, zipReq2)
+
+ // Now we'll submit a request and TimedWaitForCompletion twice, before and
+ // after we release it. We should trigger both the timeout and non-timeout
+ // cases.
+ timedReq, err := NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, secondCommit+".tar.gz")
+ require.NoError(t, err)
+ assert.NotNil(t, timedReq)
+ ArchiveRepository(db.DefaultContext, timedReq)
+
+ zipReq2, err = NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
+ require.NoError(t, err)
+ // Now, we're guaranteed to have released the original zipReq from the queue.
+ // Ensure that we don't get handed back the released entry somehow, but they
+ // should remain functionally equivalent in all fields. The exception here
+ // is zipReq.cchan, which will be non-nil because it's a completed request.
+ // It's fine to go ahead and set it to nil now.
+
+ assert.Equal(t, zipReq, zipReq2)
+ assert.NotSame(t, zipReq, zipReq2)
+
+ // Same commit, different compression formats should have different names.
+ // Ideally, the extension would match what we originally requested.
+ assert.NotEqual(t, zipReq.GetArchiveName(), tgzReq.GetArchiveName())
+ assert.NotEqual(t, zipReq.GetArchiveName(), secondReq.GetArchiveName())
+}
+
+func TestErrUnknownArchiveFormat(t *testing.T) {
+ err := ErrUnknownArchiveFormat{RequestFormat: "master"}
+ assert.ErrorIs(t, err, ErrUnknownArchiveFormat{})
+}
diff --git a/services/repository/avatar.go b/services/repository/avatar.go
new file mode 100644
index 0000000..38c2621
--- /dev/null
+++ b/services/repository/avatar.go
@@ -0,0 +1,116 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/avatar"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/storage"
+)
+
+// UploadAvatar saves custom avatar for repository.
+// FIXME: split uploads to different subdirs in case we have massive number of repos.
+func UploadAvatar(ctx context.Context, repo *repo_model.Repository, data []byte) error {
+ avatarData, err := avatar.ProcessAvatarImage(data)
+ if err != nil {
+ return err
+ }
+
+ newAvatar := avatar.HashAvatar(repo.ID, data)
+ if repo.Avatar == newAvatar { // upload the same picture
+ return nil
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ oldAvatarPath := repo.CustomAvatarRelativePath()
+
+ // Users can upload the same image to other repo - prefix it with ID
+ // Then repo will be removed - only it avatar file will be removed
+ repo.Avatar = newAvatar
+ if err := repo_model.UpdateRepositoryCols(ctx, repo, "avatar"); err != nil {
+ return fmt.Errorf("UploadAvatar: Update repository avatar: %w", err)
+ }
+
+ if err := storage.SaveFrom(storage.RepoAvatars, repo.CustomAvatarRelativePath(), func(w io.Writer) error {
+ _, err := w.Write(avatarData)
+ return err
+ }); err != nil {
+ return fmt.Errorf("UploadAvatar %s failed: Failed to remove old repo avatar %s: %w", repo.RepoPath(), newAvatar, err)
+ }
+
+ if len(oldAvatarPath) > 0 {
+ if err := storage.RepoAvatars.Delete(oldAvatarPath); err != nil {
+ return fmt.Errorf("UploadAvatar: Failed to remove old repo avatar %s: %w", oldAvatarPath, err)
+ }
+ }
+
+ return committer.Commit()
+}
+
+// DeleteAvatar deletes the repos's custom avatar.
+func DeleteAvatar(ctx context.Context, repo *repo_model.Repository) error {
+ // Avatar not exists
+ if len(repo.Avatar) == 0 {
+ return nil
+ }
+
+ avatarPath := repo.CustomAvatarRelativePath()
+ log.Trace("DeleteAvatar[%d]: %s", repo.ID, avatarPath)
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ repo.Avatar = ""
+ if err := repo_model.UpdateRepositoryCols(ctx, repo, "avatar"); err != nil {
+ return fmt.Errorf("DeleteAvatar: Update repository avatar: %w", err)
+ }
+
+ if err := storage.RepoAvatars.Delete(avatarPath); err != nil {
+ return fmt.Errorf("DeleteAvatar: Failed to remove %s: %w", avatarPath, err)
+ }
+
+ return committer.Commit()
+}
+
+// RemoveRandomAvatars removes the randomly generated avatars that were created for repositories
+func RemoveRandomAvatars(ctx context.Context) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, repository *repo_model.Repository) error {
+ select {
+ case <-ctx.Done():
+ return db.ErrCancelledf("before random avatars removed for %s", repository.FullName())
+ default:
+ }
+ stringifiedID := strconv.FormatInt(repository.ID, 10)
+ if repository.Avatar == stringifiedID {
+ return DeleteAvatar(ctx, repository)
+ }
+ return nil
+ })
+}
+
+// generateAvatar generates the avatar from a template repository
+func generateAvatar(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error {
+ generateRepo.Avatar = strings.Replace(templateRepo.Avatar, strconv.FormatInt(templateRepo.ID, 10), strconv.FormatInt(generateRepo.ID, 10), 1)
+ if _, err := storage.Copy(storage.RepoAvatars, generateRepo.CustomAvatarRelativePath(), storage.RepoAvatars, templateRepo.CustomAvatarRelativePath()); err != nil {
+ return err
+ }
+
+ return repo_model.UpdateRepositoryCols(ctx, generateRepo, "avatar")
+}
diff --git a/services/repository/avatar_test.go b/services/repository/avatar_test.go
new file mode 100644
index 0000000..f0fe991
--- /dev/null
+++ b/services/repository/avatar_test.go
@@ -0,0 +1,64 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "bytes"
+ "image"
+ "image/png"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/avatar"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestUploadAvatar(t *testing.T) {
+ // Generate image
+ myImage := image.NewRGBA(image.Rect(0, 0, 1, 1))
+ var buff bytes.Buffer
+ png.Encode(&buff, myImage)
+
+ require.NoError(t, unittest.PrepareTestDatabase())
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10})
+
+ err := UploadAvatar(db.DefaultContext, repo, buff.Bytes())
+ require.NoError(t, err)
+ assert.Equal(t, avatar.HashAvatar(10, buff.Bytes()), repo.Avatar)
+}
+
+func TestUploadBigAvatar(t *testing.T) {
+ // Generate BIG image
+ myImage := image.NewRGBA(image.Rect(0, 0, 5000, 1))
+ var buff bytes.Buffer
+ png.Encode(&buff, myImage)
+
+ require.NoError(t, unittest.PrepareTestDatabase())
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10})
+
+ err := UploadAvatar(db.DefaultContext, repo, buff.Bytes())
+ require.Error(t, err)
+}
+
+func TestDeleteAvatar(t *testing.T) {
+ // Generate image
+ myImage := image.NewRGBA(image.Rect(0, 0, 1, 1))
+ var buff bytes.Buffer
+ png.Encode(&buff, myImage)
+
+ require.NoError(t, unittest.PrepareTestDatabase())
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10})
+
+ err := UploadAvatar(db.DefaultContext, repo, buff.Bytes())
+ require.NoError(t, err)
+
+ err = DeleteAvatar(db.DefaultContext, repo)
+ require.NoError(t, err)
+
+ assert.Equal(t, "", repo.Avatar)
+}
diff --git a/services/repository/branch.go b/services/repository/branch.go
new file mode 100644
index 0000000..7d92053
--- /dev/null
+++ b/services/repository/branch.go
@@ -0,0 +1,604 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/queue"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ notify_service "code.gitea.io/gitea/services/notify"
+ pull_service "code.gitea.io/gitea/services/pull"
+ files_service "code.gitea.io/gitea/services/repository/files"
+
+ "xorm.io/builder"
+)
+
+// CreateNewBranch creates a new repository branch
+func CreateNewBranch(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, gitRepo *git.Repository, oldBranchName, branchName string) (err error) {
+ branch, err := git_model.GetBranch(ctx, repo.ID, oldBranchName)
+ if err != nil {
+ return err
+ }
+
+ return CreateNewBranchFromCommit(ctx, doer, repo, gitRepo, branch.CommitID, branchName)
+}
+
+// Branch contains the branch information
+type Branch struct {
+ DBBranch *git_model.Branch
+ IsProtected bool
+ IsIncluded bool
+ CommitsAhead int
+ CommitsBehind int
+ LatestPullRequest *issues_model.PullRequest
+ MergeMovedOn bool
+}
+
+// LoadBranches loads branches from the repository limited by page & pageSize.
+func LoadBranches(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, isDeletedBranch optional.Option[bool], keyword string, page, pageSize int) (*Branch, []*Branch, int64, error) {
+ defaultDBBranch, err := git_model.GetBranch(ctx, repo.ID, repo.DefaultBranch)
+ if err != nil {
+ return nil, nil, 0, err
+ }
+
+ branchOpts := git_model.FindBranchOptions{
+ RepoID: repo.ID,
+ IsDeletedBranch: isDeletedBranch,
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: pageSize,
+ },
+ Keyword: keyword,
+ ExcludeBranchNames: []string{repo.DefaultBranch},
+ }
+
+ dbBranches, totalNumOfBranches, err := db.FindAndCount[git_model.Branch](ctx, branchOpts)
+ if err != nil {
+ return nil, nil, 0, err
+ }
+
+ if err := git_model.BranchList(dbBranches).LoadDeletedBy(ctx); err != nil {
+ return nil, nil, 0, err
+ }
+ if err := git_model.BranchList(dbBranches).LoadPusher(ctx); err != nil {
+ return nil, nil, 0, err
+ }
+
+ rules, err := git_model.FindRepoProtectedBranchRules(ctx, repo.ID)
+ if err != nil {
+ return nil, nil, 0, err
+ }
+
+ repoIDToRepo := map[int64]*repo_model.Repository{}
+ repoIDToRepo[repo.ID] = repo
+
+ repoIDToGitRepo := map[int64]*git.Repository{}
+ repoIDToGitRepo[repo.ID] = gitRepo
+
+ branches := make([]*Branch, 0, len(dbBranches))
+ for i := range dbBranches {
+ branch, err := loadOneBranch(ctx, repo, dbBranches[i], &rules, repoIDToRepo, repoIDToGitRepo)
+ if err != nil {
+ log.Error("loadOneBranch() on repo #%d, branch '%s' failed: %v", repo.ID, dbBranches[i].Name, err)
+
+ // TODO: Ideally, we would only do this if the branch doesn't exist
+ // anymore. That is not practical to check here currently, so we do
+ // this for all kinds of errors.
+ totalNumOfBranches--
+ continue
+ }
+
+ branches = append(branches, branch)
+ }
+
+ // Always add the default branch
+ log.Debug("loadOneBranch: load default: '%s'", defaultDBBranch.Name)
+ defaultBranch, err := loadOneBranch(ctx, repo, defaultDBBranch, &rules, repoIDToRepo, repoIDToGitRepo)
+ if err != nil {
+ return nil, nil, 0, fmt.Errorf("loadOneBranch: %v", err)
+ }
+
+ return defaultBranch, branches, totalNumOfBranches, nil
+}
+
+func loadOneBranch(ctx context.Context, repo *repo_model.Repository, dbBranch *git_model.Branch, protectedBranches *git_model.ProtectedBranchRules,
+ repoIDToRepo map[int64]*repo_model.Repository,
+ repoIDToGitRepo map[int64]*git.Repository,
+) (*Branch, error) {
+ log.Trace("loadOneBranch: '%s'", dbBranch.Name)
+
+ branchName := dbBranch.Name
+ p := protectedBranches.GetFirstMatched(branchName)
+ isProtected := p != nil
+
+ var divergence *git.DivergeObject
+
+ // it's not default branch
+ if repo.DefaultBranch != dbBranch.Name && !dbBranch.IsDeleted {
+ var err error
+ divergence, err = files_service.CountDivergingCommits(ctx, repo, git.BranchPrefix+branchName)
+ if err != nil {
+ return nil, fmt.Errorf("CountDivergingCommits: %v", err)
+ }
+ }
+
+ if divergence == nil {
+ // tolerate the error that we cannot get divergence
+ divergence = &git.DivergeObject{Ahead: -1, Behind: -1}
+ }
+
+ pr, err := issues_model.GetLatestPullRequestByHeadInfo(ctx, repo.ID, branchName)
+ if err != nil {
+ return nil, fmt.Errorf("GetLatestPullRequestByHeadInfo: %v", err)
+ }
+ headCommit := dbBranch.CommitID
+
+ mergeMovedOn := false
+ if pr != nil {
+ pr.HeadRepo = repo
+ if err := pr.LoadIssue(ctx); err != nil {
+ return nil, fmt.Errorf("LoadIssue: %v", err)
+ }
+ if repo, ok := repoIDToRepo[pr.BaseRepoID]; ok {
+ pr.BaseRepo = repo
+ } else if err := pr.LoadBaseRepo(ctx); err != nil {
+ return nil, fmt.Errorf("LoadBaseRepo: %v", err)
+ } else {
+ repoIDToRepo[pr.BaseRepoID] = pr.BaseRepo
+ }
+ pr.Issue.Repo = pr.BaseRepo
+
+ if pr.HasMerged {
+ baseGitRepo, ok := repoIDToGitRepo[pr.BaseRepoID]
+ if !ok {
+ baseGitRepo, err = gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ return nil, fmt.Errorf("OpenRepository: %v", err)
+ }
+ defer baseGitRepo.Close()
+ repoIDToGitRepo[pr.BaseRepoID] = baseGitRepo
+ }
+ pullCommit, err := baseGitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil && !git.IsErrNotExist(err) {
+ return nil, fmt.Errorf("GetBranchCommitID: %v", err)
+ }
+ if err == nil && headCommit != pullCommit {
+ // the head has moved on from the merge - we shouldn't delete
+ mergeMovedOn = true
+ }
+ }
+ }
+
+ isIncluded := divergence.Ahead == 0 && repo.DefaultBranch != branchName
+ return &Branch{
+ DBBranch: dbBranch,
+ IsProtected: isProtected,
+ IsIncluded: isIncluded,
+ CommitsAhead: divergence.Ahead,
+ CommitsBehind: divergence.Behind,
+ LatestPullRequest: pr,
+ MergeMovedOn: mergeMovedOn,
+ }, nil
+}
+
+// checkBranchName validates branch name with existing repository branches
+func checkBranchName(ctx context.Context, repo *repo_model.Repository, name string) error {
+ _, err := gitrepo.WalkReferences(ctx, repo, func(_, refName string) error {
+ branchRefName := strings.TrimPrefix(refName, git.BranchPrefix)
+ switch {
+ case branchRefName == name:
+ return git_model.ErrBranchAlreadyExists{
+ BranchName: name,
+ }
+ // If branchRefName like a/b but we want to create a branch named a then we have a conflict
+ case strings.HasPrefix(branchRefName, name+"/"):
+ return git_model.ErrBranchNameConflict{
+ BranchName: branchRefName,
+ }
+ // Conversely if branchRefName like a but we want to create a branch named a/b then we also have a conflict
+ case strings.HasPrefix(name, branchRefName+"/"):
+ return git_model.ErrBranchNameConflict{
+ BranchName: branchRefName,
+ }
+ case refName == git.TagPrefix+name:
+ return models.ErrTagAlreadyExists{
+ TagName: name,
+ }
+ }
+ return nil
+ })
+
+ return err
+}
+
+// SyncBranchesToDB sync the branch information in the database.
+// It will check whether the branches of the repository have never been synced before.
+// If so, it will sync all branches of the repository.
+// Otherwise, it will sync the branches that need to be updated.
+func SyncBranchesToDB(ctx context.Context, repoID, pusherID int64, branchNames, commitIDs []string, getCommit func(commitID string) (*git.Commit, error)) error {
+ // Some designs that make the code look strange but are made for performance optimization purposes:
+ // 1. Sync branches in a batch to reduce the number of DB queries.
+ // 2. Lazy load commit information since it may be not necessary.
+ // 3. Exit early if synced all branches of git repo when there's no branch in DB.
+ // 4. Check the branches in DB if they are already synced.
+ //
+ // If the user pushes many branches at once, the Git hook will call the internal API in batches, rather than all at once.
+ // See https://github.com/go-gitea/gitea/blob/cb52b17f92e2d2293f7c003649743464492bca48/cmd/hook.go#L27
+ // For the first batch, it will hit optimization 3.
+ // For other batches, it will hit optimization 4.
+
+ if len(branchNames) != len(commitIDs) {
+ return fmt.Errorf("branchNames and commitIDs length not match")
+ }
+
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ branches, err := git_model.GetBranches(ctx, repoID, branchNames)
+ if err != nil {
+ return fmt.Errorf("git_model.GetBranches: %v", err)
+ }
+
+ if len(branches) == 0 {
+ // if user haven't visit UI but directly push to a branch after upgrading from 1.20 -> 1.21,
+ // we cannot simply insert the branch but need to check we have branches or not
+ hasBranch, err := db.Exist[git_model.Branch](ctx, git_model.FindBranchOptions{
+ RepoID: repoID,
+ IsDeletedBranch: optional.Some(false),
+ }.ToConds())
+ if err != nil {
+ return err
+ }
+ if !hasBranch {
+ if _, err = repo_module.SyncRepoBranches(ctx, repoID, pusherID); err != nil {
+ return fmt.Errorf("repo_module.SyncRepoBranches %d failed: %v", repoID, err)
+ }
+ return nil
+ }
+ }
+
+ branchMap := make(map[string]*git_model.Branch, len(branches))
+ for _, branch := range branches {
+ branchMap[branch.Name] = branch
+ }
+
+ newBranches := make([]*git_model.Branch, 0, len(branchNames))
+
+ for i, branchName := range branchNames {
+ commitID := commitIDs[i]
+ branch, exist := branchMap[branchName]
+ if exist && branch.CommitID == commitID && !branch.IsDeleted {
+ continue
+ }
+
+ commit, err := getCommit(commitID)
+ if err != nil {
+ return fmt.Errorf("get commit of %s failed: %v", branchName, err)
+ }
+
+ if exist {
+ if _, err := git_model.UpdateBranch(ctx, repoID, pusherID, branchName, commit); err != nil {
+ return fmt.Errorf("git_model.UpdateBranch %d:%s failed: %v", repoID, branchName, err)
+ }
+ continue
+ }
+
+ // if database have branches but not this branch, it means this is a new branch
+ newBranches = append(newBranches, &git_model.Branch{
+ RepoID: repoID,
+ Name: branchName,
+ CommitID: commit.ID.String(),
+ CommitMessage: commit.Summary(),
+ PusherID: pusherID,
+ CommitTime: timeutil.TimeStamp(commit.Committer.When.Unix()),
+ })
+ }
+
+ if len(newBranches) > 0 {
+ return db.Insert(ctx, newBranches)
+ }
+ return nil
+ })
+}
+
+// CreateNewBranchFromCommit creates a new repository branch
+func CreateNewBranchFromCommit(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, gitRepo *git.Repository, commitID, branchName string) (err error) {
+ err = repo.MustNotBeArchived()
+ if err != nil {
+ return err
+ }
+
+ // Check if branch name can be used
+ if err := checkBranchName(ctx, repo, branchName); err != nil {
+ return err
+ }
+
+ if err := git.Push(ctx, repo.RepoPath(), git.PushOptions{
+ Remote: repo.RepoPath(),
+ Branch: fmt.Sprintf("%s:%s%s", commitID, git.BranchPrefix, branchName),
+ Env: repo_module.PushingEnvironment(doer, repo),
+ }); err != nil {
+ if git.IsErrPushOutOfDate(err) || git.IsErrPushRejected(err) {
+ return err
+ }
+ return fmt.Errorf("push: %w", err)
+ }
+ return nil
+}
+
+// RenameBranch rename a branch
+func RenameBranch(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, gitRepo *git.Repository, from, to string) (string, error) {
+ err := repo.MustNotBeArchived()
+ if err != nil {
+ return "", err
+ }
+
+ if from == to {
+ return "target_exist", nil
+ }
+
+ if gitRepo.IsBranchExist(to) {
+ return "target_exist", nil
+ }
+
+ if !gitRepo.IsBranchExist(from) {
+ return "from_not_exist", nil
+ }
+
+ if err := git_model.RenameBranch(ctx, repo, from, to, func(ctx context.Context, isDefault bool) error {
+ err2 := gitRepo.RenameBranch(from, to)
+ if err2 != nil {
+ return err2
+ }
+
+ if isDefault {
+ // if default branch changed, we need to delete all schedules and cron jobs
+ if err := actions_model.DeleteScheduleTaskByRepo(ctx, repo.ID); err != nil {
+ log.Error("DeleteCronTaskByRepo: %v", err)
+ }
+ // cancel running cron jobs of this repository and delete old schedules
+ if err := actions_model.CancelPreviousJobs(
+ ctx,
+ repo.ID,
+ from,
+ "",
+ webhook_module.HookEventSchedule,
+ ); err != nil {
+ log.Error("CancelPreviousJobs: %v", err)
+ }
+
+ err2 = gitrepo.SetDefaultBranch(ctx, repo, to)
+ if err2 != nil {
+ return err2
+ }
+ }
+
+ return nil
+ }); err != nil {
+ return "", err
+ }
+ refNameTo := git.RefNameFromBranch(to)
+ refID, err := gitRepo.GetRefCommitID(refNameTo.String())
+ if err != nil {
+ return "", err
+ }
+
+ notify_service.DeleteRef(ctx, doer, repo, git.RefNameFromBranch(from))
+ notify_service.CreateRef(ctx, doer, repo, refNameTo, refID)
+
+ return "", nil
+}
+
+// enmuerates all branch related errors
+var (
+ ErrBranchIsDefault = errors.New("branch is default")
+)
+
+// DeleteBranch delete branch
+func DeleteBranch(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, gitRepo *git.Repository, branchName string) error {
+ err := repo.MustNotBeArchived()
+ if err != nil {
+ return err
+ }
+
+ if branchName == repo.DefaultBranch {
+ return ErrBranchIsDefault
+ }
+
+ isProtected, err := git_model.IsBranchProtected(ctx, repo.ID, branchName)
+ if err != nil {
+ return err
+ }
+ if isProtected {
+ return git_model.ErrBranchIsProtected
+ }
+
+ rawBranch, err := git_model.GetBranch(ctx, repo.ID, branchName)
+ if err != nil && !git_model.IsErrBranchNotExist(err) {
+ return fmt.Errorf("GetBranch: %v", err)
+ }
+
+ // database branch record not exist or it's a deleted branch
+ notExist := git_model.IsErrBranchNotExist(err) || rawBranch.IsDeleted
+
+ commit, err := gitRepo.GetBranchCommit(branchName)
+ if err != nil {
+ return err
+ }
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if !notExist {
+ if err := git_model.AddDeletedBranch(ctx, repo.ID, branchName, doer.ID); err != nil {
+ return err
+ }
+ }
+
+ return gitRepo.DeleteBranch(branchName, git.DeleteBranchOptions{
+ Force: true,
+ })
+ }); err != nil {
+ return err
+ }
+
+ objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
+
+ // Don't return error below this
+ if err := PushUpdate(
+ &repo_module.PushUpdateOptions{
+ RefFullName: git.RefNameFromBranch(branchName),
+ OldCommitID: commit.ID.String(),
+ NewCommitID: objectFormat.EmptyObjectID().String(),
+ PusherID: doer.ID,
+ PusherName: doer.Name,
+ RepoUserName: repo.OwnerName,
+ RepoName: repo.Name,
+ }); err != nil {
+ log.Error("Update: %v", err)
+ }
+
+ return nil
+}
+
+// DeleteBranchAfterMerge deletes the head branch after a PR was merged assiociated with the head branch.
+func DeleteBranchAfterMerge(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, headRepo *git.Repository) error {
+ // Don't cleanup when there are other PR's that use this branch as head branch.
+ exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
+ if err != nil {
+ return err
+ }
+ if exist {
+ return nil
+ }
+
+ // Ensure the doer has write permissions to the head repository of the branch it wants to delete.
+ perm, err := access.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
+ if err != nil {
+ return err
+ }
+ if !perm.CanWrite(unit.TypeCode) {
+ return util.NewPermissionDeniedErrorf("Must have write permission to the head repository")
+ }
+
+ if err := pull_service.RetargetChildrenOnMerge(ctx, doer, pr); err != nil {
+ return err
+ }
+ if err := DeleteBranch(ctx, doer, pr.HeadRepo, headRepo, pr.HeadBranch); err != nil {
+ return err
+ }
+
+ if err := issues_model.AddDeletePRBranchComment(ctx, doer, pr.BaseRepo, pr.Issue.ID, pr.HeadBranch); err != nil {
+ // Do not fail here as branch has already been deleted
+ log.Error("DeleteBranchAfterMerge: %v", err)
+ }
+
+ return nil
+}
+
+type BranchSyncOptions struct {
+ RepoID int64
+}
+
+// branchSyncQueue represents a queue to handle branch sync jobs.
+var branchSyncQueue *queue.WorkerPoolQueue[*BranchSyncOptions]
+
+func handlerBranchSync(items ...*BranchSyncOptions) []*BranchSyncOptions {
+ for _, opts := range items {
+ _, err := repo_module.SyncRepoBranches(graceful.GetManager().ShutdownContext(), opts.RepoID, 0)
+ if err != nil {
+ log.Error("syncRepoBranches [%d] failed: %v", opts.RepoID, err)
+ }
+ }
+ return nil
+}
+
+func addRepoToBranchSyncQueue(repoID int64) error {
+ return branchSyncQueue.Push(&BranchSyncOptions{
+ RepoID: repoID,
+ })
+}
+
+func initBranchSyncQueue(ctx context.Context) error {
+ branchSyncQueue = queue.CreateUniqueQueue(ctx, "branch_sync", handlerBranchSync)
+ if branchSyncQueue == nil {
+ return errors.New("unable to create branch_sync queue")
+ }
+ go graceful.GetManager().RunWithCancel(branchSyncQueue)
+
+ return nil
+}
+
+func AddAllRepoBranchesToSyncQueue(ctx context.Context) error {
+ if err := db.Iterate(ctx, builder.Eq{"is_empty": false}, func(ctx context.Context, repo *repo_model.Repository) error {
+ return addRepoToBranchSyncQueue(repo.ID)
+ }); err != nil {
+ return fmt.Errorf("run sync all branches failed: %v", err)
+ }
+ return nil
+}
+
+func SetRepoDefaultBranch(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, newBranchName string) error {
+ if repo.DefaultBranch == newBranchName {
+ return nil
+ }
+
+ if !gitRepo.IsBranchExist(newBranchName) {
+ return git_model.ErrBranchNotExist{
+ BranchName: newBranchName,
+ }
+ }
+
+ oldDefaultBranchName := repo.DefaultBranch
+ repo.DefaultBranch = newBranchName
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := repo_model.UpdateDefaultBranch(ctx, repo); err != nil {
+ return err
+ }
+
+ if err := actions_model.DeleteScheduleTaskByRepo(ctx, repo.ID); err != nil {
+ log.Error("DeleteCronTaskByRepo: %v", err)
+ }
+ // cancel running cron jobs of this repository and delete old schedules
+ if err := actions_model.CancelPreviousJobs(
+ ctx,
+ repo.ID,
+ oldDefaultBranchName,
+ "",
+ webhook_module.HookEventSchedule,
+ ); err != nil {
+ log.Error("CancelPreviousJobs: %v", err)
+ }
+
+ if err := gitrepo.SetDefaultBranch(ctx, repo, newBranchName); err != nil {
+ if !git.IsErrUnsupportedVersion(err) {
+ return err
+ }
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+
+ notify_service.ChangeDefaultBranch(ctx, repo)
+
+ return nil
+}
diff --git a/services/repository/cache.go b/services/repository/cache.go
new file mode 100644
index 0000000..b0811a9
--- /dev/null
+++ b/services/repository/cache.go
@@ -0,0 +1,30 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+)
+
+// CacheRef cachhe last commit information of the branch or the tag
+func CacheRef(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, fullRefName git.RefName) error {
+ commit, err := gitRepo.GetCommit(fullRefName.String())
+ if err != nil {
+ return err
+ }
+
+ if gitRepo.LastCommitCache == nil {
+ commitsCount, err := cache.GetInt64(repo.GetCommitsCountCacheKey(fullRefName.ShortName(), true), commit.CommitsCount)
+ if err != nil {
+ return err
+ }
+ gitRepo.LastCommitCache = git.NewLastCommitCache(commitsCount, repo.FullName(), gitRepo, cache.GetCache())
+ }
+
+ return commit.CacheCommit(ctx)
+}
diff --git a/services/repository/check.go b/services/repository/check.go
new file mode 100644
index 0000000..5cdcc14
--- /dev/null
+++ b/services/repository/check.go
@@ -0,0 +1,202 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ system_model "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// GitFsckRepos calls 'git fsck' to check repository health.
+func GitFsckRepos(ctx context.Context, timeout time.Duration, args git.TrustedCmdArgs) error {
+ log.Trace("Doing: GitFsck")
+
+ if err := db.Iterate(
+ ctx,
+ builder.Expr("id>0 AND is_fsck_enabled=?", true),
+ func(ctx context.Context, repo *repo_model.Repository) error {
+ select {
+ case <-ctx.Done():
+ return db.ErrCancelledf("before fsck of %s", repo.FullName())
+ default:
+ }
+ return GitFsckRepo(ctx, repo, timeout, args)
+ },
+ ); err != nil {
+ log.Trace("Error: GitFsck: %v", err)
+ return err
+ }
+
+ log.Trace("Finished: GitFsck")
+ return nil
+}
+
+// GitFsckRepo calls 'git fsck' to check an individual repository's health.
+func GitFsckRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Duration, args git.TrustedCmdArgs) error {
+ log.Trace("Running health check on repository %-v", repo)
+ repoPath := repo.RepoPath()
+ if err := git.Fsck(ctx, repoPath, timeout, args); err != nil {
+ log.Warn("Failed to health check repository (%-v): %v", repo, err)
+ if err = system_model.CreateRepositoryNotice("Failed to health check repository (%s): %v", repo.FullName(), err); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ }
+ return nil
+}
+
+// GitGcRepos calls 'git gc' to remove unnecessary files and optimize the local repository
+func GitGcRepos(ctx context.Context, timeout time.Duration, args git.TrustedCmdArgs) error {
+ log.Trace("Doing: GitGcRepos")
+
+ if err := db.Iterate(
+ ctx,
+ builder.Gt{"id": 0},
+ func(ctx context.Context, repo *repo_model.Repository) error {
+ select {
+ case <-ctx.Done():
+ return db.ErrCancelledf("before GC of %s", repo.FullName())
+ default:
+ }
+ // we can ignore the error here because it will be logged in GitGCRepo
+ _ = GitGcRepo(ctx, repo, timeout, args)
+ return nil
+ },
+ ); err != nil {
+ return err
+ }
+
+ log.Trace("Finished: GitGcRepos")
+ return nil
+}
+
+// GitGcRepo calls 'git gc' to remove unnecessary files and optimize the local repository
+func GitGcRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Duration, args git.TrustedCmdArgs) error {
+ log.Trace("Running git gc on %-v", repo)
+ command := git.NewCommand(ctx, "gc").AddArguments(args...).
+ SetDescription(fmt.Sprintf("Repository Garbage Collection: %s", repo.FullName()))
+ var stdout string
+ var err error
+ stdout, _, err = command.RunStdString(&git.RunOpts{Timeout: timeout, Dir: repo.RepoPath()})
+ if err != nil {
+ log.Error("Repository garbage collection failed for %-v. Stdout: %s\nError: %v", repo, stdout, err)
+ desc := fmt.Sprintf("Repository garbage collection failed for %s. Stdout: %s\nError: %v", repo.RepoPath(), stdout, err)
+ if err := system_model.CreateRepositoryNotice(desc); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ return fmt.Errorf("Repository garbage collection failed in repo: %s: Error: %w", repo.FullName(), err)
+ }
+
+ // Now update the size of the repository
+ if err := repo_module.UpdateRepoSize(ctx, repo); err != nil {
+ log.Error("Updating size as part of garbage collection failed for %-v. Stdout: %s\nError: %v", repo, stdout, err)
+ desc := fmt.Sprintf("Updating size as part of garbage collection failed for %s. Stdout: %s\nError: %v", repo.RepoPath(), stdout, err)
+ if err := system_model.CreateRepositoryNotice(desc); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ return fmt.Errorf("Updating size as part of garbage collection failed in repo: %s: Error: %w", repo.FullName(), err)
+ }
+
+ return nil
+}
+
+func gatherMissingRepoRecords(ctx context.Context) (repo_model.RepositoryList, error) {
+ repos := make([]*repo_model.Repository, 0, 10)
+ if err := db.Iterate(
+ ctx,
+ builder.Gt{"id": 0},
+ func(ctx context.Context, repo *repo_model.Repository) error {
+ select {
+ case <-ctx.Done():
+ return db.ErrCancelledf("during gathering missing repo records before checking %s", repo.FullName())
+ default:
+ }
+ isDir, err := util.IsDir(repo.RepoPath())
+ if err != nil {
+ return fmt.Errorf("Unable to check dir for %s. %w", repo.FullName(), err)
+ }
+ if !isDir {
+ repos = append(repos, repo)
+ }
+ return nil
+ },
+ ); err != nil {
+ if strings.HasPrefix(err.Error(), "Aborted gathering missing repo") {
+ return nil, err
+ }
+ if err2 := system_model.CreateRepositoryNotice("gatherMissingRepoRecords: %v", err); err2 != nil {
+ log.Error("CreateRepositoryNotice: %v", err2)
+ }
+ return nil, err
+ }
+ return repos, nil
+}
+
+// DeleteMissingRepositories deletes all repository records that lost Git files.
+func DeleteMissingRepositories(ctx context.Context, doer *user_model.User) error {
+ repos, err := gatherMissingRepoRecords(ctx)
+ if err != nil {
+ return err
+ }
+
+ if len(repos) == 0 {
+ return nil
+ }
+
+ for _, repo := range repos {
+ select {
+ case <-ctx.Done():
+ return db.ErrCancelledf("during DeleteMissingRepositories before %s", repo.FullName())
+ default:
+ }
+ log.Trace("Deleting %d/%d...", repo.OwnerID, repo.ID)
+ if err := DeleteRepositoryDirectly(ctx, doer, repo.ID); err != nil {
+ log.Error("Failed to DeleteRepository %-v: Error: %v", repo, err)
+ if err2 := system_model.CreateRepositoryNotice("Failed to DeleteRepository %s [%d]: Error: %v", repo.FullName(), repo.ID, err); err2 != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ }
+ }
+ return nil
+}
+
+// ReinitMissingRepositories reinitializes all repository records that lost Git files.
+func ReinitMissingRepositories(ctx context.Context) error {
+ repos, err := gatherMissingRepoRecords(ctx)
+ if err != nil {
+ return err
+ }
+
+ if len(repos) == 0 {
+ return nil
+ }
+
+ for _, repo := range repos {
+ select {
+ case <-ctx.Done():
+ return db.ErrCancelledf("during ReinitMissingRepositories before %s", repo.FullName())
+ default:
+ }
+ log.Trace("Initializing %d/%d...", repo.OwnerID, repo.ID)
+ if err := git.InitRepository(ctx, repo.RepoPath(), true, repo.ObjectFormatName); err != nil {
+ log.Error("Unable (re)initialize repository %d at %s. Error: %v", repo.ID, repo.RepoPath(), err)
+ if err2 := system_model.CreateRepositoryNotice("InitRepository [%d]: %v", repo.ID, err); err2 != nil {
+ log.Error("CreateRepositoryNotice: %v", err2)
+ }
+ }
+ }
+ return nil
+}
diff --git a/services/repository/collaboration.go b/services/repository/collaboration.go
new file mode 100644
index 0000000..dccc124
--- /dev/null
+++ b/services/repository/collaboration.go
@@ -0,0 +1,52 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+)
+
+// DeleteCollaboration removes collaboration relation between the user and repository.
+func DeleteCollaboration(ctx context.Context, repo *repo_model.Repository, uid int64) (err error) {
+ collaboration := &repo_model.Collaboration{
+ RepoID: repo.ID,
+ UserID: uid,
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if has, err := db.GetEngine(ctx).Delete(collaboration); err != nil {
+ return err
+ } else if has == 0 {
+ return committer.Commit()
+ }
+ if err = access_model.RecalculateAccesses(ctx, repo); err != nil {
+ return err
+ }
+
+ if err = repo_model.WatchRepo(ctx, uid, repo.ID, false); err != nil {
+ return err
+ }
+
+ if err = models.ReconsiderWatches(ctx, repo, uid); err != nil {
+ return err
+ }
+
+ // Unassign a user from any issue (s)he has been assigned to in the repository
+ if err := models.ReconsiderRepoIssuesAssignee(ctx, repo, uid); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
diff --git a/services/repository/collaboration_test.go b/services/repository/collaboration_test.go
new file mode 100644
index 0000000..c087018
--- /dev/null
+++ b/services/repository/collaboration_test.go
@@ -0,0 +1,28 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestRepository_DeleteCollaboration(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 4})
+ require.NoError(t, repo.LoadOwner(db.DefaultContext))
+ require.NoError(t, DeleteCollaboration(db.DefaultContext, repo, 4))
+ unittest.AssertNotExistsBean(t, &repo_model.Collaboration{RepoID: repo.ID, UserID: 4})
+
+ require.NoError(t, DeleteCollaboration(db.DefaultContext, repo, 4))
+ unittest.AssertNotExistsBean(t, &repo_model.Collaboration{RepoID: repo.ID, UserID: 4})
+
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
+}
diff --git a/services/repository/commit.go b/services/repository/commit.go
new file mode 100644
index 0000000..e8c0262
--- /dev/null
+++ b/services/repository/commit.go
@@ -0,0 +1,55 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/modules/util"
+ gitea_ctx "code.gitea.io/gitea/services/context"
+)
+
+type ContainedLinks struct { // TODO: better name?
+ Branches []*namedLink `json:"branches"`
+ Tags []*namedLink `json:"tags"`
+ DefaultBranch string `json:"default_branch"`
+}
+
+type namedLink struct { // TODO: better name?
+ Name string `json:"name"`
+ WebLink string `json:"web_link"`
+}
+
+// LoadBranchesAndTags creates a new repository branch
+func LoadBranchesAndTags(ctx context.Context, baseRepo *gitea_ctx.Repository, commitSHA string) (*ContainedLinks, error) {
+ containedTags, err := baseRepo.GitRepo.ListOccurrences(ctx, "tag", commitSHA)
+ if err != nil {
+ return nil, fmt.Errorf("encountered a problem while querying %s: %w", "tags", err)
+ }
+ containedBranches, err := baseRepo.GitRepo.ListOccurrences(ctx, "branch", commitSHA)
+ if err != nil {
+ return nil, fmt.Errorf("encountered a problem while querying %s: %w", "branches", err)
+ }
+
+ result := &ContainedLinks{
+ DefaultBranch: baseRepo.Repository.DefaultBranch,
+ Branches: make([]*namedLink, 0, len(containedBranches)),
+ Tags: make([]*namedLink, 0, len(containedTags)),
+ }
+ for _, tag := range containedTags {
+ // TODO: Use a common method to get the link to a branch/tag instead of hard-coding it here
+ result.Tags = append(result.Tags, &namedLink{
+ Name: tag,
+ WebLink: fmt.Sprintf("%s/src/tag/%s", baseRepo.RepoLink, util.PathEscapeSegments(tag)),
+ })
+ }
+ for _, branch := range containedBranches {
+ result.Branches = append(result.Branches, &namedLink{
+ Name: branch,
+ WebLink: fmt.Sprintf("%s/src/branch/%s", baseRepo.RepoLink, util.PathEscapeSegments(branch)),
+ })
+ }
+ return result, nil
+}
diff --git a/services/repository/commitstatus/commitstatus.go b/services/repository/commitstatus/commitstatus.go
new file mode 100644
index 0000000..5c63020
--- /dev/null
+++ b/services/repository/commitstatus/commitstatus.go
@@ -0,0 +1,202 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package commitstatus
+
+import (
+ "context"
+ "crypto/sha256"
+ "fmt"
+ "slices"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/automerge"
+)
+
+func getCacheKey(repoID int64, brancheName string) string {
+ hashBytes := sha256.Sum256([]byte(fmt.Sprintf("%d:%s", repoID, brancheName)))
+ return fmt.Sprintf("commit_status:%x", hashBytes)
+}
+
+type commitStatusCacheValue struct {
+ State string `json:"state"`
+ TargetURL string `json:"target_url"`
+}
+
+func getCommitStatusCache(repoID int64, branchName string) *commitStatusCacheValue {
+ c := cache.GetCache()
+ statusStr, ok := c.Get(getCacheKey(repoID, branchName)).(string)
+ if ok && statusStr != "" {
+ var cv commitStatusCacheValue
+ err := json.Unmarshal([]byte(statusStr), &cv)
+ if err == nil && cv.State != "" {
+ return &cv
+ }
+ if err != nil {
+ log.Warn("getCommitStatusCache: json.Unmarshal failed: %v", err)
+ }
+ }
+ return nil
+}
+
+func updateCommitStatusCache(repoID int64, branchName string, state api.CommitStatusState, targetURL string) error {
+ c := cache.GetCache()
+ bs, err := json.Marshal(commitStatusCacheValue{
+ State: state.String(),
+ TargetURL: targetURL,
+ })
+ if err != nil {
+ log.Warn("updateCommitStatusCache: json.Marshal failed: %v", err)
+ return nil
+ }
+ return c.Put(getCacheKey(repoID, branchName), string(bs), 3*24*60)
+}
+
+func deleteCommitStatusCache(repoID int64, branchName string) error {
+ c := cache.GetCache()
+ return c.Delete(getCacheKey(repoID, branchName))
+}
+
+// CreateCommitStatus creates a new CommitStatus given a bunch of parameters
+// NOTE: All text-values will be trimmed from whitespaces.
+// Requires: Repo, Creator, SHA
+func CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, creator *user_model.User, sha string, status *git_model.CommitStatus) error {
+ repoPath := repo.RepoPath()
+
+ // confirm that commit is exist
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("OpenRepository[%s]: %w", repoPath, err)
+ }
+ defer closer.Close()
+
+ objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
+
+ commit, err := gitRepo.GetCommit(sha)
+ if err != nil {
+ return fmt.Errorf("GetCommit[%s]: %w", sha, err)
+ }
+ if len(sha) != objectFormat.FullLength() {
+ // use complete commit sha
+ sha = commit.ID.String()
+ }
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := git_model.NewCommitStatus(ctx, git_model.NewCommitStatusOptions{
+ Repo: repo,
+ Creator: creator,
+ SHA: commit.ID,
+ CommitStatus: status,
+ }); err != nil {
+ return fmt.Errorf("NewCommitStatus[repo_id: %d, user_id: %d, sha: %s]: %w", repo.ID, creator.ID, sha, err)
+ }
+
+ return git_model.UpdateCommitStatusSummary(ctx, repo.ID, commit.ID.String())
+ }); err != nil {
+ return err
+ }
+
+ defaultBranchCommit, err := gitRepo.GetBranchCommit(repo.DefaultBranch)
+ if err != nil {
+ return fmt.Errorf("GetBranchCommit[%s]: %w", repo.DefaultBranch, err)
+ }
+
+ if commit.ID.String() == defaultBranchCommit.ID.String() { // since one commit status updated, the combined commit status should be invalid
+ if err := deleteCommitStatusCache(repo.ID, repo.DefaultBranch); err != nil {
+ log.Error("deleteCommitStatusCache[%d:%s] failed: %v", repo.ID, repo.DefaultBranch, err)
+ }
+ }
+
+ if status.State.IsSuccess() {
+ if err := automerge.StartPRCheckAndAutoMergeBySHA(ctx, sha, repo); err != nil {
+ return fmt.Errorf("MergeScheduledPullRequest[repo_id: %d, user_id: %d, sha: %s]: %w", repo.ID, creator.ID, sha, err)
+ }
+ }
+
+ return nil
+}
+
+// FindReposLastestCommitStatuses loading repository default branch latest combined commit status with cache
+func FindReposLastestCommitStatuses(ctx context.Context, repos []*repo_model.Repository) ([]*git_model.CommitStatus, error) {
+ if len(repos) == 0 {
+ return nil, nil
+ }
+ results := make([]*git_model.CommitStatus, len(repos))
+ for i, repo := range repos {
+ if cv := getCommitStatusCache(repo.ID, repo.DefaultBranch); cv != nil {
+ results[i] = &git_model.CommitStatus{
+ State: api.CommitStatusState(cv.State),
+ TargetURL: cv.TargetURL,
+ }
+ }
+ }
+
+ // collect the latest commit of each repo
+ // at most there are dozens of repos (limited by MaxResponseItems), so it's not a big problem at the moment
+ repoBranchNames := make(map[int64]string, len(repos))
+ for i, repo := range repos {
+ if results[i] == nil {
+ repoBranchNames[repo.ID] = repo.DefaultBranch
+ }
+ }
+
+ repoIDsToLatestCommitSHAs, err := git_model.FindBranchesByRepoAndBranchName(ctx, repoBranchNames)
+ if err != nil {
+ return nil, fmt.Errorf("FindBranchesByRepoAndBranchName: %v", err)
+ }
+
+ var repoSHAs []git_model.RepoSHA
+ for id, sha := range repoIDsToLatestCommitSHAs {
+ repoSHAs = append(repoSHAs, git_model.RepoSHA{RepoID: id, SHA: sha})
+ }
+
+ summaryResults, err := git_model.GetLatestCommitStatusForRepoAndSHAs(ctx, repoSHAs)
+ if err != nil {
+ return nil, fmt.Errorf("GetLatestCommitStatusForRepoAndSHAs: %v", err)
+ }
+
+ for _, summary := range summaryResults {
+ for i, repo := range repos {
+ if repo.ID == summary.RepoID {
+ results[i] = summary
+ _ = slices.DeleteFunc(repoSHAs, func(repoSHA git_model.RepoSHA) bool {
+ return repoSHA.RepoID == repo.ID
+ })
+ if results[i].State != "" {
+ if err := updateCommitStatusCache(repo.ID, repo.DefaultBranch, results[i].State, results[i].TargetURL); err != nil {
+ log.Error("updateCommitStatusCache[%d:%s] failed: %v", repo.ID, repo.DefaultBranch, err)
+ }
+ }
+ break
+ }
+ }
+ }
+
+ // call the database O(1) times to get the commit statuses for all repos
+ repoToItsLatestCommitStatuses, err := git_model.GetLatestCommitStatusForPairs(ctx, repoSHAs)
+ if err != nil {
+ return nil, fmt.Errorf("GetLatestCommitStatusForPairs: %v", err)
+ }
+
+ for i, repo := range repos {
+ if results[i] == nil {
+ results[i] = git_model.CalcCommitStatus(repoToItsLatestCommitStatuses[repo.ID])
+ if results[i] != nil && results[i].State != "" {
+ if err := updateCommitStatusCache(repo.ID, repo.DefaultBranch, results[i].State, results[i].TargetURL); err != nil {
+ log.Error("updateCommitStatusCache[%d:%s] failed: %v", repo.ID, repo.DefaultBranch, err)
+ }
+ }
+ }
+ }
+
+ return results, nil
+}
diff --git a/services/repository/contributors_graph.go b/services/repository/contributors_graph.go
new file mode 100644
index 0000000..4887181
--- /dev/null
+++ b/services/repository/contributors_graph.go
@@ -0,0 +1,321 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "bufio"
+ "context"
+ "errors"
+ "fmt"
+ "os"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "code.gitea.io/gitea/models/avatars"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+
+ "code.forgejo.org/go-chi/cache"
+)
+
+const contributorStatsCacheKey = "GetContributorStats/%s/%s"
+
+var (
+ ErrAwaitGeneration = errors.New("generation took longer than ")
+ awaitGenerationTime = time.Second * 5
+ generateLock = sync.Map{}
+)
+
+type WeekData struct {
+ Week int64 `json:"week"` // Starting day of the week as Unix timestamp
+ Additions int `json:"additions"` // Number of additions in that week
+ Deletions int `json:"deletions"` // Number of deletions in that week
+ Commits int `json:"commits"` // Number of commits in that week
+}
+
+// ContributorData represents statistical git commit count data
+type ContributorData struct {
+ Name string `json:"name"` // Display name of the contributor
+ Login string `json:"login"` // Login name of the contributor in case it exists
+ AvatarLink string `json:"avatar_link"`
+ HomeLink string `json:"home_link"`
+ TotalCommits int64 `json:"total_commits"`
+ Weeks map[int64]*WeekData `json:"weeks"`
+}
+
+// ExtendedCommitStats contains information for commit stats with author data
+type ExtendedCommitStats struct {
+ Author *api.CommitUser `json:"author"`
+ Stats *api.CommitStats `json:"stats"`
+}
+
+const layout = time.DateOnly
+
+func findLastSundayBeforeDate(dateStr string) (string, error) {
+ date, err := time.Parse(layout, dateStr)
+ if err != nil {
+ return "", err
+ }
+
+ weekday := date.Weekday()
+ daysToSubtract := int(weekday) - int(time.Sunday)
+ if daysToSubtract < 0 {
+ daysToSubtract += 7
+ }
+
+ lastSunday := date.AddDate(0, 0, -daysToSubtract)
+ return lastSunday.Format(layout), nil
+}
+
+// GetContributorStats returns contributors stats for git commits for given revision or default branch
+func GetContributorStats(ctx context.Context, cache cache.Cache, repo *repo_model.Repository, revision string) (map[string]*ContributorData, error) {
+ // as GetContributorStats is resource intensive we cache the result
+ cacheKey := fmt.Sprintf(contributorStatsCacheKey, repo.FullName(), revision)
+ if !cache.IsExist(cacheKey) {
+ genReady := make(chan struct{})
+
+ // dont start multiple async generations
+ _, run := generateLock.Load(cacheKey)
+ if run {
+ return nil, ErrAwaitGeneration
+ }
+
+ generateLock.Store(cacheKey, struct{}{})
+ // run generation async
+ go generateContributorStats(genReady, cache, cacheKey, repo, revision)
+
+ select {
+ case <-time.After(awaitGenerationTime):
+ return nil, ErrAwaitGeneration
+ case <-genReady:
+ // we got generation ready before timeout
+ break
+ }
+ }
+ // TODO: renew timeout of cache cache.UpdateTimeout(cacheKey, contributorStatsCacheTimeout)
+
+ switch v := cache.Get(cacheKey).(type) {
+ case error:
+ return nil, v
+ case string:
+ var cachedStats map[string]*ContributorData
+ return cachedStats, json.Unmarshal([]byte(v), &cachedStats)
+ default:
+ return nil, fmt.Errorf("unexpected type in cache detected")
+ }
+}
+
+// getExtendedCommitStats return the list of *ExtendedCommitStats for the given revision
+func getExtendedCommitStats(repo *git.Repository, revision string /*, limit int */) ([]*ExtendedCommitStats, error) {
+ baseCommit, err := repo.GetCommit(revision)
+ if err != nil {
+ return nil, err
+ }
+ stdoutReader, stdoutWriter, err := os.Pipe()
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ _ = stdoutReader.Close()
+ _ = stdoutWriter.Close()
+ }()
+
+ gitCmd := git.NewCommand(repo.Ctx, "log", "--shortstat", "--no-merges", "--pretty=format:---%n%aN%n%aE%n%as", "--reverse")
+ // AddOptionFormat("--max-count=%d", limit)
+ gitCmd.AddDynamicArguments(baseCommit.ID.String())
+
+ var extendedCommitStats []*ExtendedCommitStats
+ stderr := new(strings.Builder)
+ err = gitCmd.Run(&git.RunOpts{
+ Dir: repo.Path,
+ Stdout: stdoutWriter,
+ Stderr: stderr,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ _ = stdoutWriter.Close()
+ scanner := bufio.NewScanner(stdoutReader)
+
+ for scanner.Scan() {
+ line := strings.TrimSpace(scanner.Text())
+ if line != "---" {
+ continue
+ }
+ scanner.Scan()
+ authorName := strings.TrimSpace(scanner.Text())
+ scanner.Scan()
+ authorEmail := strings.TrimSpace(scanner.Text())
+ scanner.Scan()
+ date := strings.TrimSpace(scanner.Text())
+ scanner.Scan()
+ stats := strings.TrimSpace(scanner.Text())
+ if authorName == "" || authorEmail == "" || date == "" || stats == "" {
+ // FIXME: find a better way to parse the output so that we will handle this properly
+ log.Warn("Something is wrong with git log output, skipping...")
+ log.Warn("authorName: %s, authorEmail: %s, date: %s, stats: %s", authorName, authorEmail, date, stats)
+ continue
+ }
+ // 1 file changed, 1 insertion(+), 1 deletion(-)
+ fields := strings.Split(stats, ",")
+
+ commitStats := api.CommitStats{}
+ for _, field := range fields[1:] {
+ parts := strings.Split(strings.TrimSpace(field), " ")
+ value, contributionType := parts[0], parts[1]
+ amount, _ := strconv.Atoi(value)
+
+ if strings.HasPrefix(contributionType, "insertion") {
+ commitStats.Additions = amount
+ } else {
+ commitStats.Deletions = amount
+ }
+ }
+ commitStats.Total = commitStats.Additions + commitStats.Deletions
+ scanner.Text() // empty line at the end
+
+ res := &ExtendedCommitStats{
+ Author: &api.CommitUser{
+ Identity: api.Identity{
+ Name: authorName,
+ Email: authorEmail,
+ },
+ Date: date,
+ },
+ Stats: &commitStats,
+ }
+ extendedCommitStats = append(extendedCommitStats, res)
+ }
+ _ = stdoutReader.Close()
+ return nil
+ },
+ })
+ if err != nil {
+ return nil, fmt.Errorf("Failed to get ContributorsCommitStats for repository.\nError: %w\nStderr: %s", err, stderr)
+ }
+
+ return extendedCommitStats, nil
+}
+
+func generateContributorStats(genDone chan struct{}, cache cache.Cache, cacheKey string, repo *repo_model.Repository, revision string) {
+ ctx := graceful.GetManager().HammerContext()
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ log.Error("OpenRepository[repo=%q]: %v", repo.FullName(), err)
+ return
+ }
+ defer closer.Close()
+
+ if len(revision) == 0 {
+ revision = repo.DefaultBranch
+ }
+ extendedCommitStats, err := getExtendedCommitStats(gitRepo, revision)
+ if err != nil {
+ log.Error("getExtendedCommitStats[repo=%q revision=%q]: %v", repo.FullName(), revision, err)
+ return
+ }
+ if len(extendedCommitStats) == 0 {
+ log.Error("No commit stats were returned [repo=%q revision=%q]", repo.FullName(), revision)
+ return
+ }
+
+ layout := time.DateOnly
+
+ unknownUserAvatarLink := user_model.NewGhostUser().AvatarLinkWithSize(ctx, 0)
+ contributorsCommitStats := make(map[string]*ContributorData)
+ contributorsCommitStats["total"] = &ContributorData{
+ Name: "Total",
+ Weeks: make(map[int64]*WeekData),
+ }
+ total := contributorsCommitStats["total"]
+
+ for _, v := range extendedCommitStats {
+ userEmail := v.Author.Email
+ if len(userEmail) == 0 {
+ continue
+ }
+ u, _ := user_model.GetUserByEmail(ctx, userEmail)
+ if u != nil {
+ // update userEmail with user's primary email address so
+ // that different mail addresses will linked to same account
+ userEmail = u.GetEmail()
+ }
+ // duplicated logic
+ if _, ok := contributorsCommitStats[userEmail]; !ok {
+ if u == nil {
+ avatarLink := avatars.GenerateEmailAvatarFastLink(ctx, userEmail, 0)
+ if avatarLink == "" {
+ avatarLink = unknownUserAvatarLink
+ }
+ contributorsCommitStats[userEmail] = &ContributorData{
+ Name: v.Author.Name,
+ AvatarLink: avatarLink,
+ Weeks: make(map[int64]*WeekData),
+ }
+ } else {
+ contributorsCommitStats[userEmail] = &ContributorData{
+ Name: u.DisplayName(),
+ Login: u.LowerName,
+ AvatarLink: u.AvatarLinkWithSize(ctx, 0),
+ HomeLink: u.HomeLink(),
+ Weeks: make(map[int64]*WeekData),
+ }
+ }
+ }
+ // Update user statistics
+ user := contributorsCommitStats[userEmail]
+ startingOfWeek, _ := findLastSundayBeforeDate(v.Author.Date)
+
+ val, _ := time.Parse(layout, startingOfWeek)
+ week := val.UnixMilli()
+
+ if user.Weeks[week] == nil {
+ user.Weeks[week] = &WeekData{
+ Additions: 0,
+ Deletions: 0,
+ Commits: 0,
+ Week: week,
+ }
+ }
+ if total.Weeks[week] == nil {
+ total.Weeks[week] = &WeekData{
+ Additions: 0,
+ Deletions: 0,
+ Commits: 0,
+ Week: week,
+ }
+ }
+ user.Weeks[week].Additions += v.Stats.Additions
+ user.Weeks[week].Deletions += v.Stats.Deletions
+ user.Weeks[week].Commits++
+ user.TotalCommits++
+
+ // Update overall statistics
+ total.Weeks[week].Additions += v.Stats.Additions
+ total.Weeks[week].Deletions += v.Stats.Deletions
+ total.Weeks[week].Commits++
+ total.TotalCommits++
+ }
+
+ data, err := json.Marshal(contributorsCommitStats)
+ if err != nil {
+ log.Error("json.Marshal[repo=%q revision=%q]: %v", repo.FullName(), revision, err)
+ return
+ }
+
+ // Store the data as an string, to make it uniform what data type is returned
+ // from caches.
+ _ = cache.Put(cacheKey, string(data), setting.CacheService.TTLSeconds())
+ generateLock.Delete(cacheKey)
+ if genDone != nil {
+ genDone <- struct{}{}
+ }
+}
diff --git a/services/repository/contributors_graph_test.go b/services/repository/contributors_graph_test.go
new file mode 100644
index 0000000..8cfe69d
--- /dev/null
+++ b/services/repository/contributors_graph_test.go
@@ -0,0 +1,101 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "slices"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/test"
+
+ "code.forgejo.org/go-chi/cache"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRepository_ContributorsGraph(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ require.NoError(t, repo.LoadOwner(db.DefaultContext))
+ mockCache, err := cache.NewCacher(cache.Options{
+ Adapter: "memory",
+ Interval: 24 * 60,
+ })
+ require.NoError(t, err)
+
+ lc, cleanup := test.NewLogChecker(log.DEFAULT, log.INFO)
+ lc.StopMark(`getExtendedCommitStats[repo="user2/repo2" revision="404ref"]: object does not exist [id: 404ref, rel_path: ]`)
+ defer cleanup()
+
+ generateContributorStats(nil, mockCache, "key", repo, "404ref")
+ assert.False(t, mockCache.IsExist("key"))
+ _, stopped := lc.Check(100 * time.Millisecond)
+ assert.True(t, stopped)
+
+ generateContributorStats(nil, mockCache, "key2", repo, "master")
+ dataString, isData := mockCache.Get("key2").(string)
+ assert.True(t, isData)
+ // Verify that JSON is actually stored in the cache.
+ assert.EqualValues(t, `{"ethantkoenig@gmail.com":{"name":"Ethan Koenig","login":"","avatar_link":"https://secure.gravatar.com/avatar/b42fb195faa8c61b8d88abfefe30e9e3?d=identicon","home_link":"","total_commits":1,"weeks":{"1511654400000":{"week":1511654400000,"additions":3,"deletions":0,"commits":1}}},"jimmy.praet@telenet.be":{"name":"Jimmy Praet","login":"","avatar_link":"https://secure.gravatar.com/avatar/93c49b7c89eb156971d11161c9b52795?d=identicon","home_link":"","total_commits":1,"weeks":{"1624752000000":{"week":1624752000000,"additions":2,"deletions":0,"commits":1}}},"jon@allspice.io":{"name":"Jon","login":"","avatar_link":"https://secure.gravatar.com/avatar/00388ce725e6886f3e07c3733007289b?d=identicon","home_link":"","total_commits":1,"weeks":{"1607817600000":{"week":1607817600000,"additions":10,"deletions":0,"commits":1}}},"total":{"name":"Total","login":"","avatar_link":"","home_link":"","total_commits":3,"weeks":{"1511654400000":{"week":1511654400000,"additions":3,"deletions":0,"commits":1},"1607817600000":{"week":1607817600000,"additions":10,"deletions":0,"commits":1},"1624752000000":{"week":1624752000000,"additions":2,"deletions":0,"commits":1}}}}`, dataString)
+
+ var data map[string]*ContributorData
+ require.NoError(t, json.Unmarshal([]byte(dataString), &data))
+
+ var keys []string
+ for k := range data {
+ keys = append(keys, k)
+ }
+ slices.Sort(keys)
+ assert.EqualValues(t, []string{
+ "ethantkoenig@gmail.com",
+ "jimmy.praet@telenet.be",
+ "jon@allspice.io",
+ "total", // generated summary
+ }, keys)
+
+ assert.EqualValues(t, &ContributorData{
+ Name: "Ethan Koenig",
+ AvatarLink: "https://secure.gravatar.com/avatar/b42fb195faa8c61b8d88abfefe30e9e3?d=identicon",
+ TotalCommits: 1,
+ Weeks: map[int64]*WeekData{
+ 1511654400000: {
+ Week: 1511654400000, // sunday 2017-11-26
+ Additions: 3,
+ Deletions: 0,
+ Commits: 1,
+ },
+ },
+ }, data["ethantkoenig@gmail.com"])
+ assert.EqualValues(t, &ContributorData{
+ Name: "Total",
+ AvatarLink: "",
+ TotalCommits: 3,
+ Weeks: map[int64]*WeekData{
+ 1511654400000: {
+ Week: 1511654400000, // sunday 2017-11-26 (2017-11-26 20:31:18 -0800)
+ Additions: 3,
+ Deletions: 0,
+ Commits: 1,
+ },
+ 1607817600000: {
+ Week: 1607817600000, // sunday 2020-12-13 (2020-12-15 15:23:11 -0500)
+ Additions: 10,
+ Deletions: 0,
+ Commits: 1,
+ },
+ 1624752000000: {
+ Week: 1624752000000, // sunday 2021-06-27 (2021-06-29 21:54:09 +0200)
+ Additions: 2,
+ Deletions: 0,
+ Commits: 1,
+ },
+ },
+ }, data["total"])
+}
diff --git a/services/repository/create.go b/services/repository/create.go
new file mode 100644
index 0000000..d092d02
--- /dev/null
+++ b/services/repository/create.go
@@ -0,0 +1,318 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/options"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/templates/vars"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// CreateRepoOptions contains the create repository options
+type CreateRepoOptions struct {
+ Name string
+ Description string
+ OriginalURL string
+ GitServiceType api.GitServiceType
+ Gitignores string
+ IssueLabels string
+ License string
+ Readme string
+ DefaultBranch string
+ IsPrivate bool
+ IsMirror bool
+ IsTemplate bool
+ AutoInit bool
+ Status repo_model.RepositoryStatus
+ TrustModel repo_model.TrustModelType
+ MirrorInterval string
+ ObjectFormatName string
+}
+
+func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir, repoPath string, opts CreateRepoOptions) error {
+ commitTimeStr := time.Now().Format(time.RFC3339)
+ authorSig := repo.Owner.NewGitSig()
+
+ // Because this may call hooks we should pass in the environment
+ env := append(os.Environ(),
+ "GIT_AUTHOR_NAME="+authorSig.Name,
+ "GIT_AUTHOR_EMAIL="+authorSig.Email,
+ "GIT_AUTHOR_DATE="+commitTimeStr,
+ "GIT_COMMITTER_NAME="+authorSig.Name,
+ "GIT_COMMITTER_EMAIL="+authorSig.Email,
+ "GIT_COMMITTER_DATE="+commitTimeStr,
+ )
+
+ // Clone to temporary path and do the init commit.
+ if stdout, _, err := git.NewCommand(ctx, "clone").AddDynamicArguments(repoPath, tmpDir).
+ SetDescription(fmt.Sprintf("prepareRepoCommit (git clone): %s to %s", repoPath, tmpDir)).
+ RunStdString(&git.RunOpts{Dir: "", Env: env}); err != nil {
+ log.Error("Failed to clone from %v into %s: stdout: %s\nError: %v", repo, tmpDir, stdout, err)
+ return fmt.Errorf("git clone: %w", err)
+ }
+
+ // README
+ data, err := options.Readme(opts.Readme)
+ if err != nil {
+ return fmt.Errorf("GetRepoInitFile[%s]: %w", opts.Readme, err)
+ }
+
+ cloneLink := repo.CloneLink()
+ match := map[string]string{
+ "Name": repo.Name,
+ "Description": repo.Description,
+ "CloneURL.SSH": cloneLink.SSH,
+ "CloneURL.HTTPS": cloneLink.HTTPS,
+ "OwnerName": repo.OwnerName,
+ }
+ res, err := vars.Expand(string(data), match)
+ if err != nil {
+ // here we could just log the error and continue the rendering
+ log.Error("unable to expand template vars for repo README: %s, err: %v", opts.Readme, err)
+ }
+ if err = os.WriteFile(filepath.Join(tmpDir, "README.md"),
+ []byte(res), 0o644); err != nil {
+ return fmt.Errorf("write README.md: %w", err)
+ }
+
+ // .gitignore
+ if len(opts.Gitignores) > 0 {
+ var buf bytes.Buffer
+ names := strings.Split(opts.Gitignores, ",")
+ for _, name := range names {
+ data, err = options.Gitignore(name)
+ if err != nil {
+ return fmt.Errorf("GetRepoInitFile[%s]: %w", name, err)
+ }
+ buf.WriteString("# ---> " + name + "\n")
+ buf.Write(data)
+ buf.WriteString("\n")
+ }
+
+ if buf.Len() > 0 {
+ if err = os.WriteFile(filepath.Join(tmpDir, ".gitignore"), buf.Bytes(), 0o644); err != nil {
+ return fmt.Errorf("write .gitignore: %w", err)
+ }
+ }
+ }
+
+ // LICENSE
+ if len(opts.License) > 0 {
+ data, err = repo_module.GetLicense(opts.License, &repo_module.LicenseValues{
+ Owner: repo.OwnerName,
+ Email: authorSig.Email,
+ Repo: repo.Name,
+ Year: time.Now().Format("2006"),
+ })
+ if err != nil {
+ return fmt.Errorf("getLicense[%s]: %w", opts.License, err)
+ }
+
+ if err = os.WriteFile(filepath.Join(tmpDir, "LICENSE"), data, 0o644); err != nil {
+ return fmt.Errorf("write LICENSE: %w", err)
+ }
+ }
+
+ return nil
+}
+
+// InitRepository initializes README and .gitignore if needed.
+func initRepository(ctx context.Context, repoPath string, u *user_model.User, repo *repo_model.Repository, opts CreateRepoOptions) (err error) {
+ if err = repo_module.CheckInitRepository(ctx, repo.OwnerName, repo.Name, opts.ObjectFormatName); err != nil {
+ return err
+ }
+
+ // Initialize repository according to user's choice.
+ if opts.AutoInit {
+ tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-"+repo.Name)
+ if err != nil {
+ return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.RepoPath(), err)
+ }
+ defer func() {
+ if err := util.RemoveAll(tmpDir); err != nil {
+ log.Warn("Unable to remove temporary directory: %s: Error: %v", tmpDir, err)
+ }
+ }()
+
+ if err = prepareRepoCommit(ctx, repo, tmpDir, repoPath, opts); err != nil {
+ return fmt.Errorf("prepareRepoCommit: %w", err)
+ }
+
+ // Apply changes and commit.
+ if err = initRepoCommit(ctx, tmpDir, repo, u, opts.DefaultBranch); err != nil {
+ return fmt.Errorf("initRepoCommit: %w", err)
+ }
+ }
+
+ // Re-fetch the repository from database before updating it (else it would
+ // override changes that were done earlier with sql)
+ if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil {
+ return fmt.Errorf("getRepositoryByID: %w", err)
+ }
+
+ if !opts.AutoInit {
+ repo.IsEmpty = true
+ }
+
+ repo.DefaultBranch = setting.Repository.DefaultBranch
+ repo.WikiBranch = setting.Repository.DefaultBranch
+
+ if len(opts.DefaultBranch) > 0 {
+ repo.DefaultBranch = opts.DefaultBranch
+ if err = gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil {
+ return fmt.Errorf("setDefaultBranch: %w", err)
+ }
+
+ if !repo.IsEmpty {
+ if _, err := repo_module.SyncRepoBranches(ctx, repo.ID, u.ID); err != nil {
+ return fmt.Errorf("SyncRepoBranches: %w", err)
+ }
+ }
+ }
+
+ if err = UpdateRepository(ctx, repo, false); err != nil {
+ return fmt.Errorf("updateRepository: %w", err)
+ }
+
+ return nil
+}
+
+// CreateRepositoryDirectly creates a repository for the user/organization.
+func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) {
+ if !doer.IsAdmin && !u.CanCreateRepo() {
+ return nil, repo_model.ErrReachLimitOfRepo{
+ Limit: u.MaxRepoCreation,
+ }
+ }
+
+ if len(opts.DefaultBranch) == 0 {
+ opts.DefaultBranch = setting.Repository.DefaultBranch
+ }
+
+ // Check if label template exist
+ if len(opts.IssueLabels) > 0 {
+ if _, err := repo_module.LoadTemplateLabelsByDisplayName(opts.IssueLabels); err != nil {
+ return nil, err
+ }
+ }
+
+ if opts.ObjectFormatName == "" {
+ opts.ObjectFormatName = git.Sha1ObjectFormat.Name()
+ }
+
+ repo := &repo_model.Repository{
+ OwnerID: u.ID,
+ Owner: u,
+ OwnerName: u.Name,
+ Name: opts.Name,
+ LowerName: strings.ToLower(opts.Name),
+ Description: opts.Description,
+ OriginalURL: opts.OriginalURL,
+ OriginalServiceType: opts.GitServiceType,
+ IsPrivate: opts.IsPrivate,
+ IsFsckEnabled: !opts.IsMirror,
+ IsTemplate: opts.IsTemplate,
+ CloseIssuesViaCommitInAnyBranch: setting.Repository.DefaultCloseIssuesViaCommitsInAnyBranch,
+ Status: opts.Status,
+ IsEmpty: !opts.AutoInit,
+ TrustModel: opts.TrustModel,
+ IsMirror: opts.IsMirror,
+ DefaultBranch: opts.DefaultBranch,
+ WikiBranch: setting.Repository.DefaultBranch,
+ ObjectFormatName: opts.ObjectFormatName,
+ }
+
+ var rollbackRepo *repo_model.Repository
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := repo_module.CreateRepositoryByExample(ctx, doer, u, repo, false, false); err != nil {
+ return err
+ }
+
+ // No need for init mirror.
+ if opts.IsMirror {
+ return nil
+ }
+
+ repoPath := repo_model.RepoPath(u.Name, repo.Name)
+ isExist, err := util.IsExist(repoPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ return err
+ }
+ if isExist {
+ // repo already exists - We have two or three options.
+ // 1. We fail stating that the directory exists
+ // 2. We create the db repository to go with this data and adopt the git repo
+ // 3. We delete it and start afresh
+ //
+ // Previously Gitea would just delete and start afresh - this was naughty.
+ // So we will now fail and delegate to other functionality to adopt or delete
+ log.Error("Files already exist in %s and we are not going to adopt or delete.", repoPath)
+ return repo_model.ErrRepoFilesAlreadyExist{
+ Uname: u.Name,
+ Name: repo.Name,
+ }
+ }
+
+ if err = initRepository(ctx, repoPath, doer, repo, opts); err != nil {
+ if err2 := util.RemoveAll(repoPath); err2 != nil {
+ log.Error("initRepository: %v", err)
+ return fmt.Errorf(
+ "delete repo directory %s/%s failed(2): %v", u.Name, repo.Name, err2)
+ }
+ return fmt.Errorf("initRepository: %w", err)
+ }
+
+ // Initialize Issue Labels if selected
+ if len(opts.IssueLabels) > 0 {
+ if err = repo_module.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil {
+ rollbackRepo = repo
+ rollbackRepo.OwnerID = u.ID
+ return fmt.Errorf("InitializeLabels: %w", err)
+ }
+ }
+
+ if err := repo_module.CheckDaemonExportOK(ctx, repo); err != nil {
+ return fmt.Errorf("checkDaemonExportOK: %w", err)
+ }
+
+ if stdout, _, err := git.NewCommand(ctx, "update-server-info").
+ SetDescription(fmt.Sprintf("CreateRepository(git update-server-info): %s", repoPath)).
+ RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
+ rollbackRepo = repo
+ rollbackRepo.OwnerID = u.ID
+ return fmt.Errorf("CreateRepository(git update-server-info): %w", err)
+ }
+ return nil
+ }); err != nil {
+ if rollbackRepo != nil {
+ if errDelete := DeleteRepositoryDirectly(ctx, doer, rollbackRepo.ID); errDelete != nil {
+ log.Error("Rollback deleteRepository: %v", errDelete)
+ }
+ }
+
+ return nil, err
+ }
+
+ return repo, nil
+}
diff --git a/services/repository/create_test.go b/services/repository/create_test.go
new file mode 100644
index 0000000..9cde285
--- /dev/null
+++ b/services/repository/create_test.go
@@ -0,0 +1,149 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "fmt"
+ "testing"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestIncludesAllRepositoriesTeams(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ testTeamRepositories := func(teamID int64, repoIds []int64) {
+ team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID})
+ require.NoError(t, team.LoadRepositories(db.DefaultContext), "%s: GetRepositories", team.Name)
+ assert.Len(t, team.Repos, team.NumRepos, "%s: len repo", team.Name)
+ assert.Len(t, team.Repos, len(repoIds), "%s: repo count", team.Name)
+ for i, rid := range repoIds {
+ if rid > 0 {
+ assert.True(t, HasRepository(db.DefaultContext, team, rid), "%s: HasRepository(%d) %d", rid, i)
+ }
+ }
+ }
+
+ // Get an admin user.
+ user, err := user_model.GetUserByID(db.DefaultContext, 1)
+ require.NoError(t, err, "GetUserByID")
+
+ // Create org.
+ org := &organization.Organization{
+ Name: "All_repo",
+ IsActive: true,
+ Type: user_model.UserTypeOrganization,
+ Visibility: structs.VisibleTypePublic,
+ }
+ require.NoError(t, organization.CreateOrganization(db.DefaultContext, org, user), "CreateOrganization")
+
+ // Check Owner team.
+ ownerTeam, err := org.GetOwnerTeam(db.DefaultContext)
+ require.NoError(t, err, "GetOwnerTeam")
+ assert.True(t, ownerTeam.IncludesAllRepositories, "Owner team includes all repositories")
+
+ // Create repos.
+ repoIDs := make([]int64, 0)
+ for i := 0; i < 3; i++ {
+ r, err := CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), CreateRepoOptions{Name: fmt.Sprintf("repo-%d", i)})
+ require.NoError(t, err, "CreateRepository %d", i)
+ if r != nil {
+ repoIDs = append(repoIDs, r.ID)
+ }
+ }
+ // Get fresh copy of Owner team after creating repos.
+ ownerTeam, err = org.GetOwnerTeam(db.DefaultContext)
+ require.NoError(t, err, "GetOwnerTeam")
+
+ // Create teams and check repositories.
+ teams := []*organization.Team{
+ ownerTeam,
+ {
+ OrgID: org.ID,
+ Name: "team one",
+ AccessMode: perm.AccessModeRead,
+ IncludesAllRepositories: true,
+ },
+ {
+ OrgID: org.ID,
+ Name: "team 2",
+ AccessMode: perm.AccessModeRead,
+ IncludesAllRepositories: false,
+ },
+ {
+ OrgID: org.ID,
+ Name: "team three",
+ AccessMode: perm.AccessModeWrite,
+ IncludesAllRepositories: true,
+ },
+ {
+ OrgID: org.ID,
+ Name: "team 4",
+ AccessMode: perm.AccessModeWrite,
+ IncludesAllRepositories: false,
+ },
+ }
+ teamRepos := [][]int64{
+ repoIDs,
+ repoIDs,
+ {},
+ repoIDs,
+ {},
+ }
+ for i, team := range teams {
+ if i > 0 { // first team is Owner.
+ require.NoError(t, models.NewTeam(db.DefaultContext, team), "%s: NewTeam", team.Name)
+ }
+ testTeamRepositories(team.ID, teamRepos[i])
+ }
+
+ // Update teams and check repositories.
+ teams[3].IncludesAllRepositories = false
+ teams[4].IncludesAllRepositories = true
+ teamRepos[4] = repoIDs
+ for i, team := range teams {
+ require.NoError(t, models.UpdateTeam(db.DefaultContext, team, false, true), "%s: UpdateTeam", team.Name)
+ testTeamRepositories(team.ID, teamRepos[i])
+ }
+
+ // Create repo and check teams repositories.
+ r, err := CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), CreateRepoOptions{Name: "repo-last"})
+ require.NoError(t, err, "CreateRepository last")
+ if r != nil {
+ repoIDs = append(repoIDs, r.ID)
+ }
+ teamRepos[0] = repoIDs
+ teamRepos[1] = repoIDs
+ teamRepos[4] = repoIDs
+ for i, team := range teams {
+ testTeamRepositories(team.ID, teamRepos[i])
+ }
+
+ // Remove repo and check teams repositories.
+ require.NoError(t, DeleteRepositoryDirectly(db.DefaultContext, user, repoIDs[0]), "DeleteRepository")
+ teamRepos[0] = repoIDs[1:]
+ teamRepos[1] = repoIDs[1:]
+ teamRepos[3] = repoIDs[1:3]
+ teamRepos[4] = repoIDs[1:]
+ for i, team := range teams {
+ testTeamRepositories(team.ID, teamRepos[i])
+ }
+
+ // Wipe created items.
+ for i, rid := range repoIDs {
+ if i > 0 { // first repo already deleted.
+ require.NoError(t, DeleteRepositoryDirectly(db.DefaultContext, user, rid), "DeleteRepository %d", i)
+ }
+ }
+ require.NoError(t, organization.DeleteOrganization(db.DefaultContext, org), "DeleteOrganization")
+}
diff --git a/services/repository/delete.go b/services/repository/delete.go
new file mode 100644
index 0000000..6e84194
--- /dev/null
+++ b/services/repository/delete.go
@@ -0,0 +1,471 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models"
+ actions_model "code.gitea.io/gitea/models/actions"
+ activities_model "code.gitea.io/gitea/models/activities"
+ admin_model "code.gitea.io/gitea/models/admin"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ project_model "code.gitea.io/gitea/models/project"
+ repo_model "code.gitea.io/gitea/models/repo"
+ secret_model "code.gitea.io/gitea/models/secret"
+ system_model "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/models/webhook"
+ actions_module "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+
+ "xorm.io/builder"
+)
+
+// DeleteRepository deletes a repository for a user or organization.
+// make sure if you call this func to close open sessions (sqlite will otherwise get a deadlock)
+func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID int64, ignoreOrgTeams ...bool) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+ sess := db.GetEngine(ctx)
+
+ repo := &repo_model.Repository{}
+ has, err := sess.ID(repoID).Get(repo)
+ if err != nil {
+ return err
+ } else if !has {
+ return repo_model.ErrRepoNotExist{
+ ID: repoID,
+ OwnerName: "",
+ Name: "",
+ }
+ }
+
+ // Query the action tasks of this repo, they will be needed after they have been deleted to remove the logs
+ tasks, err := db.Find[actions_model.ActionTask](ctx, actions_model.FindTaskOptions{RepoID: repoID})
+ if err != nil {
+ return fmt.Errorf("find actions tasks of repo %v: %w", repoID, err)
+ }
+
+ // Query the artifacts of this repo, they will be needed after they have been deleted to remove artifacts files in ObjectStorage
+ artifacts, err := db.Find[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{RepoID: repoID})
+ if err != nil {
+ return fmt.Errorf("list actions artifacts of repo %v: %w", repoID, err)
+ }
+
+ // In case owner is a organization, we have to change repo specific teams
+ // if ignoreOrgTeams is not true
+ var org *user_model.User
+ if len(ignoreOrgTeams) == 0 || !ignoreOrgTeams[0] {
+ if org, err = user_model.GetUserByID(ctx, repo.OwnerID); err != nil {
+ return err
+ }
+ }
+
+ // Delete Deploy Keys
+ deployKeys, err := db.Find[asymkey_model.DeployKey](ctx, asymkey_model.ListDeployKeysOptions{RepoID: repoID})
+ if err != nil {
+ return fmt.Errorf("listDeployKeys: %w", err)
+ }
+ needRewriteKeysFile := len(deployKeys) > 0
+ for _, dKey := range deployKeys {
+ if err := models.DeleteDeployKey(ctx, doer, dKey.ID); err != nil {
+ return fmt.Errorf("deleteDeployKeys: %w", err)
+ }
+ }
+
+ if cnt, err := sess.ID(repoID).Delete(&repo_model.Repository{}); err != nil {
+ return err
+ } else if cnt != 1 {
+ return repo_model.ErrRepoNotExist{
+ ID: repoID,
+ OwnerName: "",
+ Name: "",
+ }
+ }
+
+ if org != nil && org.IsOrganization() {
+ teams, err := organization.FindOrgTeams(ctx, org.ID)
+ if err != nil {
+ return err
+ }
+ for _, t := range teams {
+ if !organization.HasTeamRepo(ctx, t.OrgID, t.ID, repoID) {
+ continue
+ } else if err = removeRepositoryFromTeam(ctx, t, repo, false); err != nil {
+ return err
+ }
+ }
+ }
+
+ attachments := make([]*repo_model.Attachment, 0, 20)
+ if err = sess.Join("INNER", "`release`", "`release`.id = `attachment`.release_id").
+ Where("`release`.repo_id = ?", repoID).
+ Find(&attachments); err != nil {
+ return err
+ }
+ releaseAttachments := make([]string, 0, len(attachments))
+ for i := 0; i < len(attachments); i++ {
+ releaseAttachments = append(releaseAttachments, attachments[i].RelativePath())
+ }
+
+ if _, err := db.Exec(ctx, "UPDATE `user` SET num_stars=num_stars-1 WHERE id IN (SELECT `uid` FROM `star` WHERE repo_id = ?)", repo.ID); err != nil {
+ return err
+ }
+
+ if setting.Database.Type.IsMySQL() {
+ // mariadb:10 does not use the hook_task KEY when using IN.
+ // https://codeberg.org/forgejo/forgejo/issues/3678
+ //
+ // Version 11 does support it, but is not available in debian yet.
+ // Version 11.4 LTS is not available yet (stable should be released mid 2024 https://mariadb.org/mariadb/all-releases/)
+
+ // Sqlite does not support the DELETE *** FROM *** syntax
+ // https://stackoverflow.com/q/24511153/3207406
+
+ // in the meantime, use a dedicated query for mysql...
+ if _, err := db.Exec(ctx, "DELETE `hook_task` FROM `hook_task` INNER JOIN `webhook` ON `webhook`.id = `hook_task`.hook_id WHERE `webhook`.repo_id = ?", repo.ID); err != nil {
+ return err
+ }
+ } else {
+ if _, err := db.GetEngine(ctx).In("hook_id", builder.Select("id").From("webhook").Where(builder.Eq{"webhook.repo_id": repo.ID})).
+ Delete(&webhook.HookTask{}); err != nil {
+ return err
+ }
+ }
+
+ if err := db.DeleteBeans(ctx,
+ &access_model.Access{RepoID: repo.ID},
+ &activities_model.Action{RepoID: repo.ID},
+ &repo_model.Collaboration{RepoID: repoID},
+ &issues_model.Comment{RefRepoID: repoID},
+ &git_model.CommitStatus{RepoID: repoID},
+ &git_model.Branch{RepoID: repoID},
+ &git_model.LFSLock{RepoID: repoID},
+ &repo_model.LanguageStat{RepoID: repoID},
+ &issues_model.Milestone{RepoID: repoID},
+ &repo_model.Mirror{RepoID: repoID},
+ &activities_model.Notification{RepoID: repoID},
+ &git_model.ProtectedBranch{RepoID: repoID},
+ &git_model.ProtectedTag{RepoID: repoID},
+ &repo_model.PushMirror{RepoID: repoID},
+ &repo_model.Release{RepoID: repoID},
+ &repo_model.RepoIndexerStatus{RepoID: repoID},
+ &repo_model.Redirect{RedirectRepoID: repoID},
+ &repo_model.RepoUnit{RepoID: repoID},
+ &repo_model.Star{RepoID: repoID},
+ &admin_model.Task{RepoID: repoID},
+ &repo_model.Watch{RepoID: repoID},
+ &webhook.Webhook{RepoID: repoID},
+ &secret_model.Secret{RepoID: repoID},
+ &actions_model.ActionTaskStep{RepoID: repoID},
+ &actions_model.ActionTask{RepoID: repoID},
+ &actions_model.ActionRunJob{RepoID: repoID},
+ &actions_model.ActionRun{RepoID: repoID},
+ &actions_model.ActionRunner{RepoID: repoID},
+ &actions_model.ActionScheduleSpec{RepoID: repoID},
+ &actions_model.ActionSchedule{RepoID: repoID},
+ &actions_model.ActionArtifact{RepoID: repoID},
+ &repo_model.RepoArchiveDownloadCount{RepoID: repoID},
+ &actions_model.ActionRunnerToken{RepoID: repoID},
+ ); err != nil {
+ return fmt.Errorf("deleteBeans: %w", err)
+ }
+
+ // Delete Labels and related objects
+ if err := issues_model.DeleteLabelsByRepoID(ctx, repoID); err != nil {
+ return err
+ }
+
+ // Delete Pulls and related objects
+ if err := issues_model.DeletePullsByBaseRepoID(ctx, repoID); err != nil {
+ return err
+ }
+
+ // Delete Issues and related objects
+ var attachmentPaths []string
+ if attachmentPaths, err = issues_model.DeleteIssuesByRepoID(ctx, repoID); err != nil {
+ return err
+ }
+
+ // Delete issue index
+ if err := db.DeleteResourceIndex(ctx, "issue_index", repoID); err != nil {
+ return err
+ }
+
+ if repo.IsFork {
+ if _, err := db.Exec(ctx, "UPDATE `repository` SET num_forks=num_forks-1 WHERE id=?", repo.ForkID); err != nil {
+ return fmt.Errorf("decrease fork count: %w", err)
+ }
+ }
+
+ if _, err := db.Exec(ctx, "UPDATE `user` SET num_repos=num_repos-1 WHERE id=?", repo.OwnerID); err != nil {
+ return err
+ }
+
+ if len(repo.Topics) > 0 {
+ if err := repo_model.RemoveTopicsFromRepo(ctx, repo.ID); err != nil {
+ return err
+ }
+ }
+
+ if err := project_model.DeleteProjectByRepoID(ctx, repoID); err != nil {
+ return fmt.Errorf("unable to delete projects for repo[%d]: %w", repoID, err)
+ }
+
+ // Remove LFS objects
+ var lfsObjects []*git_model.LFSMetaObject
+ if err = sess.Where("repository_id=?", repoID).Find(&lfsObjects); err != nil {
+ return err
+ }
+
+ lfsPaths := make([]string, 0, len(lfsObjects))
+ for _, v := range lfsObjects {
+ count, err := db.CountByBean(ctx, &git_model.LFSMetaObject{Pointer: lfs.Pointer{Oid: v.Oid}})
+ if err != nil {
+ return err
+ }
+ if count > 1 {
+ continue
+ }
+
+ lfsPaths = append(lfsPaths, v.RelativePath())
+ }
+
+ if _, err := db.DeleteByBean(ctx, &git_model.LFSMetaObject{RepositoryID: repoID}); err != nil {
+ return err
+ }
+
+ // Remove archives
+ var archives []*repo_model.RepoArchiver
+ if err = sess.Where("repo_id=?", repoID).Find(&archives); err != nil {
+ return err
+ }
+
+ archivePaths := make([]string, 0, len(archives))
+ for _, v := range archives {
+ archivePaths = append(archivePaths, v.RelativePath())
+ }
+
+ if _, err := db.DeleteByBean(ctx, &repo_model.RepoArchiver{RepoID: repoID}); err != nil {
+ return err
+ }
+
+ if repo.NumForks > 0 {
+ if _, err = sess.Exec("UPDATE `repository` SET fork_id=0,is_fork=? WHERE fork_id=?", false, repo.ID); err != nil {
+ log.Error("reset 'fork_id' and 'is_fork': %v", err)
+ }
+ }
+
+ // Get all attachments with both issue_id and release_id are zero
+ var newAttachments []*repo_model.Attachment
+ if err := sess.Where(builder.Eq{
+ "repo_id": repo.ID,
+ "issue_id": 0,
+ "release_id": 0,
+ }).Find(&newAttachments); err != nil {
+ return err
+ }
+
+ newAttachmentPaths := make([]string, 0, len(newAttachments))
+ for _, attach := range newAttachments {
+ newAttachmentPaths = append(newAttachmentPaths, attach.RelativePath())
+ }
+
+ if _, err := sess.Where("repo_id=?", repo.ID).Delete(new(repo_model.Attachment)); err != nil {
+ return err
+ }
+
+ if err = committer.Commit(); err != nil {
+ return err
+ }
+
+ committer.Close()
+
+ if needRewriteKeysFile {
+ if err := asymkey_model.RewriteAllPublicKeys(ctx); err != nil {
+ log.Error("RewriteAllPublicKeys failed: %v", err)
+ }
+ }
+
+ // We should always delete the files after the database transaction succeed. If
+ // we delete the file but the database rollback, the repository will be broken.
+
+ // Remove repository files.
+ repoPath := repo.RepoPath()
+ system_model.RemoveAllWithNotice(ctx, "Delete repository files", repoPath)
+
+ // Remove wiki files
+ if repo.HasWiki() {
+ system_model.RemoveAllWithNotice(ctx, "Delete repository wiki", repo.WikiPath())
+ }
+
+ // Remove archives
+ for _, archive := range archivePaths {
+ system_model.RemoveStorageWithNotice(ctx, storage.RepoArchives, "Delete repo archive file", archive)
+ }
+
+ // Remove lfs objects
+ for _, lfsObj := range lfsPaths {
+ system_model.RemoveStorageWithNotice(ctx, storage.LFS, "Delete orphaned LFS file", lfsObj)
+ }
+
+ // Remove issue attachment files.
+ for _, attachment := range attachmentPaths {
+ system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", attachment)
+ }
+
+ // Remove release attachment files.
+ for _, releaseAttachment := range releaseAttachments {
+ system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete release attachment", releaseAttachment)
+ }
+
+ // Remove attachment with no issue_id and release_id.
+ for _, newAttachment := range newAttachmentPaths {
+ system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", newAttachment)
+ }
+
+ if len(repo.Avatar) > 0 {
+ if err := storage.RepoAvatars.Delete(repo.CustomAvatarRelativePath()); err != nil {
+ return fmt.Errorf("Failed to remove %s: %w", repo.Avatar, err)
+ }
+ }
+
+ // Finally, delete action logs after the actions have already been deleted to avoid new log files
+ for _, task := range tasks {
+ err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename)
+ if err != nil {
+ log.Error("remove log file %q: %v", task.LogFilename, err)
+ // go on
+ }
+ }
+
+ // delete actions artifacts in ObjectStorage after the repo have already been deleted
+ for _, art := range artifacts {
+ if err := storage.ActionsArtifacts.Delete(art.StoragePath); err != nil {
+ log.Error("remove artifact file %q: %v", art.StoragePath, err)
+ // go on
+ }
+ }
+
+ return nil
+}
+
+// removeRepositoryFromTeam removes a repository from a team and recalculates access
+// Note: Repository shall not be removed from team if it includes all repositories (unless the repository is deleted)
+func removeRepositoryFromTeam(ctx context.Context, t *organization.Team, repo *repo_model.Repository, recalculate bool) (err error) {
+ e := db.GetEngine(ctx)
+ if err = organization.RemoveTeamRepo(ctx, t.ID, repo.ID); err != nil {
+ return err
+ }
+
+ t.NumRepos--
+ if _, err = e.ID(t.ID).Cols("num_repos").Update(t); err != nil {
+ return err
+ }
+
+ // Don't need to recalculate when delete a repository from organization.
+ if recalculate {
+ if err = access_model.RecalculateTeamAccesses(ctx, repo, t.ID); err != nil {
+ return err
+ }
+ }
+
+ teamUsers, err := organization.GetTeamUsersByTeamID(ctx, t.ID)
+ if err != nil {
+ return fmt.Errorf("getTeamUsersByTeamID: %w", err)
+ }
+ for _, teamUser := range teamUsers {
+ has, err := access_model.HasAccess(ctx, teamUser.UID, repo)
+ if err != nil {
+ return err
+ } else if has {
+ continue
+ }
+
+ if err = repo_model.WatchRepo(ctx, teamUser.UID, repo.ID, false); err != nil {
+ return err
+ }
+
+ // Remove all IssueWatches a user has subscribed to in the repositories
+ if err := issues_model.RemoveIssueWatchersByRepoID(ctx, teamUser.UID, repo.ID); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// HasRepository returns true if given repository belong to team.
+func HasRepository(ctx context.Context, t *organization.Team, repoID int64) bool {
+ return organization.HasTeamRepo(ctx, t.OrgID, t.ID, repoID)
+}
+
+// RemoveRepositoryFromTeam removes repository from team of organization.
+// If the team shall include all repositories the request is ignored.
+func RemoveRepositoryFromTeam(ctx context.Context, t *organization.Team, repoID int64) error {
+ if !HasRepository(ctx, t, repoID) {
+ return nil
+ }
+
+ if t.IncludesAllRepositories {
+ return nil
+ }
+
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ return err
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = removeRepositoryFromTeam(ctx, t, repo, true); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// DeleteOwnerRepositoriesDirectly calls DeleteRepositoryDirectly for all repos of the given owner
+func DeleteOwnerRepositoriesDirectly(ctx context.Context, owner *user_model.User) error {
+ for {
+ repos, _, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ PageSize: repo_model.RepositoryListDefaultPageSize,
+ Page: 1,
+ },
+ Private: true,
+ OwnerID: owner.ID,
+ Actor: owner,
+ })
+ if err != nil {
+ return fmt.Errorf("GetUserRepositories: %w", err)
+ }
+ if len(repos) == 0 {
+ break
+ }
+ for _, repo := range repos {
+ if err := DeleteRepositoryDirectly(ctx, owner, repo.ID); err != nil {
+ return fmt.Errorf("unable to delete repository %s for %s[%d]. Error: %w", repo.Name, owner.Name, owner.ID, err)
+ }
+ }
+ }
+ return nil
+}
diff --git a/services/repository/files/cherry_pick.go b/services/repository/files/cherry_pick.go
new file mode 100644
index 0000000..451a182
--- /dev/null
+++ b/services/repository/files/cherry_pick.go
@@ -0,0 +1,128 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/pull"
+)
+
+// CherryPick cherrypicks or reverts a commit to the given repository
+func CherryPick(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, revert bool, opts *ApplyDiffPatchOptions) (*structs.FileResponse, error) {
+ if err := opts.Validate(ctx, repo, doer); err != nil {
+ return nil, err
+ }
+ message := strings.TrimSpace(opts.Message)
+
+ author, committer := GetAuthorAndCommitterUsers(opts.Author, opts.Committer, doer)
+
+ t, err := NewTemporaryUploadRepository(ctx, repo)
+ if err != nil {
+ log.Error("NewTemporaryUploadRepository failed: %v", err)
+ }
+ defer t.Close()
+ if err := t.Clone(opts.OldBranch, false); err != nil {
+ return nil, err
+ }
+ if err := t.SetDefaultIndex(); err != nil {
+ return nil, err
+ }
+ if err := t.RefreshIndex(); err != nil {
+ return nil, err
+ }
+
+ // Get the commit of the original branch
+ commit, err := t.GetBranchCommit(opts.OldBranch)
+ if err != nil {
+ return nil, err // Couldn't get a commit for the branch
+ }
+
+ // Assigned LastCommitID in opts if it hasn't been set
+ if opts.LastCommitID == "" {
+ opts.LastCommitID = commit.ID.String()
+ } else {
+ lastCommitID, err := t.gitRepo.ConvertToGitID(opts.LastCommitID)
+ if err != nil {
+ return nil, fmt.Errorf("CherryPick: Invalid last commit ID: %w", err)
+ }
+ opts.LastCommitID = lastCommitID.String()
+ if commit.ID.String() != opts.LastCommitID {
+ return nil, models.ErrCommitIDDoesNotMatch{
+ GivenCommitID: opts.LastCommitID,
+ CurrentCommitID: opts.LastCommitID,
+ }
+ }
+ }
+
+ commit, err = t.GetCommit(strings.TrimSpace(opts.Content))
+ if err != nil {
+ return nil, err
+ }
+ parent, err := commit.ParentID(0)
+ if err != nil {
+ parent = git.ObjectFormatFromName(repo.ObjectFormatName).EmptyTree()
+ }
+
+ base, right := parent.String(), commit.ID.String()
+
+ if revert {
+ right, base = base, right
+ }
+
+ description := fmt.Sprintf("CherryPick %s onto %s", right, opts.OldBranch)
+ conflict, _, err := pull.AttemptThreeWayMerge(ctx,
+ t.basePath, t.gitRepo, base, opts.LastCommitID, right, description)
+ if err != nil {
+ return nil, fmt.Errorf("failed to three-way merge %s onto %s: %w", right, opts.OldBranch, err)
+ }
+
+ if conflict {
+ return nil, fmt.Errorf("failed to merge due to conflicts")
+ }
+
+ treeHash, err := t.WriteTree()
+ if err != nil {
+ // likely non-sensical tree due to merge conflicts...
+ return nil, err
+ }
+
+ // Now commit the tree
+ var commitHash string
+ if opts.Dates != nil {
+ commitHash, err = t.CommitTreeWithDate("HEAD", author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer)
+ } else {
+ commitHash, err = t.CommitTree("HEAD", author, committer, treeHash, message, opts.Signoff)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ // Then push this tree to NewBranch
+ if err := t.Push(doer, commitHash, opts.NewBranch); err != nil {
+ return nil, err
+ }
+
+ commit, err = t.GetCommit(commitHash)
+ if err != nil {
+ return nil, err
+ }
+
+ fileCommitResponse, _ := GetFileCommitResponse(repo, commit) // ok if fails, then will be nil
+ verification := GetPayloadCommitVerification(ctx, commit)
+ fileResponse := &structs.FileResponse{
+ Commit: fileCommitResponse,
+ Verification: verification,
+ }
+
+ return fileResponse, nil
+}
diff --git a/services/repository/files/commit.go b/services/repository/files/commit.go
new file mode 100644
index 0000000..e0dad29
--- /dev/null
+++ b/services/repository/files/commit.go
@@ -0,0 +1,44 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/structs"
+)
+
+// CountDivergingCommits determines how many commits a branch is ahead or behind the repository's base branch
+func CountDivergingCommits(ctx context.Context, repo *repo_model.Repository, branch string) (*git.DivergeObject, error) {
+ divergence, err := git.GetDivergingCommits(ctx, repo.RepoPath(), repo.DefaultBranch, branch)
+ if err != nil {
+ return nil, err
+ }
+ return &divergence, nil
+}
+
+// GetPayloadCommitVerification returns the verification information of a commit
+func GetPayloadCommitVerification(ctx context.Context, commit *git.Commit) *structs.PayloadCommitVerification {
+ verification := &structs.PayloadCommitVerification{}
+ commitVerification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ if commit.Signature != nil {
+ verification.Signature = commit.Signature.Signature
+ verification.Payload = commit.Signature.Payload
+ }
+ if commitVerification.SigningUser != nil {
+ verification.Signer = &structs.PayloadUser{
+ Name: commitVerification.SigningUser.Name,
+ Email: commitVerification.SigningUser.Email,
+ }
+ }
+ verification.Verified = commitVerification.Verified
+ verification.Reason = commitVerification.Reason
+ if verification.Reason == "" && !verification.Verified {
+ verification.Reason = "gpg.error.not_signed_commit"
+ }
+ return verification
+}
diff --git a/services/repository/files/content.go b/services/repository/files/content.go
new file mode 100644
index 0000000..32517e8
--- /dev/null
+++ b/services/repository/files/content.go
@@ -0,0 +1,278 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// ContentType repo content type
+type ContentType string
+
+// The string representations of different content types
+const (
+ // ContentTypeRegular regular content type (file)
+ ContentTypeRegular ContentType = "file"
+ // ContentTypeDir dir content type (dir)
+ ContentTypeDir ContentType = "dir"
+ // ContentLink link content type (symlink)
+ ContentTypeLink ContentType = "symlink"
+ // ContentTag submodule content type (submodule)
+ ContentTypeSubmodule ContentType = "submodule"
+)
+
+// String gets the string of ContentType
+func (ct *ContentType) String() string {
+ return string(*ct)
+}
+
+// GetContentsOrList gets the meta data of a file's contents (*ContentsResponse) if treePath not a tree
+// directory, otherwise a listing of file contents ([]*ContentsResponse). Ref can be a branch, commit or tag
+func GetContentsOrList(ctx context.Context, repo *repo_model.Repository, treePath, ref string) (any, error) {
+ if repo.IsEmpty {
+ return make([]any, 0), nil
+ }
+ if ref == "" {
+ ref = repo.DefaultBranch
+ }
+ origRef := ref
+
+ // Check that the path given in opts.treePath is valid (not a git path)
+ cleanTreePath := CleanUploadFileName(treePath)
+ if cleanTreePath == "" && treePath != "" {
+ return nil, models.ErrFilenameInvalid{
+ Path: treePath,
+ }
+ }
+ treePath = cleanTreePath
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ return nil, err
+ }
+ defer closer.Close()
+
+ // Get the commit object for the ref
+ commit, err := gitRepo.GetCommit(ref)
+ if err != nil {
+ return nil, err
+ }
+
+ entry, err := commit.GetTreeEntryByPath(treePath)
+ if err != nil {
+ return nil, err
+ }
+
+ if entry.Type() != "tree" {
+ return GetContents(ctx, repo, treePath, origRef, false)
+ }
+
+ // We are in a directory, so we return a list of FileContentResponse objects
+ var fileList []*api.ContentsResponse
+
+ gitTree, err := commit.SubTree(treePath)
+ if err != nil {
+ return nil, err
+ }
+ entries, err := gitTree.ListEntries()
+ if err != nil {
+ return nil, err
+ }
+ for _, e := range entries {
+ subTreePath := path.Join(treePath, e.Name())
+ fileContentResponse, err := GetContents(ctx, repo, subTreePath, origRef, true)
+ if err != nil {
+ return nil, err
+ }
+ fileList = append(fileList, fileContentResponse)
+ }
+ return fileList, nil
+}
+
+// GetObjectTypeFromTreeEntry check what content is behind it
+func GetObjectTypeFromTreeEntry(entry *git.TreeEntry) ContentType {
+ switch {
+ case entry.IsDir():
+ return ContentTypeDir
+ case entry.IsSubModule():
+ return ContentTypeSubmodule
+ case entry.IsExecutable(), entry.IsRegular():
+ return ContentTypeRegular
+ case entry.IsLink():
+ return ContentTypeLink
+ default:
+ return ""
+ }
+}
+
+// GetContents gets the meta data on a file's contents. Ref can be a branch, commit or tag
+func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref string, forList bool) (*api.ContentsResponse, error) {
+ if ref == "" {
+ ref = repo.DefaultBranch
+ }
+ origRef := ref
+
+ // Check that the path given in opts.treePath is valid (not a git path)
+ cleanTreePath := CleanUploadFileName(treePath)
+ if cleanTreePath == "" && treePath != "" {
+ return nil, models.ErrFilenameInvalid{
+ Path: treePath,
+ }
+ }
+ treePath = cleanTreePath
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ return nil, err
+ }
+ defer closer.Close()
+
+ // Get the commit object for the ref
+ commit, err := gitRepo.GetCommit(ref)
+ if err != nil {
+ return nil, err
+ }
+ commitID := commit.ID.String()
+ if len(ref) >= 4 && strings.HasPrefix(commitID, ref) {
+ ref = commit.ID.String()
+ }
+
+ entry, err := commit.GetTreeEntryByPath(treePath)
+ if err != nil {
+ return nil, err
+ }
+
+ refType := gitRepo.GetRefType(ref)
+ if refType == "invalid" {
+ return nil, fmt.Errorf("no commit found for the ref [ref: %s]", ref)
+ }
+
+ selfURL, err := url.Parse(repo.APIURL() + "/contents/" + util.PathEscapeSegments(treePath) + "?ref=" + url.QueryEscape(origRef))
+ if err != nil {
+ return nil, err
+ }
+ selfURLString := selfURL.String()
+
+ err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(ref, refType != git.ObjectCommit), repo.FullName(), commitID)
+ if err != nil {
+ return nil, err
+ }
+
+ lastCommit, err := commit.GetCommitByPath(treePath)
+ if err != nil {
+ return nil, err
+ }
+
+ // All content types have these fields in populated
+ contentsResponse := &api.ContentsResponse{
+ Name: entry.Name(),
+ Path: treePath,
+ SHA: entry.ID.String(),
+ LastCommitSHA: lastCommit.ID.String(),
+ Size: entry.Size(),
+ URL: &selfURLString,
+ Links: &api.FileLinksResponse{
+ Self: &selfURLString,
+ },
+ }
+
+ // Now populate the rest of the ContentsResponse based on entry type
+ if entry.IsRegular() || entry.IsExecutable() {
+ contentsResponse.Type = string(ContentTypeRegular)
+ if blobResponse, err := GetBlobBySHA(ctx, repo, gitRepo, entry.ID.String()); err != nil {
+ return nil, err
+ } else if !forList {
+ // We don't show the content if we are getting a list of FileContentResponses
+ contentsResponse.Encoding = &blobResponse.Encoding
+ contentsResponse.Content = &blobResponse.Content
+ }
+ } else if entry.IsDir() {
+ contentsResponse.Type = string(ContentTypeDir)
+ } else if entry.IsLink() {
+ contentsResponse.Type = string(ContentTypeLink)
+ // The target of a symlink file is the content of the file
+ targetFromContent, err := entry.Blob().GetBlobContent(1024)
+ if err != nil {
+ return nil, err
+ }
+ contentsResponse.Target = &targetFromContent
+ } else if entry.IsSubModule() {
+ contentsResponse.Type = string(ContentTypeSubmodule)
+ submoduleURL, err := commit.GetSubModule(treePath)
+ if err != nil {
+ return nil, err
+ }
+ if submoduleURL != "" {
+ contentsResponse.SubmoduleGitURL = &submoduleURL
+ }
+ }
+ // Handle links
+ if entry.IsRegular() || entry.IsLink() || entry.IsExecutable() {
+ downloadURL, err := url.Parse(repo.HTMLURL() + "/raw/" + url.PathEscape(string(refType)) + "/" + util.PathEscapeSegments(ref) + "/" + util.PathEscapeSegments(treePath))
+ if err != nil {
+ return nil, err
+ }
+ downloadURLString := downloadURL.String()
+ contentsResponse.DownloadURL = &downloadURLString
+ }
+ if !entry.IsSubModule() {
+ htmlURL, err := url.Parse(repo.HTMLURL() + "/src/" + url.PathEscape(string(refType)) + "/" + util.PathEscapeSegments(ref) + "/" + util.PathEscapeSegments(treePath))
+ if err != nil {
+ return nil, err
+ }
+ htmlURLString := htmlURL.String()
+ contentsResponse.HTMLURL = &htmlURLString
+ contentsResponse.Links.HTMLURL = &htmlURLString
+
+ gitURL, err := url.Parse(repo.APIURL() + "/git/blobs/" + url.PathEscape(entry.ID.String()))
+ if err != nil {
+ return nil, err
+ }
+ gitURLString := gitURL.String()
+ contentsResponse.GitURL = &gitURLString
+ contentsResponse.Links.GitURL = &gitURLString
+ }
+
+ return contentsResponse, nil
+}
+
+// GetBlobBySHA get the GitBlobResponse of a repository using a sha hash.
+func GetBlobBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, sha string) (*api.GitBlobResponse, error) {
+ gitBlob, err := gitRepo.GetBlob(sha)
+ if err != nil {
+ return nil, err
+ }
+ content := ""
+ if gitBlob.Size() <= setting.API.DefaultMaxBlobSize {
+ content, err = gitBlob.GetBlobContentBase64()
+ if err != nil {
+ return nil, err
+ }
+ }
+ return &api.GitBlobResponse{
+ SHA: gitBlob.ID.String(),
+ URL: repo.APIURL() + "/git/blobs/" + url.PathEscape(gitBlob.ID.String()),
+ Size: gitBlob.Size(),
+ Encoding: "base64",
+ Content: content,
+ }, nil
+}
+
+// TryGetContentLanguage tries to get the (linguist) language of the file content
+func TryGetContentLanguage(gitRepo *git.Repository, commitID, treePath string) (string, error) {
+ attribute, err := gitRepo.GitAttributeFirst(commitID, treePath, "linguist-language", "gitlab-language")
+ return attribute.Prefix(), err
+}
diff --git a/services/repository/files/content_test.go b/services/repository/files/content_test.go
new file mode 100644
index 0000000..c22dcd2
--- /dev/null
+++ b/services/repository/files/content_test.go
@@ -0,0 +1,201 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/gitrepo"
+ api "code.gitea.io/gitea/modules/structs"
+
+ _ "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func getExpectedReadmeContentsResponse() *api.ContentsResponse {
+ treePath := "README.md"
+ sha := "4b4851ad51df6a7d9f25c979345979eaeb5b349f"
+ encoding := "base64"
+ content := "IyByZXBvMQoKRGVzY3JpcHRpb24gZm9yIHJlcG8x"
+ selfURL := "https://try.gitea.io/api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master"
+ htmlURL := "https://try.gitea.io/user2/repo1/src/branch/master/" + treePath
+ gitURL := "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/" + sha
+ downloadURL := "https://try.gitea.io/user2/repo1/raw/branch/master/" + treePath
+ return &api.ContentsResponse{
+ Name: treePath,
+ Path: treePath,
+ SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
+ LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ Type: "file",
+ Size: 30,
+ Encoding: &encoding,
+ Content: &content,
+ URL: &selfURL,
+ HTMLURL: &htmlURL,
+ GitURL: &gitURL,
+ DownloadURL: &downloadURL,
+ Links: &api.FileLinksResponse{
+ Self: &selfURL,
+ GitURL: &gitURL,
+ HTMLURL: &htmlURL,
+ },
+ }
+}
+
+func TestGetContents(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ treePath := "README.md"
+ ref := repo.DefaultBranch
+
+ expectedContentsResponse := getExpectedReadmeContentsResponse()
+
+ t.Run("Get README.md contents with GetContents(ctx, )", func(t *testing.T) {
+ fileContentResponse, err := GetContents(db.DefaultContext, repo, treePath, ref, false)
+ assert.EqualValues(t, expectedContentsResponse, fileContentResponse)
+ require.NoError(t, err)
+ })
+
+ t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContents(ctx, )", func(t *testing.T) {
+ fileContentResponse, err := GetContents(db.DefaultContext, repo, treePath, "", false)
+ assert.EqualValues(t, expectedContentsResponse, fileContentResponse)
+ require.NoError(t, err)
+ })
+}
+
+func TestGetContentsOrListForDir(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ treePath := "" // root dir
+ ref := repo.DefaultBranch
+
+ readmeContentsResponse := getExpectedReadmeContentsResponse()
+ // because will be in a list, doesn't have encoding and content
+ readmeContentsResponse.Encoding = nil
+ readmeContentsResponse.Content = nil
+
+ expectedContentsListResponse := []*api.ContentsResponse{
+ readmeContentsResponse,
+ }
+
+ t.Run("Get root dir contents with GetContentsOrList(ctx, )", func(t *testing.T) {
+ fileContentResponse, err := GetContentsOrList(db.DefaultContext, repo, treePath, ref)
+ assert.EqualValues(t, expectedContentsListResponse, fileContentResponse)
+ require.NoError(t, err)
+ })
+
+ t.Run("Get root dir contents with ref as empty string (should then use the repo's default branch) with GetContentsOrList(ctx, )", func(t *testing.T) {
+ fileContentResponse, err := GetContentsOrList(db.DefaultContext, repo, treePath, "")
+ assert.EqualValues(t, expectedContentsListResponse, fileContentResponse)
+ require.NoError(t, err)
+ })
+}
+
+func TestGetContentsOrListForFile(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ treePath := "README.md"
+ ref := repo.DefaultBranch
+
+ expectedContentsResponse := getExpectedReadmeContentsResponse()
+
+ t.Run("Get README.md contents with GetContentsOrList(ctx, )", func(t *testing.T) {
+ fileContentResponse, err := GetContentsOrList(db.DefaultContext, repo, treePath, ref)
+ assert.EqualValues(t, expectedContentsResponse, fileContentResponse)
+ require.NoError(t, err)
+ })
+
+ t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContentsOrList(ctx, )", func(t *testing.T) {
+ fileContentResponse, err := GetContentsOrList(db.DefaultContext, repo, treePath, "")
+ assert.EqualValues(t, expectedContentsResponse, fileContentResponse)
+ require.NoError(t, err)
+ })
+}
+
+func TestGetContentsErrors(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ treePath := "README.md"
+ ref := repo.DefaultBranch
+
+ t.Run("bad treePath", func(t *testing.T) {
+ badTreePath := "bad/tree.md"
+ fileContentResponse, err := GetContents(db.DefaultContext, repo, badTreePath, ref, false)
+ require.EqualError(t, err, "object does not exist [id: , rel_path: bad]")
+ assert.Nil(t, fileContentResponse)
+ })
+
+ t.Run("bad ref", func(t *testing.T) {
+ badRef := "bad_ref"
+ fileContentResponse, err := GetContents(db.DefaultContext, repo, treePath, badRef, false)
+ require.EqualError(t, err, "object does not exist [id: "+badRef+", rel_path: ]")
+ assert.Nil(t, fileContentResponse)
+ })
+}
+
+func TestGetContentsOrListErrors(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ treePath := "README.md"
+ ref := repo.DefaultBranch
+
+ t.Run("bad treePath", func(t *testing.T) {
+ badTreePath := "bad/tree.md"
+ fileContentResponse, err := GetContentsOrList(db.DefaultContext, repo, badTreePath, ref)
+ require.EqualError(t, err, "object does not exist [id: , rel_path: bad]")
+ assert.Nil(t, fileContentResponse)
+ })
+
+ t.Run("bad ref", func(t *testing.T) {
+ badRef := "bad_ref"
+ fileContentResponse, err := GetContentsOrList(db.DefaultContext, repo, treePath, badRef)
+ require.EqualError(t, err, "object does not exist [id: "+badRef+", rel_path: ]")
+ assert.Nil(t, fileContentResponse)
+ })
+}
+
+func TestGetContentsOrListOfEmptyRepos(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 52})
+
+ t.Run("empty repo", func(t *testing.T) {
+ contents, err := GetContentsOrList(db.DefaultContext, repo, "", "")
+ require.NoError(t, err)
+ assert.Empty(t, contents)
+ })
+}
+
+func TestGetBlobBySHA(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ gitRepo, err := gitrepo.OpenRepository(db.DefaultContext, repo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ gbr, err := GetBlobBySHA(db.DefaultContext, repo, gitRepo, "65f1bf27bc3bf70f64657658635e66094edbcb4d")
+ expectedGBR := &api.GitBlobResponse{
+ Content: "dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK",
+ Encoding: "base64",
+ URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ Size: 180,
+ }
+ require.NoError(t, err)
+ assert.Equal(t, expectedGBR, gbr)
+}
diff --git a/services/repository/files/diff.go b/services/repository/files/diff.go
new file mode 100644
index 0000000..bf8b938
--- /dev/null
+++ b/services/repository/files/diff.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/services/gitdiff"
+)
+
+// GetDiffPreview produces and returns diff result of a file which is not yet committed.
+func GetDiffPreview(ctx context.Context, repo *repo_model.Repository, branch, treePath, content string) (*gitdiff.Diff, error) {
+ if branch == "" {
+ branch = repo.DefaultBranch
+ }
+ t, err := NewTemporaryUploadRepository(ctx, repo)
+ if err != nil {
+ return nil, err
+ }
+ defer t.Close()
+ if err := t.Clone(branch, true); err != nil {
+ return nil, err
+ }
+ if err := t.SetDefaultIndex(); err != nil {
+ return nil, err
+ }
+
+ // Add the object to the database
+ objectHash, err := t.HashObject(strings.NewReader(content))
+ if err != nil {
+ return nil, err
+ }
+
+ // Add the object to the index
+ if err := t.AddObjectToIndex("100644", objectHash, treePath); err != nil {
+ return nil, err
+ }
+ return t.DiffIndex()
+}
diff --git a/services/repository/files/diff_test.go b/services/repository/files/diff_test.go
new file mode 100644
index 0000000..95de10e
--- /dev/null
+++ b/services/repository/files/diff_test.go
@@ -0,0 +1,166 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/gitdiff"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGetDiffPreview(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1")
+ ctx.SetParams(":id", "1")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadGitRepo(t, ctx)
+ defer ctx.Repo.GitRepo.Close()
+
+ branch := ctx.Repo.Repository.DefaultBranch
+ treePath := "README.md"
+ content := "# repo1\n\nDescription for repo1\nthis is a new line"
+
+ expectedDiff := &gitdiff.Diff{
+ TotalAddition: 2,
+ TotalDeletion: 1,
+ Files: []*gitdiff.DiffFile{
+ {
+ Name: "README.md",
+ OldName: "README.md",
+ NameHash: "8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d",
+ Index: 1,
+ Addition: 2,
+ Deletion: 1,
+ Type: 2,
+ IsCreated: false,
+ IsDeleted: false,
+ IsBin: false,
+ IsLFSFile: false,
+ IsRenamed: false,
+ IsSubmodule: false,
+ Sections: []*gitdiff.DiffSection{
+ {
+ FileName: "README.md",
+ Name: "",
+ Lines: []*gitdiff.DiffLine{
+ {
+ LeftIdx: 0,
+ RightIdx: 0,
+ Type: 4,
+ Content: "@@ -1,3 +1,4 @@",
+ Conversations: nil,
+ SectionInfo: &gitdiff.DiffLineSectionInfo{
+ Path: "README.md",
+ LastLeftIdx: 0,
+ LastRightIdx: 0,
+ LeftIdx: 1,
+ RightIdx: 1,
+ LeftHunkSize: 3,
+ RightHunkSize: 4,
+ },
+ },
+ {
+ LeftIdx: 1,
+ RightIdx: 1,
+ Type: 1,
+ Content: " # repo1",
+ Conversations: nil,
+ },
+ {
+ LeftIdx: 2,
+ RightIdx: 2,
+ Type: 1,
+ Content: " ",
+ Conversations: nil,
+ },
+ {
+ LeftIdx: 3,
+ RightIdx: 0,
+ Match: 4,
+ Type: 3,
+ Content: "-Description for repo1",
+ Conversations: nil,
+ },
+ {
+ LeftIdx: 0,
+ RightIdx: 3,
+ Match: 3,
+ Type: 2,
+ Content: "+Description for repo1",
+ Conversations: nil,
+ },
+ {
+ LeftIdx: 0,
+ RightIdx: 4,
+ Match: -1,
+ Type: 2,
+ Content: "+this is a new line",
+ Conversations: nil,
+ },
+ },
+ },
+ },
+ IsIncomplete: false,
+ },
+ },
+ IsIncomplete: false,
+ }
+ expectedDiff.NumFiles = len(expectedDiff.Files)
+
+ t.Run("with given branch", func(t *testing.T) {
+ diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, branch, treePath, content)
+ require.NoError(t, err)
+ expectedBs, err := json.Marshal(expectedDiff)
+ require.NoError(t, err)
+ bs, err := json.Marshal(diff)
+ require.NoError(t, err)
+ assert.EqualValues(t, string(expectedBs), string(bs))
+ })
+
+ t.Run("empty branch, same results", func(t *testing.T) {
+ diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, "", treePath, content)
+ require.NoError(t, err)
+ expectedBs, err := json.Marshal(expectedDiff)
+ require.NoError(t, err)
+ bs, err := json.Marshal(diff)
+ require.NoError(t, err)
+ assert.EqualValues(t, expectedBs, bs)
+ })
+}
+
+func TestGetDiffPreviewErrors(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ branch := repo.DefaultBranch
+ treePath := "README.md"
+ content := "# repo1\n\nDescription for repo1\nthis is a new line"
+
+ t.Run("empty repo", func(t *testing.T) {
+ diff, err := GetDiffPreview(db.DefaultContext, &repo_model.Repository{}, branch, treePath, content)
+ assert.Nil(t, diff)
+ assert.EqualError(t, err, "repository does not exist [id: 0, uid: 0, owner_name: , name: ]")
+ })
+
+ t.Run("bad branch", func(t *testing.T) {
+ badBranch := "bad_branch"
+ diff, err := GetDiffPreview(db.DefaultContext, repo, badBranch, treePath, content)
+ assert.Nil(t, diff)
+ assert.EqualError(t, err, "branch does not exist [name: "+badBranch+"]")
+ })
+
+ t.Run("empty treePath", func(t *testing.T) {
+ diff, err := GetDiffPreview(db.DefaultContext, repo, branch, "", content)
+ assert.Nil(t, diff)
+ assert.EqualError(t, err, "path is invalid [path: ]")
+ })
+}
diff --git a/services/repository/files/file.go b/services/repository/files/file.go
new file mode 100644
index 0000000..852cca0
--- /dev/null
+++ b/services/repository/files/file.go
@@ -0,0 +1,174 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "strings"
+ "time"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+)
+
+func GetFilesResponseFromCommit(ctx context.Context, repo *repo_model.Repository, commit *git.Commit, branch string, treeNames []string) (*api.FilesResponse, error) {
+ files := []*api.ContentsResponse{}
+ for _, file := range treeNames {
+ fileContents, _ := GetContents(ctx, repo, file, branch, false) // ok if fails, then will be nil
+ files = append(files, fileContents)
+ }
+ fileCommitResponse, _ := GetFileCommitResponse(repo, commit) // ok if fails, then will be nil
+ verification := GetPayloadCommitVerification(ctx, commit)
+ filesResponse := &api.FilesResponse{
+ Files: files,
+ Commit: fileCommitResponse,
+ Verification: verification,
+ }
+ return filesResponse, nil
+}
+
+// GetFileResponseFromCommit Constructs a FileResponse from a Commit object
+func GetFileResponseFromCommit(ctx context.Context, repo *repo_model.Repository, commit *git.Commit, branch, treeName string) (*api.FileResponse, error) {
+ fileContents, _ := GetContents(ctx, repo, treeName, branch, false) // ok if fails, then will be nil
+ fileCommitResponse, _ := GetFileCommitResponse(repo, commit) // ok if fails, then will be nil
+ verification := GetPayloadCommitVerification(ctx, commit)
+ fileResponse := &api.FileResponse{
+ Content: fileContents,
+ Commit: fileCommitResponse,
+ Verification: verification,
+ }
+ return fileResponse, nil
+}
+
+// constructs a FileResponse with the file at the index from FilesResponse
+func GetFileResponseFromFilesResponse(filesResponse *api.FilesResponse, index int) *api.FileResponse {
+ content := &api.ContentsResponse{}
+ if len(filesResponse.Files) > index {
+ content = filesResponse.Files[index]
+ }
+ fileResponse := &api.FileResponse{
+ Content: content,
+ Commit: filesResponse.Commit,
+ Verification: filesResponse.Verification,
+ }
+ return fileResponse
+}
+
+// GetFileCommitResponse Constructs a FileCommitResponse from a Commit object
+func GetFileCommitResponse(repo *repo_model.Repository, commit *git.Commit) (*api.FileCommitResponse, error) {
+ if repo == nil {
+ return nil, fmt.Errorf("repo cannot be nil")
+ }
+ if commit == nil {
+ return nil, fmt.Errorf("commit cannot be nil")
+ }
+ commitURL, _ := url.Parse(repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String()))
+ commitTreeURL, _ := url.Parse(repo.APIURL() + "/git/trees/" + url.PathEscape(commit.Tree.ID.String()))
+ parents := make([]*api.CommitMeta, commit.ParentCount())
+ for i := 0; i <= commit.ParentCount(); i++ {
+ if parent, err := commit.Parent(i); err == nil && parent != nil {
+ parentCommitURL, _ := url.Parse(repo.APIURL() + "/git/commits/" + url.PathEscape(parent.ID.String()))
+ parents[i] = &api.CommitMeta{
+ SHA: parent.ID.String(),
+ URL: parentCommitURL.String(),
+ }
+ }
+ }
+ commitHTMLURL, _ := url.Parse(repo.HTMLURL() + "/commit/" + url.PathEscape(commit.ID.String()))
+ fileCommit := &api.FileCommitResponse{
+ CommitMeta: api.CommitMeta{
+ SHA: commit.ID.String(),
+ URL: commitURL.String(),
+ },
+ HTMLURL: commitHTMLURL.String(),
+ Author: &api.CommitUser{
+ Identity: api.Identity{
+ Name: commit.Author.Name,
+ Email: commit.Author.Email,
+ },
+ Date: commit.Author.When.UTC().Format(time.RFC3339),
+ },
+ Committer: &api.CommitUser{
+ Identity: api.Identity{
+ Name: commit.Committer.Name,
+ Email: commit.Committer.Email,
+ },
+ Date: commit.Committer.When.UTC().Format(time.RFC3339),
+ },
+ Message: commit.Message(),
+ Tree: &api.CommitMeta{
+ URL: commitTreeURL.String(),
+ SHA: commit.Tree.ID.String(),
+ },
+ Parents: parents,
+ }
+ return fileCommit, nil
+}
+
+// GetAuthorAndCommitterUsers Gets the author and committer user objects from the IdentityOptions
+func GetAuthorAndCommitterUsers(author, committer *IdentityOptions, doer *user_model.User) (authorUser, committerUser *user_model.User) {
+ // Committer and author are optional. If they are not the doer (not same email address)
+ // then we use bogus User objects for them to store their FullName and Email.
+ // If only one of the two are provided, we set both of them to it.
+ // If neither are provided, both are the doer.
+ if committer != nil && committer.Email != "" {
+ if doer != nil && strings.EqualFold(doer.Email, committer.Email) {
+ committerUser = doer // the committer is the doer, so will use their user object
+ if committer.Name != "" {
+ committerUser.FullName = committer.Name
+ }
+ // Use the provided email and not revert to placeholder mail.
+ committerUser.KeepEmailPrivate = false
+ } else {
+ committerUser = &user_model.User{
+ FullName: committer.Name,
+ Email: committer.Email,
+ }
+ }
+ }
+ if author != nil && author.Email != "" {
+ if doer != nil && strings.EqualFold(doer.Email, author.Email) {
+ authorUser = doer // the author is the doer, so will use their user object
+ if authorUser.Name != "" {
+ authorUser.FullName = author.Name
+ }
+ // Use the provided email and not revert to placeholder mail.
+ authorUser.KeepEmailPrivate = false
+ } else {
+ authorUser = &user_model.User{
+ FullName: author.Name,
+ Email: author.Email,
+ }
+ }
+ }
+ if authorUser == nil {
+ if committerUser != nil {
+ authorUser = committerUser // No valid author was given so use the committer
+ } else if doer != nil {
+ authorUser = doer // No valid author was given and no valid committer so use the doer
+ }
+ }
+ if committerUser == nil {
+ committerUser = authorUser // No valid committer so use the author as the committer (was set to a valid user above)
+ }
+ return authorUser, committerUser
+}
+
+// CleanUploadFileName Trims a filename and returns empty string if it is a .git directory
+func CleanUploadFileName(name string) string {
+ // Rebase the filename
+ name = util.PathJoinRel(name)
+ // Git disallows any filenames to have a .git directory in them.
+ for _, part := range strings.Split(name, "/") {
+ if strings.ToLower(part) == ".git" {
+ return ""
+ }
+ }
+ return name
+}
diff --git a/services/repository/files/file_test.go b/services/repository/files/file_test.go
new file mode 100644
index 0000000..7c387e2
--- /dev/null
+++ b/services/repository/files/file_test.go
@@ -0,0 +1,115 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCleanUploadFileName(t *testing.T) {
+ t.Run("Clean regular file", func(t *testing.T) {
+ name := "this/is/test"
+ cleanName := CleanUploadFileName(name)
+ expectedCleanName := name
+ assert.EqualValues(t, expectedCleanName, cleanName)
+ })
+
+ t.Run("Clean a .git path", func(t *testing.T) {
+ name := "this/is/test/.git"
+ cleanName := CleanUploadFileName(name)
+ expectedCleanName := ""
+ assert.EqualValues(t, expectedCleanName, cleanName)
+ })
+}
+
+func getExpectedFileResponse() *api.FileResponse {
+ treePath := "README.md"
+ sha := "4b4851ad51df6a7d9f25c979345979eaeb5b349f"
+ encoding := "base64"
+ content := "IyByZXBvMQoKRGVzY3JpcHRpb24gZm9yIHJlcG8x"
+ selfURL := setting.AppURL + "api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master"
+ htmlURL := setting.AppURL + "user2/repo1/src/branch/master/" + treePath
+ gitURL := setting.AppURL + "api/v1/repos/user2/repo1/git/blobs/" + sha
+ downloadURL := setting.AppURL + "user2/repo1/raw/branch/master/" + treePath
+ return &api.FileResponse{
+ Content: &api.ContentsResponse{
+ Name: treePath,
+ Path: treePath,
+ SHA: sha,
+ LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ Type: "file",
+ Size: 30,
+ Encoding: &encoding,
+ Content: &content,
+ URL: &selfURL,
+ HTMLURL: &htmlURL,
+ GitURL: &gitURL,
+ DownloadURL: &downloadURL,
+ Links: &api.FileLinksResponse{
+ Self: &selfURL,
+ GitURL: &gitURL,
+ HTMLURL: &htmlURL,
+ },
+ },
+ Commit: &api.FileCommitResponse{
+ CommitMeta: api.CommitMeta{
+ URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/commits/65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ },
+ HTMLURL: "https://try.gitea.io/user2/repo1/commit/65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ Author: &api.CommitUser{
+ Identity: api.Identity{
+ Name: "user1",
+ Email: "address1@example.com",
+ },
+ Date: "2017-03-19T20:47:59Z",
+ },
+ Committer: &api.CommitUser{
+ Identity: api.Identity{
+ Name: "Ethan Koenig",
+ Email: "ethantkoenig@gmail.com",
+ },
+ Date: "2017-03-19T20:47:59Z",
+ },
+ Parents: []*api.CommitMeta{},
+ Message: "Initial commit\n",
+ Tree: &api.CommitMeta{
+ URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/trees/2a2f1d4670728a2e10049e345bd7a276468beab6",
+ SHA: "2a2f1d4670728a2e10049e345bd7a276468beab6",
+ },
+ },
+ Verification: &api.PayloadCommitVerification{
+ Verified: false,
+ Reason: "gpg.error.not_signed_commit",
+ Signature: "",
+ Payload: "",
+ },
+ }
+}
+
+func TestGetFileResponseFromCommit(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ branch := repo.DefaultBranch
+ treePath := "README.md"
+ gitRepo, _ := gitrepo.OpenRepository(db.DefaultContext, repo)
+ defer gitRepo.Close()
+ commit, _ := gitRepo.GetBranchCommit(branch)
+ expectedFileResponse := getExpectedFileResponse()
+
+ fileResponse, err := GetFileResponseFromCommit(db.DefaultContext, repo, commit, branch, treePath)
+ require.NoError(t, err)
+ assert.EqualValues(t, expectedFileResponse, fileResponse)
+}
diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go
new file mode 100644
index 0000000..e5f7e2a
--- /dev/null
+++ b/services/repository/files/patch.go
@@ -0,0 +1,199 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/structs"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+)
+
+// ApplyDiffPatchOptions holds the repository diff patch update options
+type ApplyDiffPatchOptions struct {
+ LastCommitID string
+ OldBranch string
+ NewBranch string
+ Message string
+ Content string
+ SHA string
+ Author *IdentityOptions
+ Committer *IdentityOptions
+ Dates *CommitDateOptions
+ Signoff bool
+}
+
+// Validate validates the provided options
+func (opts *ApplyDiffPatchOptions) Validate(ctx context.Context, repo *repo_model.Repository, doer *user_model.User) error {
+ // If no branch name is set, assume master
+ if opts.OldBranch == "" {
+ opts.OldBranch = repo.DefaultBranch
+ }
+ if opts.NewBranch == "" {
+ opts.NewBranch = opts.OldBranch
+ }
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ // oldBranch must exist for this operation
+ if _, err := gitRepo.GetBranch(opts.OldBranch); err != nil {
+ return err
+ }
+ // A NewBranch can be specified for the patch to be applied to.
+ // Check to make sure the branch does not already exist, otherwise we can't proceed.
+ // If we aren't branching to a new branch, make sure user can commit to the given branch
+ if opts.NewBranch != opts.OldBranch {
+ existingBranch, err := gitRepo.GetBranch(opts.NewBranch)
+ if existingBranch != nil {
+ return git_model.ErrBranchAlreadyExists{
+ BranchName: opts.NewBranch,
+ }
+ }
+ if err != nil && !git.IsErrBranchNotExist(err) {
+ return err
+ }
+ } else {
+ protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, opts.OldBranch)
+ if err != nil {
+ return err
+ }
+ if protectedBranch != nil {
+ protectedBranch.Repo = repo
+ if !protectedBranch.CanUserPush(ctx, doer) {
+ return models.ErrUserCannotCommit{
+ UserName: doer.LowerName,
+ }
+ }
+ }
+ if protectedBranch != nil && protectedBranch.RequireSignedCommits {
+ _, _, _, err := asymkey_service.SignCRUDAction(ctx, repo.RepoPath(), doer, repo.RepoPath(), opts.OldBranch)
+ if err != nil {
+ if !asymkey_service.IsErrWontSign(err) {
+ return err
+ }
+ return models.ErrUserCannotCommit{
+ UserName: doer.LowerName,
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// ApplyDiffPatch applies a patch to the given repository
+func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *ApplyDiffPatchOptions) (*structs.FileResponse, error) {
+ err := repo.MustNotBeArchived()
+ if err != nil {
+ return nil, err
+ }
+
+ if err := opts.Validate(ctx, repo, doer); err != nil {
+ return nil, err
+ }
+
+ message := strings.TrimSpace(opts.Message)
+
+ author, committer := GetAuthorAndCommitterUsers(opts.Author, opts.Committer, doer)
+
+ t, err := NewTemporaryUploadRepository(ctx, repo)
+ if err != nil {
+ log.Error("NewTemporaryUploadRepository failed: %v", err)
+ }
+ defer t.Close()
+ if err := t.Clone(opts.OldBranch, true); err != nil {
+ return nil, err
+ }
+ if err := t.SetDefaultIndex(); err != nil {
+ return nil, err
+ }
+
+ // Get the commit of the original branch
+ commit, err := t.GetBranchCommit(opts.OldBranch)
+ if err != nil {
+ return nil, err // Couldn't get a commit for the branch
+ }
+
+ // Assigned LastCommitID in opts if it hasn't been set
+ if opts.LastCommitID == "" {
+ opts.LastCommitID = commit.ID.String()
+ } else {
+ lastCommitID, err := t.gitRepo.ConvertToGitID(opts.LastCommitID)
+ if err != nil {
+ return nil, fmt.Errorf("ApplyPatch: Invalid last commit ID: %w", err)
+ }
+ opts.LastCommitID = lastCommitID.String()
+ if commit.ID.String() != opts.LastCommitID {
+ return nil, models.ErrCommitIDDoesNotMatch{
+ GivenCommitID: opts.LastCommitID,
+ CurrentCommitID: opts.LastCommitID,
+ }
+ }
+ }
+
+ stdout := &strings.Builder{}
+ stderr := &strings.Builder{}
+
+ cmdApply := git.NewCommand(ctx, "apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary")
+ if git.CheckGitVersionAtLeast("2.32") == nil {
+ cmdApply.AddArguments("-3")
+ }
+
+ if err := cmdApply.Run(&git.RunOpts{
+ Dir: t.basePath,
+ Stdout: stdout,
+ Stderr: stderr,
+ Stdin: strings.NewReader(opts.Content),
+ }); err != nil {
+ return nil, fmt.Errorf("Error: Stdout: %s\nStderr: %s\nErr: %w", stdout.String(), stderr.String(), err)
+ }
+
+ // Now write the tree
+ treeHash, err := t.WriteTree()
+ if err != nil {
+ return nil, err
+ }
+
+ // Now commit the tree
+ var commitHash string
+ if opts.Dates != nil {
+ commitHash, err = t.CommitTreeWithDate("HEAD", author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer)
+ } else {
+ commitHash, err = t.CommitTree("HEAD", author, committer, treeHash, message, opts.Signoff)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ // Then push this tree to NewBranch
+ if err := t.Push(doer, commitHash, opts.NewBranch); err != nil {
+ return nil, err
+ }
+
+ commit, err = t.GetCommit(commitHash)
+ if err != nil {
+ return nil, err
+ }
+
+ fileCommitResponse, _ := GetFileCommitResponse(repo, commit) // ok if fails, then will be nil
+ verification := GetPayloadCommitVerification(ctx, commit)
+ fileResponse := &structs.FileResponse{
+ Commit: fileCommitResponse,
+ Verification: verification,
+ }
+
+ return fileResponse, nil
+}
diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go
new file mode 100644
index 0000000..50b936c
--- /dev/null
+++ b/services/repository/files/temp_repo.go
@@ -0,0 +1,406 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "regexp"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/gitdiff"
+)
+
+// TemporaryUploadRepository is a type to wrap our upload repositories as a shallow clone
+type TemporaryUploadRepository struct {
+ ctx context.Context
+ repo *repo_model.Repository
+ gitRepo *git.Repository
+ basePath string
+}
+
+// NewTemporaryUploadRepository creates a new temporary upload repository
+func NewTemporaryUploadRepository(ctx context.Context, repo *repo_model.Repository) (*TemporaryUploadRepository, error) {
+ basePath, err := repo_module.CreateTemporaryPath("upload")
+ if err != nil {
+ return nil, err
+ }
+ t := &TemporaryUploadRepository{ctx: ctx, repo: repo, basePath: basePath}
+ return t, nil
+}
+
+// Close the repository cleaning up all files
+func (t *TemporaryUploadRepository) Close() {
+ defer t.gitRepo.Close()
+ if err := repo_module.RemoveTemporaryPath(t.basePath); err != nil {
+ log.Error("Failed to remove temporary path %s: %v", t.basePath, err)
+ }
+}
+
+// Clone the base repository to our path and set branch as the HEAD
+func (t *TemporaryUploadRepository) Clone(branch string, bare bool) error {
+ cmd := git.NewCommand(t.ctx, "clone", "-s", "-b").AddDynamicArguments(branch, t.repo.RepoPath(), t.basePath)
+ if bare {
+ cmd.AddArguments("--bare")
+ }
+
+ if _, _, err := cmd.RunStdString(nil); err != nil {
+ stderr := err.Error()
+ if matched, _ := regexp.MatchString(".*Remote branch .* not found in upstream origin.*", stderr); matched {
+ return git.ErrBranchNotExist{
+ Name: branch,
+ }
+ } else if matched, _ := regexp.MatchString(".* repository .* does not exist.*", stderr); matched {
+ return repo_model.ErrRepoNotExist{
+ ID: t.repo.ID,
+ UID: t.repo.OwnerID,
+ OwnerName: t.repo.OwnerName,
+ Name: t.repo.Name,
+ }
+ }
+ return fmt.Errorf("Clone: %w %s", err, stderr)
+ }
+ gitRepo, err := git.OpenRepository(t.ctx, t.basePath)
+ if err != nil {
+ return err
+ }
+ t.gitRepo = gitRepo
+ return nil
+}
+
+// Init the repository
+func (t *TemporaryUploadRepository) Init(objectFormatName string) error {
+ if err := git.InitRepository(t.ctx, t.basePath, false, objectFormatName); err != nil {
+ return err
+ }
+ gitRepo, err := git.OpenRepository(t.ctx, t.basePath)
+ if err != nil {
+ return err
+ }
+ t.gitRepo = gitRepo
+ return nil
+}
+
+// SetDefaultIndex sets the git index to our HEAD
+func (t *TemporaryUploadRepository) SetDefaultIndex() error {
+ if _, _, err := git.NewCommand(t.ctx, "read-tree", "HEAD").RunStdString(&git.RunOpts{Dir: t.basePath}); err != nil {
+ return fmt.Errorf("SetDefaultIndex: %w", err)
+ }
+ return nil
+}
+
+// RefreshIndex looks at the current index and checks to see if merges or updates are needed by checking stat() information.
+func (t *TemporaryUploadRepository) RefreshIndex() error {
+ if _, _, err := git.NewCommand(t.ctx, "update-index", "--refresh").RunStdString(&git.RunOpts{Dir: t.basePath}); err != nil {
+ return fmt.Errorf("RefreshIndex: %w", err)
+ }
+ return nil
+}
+
+// LsFiles checks if the given filename arguments are in the index
+func (t *TemporaryUploadRepository) LsFiles(filenames ...string) ([]string, error) {
+ stdOut := new(bytes.Buffer)
+ stdErr := new(bytes.Buffer)
+
+ if err := git.NewCommand(t.ctx, "ls-files", "-z").AddDashesAndList(filenames...).
+ Run(&git.RunOpts{
+ Dir: t.basePath,
+ Stdout: stdOut,
+ Stderr: stdErr,
+ }); err != nil {
+ log.Error("Unable to run git ls-files for temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String())
+ err = fmt.Errorf("Unable to run git ls-files for temporary repo of: %s Error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String())
+ return nil, err
+ }
+
+ fileList := make([]string, 0, len(filenames))
+ for _, line := range bytes.Split(stdOut.Bytes(), []byte{'\000'}) {
+ fileList = append(fileList, string(line))
+ }
+
+ return fileList, nil
+}
+
+// RemoveFilesFromIndex removes the given files from the index
+func (t *TemporaryUploadRepository) RemoveFilesFromIndex(filenames ...string) error {
+ objectFormat, err := t.gitRepo.GetObjectFormat()
+ if err != nil {
+ return err
+ }
+
+ stdOut := new(bytes.Buffer)
+ stdErr := new(bytes.Buffer)
+ stdIn := new(bytes.Buffer)
+ for _, file := range filenames {
+ if file != "" {
+ stdIn.WriteString("0 ")
+ stdIn.WriteString(objectFormat.EmptyObjectID().String())
+ stdIn.WriteByte('\t')
+ stdIn.WriteString(file)
+ stdIn.WriteByte('\000')
+ }
+ }
+
+ if err := git.NewCommand(t.ctx, "update-index", "--remove", "-z", "--index-info").
+ Run(&git.RunOpts{
+ Dir: t.basePath,
+ Stdin: stdIn,
+ Stdout: stdOut,
+ Stderr: stdErr,
+ }); err != nil {
+ log.Error("Unable to update-index for temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String())
+ return fmt.Errorf("Unable to update-index for temporary repo: %s Error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String())
+ }
+ return nil
+}
+
+// HashObject writes the provided content to the object db and returns its hash
+func (t *TemporaryUploadRepository) HashObject(content io.Reader) (string, error) {
+ stdOut := new(bytes.Buffer)
+ stdErr := new(bytes.Buffer)
+
+ if err := git.NewCommand(t.ctx, "hash-object", "-w", "--stdin").
+ Run(&git.RunOpts{
+ Dir: t.basePath,
+ Stdin: content,
+ Stdout: stdOut,
+ Stderr: stdErr,
+ }); err != nil {
+ log.Error("Unable to hash-object to temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String())
+ return "", fmt.Errorf("Unable to hash-object to temporary repo: %s Error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String())
+ }
+
+ return strings.TrimSpace(stdOut.String()), nil
+}
+
+// AddObjectToIndex adds the provided object hash to the index with the provided mode and path
+func (t *TemporaryUploadRepository) AddObjectToIndex(mode, objectHash, objectPath string) error {
+ if _, _, err := git.NewCommand(t.ctx, "update-index", "--add", "--replace", "--cacheinfo").AddDynamicArguments(mode, objectHash, objectPath).RunStdString(&git.RunOpts{Dir: t.basePath}); err != nil {
+ stderr := err.Error()
+ if matched, _ := regexp.MatchString(".*Invalid path '.*", stderr); matched {
+ return models.ErrFilePathInvalid{
+ Message: objectPath,
+ Path: objectPath,
+ }
+ }
+ log.Error("Unable to add object to index: %s %s %s in temporary repo %s(%s) Error: %v", mode, objectHash, objectPath, t.repo.FullName(), t.basePath, err)
+ return fmt.Errorf("Unable to add object to index at %s in temporary repo %s Error: %w", objectPath, t.repo.FullName(), err)
+ }
+ return nil
+}
+
+// WriteTree writes the current index as a tree to the object db and returns its hash
+func (t *TemporaryUploadRepository) WriteTree() (string, error) {
+ stdout, _, err := git.NewCommand(t.ctx, "write-tree").RunStdString(&git.RunOpts{Dir: t.basePath})
+ if err != nil {
+ log.Error("Unable to write tree in temporary repo: %s(%s): Error: %v", t.repo.FullName(), t.basePath, err)
+ return "", fmt.Errorf("Unable to write-tree in temporary repo for: %s Error: %w", t.repo.FullName(), err)
+ }
+ return strings.TrimSpace(stdout), nil
+}
+
+// GetLastCommit gets the last commit ID SHA of the repo
+func (t *TemporaryUploadRepository) GetLastCommit() (string, error) {
+ return t.GetLastCommitByRef("HEAD")
+}
+
+// GetLastCommitByRef gets the last commit ID SHA of the repo by ref
+func (t *TemporaryUploadRepository) GetLastCommitByRef(ref string) (string, error) {
+ if ref == "" {
+ ref = "HEAD"
+ }
+ stdout, _, err := git.NewCommand(t.ctx, "rev-parse").AddDynamicArguments(ref).RunStdString(&git.RunOpts{Dir: t.basePath})
+ if err != nil {
+ log.Error("Unable to get last ref for %s in temporary repo: %s(%s): Error: %v", ref, t.repo.FullName(), t.basePath, err)
+ return "", fmt.Errorf("Unable to rev-parse %s in temporary repo for: %s Error: %w", ref, t.repo.FullName(), err)
+ }
+ return strings.TrimSpace(stdout), nil
+}
+
+// CommitTree creates a commit from a given tree for the user with provided message
+func (t *TemporaryUploadRepository) CommitTree(parent string, author, committer *user_model.User, treeHash, message string, signoff bool) (string, error) {
+ return t.CommitTreeWithDate(parent, author, committer, treeHash, message, signoff, time.Now(), time.Now())
+}
+
+// CommitTreeWithDate creates a commit from a given tree for the user with provided message
+func (t *TemporaryUploadRepository) CommitTreeWithDate(parent string, author, committer *user_model.User, treeHash, message string, signoff bool, authorDate, committerDate time.Time) (string, error) {
+ authorSig := author.NewGitSig()
+ committerSig := committer.NewGitSig()
+
+ // Because this may call hooks we should pass in the environment
+ env := append(os.Environ(),
+ "GIT_AUTHOR_NAME="+authorSig.Name,
+ "GIT_AUTHOR_EMAIL="+authorSig.Email,
+ "GIT_AUTHOR_DATE="+authorDate.Format(time.RFC3339),
+ "GIT_COMMITTER_DATE="+committerDate.Format(time.RFC3339),
+ )
+
+ messageBytes := new(bytes.Buffer)
+ _, _ = messageBytes.WriteString(message)
+ _, _ = messageBytes.WriteString("\n")
+
+ cmdCommitTree := git.NewCommand(t.ctx, "commit-tree").AddDynamicArguments(treeHash)
+ if parent != "" {
+ cmdCommitTree.AddOptionValues("-p", parent)
+ }
+
+ var sign bool
+ var keyID string
+ var signer *git.Signature
+ if parent != "" {
+ sign, keyID, signer, _ = asymkey_service.SignCRUDAction(t.ctx, t.repo.RepoPath(), author, t.basePath, parent)
+ } else {
+ sign, keyID, signer, _ = asymkey_service.SignInitialCommit(t.ctx, t.repo.RepoPath(), author)
+ }
+ if sign {
+ cmdCommitTree.AddOptionFormat("-S%s", keyID)
+ if t.repo.GetTrustModel() == repo_model.CommitterTrustModel || t.repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
+ if committerSig.Name != authorSig.Name || committerSig.Email != authorSig.Email {
+ // Add trailers
+ _, _ = messageBytes.WriteString("\n")
+ _, _ = messageBytes.WriteString("Co-authored-by: ")
+ _, _ = messageBytes.WriteString(committerSig.String())
+ _, _ = messageBytes.WriteString("\n")
+ _, _ = messageBytes.WriteString("Co-committed-by: ")
+ _, _ = messageBytes.WriteString(committerSig.String())
+ _, _ = messageBytes.WriteString("\n")
+ }
+ committerSig = signer
+ }
+ } else {
+ cmdCommitTree.AddArguments("--no-gpg-sign")
+ }
+
+ if signoff {
+ // Signed-off-by
+ _, _ = messageBytes.WriteString("\n")
+ _, _ = messageBytes.WriteString("Signed-off-by: ")
+ _, _ = messageBytes.WriteString(committerSig.String())
+ }
+
+ env = append(env,
+ "GIT_COMMITTER_NAME="+committerSig.Name,
+ "GIT_COMMITTER_EMAIL="+committerSig.Email,
+ )
+
+ stdout := new(bytes.Buffer)
+ stderr := new(bytes.Buffer)
+ if err := cmdCommitTree.
+ Run(&git.RunOpts{
+ Env: env,
+ Dir: t.basePath,
+ Stdin: messageBytes,
+ Stdout: stdout,
+ Stderr: stderr,
+ }); err != nil {
+ log.Error("Unable to commit-tree in temporary repo: %s (%s) Error: %v\nStdout: %s\nStderr: %s",
+ t.repo.FullName(), t.basePath, err, stdout, stderr)
+ return "", fmt.Errorf("Unable to commit-tree in temporary repo: %s Error: %w\nStdout: %s\nStderr: %s",
+ t.repo.FullName(), err, stdout, stderr)
+ }
+ return strings.TrimSpace(stdout.String()), nil
+}
+
+// Push the provided commitHash to the repository branch by the provided user
+func (t *TemporaryUploadRepository) Push(doer *user_model.User, commitHash, branch string) error {
+ // Because calls hooks we need to pass in the environment
+ env := repo_module.PushingEnvironment(doer, t.repo)
+ if err := git.Push(t.ctx, t.basePath, git.PushOptions{
+ Remote: t.repo.RepoPath(),
+ Branch: strings.TrimSpace(commitHash) + ":" + git.BranchPrefix + strings.TrimSpace(branch),
+ Env: env,
+ }); err != nil {
+ if git.IsErrPushOutOfDate(err) {
+ return err
+ } else if git.IsErrPushRejected(err) {
+ rejectErr := err.(*git.ErrPushRejected)
+ log.Info("Unable to push back to repo from temporary repo due to rejection: %s (%s)\nStdout: %s\nStderr: %s\nError: %v",
+ t.repo.FullName(), t.basePath, rejectErr.StdOut, rejectErr.StdErr, rejectErr.Err)
+ return err
+ }
+ log.Error("Unable to push back to repo from temporary repo: %s (%s)\nError: %v",
+ t.repo.FullName(), t.basePath, err)
+ return fmt.Errorf("Unable to push back to repo from temporary repo: %s (%s) Error: %v",
+ t.repo.FullName(), t.basePath, err)
+ }
+ return nil
+}
+
+// DiffIndex returns a Diff of the current index to the head
+func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) {
+ stdoutReader, stdoutWriter, err := os.Pipe()
+ if err != nil {
+ log.Error("Unable to open stdout pipe: %v", err)
+ return nil, fmt.Errorf("Unable to open stdout pipe: %w", err)
+ }
+ defer func() {
+ _ = stdoutReader.Close()
+ _ = stdoutWriter.Close()
+ }()
+ stderr := new(bytes.Buffer)
+ var diff *gitdiff.Diff
+ var finalErr error
+
+ if err := git.NewCommand(t.ctx, "diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD").
+ Run(&git.RunOpts{
+ Timeout: 30 * time.Second,
+ Dir: t.basePath,
+ Stdout: stdoutWriter,
+ Stderr: stderr,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ _ = stdoutWriter.Close()
+ diff, finalErr = gitdiff.ParsePatch(t.ctx, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, stdoutReader, "")
+ if finalErr != nil {
+ log.Error("ParsePatch: %v", finalErr)
+ cancel()
+ }
+ _ = stdoutReader.Close()
+ return finalErr
+ },
+ }); err != nil {
+ if finalErr != nil {
+ log.Error("Unable to ParsePatch in temporary repo %s (%s). Error: %v", t.repo.FullName(), t.basePath, finalErr)
+ return nil, finalErr
+ }
+ log.Error("Unable to run diff-index pipeline in temporary repo %s (%s). Error: %v\nStderr: %s",
+ t.repo.FullName(), t.basePath, err, stderr)
+ return nil, fmt.Errorf("Unable to run diff-index pipeline in temporary repo %s. Error: %w\nStderr: %s",
+ t.repo.FullName(), err, stderr)
+ }
+
+ diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(t.ctx, t.basePath, git.TrustedCmdArgs{"--cached"}, "HEAD")
+ if err != nil {
+ return nil, err
+ }
+
+ return diff, nil
+}
+
+// GetBranchCommit Gets the commit object of the given branch
+func (t *TemporaryUploadRepository) GetBranchCommit(branch string) (*git.Commit, error) {
+ if t.gitRepo == nil {
+ return nil, fmt.Errorf("repository has not been cloned")
+ }
+ return t.gitRepo.GetBranchCommit(branch)
+}
+
+// GetCommit Gets the commit object of the given commit ID
+func (t *TemporaryUploadRepository) GetCommit(commitID string) (*git.Commit, error) {
+ if t.gitRepo == nil {
+ return nil, fmt.Errorf("repository has not been cloned")
+ }
+ return t.gitRepo.GetCommit(commitID)
+}
diff --git a/services/repository/files/temp_repo_test.go b/services/repository/files/temp_repo_test.go
new file mode 100644
index 0000000..e7d85ea
--- /dev/null
+++ b/services/repository/files/temp_repo_test.go
@@ -0,0 +1,28 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestRemoveFilesFromIndexSha256(t *testing.T) {
+ if git.CheckGitVersionAtLeast("2.42") != nil {
+ t.Skip("skipping because installed Git version doesn't support SHA256")
+ }
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ temp, err := NewTemporaryUploadRepository(db.DefaultContext, repo)
+ require.NoError(t, err)
+ require.NoError(t, temp.Init("sha256"))
+ require.NoError(t, temp.RemoveFilesFromIndex("README.md"))
+}
diff --git a/services/repository/files/tree.go b/services/repository/files/tree.go
new file mode 100644
index 0000000..e3a7f3b
--- /dev/null
+++ b/services/repository/files/tree.go
@@ -0,0 +1,101 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+
+ "code.gitea.io/gitea/models"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+// GetTreeBySHA get the GitTreeResponse of a repository using a sha hash.
+func GetTreeBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, sha string, page, perPage int, recursive bool) (*api.GitTreeResponse, error) {
+ gitTree, err := gitRepo.GetTree(sha)
+ if err != nil || gitTree == nil {
+ return nil, models.ErrSHANotFound{
+ SHA: sha,
+ }
+ }
+ tree := new(api.GitTreeResponse)
+ tree.SHA = gitTree.ResolvedID.String()
+ tree.URL = repo.APIURL() + "/git/trees/" + url.PathEscape(tree.SHA)
+ var entries git.Entries
+ if recursive {
+ entries, err = gitTree.ListEntriesRecursiveWithSize()
+ } else {
+ entries, err = gitTree.ListEntries()
+ }
+ if err != nil {
+ return nil, err
+ }
+ apiURL := repo.APIURL()
+ apiURLLen := len(apiURL)
+ objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
+ hashLen := objectFormat.FullLength()
+
+ const gitBlobsPath = "/git/blobs/"
+ blobURL := make([]byte, apiURLLen+hashLen+len(gitBlobsPath))
+ copy(blobURL, apiURL)
+ copy(blobURL[apiURLLen:], []byte(gitBlobsPath))
+
+ const gitTreePath = "/git/trees/"
+ treeURL := make([]byte, apiURLLen+hashLen+len(gitTreePath))
+ copy(treeURL, apiURL)
+ copy(treeURL[apiURLLen:], []byte(gitTreePath))
+
+ // copyPos is at the start of the hash
+ copyPos := len(treeURL) - hashLen
+
+ if perPage <= 0 || perPage > setting.API.DefaultGitTreesPerPage {
+ perPage = setting.API.DefaultGitTreesPerPage
+ }
+ if page <= 0 {
+ page = 1
+ }
+ tree.Page = page
+ tree.TotalCount = len(entries)
+ rangeStart := perPage * (page - 1)
+ if rangeStart >= len(entries) {
+ return tree, nil
+ }
+ var rangeEnd int
+ if len(entries) > perPage {
+ tree.Truncated = true
+ }
+ if rangeStart+perPage < len(entries) {
+ rangeEnd = rangeStart + perPage
+ } else {
+ rangeEnd = len(entries)
+ }
+ tree.Entries = make([]api.GitEntry, rangeEnd-rangeStart)
+ for e := rangeStart; e < rangeEnd; e++ {
+ i := e - rangeStart
+
+ tree.Entries[i].Path = entries[e].Name()
+ tree.Entries[i].Mode = fmt.Sprintf("%06o", entries[e].Mode())
+ tree.Entries[i].Type = entries[e].Type()
+ tree.Entries[i].Size = entries[e].Size()
+ tree.Entries[i].SHA = entries[e].ID.String()
+
+ if entries[e].IsDir() {
+ copy(treeURL[copyPos:], entries[e].ID.String())
+ tree.Entries[i].URL = string(treeURL)
+ } else if entries[e].IsSubModule() {
+ // In Github Rest API Version=2022-11-28, if a tree entry is a submodule,
+ // its url will be returned as an empty string.
+ // So the URL will be set to "" here.
+ tree.Entries[i].URL = ""
+ } else {
+ copy(blobURL[copyPos:], entries[e].ID.String())
+ tree.Entries[i].URL = string(blobURL)
+ }
+ }
+ return tree, nil
+}
diff --git a/services/repository/files/tree_test.go b/services/repository/files/tree_test.go
new file mode 100644
index 0000000..9e5c5c1
--- /dev/null
+++ b/services/repository/files/tree_test.go
@@ -0,0 +1,52 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGetTreeBySHA(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadGitRepo(t, ctx)
+ defer ctx.Repo.GitRepo.Close()
+
+ sha := ctx.Repo.Repository.DefaultBranch
+ page := 1
+ perPage := 10
+ ctx.SetParams(":id", "1")
+ ctx.SetParams(":sha", sha)
+
+ tree, err := GetTreeBySHA(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.Params(":sha"), page, perPage, true)
+ require.NoError(t, err)
+ expectedTree := &api.GitTreeResponse{
+ SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/trees/65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ Entries: []api.GitEntry{
+ {
+ Path: "README.md",
+ Mode: "100644",
+ Type: "blob",
+ Size: 30,
+ SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
+ URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/4b4851ad51df6a7d9f25c979345979eaeb5b349f",
+ },
+ },
+ Truncated: false,
+ Page: 1,
+ TotalCount: 1,
+ }
+
+ assert.EqualValues(t, expectedTree, tree)
+}
diff --git a/services/repository/files/update.go b/services/repository/files/update.go
new file mode 100644
index 0000000..d6025b6
--- /dev/null
+++ b/services/repository/files/update.go
@@ -0,0 +1,501 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "path"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+)
+
+// IdentityOptions for a person's identity like an author or committer
+type IdentityOptions struct {
+ Name string
+ Email string
+}
+
+// CommitDateOptions store dates for GIT_AUTHOR_DATE and GIT_COMMITTER_DATE
+type CommitDateOptions struct {
+ Author time.Time
+ Committer time.Time
+}
+
+type ChangeRepoFile struct {
+ Operation string
+ TreePath string
+ FromTreePath string
+ ContentReader io.ReadSeeker
+ SHA string
+ Options *RepoFileOptions
+}
+
+// ChangeRepoFilesOptions holds the repository files update options
+type ChangeRepoFilesOptions struct {
+ LastCommitID string
+ OldBranch string
+ NewBranch string
+ Message string
+ Files []*ChangeRepoFile
+ Author *IdentityOptions
+ Committer *IdentityOptions
+ Dates *CommitDateOptions
+ Signoff bool
+}
+
+type RepoFileOptions struct {
+ treePath string
+ fromTreePath string
+ executable bool
+}
+
+// ChangeRepoFiles adds, updates or removes multiple files in the given repository
+func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *ChangeRepoFilesOptions) (*structs.FilesResponse, error) {
+ err := repo.MustNotBeArchived()
+ if err != nil {
+ return nil, err
+ }
+
+ // If no branch name is set, assume default branch
+ if opts.OldBranch == "" {
+ opts.OldBranch = repo.DefaultBranch
+ }
+ if opts.NewBranch == "" {
+ opts.NewBranch = opts.OldBranch
+ }
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ return nil, err
+ }
+ defer closer.Close()
+
+ // oldBranch must exist for this operation
+ if _, err := gitRepo.GetBranch(opts.OldBranch); err != nil && !repo.IsEmpty {
+ return nil, err
+ }
+
+ var treePaths []string
+ for _, file := range opts.Files {
+ // If FromTreePath is not set, set it to the opts.TreePath
+ if file.TreePath != "" && file.FromTreePath == "" {
+ file.FromTreePath = file.TreePath
+ }
+
+ // Check that the path given in opts.treePath is valid (not a git path)
+ treePath := CleanUploadFileName(file.TreePath)
+ if treePath == "" {
+ return nil, models.ErrFilenameInvalid{
+ Path: file.TreePath,
+ }
+ }
+ // If there is a fromTreePath (we are copying it), also clean it up
+ fromTreePath := CleanUploadFileName(file.FromTreePath)
+ if fromTreePath == "" && file.FromTreePath != "" {
+ return nil, models.ErrFilenameInvalid{
+ Path: file.FromTreePath,
+ }
+ }
+
+ file.Options = &RepoFileOptions{
+ treePath: treePath,
+ fromTreePath: fromTreePath,
+ executable: false,
+ }
+ treePaths = append(treePaths, treePath)
+ }
+
+ // A NewBranch can be specified for the file to be created/updated in a new branch.
+ // Check to make sure the branch does not already exist, otherwise we can't proceed.
+ // If we aren't branching to a new branch, make sure user can commit to the given branch
+ if opts.NewBranch != opts.OldBranch {
+ existingBranch, err := gitRepo.GetBranch(opts.NewBranch)
+ if existingBranch != nil {
+ return nil, git_model.ErrBranchAlreadyExists{
+ BranchName: opts.NewBranch,
+ }
+ }
+ if err != nil && !git.IsErrBranchNotExist(err) {
+ return nil, err
+ }
+ } else if err := VerifyBranchProtection(ctx, repo, doer, opts.OldBranch, treePaths); err != nil {
+ return nil, err
+ }
+
+ message := strings.TrimSpace(opts.Message)
+
+ author, committer := GetAuthorAndCommitterUsers(opts.Author, opts.Committer, doer)
+
+ t, err := NewTemporaryUploadRepository(ctx, repo)
+ if err != nil {
+ log.Error("NewTemporaryUploadRepository failed: %v", err)
+ }
+ defer t.Close()
+ hasOldBranch := true
+ if err := t.Clone(opts.OldBranch, true); err != nil {
+ for _, file := range opts.Files {
+ if file.Operation == "delete" {
+ return nil, err
+ }
+ }
+ if !git.IsErrBranchNotExist(err) || !repo.IsEmpty {
+ return nil, err
+ }
+ if err := t.Init(repo.ObjectFormatName); err != nil {
+ return nil, err
+ }
+ hasOldBranch = false
+ opts.LastCommitID = ""
+ }
+ if hasOldBranch {
+ if err := t.SetDefaultIndex(); err != nil {
+ return nil, err
+ }
+ }
+
+ for _, file := range opts.Files {
+ if file.Operation == "delete" {
+ // Get the files in the index
+ filesInIndex, err := t.LsFiles(file.TreePath)
+ if err != nil {
+ return nil, fmt.Errorf("DeleteRepoFile: %w", err)
+ }
+
+ // Find the file we want to delete in the index
+ inFilelist := false
+ for _, indexFile := range filesInIndex {
+ if indexFile == file.TreePath {
+ inFilelist = true
+ break
+ }
+ }
+ if !inFilelist {
+ return nil, models.ErrRepoFileDoesNotExist{
+ Path: file.TreePath,
+ }
+ }
+ }
+ }
+
+ if hasOldBranch {
+ // Get the commit of the original branch
+ commit, err := t.GetBranchCommit(opts.OldBranch)
+ if err != nil {
+ return nil, err // Couldn't get a commit for the branch
+ }
+
+ // Assigned LastCommitID in opts if it hasn't been set
+ if opts.LastCommitID == "" {
+ opts.LastCommitID = commit.ID.String()
+ } else {
+ lastCommitID, err := t.gitRepo.ConvertToGitID(opts.LastCommitID)
+ if err != nil {
+ return nil, fmt.Errorf("ConvertToSHA1: Invalid last commit ID: %w", err)
+ }
+ opts.LastCommitID = lastCommitID.String()
+ }
+
+ for _, file := range opts.Files {
+ if err := handleCheckErrors(file, commit, opts); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ contentStore := lfs.NewContentStore()
+ for _, file := range opts.Files {
+ switch file.Operation {
+ case "create", "update":
+ if err := CreateOrUpdateFile(ctx, t, file, contentStore, repo.ID, hasOldBranch); err != nil {
+ return nil, err
+ }
+ case "delete":
+ // Remove the file from the index
+ if err := t.RemoveFilesFromIndex(file.TreePath); err != nil {
+ return nil, err
+ }
+ default:
+ return nil, fmt.Errorf("invalid file operation: %s %s, supported operations are create, update, delete", file.Operation, file.Options.treePath)
+ }
+ }
+
+ // Now write the tree
+ treeHash, err := t.WriteTree()
+ if err != nil {
+ return nil, err
+ }
+
+ // Now commit the tree
+ var commitHash string
+ if opts.Dates != nil {
+ commitHash, err = t.CommitTreeWithDate(opts.LastCommitID, author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer)
+ } else {
+ commitHash, err = t.CommitTree(opts.LastCommitID, author, committer, treeHash, message, opts.Signoff)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ // Then push this tree to NewBranch
+ if err := t.Push(doer, commitHash, opts.NewBranch); err != nil {
+ log.Error("%T %v", err, err)
+ return nil, err
+ }
+
+ commit, err := t.GetCommit(commitHash)
+ if err != nil {
+ return nil, err
+ }
+
+ filesResponse, err := GetFilesResponseFromCommit(ctx, repo, commit, opts.NewBranch, treePaths)
+ if err != nil {
+ return nil, err
+ }
+
+ if repo.IsEmpty {
+ if isEmpty, err := gitRepo.IsEmpty(); err == nil && !isEmpty {
+ _ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: repo.ID, IsEmpty: false, DefaultBranch: opts.NewBranch}, "is_empty", "default_branch")
+ }
+ }
+
+ return filesResponse, nil
+}
+
+// handles the check for various issues for ChangeRepoFiles
+func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions) error {
+ if file.Operation == "update" || file.Operation == "delete" {
+ fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath)
+ if err != nil {
+ return err
+ }
+ if file.SHA != "" {
+ // If a SHA was given and the SHA given doesn't match the SHA of the fromTreePath, throw error
+ if file.SHA != fromEntry.ID.String() {
+ return models.ErrSHADoesNotMatch{
+ Path: file.Options.treePath,
+ GivenSHA: file.SHA,
+ CurrentSHA: fromEntry.ID.String(),
+ }
+ }
+ } else if opts.LastCommitID != "" {
+ // If a lastCommitID was given and it doesn't match the commitID of the head of the branch throw
+ // an error, but only if we aren't creating a new branch.
+ if commit.ID.String() != opts.LastCommitID && opts.OldBranch == opts.NewBranch {
+ if changed, err := commit.FileChangedSinceCommit(file.Options.treePath, opts.LastCommitID); err != nil {
+ return err
+ } else if changed {
+ return models.ErrCommitIDDoesNotMatch{
+ GivenCommitID: opts.LastCommitID,
+ CurrentCommitID: opts.LastCommitID,
+ }
+ }
+ // The file wasn't modified, so we are good to delete it
+ }
+ } else {
+ // When updating a file, a lastCommitID or SHA needs to be given to make sure other commits
+ // haven't been made. We throw an error if one wasn't provided.
+ return models.ErrSHAOrCommitIDNotProvided{}
+ }
+ file.Options.executable = fromEntry.IsExecutable()
+ }
+ if file.Operation == "create" || file.Operation == "update" {
+ // For the path where this file will be created/updated, we need to make
+ // sure no parts of the path are existing files or links except for the last
+ // item in the path which is the file name, and that shouldn't exist IF it is
+ // a new file OR is being moved to a new path.
+ treePathParts := strings.Split(file.Options.treePath, "/")
+ subTreePath := ""
+ for index, part := range treePathParts {
+ subTreePath = path.Join(subTreePath, part)
+ entry, err := commit.GetTreeEntryByPath(subTreePath)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ // Means there is no item with that name, so we're good
+ break
+ }
+ return err
+ }
+ if index < len(treePathParts)-1 {
+ if !entry.IsDir() {
+ return models.ErrFilePathInvalid{
+ Message: fmt.Sprintf("a file exists where you’re trying to create a subdirectory [path: %s]", subTreePath),
+ Path: subTreePath,
+ Name: part,
+ Type: git.EntryModeBlob,
+ }
+ }
+ } else if entry.IsLink() {
+ return models.ErrFilePathInvalid{
+ Message: fmt.Sprintf("a symbolic link exists where you’re trying to create a subdirectory [path: %s]", subTreePath),
+ Path: subTreePath,
+ Name: part,
+ Type: git.EntryModeSymlink,
+ }
+ } else if entry.IsDir() {
+ return models.ErrFilePathInvalid{
+ Message: fmt.Sprintf("a directory exists where you’re trying to create a file [path: %s]", subTreePath),
+ Path: subTreePath,
+ Name: part,
+ Type: git.EntryModeTree,
+ }
+ } else if file.Options.fromTreePath != file.Options.treePath || file.Operation == "create" {
+ // The entry shouldn't exist if we are creating new file or moving to a new path
+ return models.ErrRepoFileAlreadyExists{
+ Path: file.Options.treePath,
+ }
+ }
+ }
+ }
+
+ return nil
+}
+
+// CreateOrUpdateFile handles creating or updating a file for ChangeRepoFiles
+func CreateOrUpdateFile(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile, contentStore *lfs.ContentStore, repoID int64, hasOldBranch bool) error {
+ // Get the two paths (might be the same if not moving) from the index if they exist
+ filesInIndex, err := t.LsFiles(file.TreePath, file.FromTreePath)
+ if err != nil {
+ return fmt.Errorf("UpdateRepoFile: %w", err)
+ }
+ // If is a new file (not updating) then the given path shouldn't exist
+ if file.Operation == "create" {
+ for _, indexFile := range filesInIndex {
+ if indexFile == file.TreePath {
+ return models.ErrRepoFileAlreadyExists{
+ Path: file.TreePath,
+ }
+ }
+ }
+ }
+
+ // Remove the old path from the tree
+ if file.Options.fromTreePath != file.Options.treePath && len(filesInIndex) > 0 {
+ for _, indexFile := range filesInIndex {
+ if indexFile == file.Options.fromTreePath {
+ if err := t.RemoveFilesFromIndex(file.FromTreePath); err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ treeObjectContentReader := file.ContentReader
+ var lfsMetaObject *git_model.LFSMetaObject
+ if setting.LFS.StartServer && hasOldBranch {
+ // Check there is no way this can return multiple infos
+ filterAttribute, err := t.gitRepo.GitAttributeFirst("", file.Options.treePath, "filter")
+ if err != nil {
+ return err
+ }
+
+ if filterAttribute == "lfs" {
+ // OK so we are supposed to LFS this data!
+ pointer, err := lfs.GeneratePointer(treeObjectContentReader)
+ if err != nil {
+ return err
+ }
+ lfsMetaObject = &git_model.LFSMetaObject{Pointer: pointer, RepositoryID: repoID}
+ treeObjectContentReader = strings.NewReader(pointer.StringContent())
+ }
+ }
+
+ // Add the object to the database
+ objectHash, err := t.HashObject(treeObjectContentReader)
+ if err != nil {
+ return err
+ }
+
+ // Add the object to the index
+ if file.Options.executable {
+ if err := t.AddObjectToIndex("100755", objectHash, file.Options.treePath); err != nil {
+ return err
+ }
+ } else {
+ if err := t.AddObjectToIndex("100644", objectHash, file.Options.treePath); err != nil {
+ return err
+ }
+ }
+
+ if lfsMetaObject != nil {
+ // We have an LFS object - create it
+ lfsMetaObject, err = git_model.NewLFSMetaObject(ctx, lfsMetaObject.RepositoryID, lfsMetaObject.Pointer)
+ if err != nil {
+ return err
+ }
+ exist, err := contentStore.Exists(lfsMetaObject.Pointer)
+ if err != nil {
+ return err
+ }
+ if !exist {
+ _, err := file.ContentReader.Seek(0, io.SeekStart)
+ if err != nil {
+ return err
+ }
+ if err := contentStore.Put(lfsMetaObject.Pointer, file.ContentReader); err != nil {
+ if _, err2 := git_model.RemoveLFSMetaObjectByOid(ctx, repoID, lfsMetaObject.Oid); err2 != nil {
+ return fmt.Errorf("unable to remove failed inserted LFS object %s: %v (Prev Error: %w)", lfsMetaObject.Oid, err2, err)
+ }
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+// VerifyBranchProtection verify the branch protection for modifying the given treePath on the given branch
+func VerifyBranchProtection(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, branchName string, treePaths []string) error {
+ protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, branchName)
+ if err != nil {
+ return err
+ }
+ if protectedBranch != nil {
+ protectedBranch.Repo = repo
+ globUnprotected := protectedBranch.GetUnprotectedFilePatterns()
+ globProtected := protectedBranch.GetProtectedFilePatterns()
+ canUserPush := protectedBranch.CanUserPush(ctx, doer)
+ for _, treePath := range treePaths {
+ isUnprotectedFile := false
+ if len(globUnprotected) != 0 {
+ isUnprotectedFile = protectedBranch.IsUnprotectedFile(globUnprotected, treePath)
+ }
+ if !canUserPush && !isUnprotectedFile {
+ return models.ErrUserCannotCommit{
+ UserName: doer.LowerName,
+ }
+ }
+ if protectedBranch.IsProtectedFile(globProtected, treePath) {
+ return models.ErrFilePathProtected{
+ Path: treePath,
+ }
+ }
+ }
+ if protectedBranch.RequireSignedCommits {
+ _, _, _, err := asymkey_service.SignCRUDAction(ctx, repo.RepoPath(), doer, repo.RepoPath(), branchName)
+ if err != nil {
+ if !asymkey_service.IsErrWontSign(err) {
+ return err
+ }
+ return models.ErrUserCannotCommit{
+ UserName: doer.LowerName,
+ }
+ }
+ }
+ }
+ return nil
+}
diff --git a/services/repository/files/upload.go b/services/repository/files/upload.go
new file mode 100644
index 0000000..1330116
--- /dev/null
+++ b/services/repository/files/upload.go
@@ -0,0 +1,248 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package files
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// UploadRepoFileOptions contains the uploaded repository file options
+type UploadRepoFileOptions struct {
+ LastCommitID string
+ OldBranch string
+ NewBranch string
+ TreePath string
+ Message string
+ Author *IdentityOptions
+ Committer *IdentityOptions
+ Files []string // In UUID format.
+ Signoff bool
+}
+
+type uploadInfo struct {
+ upload *repo_model.Upload
+ lfsMetaObject *git_model.LFSMetaObject
+}
+
+func cleanUpAfterFailure(ctx context.Context, infos *[]uploadInfo, t *TemporaryUploadRepository, original error) error {
+ for _, info := range *infos {
+ if info.lfsMetaObject == nil {
+ continue
+ }
+ if !info.lfsMetaObject.Existing {
+ if _, err := git_model.RemoveLFSMetaObjectByOid(ctx, t.repo.ID, info.lfsMetaObject.Oid); err != nil {
+ original = fmt.Errorf("%w, %v", original, err) // We wrap the original error - as this is the underlying error that required the fallback
+ }
+ }
+ }
+ return original
+}
+
+// UploadRepoFiles uploads files to the given repository
+func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *UploadRepoFileOptions) error {
+ if len(opts.Files) == 0 {
+ return nil
+ }
+
+ uploads, err := repo_model.GetUploadsByUUIDs(ctx, opts.Files)
+ if err != nil {
+ return fmt.Errorf("GetUploadsByUUIDs [uuids: %v]: %w", opts.Files, err)
+ }
+
+ names := make([]string, len(uploads))
+ infos := make([]uploadInfo, len(uploads))
+ for i, upload := range uploads {
+ // Check file is not lfs locked, will return nil if lock setting not enabled
+ filepath := path.Join(opts.TreePath, upload.Name)
+ lfsLock, err := git_model.GetTreePathLock(ctx, repo.ID, filepath)
+ if err != nil {
+ return err
+ }
+ if lfsLock != nil && lfsLock.OwnerID != doer.ID {
+ u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID)
+ if err != nil {
+ return err
+ }
+ return git_model.ErrLFSFileLocked{RepoID: repo.ID, Path: filepath, UserName: u.Name}
+ }
+
+ names[i] = upload.Name
+ infos[i] = uploadInfo{upload: upload}
+ }
+
+ t, err := NewTemporaryUploadRepository(ctx, repo)
+ if err != nil {
+ return err
+ }
+ defer t.Close()
+
+ hasOldBranch := true
+ if err = t.Clone(opts.OldBranch, true); err != nil {
+ if !git.IsErrBranchNotExist(err) || !repo.IsEmpty {
+ return err
+ }
+ if err = t.Init(repo.ObjectFormatName); err != nil {
+ return err
+ }
+ hasOldBranch = false
+ opts.LastCommitID = ""
+ }
+ if hasOldBranch {
+ if err = t.SetDefaultIndex(); err != nil {
+ return err
+ }
+ }
+
+ // Copy uploaded files into repository.
+ if err := copyUploadedLFSFilesIntoRepository(infos, t, opts.TreePath); err != nil {
+ return err
+ }
+
+ // Now write the tree
+ treeHash, err := t.WriteTree()
+ if err != nil {
+ return err
+ }
+
+ author, committer := GetAuthorAndCommitterUsers(opts.Author, opts.Committer, doer)
+
+ // Now commit the tree
+ commitHash, err := t.CommitTree(opts.LastCommitID, author, committer, treeHash, opts.Message, opts.Signoff)
+ if err != nil {
+ return err
+ }
+
+ // Now deal with LFS objects
+ for i := range infos {
+ if infos[i].lfsMetaObject == nil {
+ continue
+ }
+ infos[i].lfsMetaObject, err = git_model.NewLFSMetaObject(ctx, infos[i].lfsMetaObject.RepositoryID, infos[i].lfsMetaObject.Pointer)
+ if err != nil {
+ // OK Now we need to cleanup
+ return cleanUpAfterFailure(ctx, &infos, t, err)
+ }
+ // Don't move the files yet - we need to ensure that
+ // everything can be inserted first
+ }
+
+ // OK now we can insert the data into the store - there's no way to clean up the store
+ // once it's in there, it's in there.
+ contentStore := lfs.NewContentStore()
+ for _, info := range infos {
+ if err := uploadToLFSContentStore(info, contentStore); err != nil {
+ return cleanUpAfterFailure(ctx, &infos, t, err)
+ }
+ }
+
+ // Then push this tree to NewBranch
+ if err := t.Push(doer, commitHash, opts.NewBranch); err != nil {
+ return err
+ }
+
+ return repo_model.DeleteUploads(ctx, uploads...)
+}
+
+func copyUploadedLFSFilesIntoRepository(infos []uploadInfo, t *TemporaryUploadRepository, treePath string) error {
+ var storeInLFSFunc func(string) (bool, error)
+
+ if setting.LFS.StartServer {
+ checker, err := t.gitRepo.GitAttributeChecker("", "filter")
+ if err != nil {
+ return err
+ }
+ defer checker.Close()
+
+ storeInLFSFunc = func(name string) (bool, error) {
+ attrs, err := checker.CheckPath(name)
+ if err != nil {
+ return false, fmt.Errorf("could not CheckPath(%s): %w", name, err)
+ }
+ return attrs["filter"] == "lfs", nil
+ }
+ }
+
+ // Copy uploaded files into repository.
+ for i, info := range infos {
+ storeInLFS := false
+ if storeInLFSFunc != nil {
+ var err error
+ storeInLFS, err = storeInLFSFunc(info.upload.Name)
+ if err != nil {
+ return err
+ }
+ }
+
+ if err := copyUploadedLFSFileIntoRepository(&infos[i], storeInLFS, t, treePath); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func copyUploadedLFSFileIntoRepository(info *uploadInfo, storeInLFS bool, t *TemporaryUploadRepository, treePath string) error {
+ file, err := os.Open(info.upload.LocalPath())
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+
+ var objectHash string
+ if storeInLFS {
+ // Handle LFS
+ // FIXME: Inefficient! this should probably happen in models.Upload
+ pointer, err := lfs.GeneratePointer(file)
+ if err != nil {
+ return err
+ }
+
+ info.lfsMetaObject = &git_model.LFSMetaObject{Pointer: pointer, RepositoryID: t.repo.ID}
+
+ if objectHash, err = t.HashObject(strings.NewReader(pointer.StringContent())); err != nil {
+ return err
+ }
+ } else if objectHash, err = t.HashObject(file); err != nil {
+ return err
+ }
+
+ // Add the object to the index
+ return t.AddObjectToIndex("100644", objectHash, path.Join(treePath, info.upload.Name))
+}
+
+func uploadToLFSContentStore(info uploadInfo, contentStore *lfs.ContentStore) error {
+ if info.lfsMetaObject == nil {
+ return nil
+ }
+ exist, err := contentStore.Exists(info.lfsMetaObject.Pointer)
+ if err != nil {
+ return err
+ }
+ if !exist {
+ file, err := os.Open(info.upload.LocalPath())
+ if err != nil {
+ return err
+ }
+
+ defer file.Close()
+ // FIXME: Put regenerates the hash and copies the file over.
+ // I guess this strictly ensures the soundness of the store but this is inefficient.
+ if err := contentStore.Put(info.lfsMetaObject.Pointer, file); err != nil {
+ // OK Now we need to cleanup
+ // Can't clean up the store, once uploaded there they're there.
+ return err
+ }
+ }
+ return nil
+}
diff --git a/services/repository/fork.go b/services/repository/fork.go
new file mode 100644
index 0000000..0378f7b
--- /dev/null
+++ b/services/repository/fork.go
@@ -0,0 +1,248 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// ErrForkAlreadyExist represents a "ForkAlreadyExist" kind of error.
+type ErrForkAlreadyExist struct {
+ Uname string
+ RepoName string
+ ForkName string
+}
+
+// IsErrForkAlreadyExist checks if an error is an ErrForkAlreadyExist.
+func IsErrForkAlreadyExist(err error) bool {
+ _, ok := err.(ErrForkAlreadyExist)
+ return ok
+}
+
+func (err ErrForkAlreadyExist) Error() string {
+ return fmt.Sprintf("repository is already forked by user [uname: %s, repo path: %s, fork path: %s]", err.Uname, err.RepoName, err.ForkName)
+}
+
+func (err ErrForkAlreadyExist) Unwrap() error {
+ return util.ErrAlreadyExist
+}
+
+// ForkRepoOptions contains the fork repository options
+type ForkRepoOptions struct {
+ BaseRepo *repo_model.Repository
+ Name string
+ Description string
+ SingleBranch string
+}
+
+// ForkRepositoryIfNotExists creates a fork of a repository if it does not already exists and fails otherwise
+func ForkRepositoryIfNotExists(ctx context.Context, doer, owner *user_model.User, opts ForkRepoOptions) (*repo_model.Repository, error) {
+ // Fork is prohibited, if user has reached maximum limit of repositories
+ if !doer.IsAdmin && !owner.CanForkRepo() {
+ return nil, repo_model.ErrReachLimitOfRepo{
+ Limit: owner.MaxRepoCreation,
+ }
+ }
+
+ forkedRepo, err := repo_model.GetUserFork(ctx, opts.BaseRepo.ID, owner.ID)
+ if err != nil {
+ return nil, err
+ }
+ if forkedRepo != nil {
+ return nil, ErrForkAlreadyExist{
+ Uname: owner.Name,
+ RepoName: opts.BaseRepo.FullName(),
+ ForkName: forkedRepo.FullName(),
+ }
+ }
+
+ defaultBranch := opts.BaseRepo.DefaultBranch
+ if opts.SingleBranch != "" {
+ defaultBranch = opts.SingleBranch
+ }
+ repo := &repo_model.Repository{
+ OwnerID: owner.ID,
+ Owner: owner,
+ OwnerName: owner.Name,
+ Name: opts.Name,
+ LowerName: strings.ToLower(opts.Name),
+ Description: opts.Description,
+ DefaultBranch: defaultBranch,
+ IsPrivate: opts.BaseRepo.IsPrivate || opts.BaseRepo.Owner.Visibility == structs.VisibleTypePrivate,
+ IsEmpty: opts.BaseRepo.IsEmpty,
+ IsFork: true,
+ ForkID: opts.BaseRepo.ID,
+ ObjectFormatName: opts.BaseRepo.ObjectFormatName,
+ }
+
+ oldRepoPath := opts.BaseRepo.RepoPath()
+
+ needsRollback := false
+ rollbackFn := func() {
+ if !needsRollback {
+ return
+ }
+
+ repoPath := repo_model.RepoPath(owner.Name, repo.Name)
+
+ if exists, _ := util.IsExist(repoPath); !exists {
+ return
+ }
+
+ // As the transaction will be failed and hence database changes will be destroyed we only need
+ // to delete the related repository on the filesystem
+ if errDelete := util.RemoveAll(repoPath); errDelete != nil {
+ log.Error("Failed to remove fork repo")
+ }
+ }
+
+ needsRollbackInPanic := true
+ defer func() {
+ panicErr := recover()
+ if panicErr == nil {
+ return
+ }
+
+ if needsRollbackInPanic {
+ rollbackFn()
+ }
+ panic(panicErr)
+ }()
+
+ err = db.WithTx(ctx, func(txCtx context.Context) error {
+ if err = repo_module.CreateRepositoryByExample(txCtx, doer, owner, repo, false, true); err != nil {
+ return err
+ }
+
+ if err = repo_model.IncrementRepoForkNum(txCtx, opts.BaseRepo.ID); err != nil {
+ return err
+ }
+
+ // copy lfs files failure should not be ignored
+ if err = git_model.CopyLFS(txCtx, repo, opts.BaseRepo); err != nil {
+ return err
+ }
+
+ needsRollback = true
+
+ cloneCmd := git.NewCommand(txCtx, "clone", "--bare")
+ if opts.SingleBranch != "" {
+ cloneCmd.AddArguments("--single-branch", "--branch").AddDynamicArguments(opts.SingleBranch)
+ }
+ repoPath := repo_model.RepoPath(owner.Name, repo.Name)
+ if stdout, _, err := cloneCmd.AddDynamicArguments(oldRepoPath, repoPath).
+ SetDescription(fmt.Sprintf("ForkRepositoryIfNotExists(git clone): %s to %s", opts.BaseRepo.FullName(), repo.FullName())).
+ RunStdBytes(&git.RunOpts{Timeout: 10 * time.Minute}); err != nil {
+ log.Error("Fork Repository (git clone) Failed for %v (from %v):\nStdout: %s\nError: %v", repo, opts.BaseRepo, stdout, err)
+ return fmt.Errorf("git clone: %w", err)
+ }
+
+ if err := repo_module.CheckDaemonExportOK(txCtx, repo); err != nil {
+ return fmt.Errorf("checkDaemonExportOK: %w", err)
+ }
+
+ if stdout, _, err := git.NewCommand(txCtx, "update-server-info").
+ SetDescription(fmt.Sprintf("ForkRepositoryIfNotExists(git update-server-info): %s", repo.FullName())).
+ RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ log.Error("Fork Repository (git update-server-info) failed for %v:\nStdout: %s\nError: %v", repo, stdout, err)
+ return fmt.Errorf("git update-server-info: %w", err)
+ }
+
+ if err = repo_module.CreateDelegateHooks(repoPath); err != nil {
+ return fmt.Errorf("createDelegateHooks: %w", err)
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(txCtx, repo)
+ if err != nil {
+ return fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ _, err = repo_module.SyncRepoBranchesWithRepo(txCtx, repo, gitRepo, doer.ID)
+ return err
+ })
+ needsRollbackInPanic = false
+ if err != nil {
+ rollbackFn()
+ return nil, err
+ }
+
+ return repo, nil
+}
+
+// ForkRepositoryAndUpdates forks a repository. On success it updates metadata (size, stats, etc.) and send a notification.
+func ForkRepositoryAndUpdates(ctx context.Context, doer, owner *user_model.User, opts ForkRepoOptions) (*repo_model.Repository, error) {
+ repo, err := ForkRepositoryIfNotExists(ctx, doer, owner, opts)
+ if err != nil {
+ return nil, err
+ }
+
+ // even if below operations failed, it could be ignored. And they will be retried
+ if err := repo_module.UpdateRepoSize(ctx, repo); err != nil {
+ log.Error("Failed to update size for repository: %v", err)
+ }
+ if err := repo_model.CopyLanguageStat(ctx, opts.BaseRepo, repo); err != nil {
+ log.Error("Copy language stat from oldRepo failed: %v", err)
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ log.Error("Open created git repository failed: %v", err)
+ } else {
+ defer gitRepo.Close()
+ if err := repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
+ log.Error("Sync releases from git tags failed: %v", err)
+ }
+ }
+
+ notify_service.ForkRepository(ctx, doer, opts.BaseRepo, repo)
+
+ return repo, nil
+}
+
+// ConvertForkToNormalRepository convert the provided repo from a forked repo to normal repo
+func ConvertForkToNormalRepository(ctx context.Context, repo *repo_model.Repository) error {
+ err := db.WithTx(ctx, func(ctx context.Context) error {
+ repo, err := repo_model.GetRepositoryByID(ctx, repo.ID)
+ if err != nil {
+ return err
+ }
+
+ if !repo.IsFork {
+ return nil
+ }
+
+ if err := repo_model.DecrementRepoForkNum(ctx, repo.ForkID); err != nil {
+ log.Error("Unable to decrement repo fork num for old root repo %d of repository %-v whilst converting from fork. Error: %v", repo.ForkID, repo, err)
+ return err
+ }
+
+ repo.IsFork = false
+ repo.ForkID = 0
+
+ if err := repo_module.UpdateRepository(ctx, repo, false); err != nil {
+ log.Error("Unable to update repository %-v whilst converting from fork. Error: %v", repo, err)
+ return err
+ }
+
+ return nil
+ })
+
+ return err
+}
diff --git a/services/repository/fork_test.go b/services/repository/fork_test.go
new file mode 100644
index 0000000..2e1e72a
--- /dev/null
+++ b/services/repository/fork_test.go
@@ -0,0 +1,49 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestForkRepository(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // user 13 has already forked repo10
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 13})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10})
+
+ fork, err := ForkRepositoryAndUpdates(git.DefaultContext, user, user, ForkRepoOptions{
+ BaseRepo: repo,
+ Name: "test",
+ Description: "test",
+ })
+ assert.Nil(t, fork)
+ require.Error(t, err)
+ assert.True(t, IsErrForkAlreadyExist(err))
+
+ // user not reached maximum limit of repositories
+ assert.False(t, repo_model.IsErrReachLimitOfRepo(err))
+
+ // change AllowForkWithoutMaximumLimit to false for the test
+ setting.Repository.AllowForkWithoutMaximumLimit = false
+ // user has reached maximum limit of repositories
+ user.MaxRepoCreation = 0
+ fork2, err := ForkRepositoryAndUpdates(git.DefaultContext, user, user, ForkRepoOptions{
+ BaseRepo: repo,
+ Name: "test",
+ Description: "test",
+ })
+ assert.Nil(t, fork2)
+ assert.True(t, repo_model.IsErrReachLimitOfRepo(err))
+}
diff --git a/services/repository/generate.go b/services/repository/generate.go
new file mode 100644
index 0000000..8bd14ac
--- /dev/null
+++ b/services/repository/generate.go
@@ -0,0 +1,391 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/util"
+
+ "github.com/gobwas/glob"
+ "github.com/huandu/xstrings"
+)
+
+type transformer struct {
+ Name string
+ Transform func(string) string
+}
+
+type expansion struct {
+ Name string
+ Value string
+ Transformers []transformer
+}
+
+var defaultTransformers = []transformer{
+ {Name: "SNAKE", Transform: xstrings.ToSnakeCase},
+ {Name: "KEBAB", Transform: xstrings.ToKebabCase},
+ // as of xstrings v1.5.0 the CAMEL & PASCAL workarounds are no longer necessary
+ // and can be removed https://codeberg.org/forgejo/forgejo/pulls/4050
+ {Name: "CAMEL", Transform: func(str string) string {
+ return xstrings.FirstRuneToLower(xstrings.ToCamelCase(str))
+ }},
+ {Name: "PASCAL", Transform: xstrings.ToCamelCase},
+ {Name: "LOWER", Transform: strings.ToLower},
+ {Name: "UPPER", Transform: strings.ToUpper},
+ {Name: "TITLE", Transform: util.ToTitleCase},
+}
+
+func generateExpansion(src string, templateRepo, generateRepo *repo_model.Repository, sanitizeFileName bool) string {
+ year, month, day := time.Now().Date()
+ expansions := []expansion{
+ {Name: "YEAR", Value: strconv.Itoa(year), Transformers: nil},
+ {Name: "MONTH", Value: fmt.Sprintf("%02d", int(month)), Transformers: nil},
+ {Name: "MONTH_ENGLISH", Value: month.String(), Transformers: defaultTransformers},
+ {Name: "DAY", Value: fmt.Sprintf("%02d", day), Transformers: nil},
+ {Name: "REPO_NAME", Value: generateRepo.Name, Transformers: defaultTransformers},
+ {Name: "TEMPLATE_NAME", Value: templateRepo.Name, Transformers: defaultTransformers},
+ {Name: "REPO_DESCRIPTION", Value: generateRepo.Description, Transformers: nil},
+ {Name: "TEMPLATE_DESCRIPTION", Value: templateRepo.Description, Transformers: nil},
+ {Name: "REPO_OWNER", Value: generateRepo.OwnerName, Transformers: defaultTransformers},
+ {Name: "TEMPLATE_OWNER", Value: templateRepo.OwnerName, Transformers: defaultTransformers},
+ {Name: "REPO_LINK", Value: generateRepo.Link(), Transformers: nil},
+ {Name: "TEMPLATE_LINK", Value: templateRepo.Link(), Transformers: nil},
+ {Name: "REPO_HTTPS_URL", Value: generateRepo.CloneLink().HTTPS, Transformers: nil},
+ {Name: "TEMPLATE_HTTPS_URL", Value: templateRepo.CloneLink().HTTPS, Transformers: nil},
+ {Name: "REPO_SSH_URL", Value: generateRepo.CloneLink().SSH, Transformers: nil},
+ {Name: "TEMPLATE_SSH_URL", Value: templateRepo.CloneLink().SSH, Transformers: nil},
+ }
+
+ expansionMap := make(map[string]string)
+ for _, e := range expansions {
+ expansionMap[e.Name] = e.Value
+ for _, tr := range e.Transformers {
+ expansionMap[fmt.Sprintf("%s_%s", e.Name, tr.Name)] = tr.Transform(e.Value)
+ }
+ }
+
+ return os.Expand(src, func(key string) string {
+ if expansion, ok := expansionMap[key]; ok {
+ if sanitizeFileName {
+ return fileNameSanitize(expansion)
+ }
+ return expansion
+ }
+ return key
+ })
+}
+
+// GiteaTemplate holds information about a .gitea/template file
+type GiteaTemplate struct {
+ Path string
+ Content []byte
+
+ globs []glob.Glob
+}
+
+// Globs parses the .gitea/template globs or returns them if they were already parsed
+func (gt *GiteaTemplate) Globs() []glob.Glob {
+ if gt.globs != nil {
+ return gt.globs
+ }
+
+ gt.globs = make([]glob.Glob, 0)
+ scanner := bufio.NewScanner(bytes.NewReader(gt.Content))
+ for scanner.Scan() {
+ line := strings.TrimSpace(scanner.Text())
+ if line == "" || strings.HasPrefix(line, "#") {
+ continue
+ }
+ g, err := glob.Compile(line, '/')
+ if err != nil {
+ log.Info("Invalid glob expression '%s' (skipped): %v", line, err)
+ continue
+ }
+ gt.globs = append(gt.globs, g)
+ }
+ return gt.globs
+}
+
+func checkGiteaTemplate(tmpDir string) (*GiteaTemplate, error) {
+ gtPath := filepath.Join(tmpDir, ".gitea", "template")
+ if _, err := os.Stat(gtPath); os.IsNotExist(err) {
+ return nil, nil
+ } else if err != nil {
+ return nil, err
+ }
+
+ content, err := os.ReadFile(gtPath)
+ if err != nil {
+ return nil, err
+ }
+
+ gt := &GiteaTemplate{
+ Path: gtPath,
+ Content: content,
+ }
+
+ return gt, nil
+}
+
+func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *repo_model.Repository, tmpDir string) error {
+ commitTimeStr := time.Now().Format(time.RFC3339)
+ authorSig := repo.Owner.NewGitSig()
+
+ // Because this may call hooks we should pass in the environment
+ env := append(os.Environ(),
+ "GIT_AUTHOR_NAME="+authorSig.Name,
+ "GIT_AUTHOR_EMAIL="+authorSig.Email,
+ "GIT_AUTHOR_DATE="+commitTimeStr,
+ "GIT_COMMITTER_NAME="+authorSig.Name,
+ "GIT_COMMITTER_EMAIL="+authorSig.Email,
+ "GIT_COMMITTER_DATE="+commitTimeStr,
+ )
+
+ // Clone to temporary path and do the init commit.
+ templateRepoPath := templateRepo.RepoPath()
+ if err := git.Clone(ctx, templateRepoPath, tmpDir, git.CloneRepoOptions{
+ Depth: 1,
+ Branch: templateRepo.DefaultBranch,
+ }); err != nil {
+ return fmt.Errorf("git clone: %w", err)
+ }
+
+ if err := util.RemoveAll(path.Join(tmpDir, ".git")); err != nil {
+ return fmt.Errorf("remove git dir: %w", err)
+ }
+
+ // Variable expansion
+ gt, err := checkGiteaTemplate(tmpDir)
+ if err != nil {
+ return fmt.Errorf("checkGiteaTemplate: %w", err)
+ }
+
+ if gt != nil {
+ if err := util.Remove(gt.Path); err != nil {
+ return fmt.Errorf("remove .giteatemplate: %w", err)
+ }
+
+ // Avoid walking tree if there are no globs
+ if len(gt.Globs()) > 0 {
+ tmpDirSlash := strings.TrimSuffix(filepath.ToSlash(tmpDir), "/") + "/"
+ if err := filepath.WalkDir(tmpDirSlash, func(path string, d os.DirEntry, walkErr error) error {
+ if walkErr != nil {
+ return walkErr
+ }
+
+ if d.IsDir() {
+ return nil
+ }
+
+ base := strings.TrimPrefix(filepath.ToSlash(path), tmpDirSlash)
+ for _, g := range gt.Globs() {
+ if g.Match(base) {
+ content, err := os.ReadFile(path)
+ if err != nil {
+ return err
+ }
+
+ if err := os.WriteFile(path,
+ []byte(generateExpansion(string(content), templateRepo, generateRepo, false)),
+ 0o644); err != nil {
+ return err
+ }
+
+ substPath := filepath.FromSlash(filepath.Join(tmpDirSlash,
+ generateExpansion(base, templateRepo, generateRepo, true)))
+
+ // Create parent subdirectories if needed or continue silently if it exists
+ if err := os.MkdirAll(filepath.Dir(substPath), 0o755); err != nil {
+ return err
+ }
+
+ // Substitute filename variables
+ if err := os.Rename(path, substPath); err != nil {
+ return err
+ }
+
+ break
+ }
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+ }
+ }
+
+ if err := git.InitRepository(ctx, tmpDir, false, templateRepo.ObjectFormatName); err != nil {
+ return err
+ }
+
+ repoPath := repo.RepoPath()
+ if stdout, _, err := git.NewCommand(ctx, "remote", "add", "origin").AddDynamicArguments(repoPath).
+ SetDescription(fmt.Sprintf("generateRepoCommit (git remote add): %s to %s", templateRepoPath, tmpDir)).
+ RunStdString(&git.RunOpts{Dir: tmpDir, Env: env}); err != nil {
+ log.Error("Unable to add %v as remote origin to temporary repo to %s: stdout %s\nError: %v", repo, tmpDir, stdout, err)
+ return fmt.Errorf("git remote add: %w", err)
+ }
+
+ // set default branch based on whether it's specified in the newly generated repo or not
+ defaultBranch := repo.DefaultBranch
+ if strings.TrimSpace(defaultBranch) == "" {
+ defaultBranch = templateRepo.DefaultBranch
+ }
+
+ return initRepoCommit(ctx, tmpDir, repo, repo.Owner, defaultBranch)
+}
+
+func generateGitContent(ctx context.Context, repo, templateRepo, generateRepo *repo_model.Repository) (err error) {
+ tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-"+repo.Name)
+ if err != nil {
+ return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.RepoPath(), err)
+ }
+
+ defer func() {
+ if err := util.RemoveAll(tmpDir); err != nil {
+ log.Error("RemoveAll: %v", err)
+ }
+ }()
+
+ if err = generateRepoCommit(ctx, repo, templateRepo, generateRepo, tmpDir); err != nil {
+ return fmt.Errorf("generateRepoCommit: %w", err)
+ }
+
+ // re-fetch repo
+ if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil {
+ return fmt.Errorf("getRepositoryByID: %w", err)
+ }
+
+ // if there was no default branch supplied when generating the repo, use the default one from the template
+ if strings.TrimSpace(repo.DefaultBranch) == "" {
+ repo.DefaultBranch = templateRepo.DefaultBranch
+ }
+
+ if err = gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil {
+ return fmt.Errorf("setDefaultBranch: %w", err)
+ }
+ if err = UpdateRepository(ctx, repo, false); err != nil {
+ return fmt.Errorf("updateRepository: %w", err)
+ }
+
+ return nil
+}
+
+// GenerateGitContent generates git content from a template repository
+func GenerateGitContent(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error {
+ if err := generateGitContent(ctx, generateRepo, templateRepo, generateRepo); err != nil {
+ return err
+ }
+
+ if err := repo_module.UpdateRepoSize(ctx, generateRepo); err != nil {
+ return fmt.Errorf("failed to update size for repository: %w", err)
+ }
+
+ if err := git_model.CopyLFS(ctx, generateRepo, templateRepo); err != nil {
+ return fmt.Errorf("failed to copy LFS: %w", err)
+ }
+ return nil
+}
+
+// GenerateRepoOptions contains the template units to generate
+type GenerateRepoOptions struct {
+ Name string
+ DefaultBranch string
+ Description string
+ Private bool
+ GitContent bool
+ Topics bool
+ GitHooks bool
+ Webhooks bool
+ Avatar bool
+ IssueLabels bool
+ ProtectedBranch bool
+}
+
+// IsValid checks whether at least one option is chosen for generation
+func (gro GenerateRepoOptions) IsValid() bool {
+ return gro.GitContent || gro.Topics || gro.GitHooks || gro.Webhooks || gro.Avatar ||
+ gro.IssueLabels || gro.ProtectedBranch // or other items as they are added
+}
+
+// generateRepository generates a repository from a template
+func generateRepository(ctx context.Context, doer, owner *user_model.User, templateRepo *repo_model.Repository, opts GenerateRepoOptions) (_ *repo_model.Repository, err error) {
+ generateRepo := &repo_model.Repository{
+ OwnerID: owner.ID,
+ Owner: owner,
+ OwnerName: owner.Name,
+ Name: opts.Name,
+ LowerName: strings.ToLower(opts.Name),
+ Description: opts.Description,
+ DefaultBranch: opts.DefaultBranch,
+ IsPrivate: opts.Private,
+ IsEmpty: !opts.GitContent || templateRepo.IsEmpty,
+ IsFsckEnabled: templateRepo.IsFsckEnabled,
+ TemplateID: templateRepo.ID,
+ TrustModel: templateRepo.TrustModel,
+ ObjectFormatName: templateRepo.ObjectFormatName,
+ }
+
+ if err = repo_module.CreateRepositoryByExample(ctx, doer, owner, generateRepo, false, false); err != nil {
+ return nil, err
+ }
+
+ repoPath := generateRepo.RepoPath()
+ isExist, err := util.IsExist(repoPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ return nil, err
+ }
+ if isExist {
+ return nil, repo_model.ErrRepoFilesAlreadyExist{
+ Uname: generateRepo.OwnerName,
+ Name: generateRepo.Name,
+ }
+ }
+
+ if err = repo_module.CheckInitRepository(ctx, owner.Name, generateRepo.Name, generateRepo.ObjectFormatName); err != nil {
+ return generateRepo, err
+ }
+
+ if err = repo_module.CheckDaemonExportOK(ctx, generateRepo); err != nil {
+ return generateRepo, fmt.Errorf("checkDaemonExportOK: %w", err)
+ }
+
+ if stdout, _, err := git.NewCommand(ctx, "update-server-info").
+ SetDescription(fmt.Sprintf("GenerateRepository(git update-server-info): %s", repoPath)).
+ RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ log.Error("GenerateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", generateRepo, stdout, err)
+ return generateRepo, fmt.Errorf("error in GenerateRepository(git update-server-info): %w", err)
+ }
+
+ return generateRepo, nil
+}
+
+var fileNameSanitizeRegexp = regexp.MustCompile(`(?i)\.\.|[<>:\"/\\|?*\x{0000}-\x{001F}]|^(con|prn|aux|nul|com\d|lpt\d)$`)
+
+// Sanitize user input to valid OS filenames
+//
+// Based on https://github.com/sindresorhus/filename-reserved-regex
+// Adds ".." to prevent directory traversal
+func fileNameSanitize(s string) string {
+ return strings.TrimSpace(fileNameSanitizeRegexp.ReplaceAllString(s, "_"))
+}
diff --git a/services/repository/generate_test.go b/services/repository/generate_test.go
new file mode 100644
index 0000000..b0f97d0
--- /dev/null
+++ b/services/repository/generate_test.go
@@ -0,0 +1,67 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+var giteaTemplate = []byte(`
+# Header
+
+# All .go files
+**.go
+
+# All text files in /text/
+text/*.txt
+
+# All files in modules folders
+**/modules/*
+`)
+
+func TestGiteaTemplate(t *testing.T) {
+ gt := GiteaTemplate{Content: giteaTemplate}
+ assert.Len(t, gt.Globs(), 3)
+
+ tt := []struct {
+ Path string
+ Match bool
+ }{
+ {Path: "main.go", Match: true},
+ {Path: "a/b/c/d/e.go", Match: true},
+ {Path: "main.txt", Match: false},
+ {Path: "a/b.txt", Match: false},
+ {Path: "text/a.txt", Match: true},
+ {Path: "text/b.txt", Match: true},
+ {Path: "text/c.json", Match: false},
+ {Path: "a/b/c/modules/README.md", Match: true},
+ {Path: "a/b/c/modules/d/README.md", Match: false},
+ }
+
+ for _, tc := range tt {
+ t.Run(tc.Path, func(t *testing.T) {
+ match := false
+ for _, g := range gt.Globs() {
+ if g.Match(tc.Path) {
+ match = true
+ break
+ }
+ }
+ assert.Equal(t, tc.Match, match)
+ })
+ }
+}
+
+func TestFileNameSanitize(t *testing.T) {
+ assert.Equal(t, "test_CON", fileNameSanitize("test_CON"))
+ assert.Equal(t, "test CON", fileNameSanitize("test CON "))
+ assert.Equal(t, "__traverse__", fileNameSanitize("../traverse/.."))
+ assert.Equal(t, "http___localhost_3003_user_test.git", fileNameSanitize("http://localhost:3003/user/test.git"))
+ assert.Equal(t, "_", fileNameSanitize("CON"))
+ assert.Equal(t, "_", fileNameSanitize("con"))
+ assert.Equal(t, "_", fileNameSanitize("\u0000"))
+ assert.Equal(t, "目标", fileNameSanitize("目标"))
+}
diff --git a/services/repository/hooks.go b/services/repository/hooks.go
new file mode 100644
index 0000000..97e9e29
--- /dev/null
+++ b/services/repository/hooks.go
@@ -0,0 +1,110 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+
+ "xorm.io/builder"
+)
+
+// SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks
+// to make sure the binary and custom conf path are up-to-date.
+func SyncRepositoryHooks(ctx context.Context) error {
+ log.Trace("Doing: SyncRepositoryHooks")
+
+ if err := db.Iterate(
+ ctx,
+ builder.Gt{"id": 0},
+ func(ctx context.Context, repo *repo_model.Repository) error {
+ select {
+ case <-ctx.Done():
+ return db.ErrCancelledf("before sync repository hooks for %s", repo.FullName())
+ default:
+ }
+
+ if err := repo_module.CreateDelegateHooks(repo.RepoPath()); err != nil {
+ return fmt.Errorf("SyncRepositoryHook: %w", err)
+ }
+ if repo.HasWiki() {
+ if err := repo_module.CreateDelegateHooks(repo.WikiPath()); err != nil {
+ return fmt.Errorf("SyncRepositoryHook: %w", err)
+ }
+ }
+ return nil
+ },
+ ); err != nil {
+ return err
+ }
+
+ log.Trace("Finished: SyncRepositoryHooks")
+ return nil
+}
+
+// GenerateGitHooks generates git hooks from a template repository
+func GenerateGitHooks(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error {
+ generateGitRepo, err := gitrepo.OpenRepository(ctx, generateRepo)
+ if err != nil {
+ return err
+ }
+ defer generateGitRepo.Close()
+
+ templateGitRepo, err := gitrepo.OpenRepository(ctx, templateRepo)
+ if err != nil {
+ return err
+ }
+ defer templateGitRepo.Close()
+
+ templateHooks, err := templateGitRepo.Hooks()
+ if err != nil {
+ return err
+ }
+
+ for _, templateHook := range templateHooks {
+ generateHook, err := generateGitRepo.GetHook(templateHook.Name())
+ if err != nil {
+ return err
+ }
+
+ generateHook.Content = templateHook.Content
+ if err := generateHook.Update(); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// GenerateWebhooks generates webhooks from a template repository
+func GenerateWebhooks(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error {
+ templateWebhooks, err := db.Find[webhook.Webhook](ctx, webhook.ListWebhookOptions{RepoID: templateRepo.ID})
+ if err != nil {
+ return err
+ }
+
+ ws := make([]*webhook.Webhook, 0, len(templateWebhooks))
+ for _, templateWebhook := range templateWebhooks {
+ ws = append(ws, &webhook.Webhook{
+ RepoID: generateRepo.ID,
+ URL: templateWebhook.URL,
+ HTTPMethod: templateWebhook.HTTPMethod,
+ ContentType: templateWebhook.ContentType,
+ Secret: templateWebhook.Secret,
+ HookEvent: templateWebhook.HookEvent,
+ IsActive: templateWebhook.IsActive,
+ Type: templateWebhook.Type,
+ OwnerID: templateWebhook.OwnerID,
+ Events: templateWebhook.Events,
+ Meta: templateWebhook.Meta,
+ })
+ }
+ return webhook.CreateWebhooks(ctx, ws)
+}
diff --git a/services/repository/init.go b/services/repository/init.go
new file mode 100644
index 0000000..817fa4a
--- /dev/null
+++ b/services/repository/init.go
@@ -0,0 +1,83 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "time"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+)
+
+// initRepoCommit temporarily changes with work directory.
+func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Repository, u *user_model.User, defaultBranch string) (err error) {
+ commitTimeStr := time.Now().Format(time.RFC3339)
+
+ sig := u.NewGitSig()
+ // Because this may call hooks we should pass in the environment
+ env := append(os.Environ(),
+ "GIT_AUTHOR_NAME="+sig.Name,
+ "GIT_AUTHOR_EMAIL="+sig.Email,
+ "GIT_AUTHOR_DATE="+commitTimeStr,
+ "GIT_COMMITTER_DATE="+commitTimeStr,
+ )
+ committerName := sig.Name
+ committerEmail := sig.Email
+
+ if stdout, _, err := git.NewCommand(ctx, "add", "--all").
+ SetDescription(fmt.Sprintf("initRepoCommit (git add): %s", tmpPath)).
+ RunStdString(&git.RunOpts{Dir: tmpPath}); err != nil {
+ log.Error("git add --all failed: Stdout: %s\nError: %v", stdout, err)
+ return fmt.Errorf("git add --all: %w", err)
+ }
+
+ cmd := git.NewCommand(ctx, "commit", "--message=Initial commit").
+ AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email)
+
+ sign, keyID, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u)
+ if sign {
+ cmd.AddOptionFormat("-S%s", keyID)
+
+ if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
+ // need to set the committer to the KeyID owner
+ committerName = signer.Name
+ committerEmail = signer.Email
+ }
+ } else {
+ cmd.AddArguments("--no-gpg-sign")
+ }
+
+ env = append(env,
+ "GIT_COMMITTER_NAME="+committerName,
+ "GIT_COMMITTER_EMAIL="+committerEmail,
+ )
+
+ if stdout, _, err := cmd.
+ SetDescription(fmt.Sprintf("initRepoCommit (git commit): %s", tmpPath)).
+ RunStdString(&git.RunOpts{Dir: tmpPath, Env: env}); err != nil {
+ log.Error("Failed to commit: %v: Stdout: %s\nError: %v", cmd.String(), stdout, err)
+ return fmt.Errorf("git commit: %w", err)
+ }
+
+ if len(defaultBranch) == 0 {
+ defaultBranch = setting.Repository.DefaultBranch
+ }
+
+ if stdout, _, err := git.NewCommand(ctx, "push", "origin").AddDynamicArguments("HEAD:" + defaultBranch).
+ SetDescription(fmt.Sprintf("initRepoCommit (git push): %s", tmpPath)).
+ RunStdString(&git.RunOpts{Dir: tmpPath, Env: repo_module.InternalPushingEnvironment(u, repo)}); err != nil {
+ log.Error("Failed to push back to HEAD: Stdout: %s\nError: %v", stdout, err)
+ return fmt.Errorf("git push: %w", err)
+ }
+
+ return nil
+}
diff --git a/services/repository/lfs.go b/services/repository/lfs.go
new file mode 100644
index 0000000..4cd1110
--- /dev/null
+++ b/services/repository/lfs.go
@@ -0,0 +1,123 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+// GarbageCollectLFSMetaObjectsOptions provides options for GarbageCollectLFSMetaObjects function
+type GarbageCollectLFSMetaObjectsOptions struct {
+ LogDetail func(format string, v ...any)
+ AutoFix bool
+ OlderThan time.Time
+ UpdatedLessRecentlyThan time.Time
+}
+
+// GarbageCollectLFSMetaObjects garbage collects LFS objects for all repositories
+func GarbageCollectLFSMetaObjects(ctx context.Context, opts GarbageCollectLFSMetaObjectsOptions) error {
+ log.Trace("Doing: GarbageCollectLFSMetaObjects")
+ defer log.Trace("Finished: GarbageCollectLFSMetaObjects")
+
+ if opts.LogDetail == nil {
+ opts.LogDetail = log.Debug
+ }
+
+ if !setting.LFS.StartServer {
+ opts.LogDetail("LFS support is disabled")
+ return nil
+ }
+
+ return git_model.IterateRepositoryIDsWithLFSMetaObjects(ctx, func(ctx context.Context, repoID, count int64) error {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ return err
+ }
+
+ return GarbageCollectLFSMetaObjectsForRepo(ctx, repo, opts)
+ })
+}
+
+// GarbageCollectLFSMetaObjectsForRepo garbage collects LFS objects for a specific repository
+func GarbageCollectLFSMetaObjectsForRepo(ctx context.Context, repo *repo_model.Repository, opts GarbageCollectLFSMetaObjectsOptions) error {
+ opts.LogDetail("Checking %s", repo.FullName())
+ total, orphaned, collected, deleted := int64(0), 0, 0, 0
+ defer func() {
+ if orphaned == 0 {
+ opts.LogDetail("Found %d total LFSMetaObjects in %s", total, repo.FullName())
+ } else if !opts.AutoFix {
+ opts.LogDetail("Found %d/%d orphaned LFSMetaObjects in %s", orphaned, total, repo.FullName())
+ } else {
+ opts.LogDetail("Collected %d/%d orphaned/%d total LFSMetaObjects in %s. %d removed from storage.", collected, orphaned, total, repo.FullName(), deleted)
+ }
+ }()
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ log.Error("Unable to open git repository %s: %v", repo.FullName(), err)
+ return err
+ }
+ defer gitRepo.Close()
+
+ store := lfs.NewContentStore()
+ objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
+
+ err = git_model.IterateLFSMetaObjectsForRepo(ctx, repo.ID, func(ctx context.Context, metaObject *git_model.LFSMetaObject) error {
+ total++
+ pointerSha := git.ComputeBlobHash(objectFormat, []byte(metaObject.Pointer.StringContent()))
+
+ if gitRepo.IsObjectExist(pointerSha.String()) {
+ return git_model.MarkLFSMetaObject(ctx, metaObject.ID)
+ }
+ orphaned++
+
+ if !opts.AutoFix {
+ return nil
+ }
+ // Non-existent pointer file
+ _, err = git_model.RemoveLFSMetaObjectByOidFn(ctx, repo.ID, metaObject.Oid, func(count int64) error {
+ if count > 0 {
+ return nil
+ }
+
+ if err := store.Delete(metaObject.RelativePath()); err != nil {
+ log.Error("Unable to remove lfs metaobject %s from store: %v", metaObject.Oid, err)
+ }
+ deleted++
+ return nil
+ })
+ if err != nil {
+ return fmt.Errorf("unable to remove meta-object %s in %s: %w", metaObject.Oid, repo.FullName(), err)
+ }
+ collected++
+
+ return nil
+ }, &git_model.IterateLFSMetaObjectsForRepoOptions{
+ // Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
+ // and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
+ // an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
+ // changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
+ // objects.
+ //
+ // It is likely that a week is potentially excessive but it should definitely be enough that any
+ // unassociated LFS object is genuinely unassociated.
+ OlderThan: timeutil.TimeStamp(opts.OlderThan.Unix()),
+ UpdatedLessRecentlyThan: timeutil.TimeStamp(opts.UpdatedLessRecentlyThan.Unix()),
+ })
+ if err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/services/repository/lfs_test.go b/services/repository/lfs_test.go
new file mode 100644
index 0000000..a0c01df
--- /dev/null
+++ b/services/repository/lfs_test.go
@@ -0,0 +1,75 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository_test
+
+import (
+ "bytes"
+ "context"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGarbageCollectLFSMetaObjects(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ setting.LFS.StartServer = true
+ err := storage.Init()
+ require.NoError(t, err)
+
+ repo, err := repo_model.GetRepositoryByOwnerAndName(db.DefaultContext, "user2", "lfs")
+ require.NoError(t, err)
+
+ validLFSObjects, err := db.GetEngine(db.DefaultContext).Count(git_model.LFSMetaObject{RepositoryID: repo.ID})
+ require.NoError(t, err)
+ assert.Greater(t, validLFSObjects, int64(1))
+
+ // add lfs object
+ lfsContent := []byte("gitea1")
+ lfsOid := storeObjectInRepo(t, repo.ID, &lfsContent)
+
+ // gc
+ err = repo_service.GarbageCollectLFSMetaObjects(context.Background(), repo_service.GarbageCollectLFSMetaObjectsOptions{
+ AutoFix: true,
+ OlderThan: time.Now().Add(7 * 24 * time.Hour).Add(5 * 24 * time.Hour),
+ UpdatedLessRecentlyThan: time.Time{}, // ensure that the models/fixtures/lfs_meta_object.yml objects are considered as well
+ LogDetail: t.Logf,
+ })
+ require.NoError(t, err)
+
+ // lfs meta has been deleted
+ _, err = git_model.GetLFSMetaObjectByOid(db.DefaultContext, repo.ID, lfsOid)
+ require.ErrorIs(t, err, git_model.ErrLFSObjectNotExist)
+
+ remainingLFSObjects, err := db.GetEngine(db.DefaultContext).Count(git_model.LFSMetaObject{RepositoryID: repo.ID})
+ require.NoError(t, err)
+ assert.Equal(t, validLFSObjects-1, remainingLFSObjects)
+}
+
+func storeObjectInRepo(t *testing.T, repositoryID int64, content *[]byte) string {
+ pointer, err := lfs.GeneratePointer(bytes.NewReader(*content))
+ require.NoError(t, err)
+
+ _, err = git_model.NewLFSMetaObject(db.DefaultContext, repositoryID, pointer)
+ require.NoError(t, err)
+ contentStore := lfs.NewContentStore()
+ exist, err := contentStore.Exists(pointer)
+ require.NoError(t, err)
+ if !exist {
+ err := contentStore.Put(pointer, bytes.NewReader(*content))
+ require.NoError(t, err)
+ }
+ return pointer.Oid
+}
diff --git a/services/repository/main_test.go b/services/repository/main_test.go
new file mode 100644
index 0000000..7ad1540
--- /dev/null
+++ b/services/repository/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/repository/migrate.go b/services/repository/migrate.go
new file mode 100644
index 0000000..39ced04
--- /dev/null
+++ b/services/repository/migrate.go
@@ -0,0 +1,289 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/migration"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// MigrateRepositoryGitData starts migrating git related data after created migrating repository
+func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
+ repo *repo_model.Repository, opts migration.MigrateOptions,
+ httpTransport *http.Transport,
+) (*repo_model.Repository, error) {
+ repoPath := repo_model.RepoPath(u.Name, opts.RepoName)
+
+ if u.IsOrganization() {
+ t, err := organization.OrgFromUser(u).GetOwnerTeam(ctx)
+ if err != nil {
+ return nil, err
+ }
+ repo.NumWatches = t.NumMembers
+ } else {
+ repo.NumWatches = 1
+ }
+
+ migrateTimeout := time.Duration(setting.Git.Timeout.Migrate) * time.Second
+
+ var err error
+ if err = util.RemoveAll(repoPath); err != nil {
+ return repo, fmt.Errorf("Failed to remove %s: %w", repoPath, err)
+ }
+
+ if err = git.Clone(ctx, opts.CloneAddr, repoPath, git.CloneRepoOptions{
+ Mirror: true,
+ Quiet: true,
+ Timeout: migrateTimeout,
+ SkipTLSVerify: setting.Migrations.SkipTLSVerify,
+ }); err != nil {
+ if errors.Is(err, context.DeadlineExceeded) {
+ return repo, fmt.Errorf("Clone timed out. Consider increasing [git.timeout] MIGRATE in app.ini. Underlying Error: %w", err)
+ }
+ return repo, fmt.Errorf("Clone: %w", err)
+ }
+
+ if err := git.WriteCommitGraph(ctx, repoPath); err != nil {
+ return repo, err
+ }
+
+ if opts.Wiki {
+ wikiPath := repo_model.WikiPath(u.Name, opts.RepoName)
+ wikiRemotePath := repo_module.WikiRemoteURL(ctx, opts.CloneAddr)
+ if len(wikiRemotePath) > 0 {
+ if err := util.RemoveAll(wikiPath); err != nil {
+ return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
+ }
+
+ if err := git.Clone(ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{
+ Mirror: true,
+ Quiet: true,
+ Timeout: migrateTimeout,
+ SkipTLSVerify: setting.Migrations.SkipTLSVerify,
+ }); err != nil {
+ log.Warn("Clone wiki: %v", err)
+ if err := util.RemoveAll(wikiPath); err != nil {
+ return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
+ }
+ } else {
+ // Figure out the branch of the wiki we just cloned. We assume
+ // that the default branch is to be used, and we'll use the same
+ // name as the source.
+ gitRepo, err := git.OpenRepository(ctx, wikiPath)
+ if err != nil {
+ log.Warn("Failed to open wiki repository during migration: %v", err)
+ if err := util.RemoveAll(wikiPath); err != nil {
+ return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
+ }
+ return repo, err
+ }
+ defer gitRepo.Close()
+
+ branch, err := gitrepo.GetDefaultBranch(ctx, repo)
+ if err != nil {
+ log.Warn("Failed to get the default branch of a migrated wiki repo: %v", err)
+ if err := util.RemoveAll(wikiPath); err != nil {
+ return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
+ }
+
+ return repo, err
+ }
+ repo.WikiBranch = branch
+
+ if err := git.WriteCommitGraph(ctx, wikiPath); err != nil {
+ return repo, err
+ }
+ }
+ }
+ }
+
+ if repo.OwnerID == u.ID {
+ repo.Owner = u
+ }
+
+ if err = repo_module.CheckDaemonExportOK(ctx, repo); err != nil {
+ return repo, fmt.Errorf("checkDaemonExportOK: %w", err)
+ }
+
+ if stdout, _, err := git.NewCommand(ctx, "update-server-info").
+ SetDescription(fmt.Sprintf("MigrateRepositoryGitData(git update-server-info): %s", repoPath)).
+ RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ log.Error("MigrateRepositoryGitData(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
+ return repo, fmt.Errorf("error in MigrateRepositoryGitData(git update-server-info): %w", err)
+ }
+
+ gitRepo, err := git.OpenRepository(ctx, repoPath)
+ if err != nil {
+ return repo, fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ repo.IsEmpty, err = gitRepo.IsEmpty()
+ if err != nil {
+ return repo, fmt.Errorf("git.IsEmpty: %w", err)
+ }
+
+ if !repo.IsEmpty {
+ if len(repo.DefaultBranch) == 0 {
+ // Try to get HEAD branch and set it as default branch.
+ headBranch, err := gitRepo.GetHEADBranch()
+ if err != nil {
+ return repo, fmt.Errorf("GetHEADBranch: %w", err)
+ }
+ if headBranch != nil {
+ repo.DefaultBranch = headBranch.Name
+ }
+ }
+
+ if _, err := repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, u.ID); err != nil {
+ return repo, fmt.Errorf("SyncRepoBranchesWithRepo: %v", err)
+ }
+
+ if !opts.Releases {
+ // note: this will greatly improve release (tag) sync
+ // for pull-mirrors with many tags
+ repo.IsMirror = opts.Mirror
+ if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
+ log.Error("Failed to synchronize tags to releases for repository: %v", err)
+ }
+ }
+
+ if opts.LFS {
+ endpoint := lfs.DetermineEndpoint(opts.CloneAddr, opts.LFSEndpoint)
+ lfsClient := lfs.NewClient(endpoint, httpTransport)
+ if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, repo, gitRepo, lfsClient); err != nil {
+ log.Error("Failed to store missing LFS objects for repository: %v", err)
+ return repo, fmt.Errorf("StoreMissingLfsObjectsInRepository: %w", err)
+ }
+ }
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+
+ if opts.Mirror {
+ remoteAddress, err := util.SanitizeURL(opts.CloneAddr)
+ if err != nil {
+ return repo, err
+ }
+ mirrorModel := repo_model.Mirror{
+ RepoID: repo.ID,
+ Interval: setting.Mirror.DefaultInterval,
+ EnablePrune: true,
+ NextUpdateUnix: timeutil.TimeStampNow().AddDuration(setting.Mirror.DefaultInterval),
+ LFS: opts.LFS,
+ RemoteAddress: remoteAddress,
+ }
+ if opts.LFS {
+ mirrorModel.LFSEndpoint = opts.LFSEndpoint
+ }
+
+ if opts.MirrorInterval != "" {
+ parsedInterval, err := time.ParseDuration(opts.MirrorInterval)
+ if err != nil {
+ log.Error("Failed to set Interval: %v", err)
+ return repo, err
+ }
+ if parsedInterval == 0 {
+ mirrorModel.Interval = 0
+ mirrorModel.NextUpdateUnix = 0
+ } else if parsedInterval < setting.Mirror.MinInterval {
+ err := fmt.Errorf("interval %s is set below Minimum Interval of %s", parsedInterval, setting.Mirror.MinInterval)
+ log.Error("Interval: %s is too frequent", opts.MirrorInterval)
+ return repo, err
+ } else {
+ mirrorModel.Interval = parsedInterval
+ mirrorModel.NextUpdateUnix = timeutil.TimeStampNow().AddDuration(parsedInterval)
+ }
+ }
+
+ if err = repo_model.InsertMirror(ctx, &mirrorModel); err != nil {
+ return repo, fmt.Errorf("InsertOne: %w", err)
+ }
+
+ repo.IsMirror = true
+ if err = UpdateRepository(ctx, repo, false); err != nil {
+ return nil, err
+ }
+
+ // this is necessary for sync local tags from remote
+ configName := fmt.Sprintf("remote.%s.fetch", mirrorModel.GetRemoteName())
+ if stdout, _, err := git.NewCommand(ctx, "config").
+ AddOptionValues("--add", configName, `+refs/tags/*:refs/tags/*`).
+ RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ log.Error("MigrateRepositoryGitData(git config --add <remote> +refs/tags/*:refs/tags/*) in %v: Stdout: %s\nError: %v", repo, stdout, err)
+ return repo, fmt.Errorf("error in MigrateRepositoryGitData(git config --add <remote> +refs/tags/*:refs/tags/*): %w", err)
+ }
+ } else {
+ if err = repo_module.UpdateRepoSize(ctx, repo); err != nil {
+ log.Error("Failed to update size for repository: %v", err)
+ }
+ if repo, err = CleanUpMigrateInfo(ctx, repo); err != nil {
+ return nil, err
+ }
+ }
+
+ return repo, committer.Commit()
+}
+
+// cleanUpMigrateGitConfig removes mirror info which prevents "push --all".
+// This also removes possible user credentials.
+func cleanUpMigrateGitConfig(ctx context.Context, repoPath string) error {
+ cmd := git.NewCommand(ctx, "remote", "rm", "origin")
+ // if the origin does not exist
+ _, stderr, err := cmd.RunStdString(&git.RunOpts{
+ Dir: repoPath,
+ })
+ if err != nil && !strings.HasPrefix(stderr, "fatal: No such remote") {
+ return err
+ }
+ return nil
+}
+
+// CleanUpMigrateInfo finishes migrating repository and/or wiki with things that don't need to be done for mirrors.
+func CleanUpMigrateInfo(ctx context.Context, repo *repo_model.Repository) (*repo_model.Repository, error) {
+ repoPath := repo.RepoPath()
+ if err := repo_module.CreateDelegateHooks(repoPath); err != nil {
+ return repo, fmt.Errorf("createDelegateHooks: %w", err)
+ }
+ if repo.HasWiki() {
+ if err := repo_module.CreateDelegateHooks(repo.WikiPath()); err != nil {
+ return repo, fmt.Errorf("createDelegateHooks.(wiki): %w", err)
+ }
+ }
+
+ _, _, err := git.NewCommand(ctx, "remote", "rm", "origin").RunStdString(&git.RunOpts{Dir: repoPath})
+ if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") {
+ return repo, fmt.Errorf("CleanUpMigrateInfo: %w", err)
+ }
+
+ if repo.HasWiki() {
+ if err := cleanUpMigrateGitConfig(ctx, repo.WikiPath()); err != nil {
+ return repo, fmt.Errorf("cleanUpMigrateGitConfig (wiki): %w", err)
+ }
+ }
+
+ return repo, UpdateRepository(ctx, repo, false)
+}
diff --git a/services/repository/push.go b/services/repository/push.go
new file mode 100644
index 0000000..afd6308
--- /dev/null
+++ b/services/repository/push.go
@@ -0,0 +1,420 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/queue"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ issue_service "code.gitea.io/gitea/services/issue"
+ notify_service "code.gitea.io/gitea/services/notify"
+ pull_service "code.gitea.io/gitea/services/pull"
+)
+
+// pushQueue represents a queue to handle update pull request tests
+var pushQueue *queue.WorkerPoolQueue[[]*repo_module.PushUpdateOptions]
+
+// handle passed PR IDs and test the PRs
+func handler(items ...[]*repo_module.PushUpdateOptions) [][]*repo_module.PushUpdateOptions {
+ for _, opts := range items {
+ if err := pushUpdates(opts); err != nil {
+ // Username and repository stays the same between items in opts.
+ pushUpdate := opts[0]
+ log.Error("pushUpdate[%s/%s] failed: %v", pushUpdate.RepoUserName, pushUpdate.RepoName, err)
+ }
+ }
+ return nil
+}
+
+func initPushQueue() error {
+ pushQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "push_update", handler)
+ if pushQueue == nil {
+ return errors.New("unable to create push_update queue")
+ }
+ go graceful.GetManager().RunWithCancel(pushQueue)
+ return nil
+}
+
+// PushUpdate is an alias of PushUpdates for single push update options
+func PushUpdate(opts *repo_module.PushUpdateOptions) error {
+ return PushUpdates([]*repo_module.PushUpdateOptions{opts})
+}
+
+// PushUpdates adds a push update to push queue
+func PushUpdates(opts []*repo_module.PushUpdateOptions) error {
+ if len(opts) == 0 {
+ return nil
+ }
+
+ for _, opt := range opts {
+ if opt.IsNewRef() && opt.IsDelRef() {
+ return fmt.Errorf("Old and new revisions are both NULL")
+ }
+ }
+
+ return pushQueue.Push(opts)
+}
+
+// pushUpdates generates push action history feeds for push updating multiple refs
+func pushUpdates(optsList []*repo_module.PushUpdateOptions) error {
+ if len(optsList) == 0 {
+ return nil
+ }
+
+ ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("PushUpdates: %s/%s", optsList[0].RepoUserName, optsList[0].RepoName))
+ defer finished()
+
+ repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, optsList[0].RepoUserName, optsList[0].RepoName)
+ if err != nil {
+ return fmt.Errorf("GetRepositoryByOwnerAndName failed: %w", err)
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("OpenRepository[%s]: %w", repo.FullName(), err)
+ }
+ defer gitRepo.Close()
+
+ if err = repo_module.UpdateRepoSize(ctx, repo); err != nil {
+ return fmt.Errorf("Failed to update size for repository: %v", err)
+ }
+
+ addTags := make([]string, 0, len(optsList))
+ delTags := make([]string, 0, len(optsList))
+ var pusher *user_model.User
+ objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
+
+ for _, opts := range optsList {
+ log.Trace("pushUpdates: %-v %s %s %s", repo, opts.OldCommitID, opts.NewCommitID, opts.RefFullName)
+
+ if opts.IsNewRef() && opts.IsDelRef() {
+ return fmt.Errorf("old and new revisions are both %s", objectFormat.EmptyObjectID())
+ }
+ if opts.RefFullName.IsTag() {
+ if pusher == nil || pusher.ID != opts.PusherID {
+ if opts.PusherID == user_model.ActionsUserID {
+ pusher = user_model.NewActionsUser()
+ } else {
+ var err error
+ if pusher, err = user_model.GetUserByID(ctx, opts.PusherID); err != nil {
+ return err
+ }
+ }
+ }
+ tagName := opts.RefFullName.TagName()
+ if opts.IsDelRef() {
+ notify_service.PushCommits(
+ ctx, pusher, repo,
+ &repo_module.PushUpdateOptions{
+ RefFullName: git.RefNameFromTag(tagName),
+ OldCommitID: opts.OldCommitID,
+ NewCommitID: objectFormat.EmptyObjectID().String(),
+ }, repo_module.NewPushCommits())
+
+ delTags = append(delTags, tagName)
+ notify_service.DeleteRef(ctx, pusher, repo, opts.RefFullName)
+ } else { // is new tag
+ newCommit, err := gitRepo.GetCommit(opts.NewCommitID)
+ if err != nil {
+ return fmt.Errorf("gitRepo.GetCommit(%s) in %s/%s[%d]: %w", opts.NewCommitID, repo.OwnerName, repo.Name, repo.ID, err)
+ }
+
+ commits := repo_module.NewPushCommits()
+ commits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
+ commits.CompareURL = repo.ComposeCompareURL(objectFormat.EmptyObjectID().String(), opts.NewCommitID)
+
+ notify_service.PushCommits(
+ ctx, pusher, repo,
+ &repo_module.PushUpdateOptions{
+ RefFullName: opts.RefFullName,
+ OldCommitID: objectFormat.EmptyObjectID().String(),
+ NewCommitID: opts.NewCommitID,
+ }, commits)
+
+ addTags = append(addTags, tagName)
+ notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID)
+ }
+ } else if opts.RefFullName.IsBranch() {
+ if pusher == nil || pusher.ID != opts.PusherID {
+ if opts.PusherID == user_model.ActionsUserID {
+ pusher = user_model.NewActionsUser()
+ } else {
+ var err error
+ if pusher, err = user_model.GetUserByID(ctx, opts.PusherID); err != nil {
+ return err
+ }
+ }
+ }
+
+ branch := opts.RefFullName.BranchName()
+ if !opts.IsDelRef() {
+ log.Trace("TriggerTask '%s/%s' by %s", repo.Name, branch, pusher.Name)
+ pull_service.AddTestPullRequestTask(ctx, pusher, repo.ID, branch, true, opts.OldCommitID, opts.NewCommitID, opts.TimeNano)
+
+ newCommit, err := gitRepo.GetCommit(opts.NewCommitID)
+ if err != nil {
+ return fmt.Errorf("gitRepo.GetCommit(%s) in %s/%s[%d]: %w", opts.NewCommitID, repo.OwnerName, repo.Name, repo.ID, err)
+ }
+
+ refName := opts.RefName()
+
+ // Push new branch.
+ var l []*git.Commit
+ if opts.IsNewRef() {
+ if repo.IsEmpty { // Change default branch and empty status only if pushed ref is non-empty branch.
+ repo.DefaultBranch = refName
+ repo.IsEmpty = false
+ if repo.DefaultBranch != setting.Repository.DefaultBranch {
+ if err := gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil {
+ if !git.IsErrUnsupportedVersion(err) {
+ return err
+ }
+ }
+ }
+ // Update the is empty and default_branch columns
+ if err := repo_model.UpdateRepositoryCols(ctx, repo, "default_branch", "is_empty"); err != nil {
+ return fmt.Errorf("UpdateRepositoryCols: %w", err)
+ }
+ }
+
+ l, err = newCommit.CommitsBeforeLimit(10)
+ if err != nil {
+ return fmt.Errorf("newCommit.CommitsBeforeLimit: %w", err)
+ }
+ notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID)
+ } else {
+ l, err = newCommit.CommitsBeforeUntil(opts.OldCommitID)
+ if err != nil {
+ return fmt.Errorf("newCommit.CommitsBeforeUntil: %w", err)
+ }
+
+ isForcePush, err := newCommit.IsForcePush(opts.OldCommitID)
+ if err != nil {
+ log.Error("IsForcePush %s:%s failed: %v", repo.FullName(), branch, err)
+ }
+
+ if isForcePush {
+ log.Trace("Push %s is a force push", opts.NewCommitID)
+
+ cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true))
+ } else {
+ // TODO: increment update the commit count cache but not remove
+ cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true))
+ }
+ }
+
+ commits := repo_module.GitToPushCommits(l)
+ commits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
+
+ if err := issue_service.UpdateIssuesCommit(ctx, pusher, repo, commits.Commits, refName); err != nil {
+ log.Error("updateIssuesCommit: %v", err)
+ }
+
+ oldCommitID := opts.OldCommitID
+ if oldCommitID == objectFormat.EmptyObjectID().String() && len(commits.Commits) > 0 {
+ oldCommit, err := gitRepo.GetCommit(commits.Commits[len(commits.Commits)-1].Sha1)
+ if err != nil && !git.IsErrNotExist(err) {
+ log.Error("unable to GetCommit %s from %-v: %v", oldCommitID, repo, err)
+ }
+ if oldCommit != nil {
+ for i := 0; i < oldCommit.ParentCount(); i++ {
+ commitID, _ := oldCommit.ParentID(i)
+ if !commitID.IsZero() {
+ oldCommitID = commitID.String()
+ break
+ }
+ }
+ }
+ }
+
+ if oldCommitID == objectFormat.EmptyObjectID().String() && repo.DefaultBranch != branch {
+ oldCommitID = repo.DefaultBranch
+ }
+
+ if oldCommitID != objectFormat.EmptyObjectID().String() {
+ commits.CompareURL = repo.ComposeCompareURL(oldCommitID, opts.NewCommitID)
+ } else {
+ commits.CompareURL = ""
+ }
+
+ if len(commits.Commits) > setting.UI.FeedMaxCommitNum {
+ commits.Commits = commits.Commits[:setting.UI.FeedMaxCommitNum]
+ }
+
+ notify_service.PushCommits(ctx, pusher, repo, opts, commits)
+
+ // Cache for big repository
+ if err := CacheRef(graceful.GetManager().HammerContext(), repo, gitRepo, opts.RefFullName); err != nil {
+ log.Error("repo_module.CacheRef %s/%s failed: %v", repo.ID, branch, err)
+ }
+ } else {
+ notify_service.DeleteRef(ctx, pusher, repo, opts.RefFullName)
+ if err = pull_service.CloseBranchPulls(ctx, pusher, repo.ID, branch); err != nil {
+ // close all related pulls
+ log.Error("close related pull request failed: %v", err)
+ }
+ }
+
+ // Even if user delete a branch on a repository which he didn't watch, he will be watch that.
+ if err = repo_model.WatchIfAuto(ctx, opts.PusherID, repo.ID, true); err != nil {
+ log.Warn("Fail to perform auto watch on user %v for repo %v: %v", opts.PusherID, repo.ID, err)
+ }
+ } else {
+ log.Trace("Non-tag and non-branch commits pushed.")
+ }
+ }
+ if err := PushUpdateAddDeleteTags(ctx, repo, gitRepo, addTags, delTags); err != nil {
+ return fmt.Errorf("PushUpdateAddDeleteTags: %w", err)
+ }
+
+ // Change repository last updated time.
+ if err := repo_model.UpdateRepositoryUpdatedTime(ctx, repo.ID, time.Now()); err != nil {
+ return fmt.Errorf("UpdateRepositoryUpdatedTime: %w", err)
+ }
+
+ return nil
+}
+
+// PushUpdateAddDeleteTags updates a number of added and delete tags
+func PushUpdateAddDeleteTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, addTags, delTags []string) error {
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if err := repo_model.PushUpdateDeleteTagsContext(ctx, repo, delTags); err != nil {
+ return err
+ }
+ return pushUpdateAddTags(ctx, repo, gitRepo, addTags)
+ })
+}
+
+// pushUpdateAddTags updates a number of add tags
+func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, tags []string) error {
+ if len(tags) == 0 {
+ return nil
+ }
+
+ releases, err := db.Find[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ RepoID: repo.ID,
+ TagNames: tags,
+ IncludeTags: true,
+ })
+ if err != nil {
+ return fmt.Errorf("db.Find[repo_model.Release]: %w", err)
+ }
+ relMap := make(map[string]*repo_model.Release)
+ for _, rel := range releases {
+ relMap[rel.LowerTagName] = rel
+ }
+
+ lowerTags := make([]string, 0, len(tags))
+ for _, tag := range tags {
+ lowerTags = append(lowerTags, strings.ToLower(tag))
+ }
+
+ newReleases := make([]*repo_model.Release, 0, len(lowerTags)-len(relMap))
+
+ emailToUser := make(map[string]*user_model.User)
+
+ for i, lowerTag := range lowerTags {
+ tag, err := gitRepo.GetTag(tags[i])
+ if err != nil {
+ return fmt.Errorf("GetTag: %w", err)
+ }
+ commit, err := tag.Commit(gitRepo)
+ if err != nil {
+ return fmt.Errorf("Commit: %w", err)
+ }
+
+ sig := tag.Tagger
+ if sig == nil {
+ sig = commit.Author
+ }
+ if sig == nil {
+ sig = commit.Committer
+ }
+ var author *user_model.User
+ createdAt := time.Unix(1, 0)
+
+ if sig != nil {
+ var ok bool
+ author, ok = emailToUser[sig.Email]
+ if !ok {
+ author, err = user_model.GetUserByEmail(ctx, sig.Email)
+ if err != nil && !user_model.IsErrUserNotExist(err) {
+ return fmt.Errorf("GetUserByEmail: %w", err)
+ }
+ if author != nil {
+ emailToUser[sig.Email] = author
+ }
+ }
+ createdAt = sig.When
+ }
+
+ commitsCount, err := commit.CommitsCount()
+ if err != nil {
+ return fmt.Errorf("CommitsCount: %w", err)
+ }
+
+ parts := strings.SplitN(tag.Message, "\n", 2)
+ note := ""
+ if len(parts) > 1 {
+ note = parts[1]
+ }
+
+ if rel, has := relMap[lowerTag]; !has {
+ rel = &repo_model.Release{
+ RepoID: repo.ID,
+ Title: parts[0],
+ TagName: tags[i],
+ LowerTagName: lowerTag,
+ Target: "",
+ Sha1: commit.ID.String(),
+ NumCommits: commitsCount,
+ Note: note,
+ IsDraft: false,
+ IsPrerelease: false,
+ IsTag: true,
+ CreatedUnix: timeutil.TimeStamp(createdAt.Unix()),
+ }
+ if author != nil {
+ rel.PublisherID = author.ID
+ }
+ newReleases = append(newReleases, rel)
+ } else {
+ rel.Title = parts[0]
+ rel.Note = note
+ rel.Sha1 = commit.ID.String()
+ rel.CreatedUnix = timeutil.TimeStamp(createdAt.Unix())
+ rel.NumCommits = commitsCount
+ if rel.IsTag && author != nil {
+ rel.PublisherID = author.ID
+ }
+ if err = repo_model.UpdateRelease(ctx, rel); err != nil {
+ return fmt.Errorf("Update: %w", err)
+ }
+ }
+ }
+
+ if len(newReleases) > 0 {
+ if err = db.Insert(ctx, newReleases); err != nil {
+ return fmt.Errorf("Insert: %w", err)
+ }
+ }
+
+ return nil
+}
diff --git a/services/repository/repository.go b/services/repository/repository.go
new file mode 100644
index 0000000..116e241
--- /dev/null
+++ b/services/repository/repository.go
@@ -0,0 +1,153 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ system_model "code.gitea.io/gitea/models/system"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ federation_service "code.gitea.io/gitea/services/federation"
+ notify_service "code.gitea.io/gitea/services/notify"
+ pull_service "code.gitea.io/gitea/services/pull"
+)
+
+// WebSearchRepository represents a repository returned by web search
+type WebSearchRepository struct {
+ Repository *structs.Repository `json:"repository"`
+ LatestCommitStatus *git.CommitStatus `json:"latest_commit_status"`
+ LocaleLatestCommitStatus string `json:"locale_latest_commit_status"`
+}
+
+// WebSearchResults results of a successful web search
+type WebSearchResults struct {
+ OK bool `json:"ok"`
+ Data []*WebSearchRepository `json:"data"`
+}
+
+// CreateRepository creates a repository for the user/organization.
+func CreateRepository(ctx context.Context, doer, owner *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) {
+ repo, err := CreateRepositoryDirectly(ctx, doer, owner, opts)
+ if err != nil {
+ // No need to rollback here we should do this in CreateRepository...
+ return nil, err
+ }
+
+ notify_service.CreateRepository(ctx, doer, owner, repo)
+
+ return repo, nil
+}
+
+// DeleteRepository deletes a repository for a user or organization.
+func DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, notify bool) error {
+ if err := pull_service.CloseRepoBranchesPulls(ctx, doer, repo); err != nil {
+ log.Error("CloseRepoBranchesPulls failed: %v", err)
+ }
+
+ if notify {
+ // If the repo itself has webhooks, we need to trigger them before deleting it...
+ notify_service.DeleteRepository(ctx, doer, repo)
+ }
+
+ if err := DeleteRepositoryDirectly(ctx, doer, repo.ID); err != nil {
+ return err
+ }
+
+ if err := federation_service.DeleteFollowingRepos(ctx, repo.ID); err != nil {
+ return err
+ }
+
+ return packages_model.UnlinkRepositoryFromAllPackages(ctx, repo.ID)
+}
+
+// PushCreateRepo creates a repository when a new repository is pushed to an appropriate namespace
+func PushCreateRepo(ctx context.Context, authUser, owner *user_model.User, repoName string) (*repo_model.Repository, error) {
+ if !authUser.IsAdmin {
+ if owner.IsOrganization() {
+ if ok, err := organization.CanCreateOrgRepo(ctx, owner.ID, authUser.ID); err != nil {
+ return nil, err
+ } else if !ok {
+ return nil, fmt.Errorf("cannot push-create repository for org")
+ }
+ } else if authUser.ID != owner.ID {
+ return nil, fmt.Errorf("cannot push-create repository for another user")
+ }
+ }
+
+ repo, err := CreateRepository(ctx, authUser, owner, CreateRepoOptions{
+ Name: repoName,
+ IsPrivate: setting.Repository.DefaultPushCreatePrivate || setting.Repository.ForcePrivate,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ return repo, nil
+}
+
+// Init start repository service
+func Init(ctx context.Context) error {
+ if err := repo_module.LoadRepoConfig(); err != nil {
+ return err
+ }
+ system_model.RemoveAllWithNotice(ctx, "Clean up temporary repository uploads", setting.Repository.Upload.TempPath)
+ system_model.RemoveAllWithNotice(ctx, "Clean up temporary repositories", repo_module.LocalCopyPath())
+ if err := initPushQueue(); err != nil {
+ return err
+ }
+ return initBranchSyncQueue(graceful.GetManager().ShutdownContext())
+}
+
+// UpdateRepository updates a repository
+func UpdateRepository(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = repo_module.UpdateRepository(ctx, repo, visibilityChanged); err != nil {
+ return fmt.Errorf("updateRepository: %w", err)
+ }
+
+ return committer.Commit()
+}
+
+// LinkedRepository returns the linked repo if any
+func LinkedRepository(ctx context.Context, a *repo_model.Attachment) (*repo_model.Repository, unit.Type, error) {
+ if a.IssueID != 0 {
+ iss, err := issues_model.GetIssueByID(ctx, a.IssueID)
+ if err != nil {
+ return nil, unit.TypeIssues, err
+ }
+ repo, err := repo_model.GetRepositoryByID(ctx, iss.RepoID)
+ unitType := unit.TypeIssues
+ if iss.IsPull {
+ unitType = unit.TypePullRequests
+ }
+ return repo, unitType, err
+ } else if a.ReleaseID != 0 {
+ rel, err := repo_model.GetReleaseByID(ctx, a.ReleaseID)
+ if err != nil {
+ return nil, unit.TypeReleases, err
+ }
+ repo, err := repo_model.GetRepositoryByID(ctx, rel.RepoID)
+ return repo, unit.TypeReleases, err
+ }
+ return nil, -1, nil
+}
diff --git a/services/repository/repository_test.go b/services/repository/repository_test.go
new file mode 100644
index 0000000..a5c0b3e
--- /dev/null
+++ b/services/repository/repository_test.go
@@ -0,0 +1,43 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLinkedRepository(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ testCases := []struct {
+ name string
+ attachID int64
+ expectedRepo *repo_model.Repository
+ expectedUnitType unit.Type
+ }{
+ {"LinkedIssue", 1, &repo_model.Repository{ID: 1}, unit.TypeIssues},
+ {"LinkedComment", 3, &repo_model.Repository{ID: 1}, unit.TypePullRequests},
+ {"LinkedRelease", 9, &repo_model.Repository{ID: 1}, unit.TypeReleases},
+ {"Notlinked", 10, nil, -1},
+ }
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ attach, err := repo_model.GetAttachmentByID(db.DefaultContext, tc.attachID)
+ require.NoError(t, err)
+ repo, unitType, err := LinkedRepository(db.DefaultContext, attach)
+ require.NoError(t, err)
+ if tc.expectedRepo != nil {
+ assert.Equal(t, tc.expectedRepo.ID, repo.ID)
+ }
+ assert.Equal(t, tc.expectedUnitType, unitType)
+ })
+ }
+}
diff --git a/services/repository/review.go b/services/repository/review.go
new file mode 100644
index 0000000..40513e6
--- /dev/null
+++ b/services/repository/review.go
@@ -0,0 +1,24 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ repo_model "code.gitea.io/gitea/models/repo"
+)
+
+// GetReviewerTeams get all teams can be requested to review
+func GetReviewerTeams(ctx context.Context, repo *repo_model.Repository) ([]*organization.Team, error) {
+ if err := repo.LoadOwner(ctx); err != nil {
+ return nil, err
+ }
+ if !repo.Owner.IsOrganization() {
+ return nil, nil
+ }
+
+ return organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead)
+}
diff --git a/services/repository/review_test.go b/services/repository/review_test.go
new file mode 100644
index 0000000..eb1712c
--- /dev/null
+++ b/services/repository/review_test.go
@@ -0,0 +1,29 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRepoGetReviewerTeams(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ teams, err := GetReviewerTeams(db.DefaultContext, repo2)
+ require.NoError(t, err)
+ assert.Empty(t, teams)
+
+ repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+ teams, err = GetReviewerTeams(db.DefaultContext, repo3)
+ require.NoError(t, err)
+ assert.Len(t, teams, 2)
+}
diff --git a/services/repository/setting.go b/services/repository/setting.go
new file mode 100644
index 0000000..33b00cc
--- /dev/null
+++ b/services/repository/setting.go
@@ -0,0 +1,57 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "slices"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/log"
+ actions_service "code.gitea.io/gitea/services/actions"
+)
+
+// UpdateRepositoryUnits updates a repository's units
+func UpdateRepositoryUnits(ctx context.Context, repo *repo_model.Repository, units []repo_model.RepoUnit, deleteUnitTypes []unit.Type) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ // Delete existing settings of units before adding again
+ for _, u := range units {
+ deleteUnitTypes = append(deleteUnitTypes, u.Type)
+ }
+
+ if slices.Contains(deleteUnitTypes, unit.TypeActions) {
+ if err := actions_model.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
+ log.Error("CleanRepoScheduleTasks: %v", err)
+ }
+ }
+
+ for _, u := range units {
+ if u.Type == unit.TypeActions {
+ if err := actions_service.DetectAndHandleSchedules(ctx, repo); err != nil {
+ log.Error("DetectAndHandleSchedules: %v", err)
+ }
+ break
+ }
+ }
+
+ if _, err = db.GetEngine(ctx).Where("repo_id = ?", repo.ID).In("type", deleteUnitTypes).Delete(new(repo_model.RepoUnit)); err != nil {
+ return err
+ }
+
+ if len(units) > 0 {
+ if err = db.Insert(ctx, units); err != nil {
+ return err
+ }
+ }
+
+ return committer.Commit()
+}
diff --git a/services/repository/star.go b/services/repository/star.go
new file mode 100644
index 0000000..505da0f
--- /dev/null
+++ b/services/repository/star.go
@@ -0,0 +1,27 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/federation"
+)
+
+func StarRepoAndSendLikeActivities(ctx context.Context, doer user.User, repoID int64, star bool) error {
+ if err := repo.StarRepo(ctx, doer.ID, repoID, star); err != nil {
+ return err
+ }
+
+ if star && setting.Federation.Enabled {
+ if err := federation.SendLikeActivities(ctx, doer, repoID); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/services/repository/template.go b/services/repository/template.go
new file mode 100644
index 0000000..36a680c
--- /dev/null
+++ b/services/repository/template.go
@@ -0,0 +1,135 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// GenerateIssueLabels generates issue labels from a template repository
+func GenerateIssueLabels(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error {
+ templateLabels, err := issues_model.GetLabelsByRepoID(ctx, templateRepo.ID, "", db.ListOptions{})
+ if err != nil {
+ return err
+ }
+ // Prevent insert being called with an empty slice which would result in
+ // err "no element on slice when insert".
+ if len(templateLabels) == 0 {
+ return nil
+ }
+
+ newLabels := make([]*issues_model.Label, 0, len(templateLabels))
+ for _, templateLabel := range templateLabels {
+ newLabels = append(newLabels, &issues_model.Label{
+ RepoID: generateRepo.ID,
+ Name: templateLabel.Name,
+ Exclusive: templateLabel.Exclusive,
+ Description: templateLabel.Description,
+ Color: templateLabel.Color,
+ })
+ }
+ return db.Insert(ctx, newLabels)
+}
+
+func GenerateProtectedBranch(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error {
+ templateBranches, err := git_model.FindRepoProtectedBranchRules(ctx, templateRepo.ID)
+ if err != nil {
+ return err
+ }
+ // Prevent insert being called with an empty slice which would result in
+ // err "no element on slice when insert".
+ if len(templateBranches) == 0 {
+ return nil
+ }
+
+ newBranches := make([]*git_model.ProtectedBranch, 0, len(templateBranches))
+ for _, templateBranch := range templateBranches {
+ templateBranch.ID = 0
+ templateBranch.RepoID = generateRepo.ID
+ templateBranch.UpdatedUnix = 0
+ templateBranch.CreatedUnix = 0
+ newBranches = append(newBranches, templateBranch)
+ }
+ return db.Insert(ctx, newBranches)
+}
+
+// GenerateRepository generates a repository from a template
+func GenerateRepository(ctx context.Context, doer, owner *user_model.User, templateRepo *repo_model.Repository, opts GenerateRepoOptions) (_ *repo_model.Repository, err error) {
+ if !doer.IsAdmin && !owner.CanCreateRepo() {
+ return nil, repo_model.ErrReachLimitOfRepo{
+ Limit: owner.MaxRepoCreation,
+ }
+ }
+
+ var generateRepo *repo_model.Repository
+ if err = db.WithTx(ctx, func(ctx context.Context) error {
+ generateRepo, err = generateRepository(ctx, doer, owner, templateRepo, opts)
+ if err != nil {
+ return err
+ }
+
+ // Git Content
+ if opts.GitContent && !templateRepo.IsEmpty {
+ if err = GenerateGitContent(ctx, templateRepo, generateRepo); err != nil {
+ return err
+ }
+ }
+
+ // Topics
+ if opts.Topics {
+ if err = repo_model.GenerateTopics(ctx, templateRepo, generateRepo); err != nil {
+ return err
+ }
+ }
+
+ // Git Hooks
+ if opts.GitHooks {
+ if err = GenerateGitHooks(ctx, templateRepo, generateRepo); err != nil {
+ return err
+ }
+ }
+
+ // Webhooks
+ if opts.Webhooks {
+ if err = GenerateWebhooks(ctx, templateRepo, generateRepo); err != nil {
+ return err
+ }
+ }
+
+ // Avatar
+ if opts.Avatar && len(templateRepo.Avatar) > 0 {
+ if err = generateAvatar(ctx, templateRepo, generateRepo); err != nil {
+ return err
+ }
+ }
+
+ // Issue Labels
+ if opts.IssueLabels {
+ if err = GenerateIssueLabels(ctx, templateRepo, generateRepo); err != nil {
+ return err
+ }
+ }
+
+ if opts.ProtectedBranch {
+ if err = GenerateProtectedBranch(ctx, templateRepo, generateRepo); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }); err != nil {
+ return nil, err
+ }
+
+ notify_service.CreateRepository(ctx, doer, owner, generateRepo)
+
+ return generateRepo, nil
+}
diff --git a/services/repository/transfer.go b/services/repository/transfer.go
new file mode 100644
index 0000000..467c85e
--- /dev/null
+++ b/services/repository/transfer.go
@@ -0,0 +1,434 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/sync"
+ "code.gitea.io/gitea/modules/util"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// repoWorkingPool represents a working pool to order the parallel changes to the same repository
+// TODO: use clustered lock (unique queue? or *abuse* cache)
+var repoWorkingPool = sync.NewExclusivePool()
+
+// TransferOwnership transfers all corresponding setting from old user to new one.
+func TransferOwnership(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository, teams []*organization.Team) error {
+ if err := repo.LoadOwner(ctx); err != nil {
+ return err
+ }
+ for _, team := range teams {
+ if newOwner.ID != team.OrgID {
+ return fmt.Errorf("team %d does not belong to organization", team.ID)
+ }
+ }
+
+ oldOwner := repo.Owner
+
+ repoWorkingPool.CheckIn(fmt.Sprint(repo.ID))
+ if err := transferOwnership(ctx, doer, newOwner.Name, repo); err != nil {
+ repoWorkingPool.CheckOut(fmt.Sprint(repo.ID))
+ return err
+ }
+ repoWorkingPool.CheckOut(fmt.Sprint(repo.ID))
+
+ newRepo, err := repo_model.GetRepositoryByID(ctx, repo.ID)
+ if err != nil {
+ return err
+ }
+
+ for _, team := range teams {
+ if err := models.AddRepository(ctx, team, newRepo); err != nil {
+ return err
+ }
+ }
+
+ notify_service.TransferRepository(ctx, doer, repo, oldOwner.Name)
+
+ return nil
+}
+
+// transferOwnership transfers all corresponding repository items from old user to new one.
+func transferOwnership(ctx context.Context, doer *user_model.User, newOwnerName string, repo *repo_model.Repository) (err error) {
+ repoRenamed := false
+ wikiRenamed := false
+ oldOwnerName := doer.Name
+
+ defer func() {
+ if !repoRenamed && !wikiRenamed {
+ return
+ }
+
+ recoverErr := recover()
+ if err == nil && recoverErr == nil {
+ return
+ }
+
+ if repoRenamed {
+ if err := util.Rename(repo_model.RepoPath(newOwnerName, repo.Name), repo_model.RepoPath(oldOwnerName, repo.Name)); err != nil {
+ log.Critical("Unable to move repository %s/%s directory from %s back to correct place %s: %v", oldOwnerName, repo.Name,
+ repo_model.RepoPath(newOwnerName, repo.Name), repo_model.RepoPath(oldOwnerName, repo.Name), err)
+ }
+ }
+
+ if wikiRenamed {
+ if err := util.Rename(repo_model.WikiPath(newOwnerName, repo.Name), repo_model.WikiPath(oldOwnerName, repo.Name)); err != nil {
+ log.Critical("Unable to move wiki for repository %s/%s directory from %s back to correct place %s: %v", oldOwnerName, repo.Name,
+ repo_model.WikiPath(newOwnerName, repo.Name), repo_model.WikiPath(oldOwnerName, repo.Name), err)
+ }
+ }
+
+ if recoverErr != nil {
+ log.Error("Panic within TransferOwnership: %v\n%s", recoverErr, log.Stack(2))
+ panic(recoverErr)
+ }
+ }()
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ sess := db.GetEngine(ctx)
+
+ newOwner, err := user_model.GetUserByName(ctx, newOwnerName)
+ if err != nil {
+ return fmt.Errorf("get new owner '%s': %w", newOwnerName, err)
+ }
+ newOwnerName = newOwner.Name // ensure capitalisation matches
+
+ // Check if new owner has repository with same name.
+ if has, err := repo_model.IsRepositoryModelOrDirExist(ctx, newOwner, repo.Name); err != nil {
+ return fmt.Errorf("IsRepositoryExist: %w", err)
+ } else if has {
+ return repo_model.ErrRepoAlreadyExist{
+ Uname: newOwnerName,
+ Name: repo.Name,
+ }
+ }
+
+ oldOwner := repo.Owner
+ oldOwnerName = oldOwner.Name
+
+ // Note: we have to set value here to make sure recalculate accesses is based on
+ // new owner.
+ repo.OwnerID = newOwner.ID
+ repo.Owner = newOwner
+ repo.OwnerName = newOwner.Name
+
+ // Update repository.
+ if _, err := sess.ID(repo.ID).Update(repo); err != nil {
+ return fmt.Errorf("update owner: %w", err)
+ }
+
+ // Remove redundant collaborators.
+ collaborators, err := repo_model.GetCollaborators(ctx, repo.ID, db.ListOptions{})
+ if err != nil {
+ return fmt.Errorf("getCollaborators: %w", err)
+ }
+
+ // Dummy object.
+ collaboration := &repo_model.Collaboration{RepoID: repo.ID}
+ for _, c := range collaborators {
+ if c.IsGhost() {
+ collaboration.ID = c.Collaboration.ID
+ if _, err := sess.Delete(collaboration); err != nil {
+ return fmt.Errorf("remove collaborator '%d': %w", c.ID, err)
+ }
+ collaboration.ID = 0
+ }
+
+ if c.ID != newOwner.ID {
+ isMember, err := organization.IsOrganizationMember(ctx, newOwner.ID, c.ID)
+ if err != nil {
+ return fmt.Errorf("IsOrgMember: %w", err)
+ } else if !isMember {
+ continue
+ }
+ }
+ collaboration.UserID = c.ID
+ if _, err := sess.Delete(collaboration); err != nil {
+ return fmt.Errorf("remove collaborator '%d': %w", c.ID, err)
+ }
+ collaboration.UserID = 0
+ }
+
+ // Remove old team-repository relations.
+ if oldOwner.IsOrganization() {
+ if err := organization.RemoveOrgRepo(ctx, oldOwner.ID, repo.ID); err != nil {
+ return fmt.Errorf("removeOrgRepo: %w", err)
+ }
+ }
+
+ if newOwner.IsOrganization() {
+ teams, err := organization.FindOrgTeams(ctx, newOwner.ID)
+ if err != nil {
+ return fmt.Errorf("LoadTeams: %w", err)
+ }
+ for _, t := range teams {
+ if t.IncludesAllRepositories {
+ if err := models.AddRepository(ctx, t, repo); err != nil {
+ return fmt.Errorf("AddRepository: %w", err)
+ }
+ }
+ }
+ } else if err := access_model.RecalculateAccesses(ctx, repo); err != nil {
+ // Organization called this in addRepository method.
+ return fmt.Errorf("recalculateAccesses: %w", err)
+ }
+
+ // Update repository count.
+ if _, err := sess.Exec("UPDATE `user` SET num_repos=num_repos+1 WHERE id=?", newOwner.ID); err != nil {
+ return fmt.Errorf("increase new owner repository count: %w", err)
+ } else if _, err := sess.Exec("UPDATE `user` SET num_repos=num_repos-1 WHERE id=?", oldOwner.ID); err != nil {
+ return fmt.Errorf("decrease old owner repository count: %w", err)
+ }
+
+ if err := repo_model.WatchRepo(ctx, doer.ID, repo.ID, true); err != nil {
+ return fmt.Errorf("watchRepo: %w", err)
+ }
+
+ // Remove watch for organization.
+ if oldOwner.IsOrganization() {
+ if err := repo_model.WatchRepo(ctx, oldOwner.ID, repo.ID, false); err != nil {
+ return fmt.Errorf("watchRepo [false]: %w", err)
+ }
+ }
+
+ // Delete labels that belong to the old organization and comments that added these labels
+ if oldOwner.IsOrganization() {
+ if _, err := sess.Exec(`DELETE FROM issue_label WHERE issue_label.id IN (
+ SELECT il_too.id FROM (
+ SELECT il_too_too.id
+ FROM issue_label AS il_too_too
+ INNER JOIN label ON il_too_too.label_id = label.id
+ INNER JOIN issue on issue.id = il_too_too.issue_id
+ WHERE
+ issue.repo_id = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != ?))
+ ) AS il_too )`, repo.ID, newOwner.ID); err != nil {
+ return fmt.Errorf("Unable to remove old org labels: %w", err)
+ }
+
+ if _, err := sess.Exec(`DELETE FROM comment WHERE comment.id IN (
+ SELECT il_too.id FROM (
+ SELECT com.id
+ FROM comment AS com
+ INNER JOIN label ON com.label_id = label.id
+ INNER JOIN issue ON issue.id = com.issue_id
+ WHERE
+ com.type = ? AND issue.repo_id = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != ?))
+ ) AS il_too)`, issues_model.CommentTypeLabel, repo.ID, newOwner.ID); err != nil {
+ return fmt.Errorf("Unable to remove old org label comments: %w", err)
+ }
+ }
+
+ // Rename remote repository to new path and delete local copy.
+ dir := user_model.UserPath(newOwner.Name)
+
+ if err := os.MkdirAll(dir, os.ModePerm); err != nil {
+ return fmt.Errorf("Failed to create dir %s: %w", dir, err)
+ }
+
+ if err := util.Rename(repo_model.RepoPath(oldOwner.Name, repo.Name), repo_model.RepoPath(newOwner.Name, repo.Name)); err != nil {
+ return fmt.Errorf("rename repository directory: %w", err)
+ }
+ repoRenamed = true
+
+ // Rename remote wiki repository to new path and delete local copy.
+ wikiPath := repo_model.WikiPath(oldOwner.Name, repo.Name)
+
+ if isExist, err := util.IsExist(wikiPath); err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", wikiPath, err)
+ return err
+ } else if isExist {
+ if err := util.Rename(wikiPath, repo_model.WikiPath(newOwner.Name, repo.Name)); err != nil {
+ return fmt.Errorf("rename repository wiki: %w", err)
+ }
+ wikiRenamed = true
+ }
+
+ if err := models.DeleteRepositoryTransfer(ctx, repo.ID); err != nil {
+ return fmt.Errorf("deleteRepositoryTransfer: %w", err)
+ }
+ repo.Status = repo_model.RepositoryReady
+ if err := repo_model.UpdateRepositoryCols(ctx, repo, "status"); err != nil {
+ return err
+ }
+
+ // If there was previously a redirect at this location, remove it.
+ if err := repo_model.DeleteRedirect(ctx, newOwner.ID, repo.Name); err != nil {
+ return fmt.Errorf("delete repo redirect: %w", err)
+ }
+
+ if err := repo_model.NewRedirect(ctx, oldOwner.ID, repo.ID, repo.Name, repo.Name); err != nil {
+ return fmt.Errorf("repo_model.NewRedirect: %w", err)
+ }
+
+ return committer.Commit()
+}
+
+// changeRepositoryName changes all corresponding setting from old repository name to new one.
+func changeRepositoryName(ctx context.Context, repo *repo_model.Repository, newRepoName string) (err error) {
+ oldRepoName := repo.Name
+ newRepoName = strings.ToLower(newRepoName)
+ if err = repo_model.IsUsableRepoName(newRepoName); err != nil {
+ return err
+ }
+
+ if err := repo.LoadOwner(ctx); err != nil {
+ return err
+ }
+
+ has, err := repo_model.IsRepositoryModelOrDirExist(ctx, repo.Owner, newRepoName)
+ if err != nil {
+ return fmt.Errorf("IsRepositoryExist: %w", err)
+ } else if has {
+ return repo_model.ErrRepoAlreadyExist{
+ Uname: repo.Owner.Name,
+ Name: newRepoName,
+ }
+ }
+
+ newRepoPath := repo_model.RepoPath(repo.Owner.Name, newRepoName)
+ if err = util.Rename(repo.RepoPath(), newRepoPath); err != nil {
+ return fmt.Errorf("rename repository directory: %w", err)
+ }
+
+ wikiPath := repo.WikiPath()
+ isExist, err := util.IsExist(wikiPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", wikiPath, err)
+ return err
+ }
+ if isExist {
+ if err = util.Rename(wikiPath, repo_model.WikiPath(repo.Owner.Name, newRepoName)); err != nil {
+ return fmt.Errorf("rename repository wiki: %w", err)
+ }
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := repo_model.NewRedirect(ctx, repo.Owner.ID, repo.ID, oldRepoName, newRepoName); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// ChangeRepositoryName changes all corresponding setting from old repository name to new one.
+func ChangeRepositoryName(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, newRepoName string) error {
+ log.Trace("ChangeRepositoryName: %s/%s -> %s", doer.Name, repo.Name, newRepoName)
+
+ oldRepoName := repo.Name
+
+ // Change repository directory name. We must lock the local copy of the
+ // repo so that we can atomically rename the repo path and updates the
+ // local copy's origin accordingly.
+
+ repoWorkingPool.CheckIn(fmt.Sprint(repo.ID))
+ if err := changeRepositoryName(ctx, repo, newRepoName); err != nil {
+ repoWorkingPool.CheckOut(fmt.Sprint(repo.ID))
+ return err
+ }
+ repoWorkingPool.CheckOut(fmt.Sprint(repo.ID))
+
+ repo.Name = newRepoName
+ notify_service.RenameRepository(ctx, doer, repo, oldRepoName)
+
+ return nil
+}
+
+// StartRepositoryTransfer transfer a repo from one owner to a new one.
+// it make repository into pending transfer state, if doer can not create repo for new owner.
+func StartRepositoryTransfer(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository, teams []*organization.Team) error {
+ if user_model.IsBlocked(ctx, newOwner.ID, doer.ID) {
+ return user_model.ErrBlockedByUser
+ }
+
+ if err := models.TestRepositoryReadyForTransfer(repo.Status); err != nil {
+ return err
+ }
+
+ // Admin is always allowed to transfer || user transfer repo back to his account
+ if doer.IsAdmin || doer.ID == newOwner.ID {
+ return TransferOwnership(ctx, doer, newOwner, repo, teams)
+ }
+
+ // If new owner is an org and user can create repos he can transfer directly too
+ if newOwner.IsOrganization() {
+ allowed, err := organization.CanCreateOrgRepo(ctx, newOwner.ID, doer.ID)
+ if err != nil {
+ return err
+ }
+ if allowed {
+ return TransferOwnership(ctx, doer, newOwner, repo, teams)
+ }
+ }
+
+ // In case the new owner would not have sufficient access to the repo, give access rights for read
+ hasAccess, err := access_model.HasAccess(ctx, newOwner.ID, repo)
+ if err != nil {
+ return err
+ }
+ if !hasAccess {
+ if err := repo_module.AddCollaborator(ctx, repo, newOwner); err != nil {
+ return err
+ }
+ if err := repo_model.ChangeCollaborationAccessMode(ctx, repo, newOwner.ID, perm.AccessModeRead); err != nil {
+ return err
+ }
+ }
+
+ // Make repo as pending for transfer
+ repo.Status = repo_model.RepositoryPendingTransfer
+ if err := models.CreatePendingRepositoryTransfer(ctx, doer, newOwner, repo.ID, teams); err != nil {
+ return err
+ }
+
+ // notify users who are able to accept / reject transfer
+ notify_service.RepoPendingTransfer(ctx, doer, newOwner, repo)
+
+ return nil
+}
+
+// CancelRepositoryTransfer marks the repository as ready and remove pending transfer entry,
+// thus cancel the transfer process.
+func CancelRepositoryTransfer(ctx context.Context, repo *repo_model.Repository) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ repo.Status = repo_model.RepositoryReady
+ if err := repo_model.UpdateRepositoryCols(ctx, repo, "status"); err != nil {
+ return err
+ }
+
+ if err := models.DeleteRepositoryTransfer(ctx, repo.ID); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
diff --git a/services/repository/transfer_test.go b/services/repository/transfer_test.go
new file mode 100644
index 0000000..cc51a05
--- /dev/null
+++ b/services/repository/transfer_test.go
@@ -0,0 +1,124 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "sync"
+ "testing"
+
+ "code.gitea.io/gitea/models"
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/feed"
+ notify_service "code.gitea.io/gitea/services/notify"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+var notifySync sync.Once
+
+func registerNotifier() {
+ notifySync.Do(func() {
+ notify_service.RegisterNotifier(feed.NewNotifier())
+ })
+}
+
+func TestTransferOwnership(t *testing.T) {
+ registerNotifier()
+
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+ repo.Owner = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
+ require.NoError(t, TransferOwnership(db.DefaultContext, doer, doer, repo, nil))
+
+ transferredRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+ assert.EqualValues(t, 2, transferredRepo.OwnerID)
+
+ exist, err := util.IsExist(repo_model.RepoPath("org3", "repo3"))
+ require.NoError(t, err)
+ assert.False(t, exist)
+ exist, err = util.IsExist(repo_model.RepoPath("user2", "repo3"))
+ require.NoError(t, err)
+ assert.True(t, exist)
+ unittest.AssertExistsAndLoadBean(t, &activities_model.Action{
+ OpType: activities_model.ActionTransferRepo,
+ ActUserID: 2,
+ RepoID: 3,
+ Content: "org3/repo3",
+ })
+
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{}, &user_model.User{}, &organization.Team{})
+}
+
+func TestStartRepositoryTransferSetPermission(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
+ recipient := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 5})
+ repo.Owner = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
+
+ hasAccess, err := access_model.HasAccess(db.DefaultContext, recipient.ID, repo)
+ require.NoError(t, err)
+ assert.False(t, hasAccess)
+
+ require.NoError(t, StartRepositoryTransfer(db.DefaultContext, doer, recipient, repo, nil))
+
+ hasAccess, err = access_model.HasAccess(db.DefaultContext, recipient.ID, repo)
+ require.NoError(t, err)
+ assert.True(t, hasAccess)
+
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{}, &user_model.User{}, &organization.Team{})
+}
+
+func TestRepositoryTransfer(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+
+ transfer, err := models.GetPendingRepositoryTransfer(db.DefaultContext, repo)
+ require.NoError(t, err)
+ assert.NotNil(t, transfer)
+
+ // Cancel transfer
+ require.NoError(t, CancelRepositoryTransfer(db.DefaultContext, repo))
+
+ transfer, err = models.GetPendingRepositoryTransfer(db.DefaultContext, repo)
+ require.Error(t, err)
+ assert.Nil(t, transfer)
+ assert.True(t, models.IsErrNoPendingTransfer(err))
+
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ require.NoError(t, models.CreatePendingRepositoryTransfer(db.DefaultContext, doer, user2, repo.ID, nil))
+
+ transfer, err = models.GetPendingRepositoryTransfer(db.DefaultContext, repo)
+ require.NoError(t, err)
+ require.NoError(t, transfer.LoadAttributes(db.DefaultContext))
+ assert.Equal(t, "user2", transfer.Recipient.Name)
+
+ org6 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ // Only transfer can be started at any given time
+ err = models.CreatePendingRepositoryTransfer(db.DefaultContext, doer, org6, repo.ID, nil)
+ require.Error(t, err)
+ assert.True(t, models.IsErrRepoTransferInProgress(err))
+
+ // Unknown user
+ err = models.CreatePendingRepositoryTransfer(db.DefaultContext, doer, &user_model.User{ID: 1000, LowerName: "user1000"}, repo.ID, nil)
+ require.Error(t, err)
+
+ // Cancel transfer
+ require.NoError(t, CancelRepositoryTransfer(db.DefaultContext, repo))
+}
diff --git a/services/secrets/secrets.go b/services/secrets/secrets.go
new file mode 100644
index 0000000..031c474
--- /dev/null
+++ b/services/secrets/secrets.go
@@ -0,0 +1,83 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package secrets
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ secret_model "code.gitea.io/gitea/models/secret"
+)
+
+func CreateOrUpdateSecret(ctx context.Context, ownerID, repoID int64, name, data string) (*secret_model.Secret, bool, error) {
+ if err := ValidateName(name); err != nil {
+ return nil, false, err
+ }
+
+ s, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{
+ OwnerID: ownerID,
+ RepoID: repoID,
+ Name: name,
+ })
+ if err != nil {
+ return nil, false, err
+ }
+
+ if len(s) == 0 {
+ s, err := secret_model.InsertEncryptedSecret(ctx, ownerID, repoID, name, data)
+ if err != nil {
+ return nil, false, err
+ }
+ return s, true, nil
+ }
+
+ if err := secret_model.UpdateSecret(ctx, s[0].ID, data); err != nil {
+ return nil, false, err
+ }
+
+ return s[0], false, nil
+}
+
+func DeleteSecretByID(ctx context.Context, ownerID, repoID, secretID int64) error {
+ s, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{
+ OwnerID: ownerID,
+ RepoID: repoID,
+ SecretID: secretID,
+ })
+ if err != nil {
+ return err
+ }
+ if len(s) != 1 {
+ return secret_model.ErrSecretNotFound{}
+ }
+
+ return deleteSecret(ctx, s[0])
+}
+
+func DeleteSecretByName(ctx context.Context, ownerID, repoID int64, name string) error {
+ if err := ValidateName(name); err != nil {
+ return err
+ }
+
+ s, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{
+ OwnerID: ownerID,
+ RepoID: repoID,
+ Name: name,
+ })
+ if err != nil {
+ return err
+ }
+ if len(s) != 1 {
+ return secret_model.ErrSecretNotFound{}
+ }
+
+ return deleteSecret(ctx, s[0])
+}
+
+func deleteSecret(ctx context.Context, s *secret_model.Secret) error {
+ if _, err := db.DeleteByID[secret_model.Secret](ctx, s.ID); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/services/secrets/validation.go b/services/secrets/validation.go
new file mode 100644
index 0000000..3db5b96
--- /dev/null
+++ b/services/secrets/validation.go
@@ -0,0 +1,25 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package secrets
+
+import (
+ "regexp"
+
+ "code.gitea.io/gitea/modules/util"
+)
+
+// https://docs.github.com/en/actions/security-guides/encrypted-secrets#naming-your-secrets
+var (
+ namePattern = regexp.MustCompile("(?i)^[A-Z_][A-Z0-9_]*$")
+ forbiddenPrefixPattern = regexp.MustCompile("(?i)^GIT(EA|HUB)_")
+
+ ErrInvalidName = util.NewInvalidArgumentErrorf("invalid secret name")
+)
+
+func ValidateName(name string) error {
+ if !namePattern.MatchString(name) || forbiddenPrefixPattern.MatchString(name) {
+ return ErrInvalidName
+ }
+ return nil
+}
diff --git a/services/task/migrate.go b/services/task/migrate.go
new file mode 100644
index 0000000..9cef77a
--- /dev/null
+++ b/services/task/migrate.go
@@ -0,0 +1,154 @@
+// Copyright 2019 Gitea. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package task
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+ "time"
+
+ admin_model "code.gitea.io/gitea/models/admin"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/migrations"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+func handleCreateError(owner *user_model.User, err error) error {
+ switch {
+ case repo_model.IsErrReachLimitOfRepo(err):
+ return fmt.Errorf("you have already reached your limit of %d repositories", owner.MaxCreationLimit())
+ case repo_model.IsErrRepoAlreadyExist(err):
+ return errors.New("the repository name is already used")
+ case db.IsErrNameReserved(err):
+ return fmt.Errorf("the repository name '%s' is reserved", err.(db.ErrNameReserved).Name)
+ case db.IsErrNamePatternNotAllowed(err):
+ return fmt.Errorf("the pattern '%s' is not allowed in a repository name", err.(db.ErrNamePatternNotAllowed).Pattern)
+ default:
+ return err
+ }
+}
+
+func runMigrateTask(ctx context.Context, t *admin_model.Task) (err error) {
+ defer func(ctx context.Context) {
+ if e := recover(); e != nil {
+ err = fmt.Errorf("PANIC whilst trying to do migrate task: %v", e)
+ log.Critical("PANIC during runMigrateTask[%d] by DoerID[%d] to RepoID[%d] for OwnerID[%d]: %v\nStacktrace: %v", t.ID, t.DoerID, t.RepoID, t.OwnerID, e, log.Stack(2))
+ }
+ if err == nil {
+ err = admin_model.FinishMigrateTask(ctx, t)
+ if err == nil {
+ notify_service.MigrateRepository(ctx, t.Doer, t.Owner, t.Repo)
+ return
+ }
+
+ log.Error("FinishMigrateTask[%d] by DoerID[%d] to RepoID[%d] for OwnerID[%d] failed: %v", t.ID, t.DoerID, t.RepoID, t.OwnerID, err)
+ }
+
+ log.Error("runMigrateTask[%d] by DoerID[%d] to RepoID[%d] for OwnerID[%d] failed: %v", t.ID, t.DoerID, t.RepoID, t.OwnerID, err)
+
+ t.EndTime = timeutil.TimeStampNow()
+ t.Status = structs.TaskStatusFailed
+ t.Message = err.Error()
+ if err := t.UpdateCols(ctx, "status", "message", "end_time"); err != nil {
+ log.Error("Task UpdateCols failed: %v", err)
+ }
+
+ // then, do not delete the repository, otherwise the users won't be able to see the last error
+ }(graceful.GetManager().ShutdownContext()) // even if the parent ctx is canceled, this defer-function still needs to update the task record in database
+
+ if err = t.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ // if repository is ready, then just finish the task
+ if t.Repo.Status == repo_model.RepositoryReady {
+ return nil
+ }
+
+ if err = t.LoadDoer(ctx); err != nil {
+ return err
+ }
+ if err = t.LoadOwner(ctx); err != nil {
+ return err
+ }
+
+ var opts *migration.MigrateOptions
+ opts, err = t.MigrateConfig()
+ if err != nil {
+ return err
+ }
+
+ opts.MigrateToRepoID = t.RepoID
+
+ pm := process.GetManager()
+ ctx, cancel, finished := pm.AddContext(graceful.GetManager().ShutdownContext(), fmt.Sprintf("MigrateTask: %s/%s", t.Owner.Name, opts.RepoName))
+ defer finished()
+
+ t.StartTime = timeutil.TimeStampNow()
+ t.Status = structs.TaskStatusRunning
+ if err = t.UpdateCols(ctx, "start_time", "status"); err != nil {
+ return err
+ }
+
+ // check whether the task should be canceled, this goroutine is also managed by process manager
+ go func() {
+ for {
+ select {
+ case <-time.After(2 * time.Second):
+ case <-ctx.Done():
+ return
+ }
+ task, _ := admin_model.GetMigratingTask(ctx, t.RepoID)
+ if task != nil && task.Status != structs.TaskStatusRunning {
+ log.Debug("MigrateTask[%d] by DoerID[%d] to RepoID[%d] for OwnerID[%d] is canceled due to status is not 'running'", t.ID, t.DoerID, t.RepoID, t.OwnerID)
+ cancel()
+ return
+ }
+ }
+ }()
+
+ t.Repo, err = migrations.MigrateRepository(ctx, t.Doer, t.Owner.Name, *opts, func(format string, args ...any) {
+ message := admin_model.TranslatableMessage{
+ Format: format,
+ Args: args,
+ }
+ bs, _ := json.Marshal(message)
+ t.Message = string(bs)
+ _ = t.UpdateCols(ctx, "message")
+ })
+
+ if err == nil {
+ log.Trace("Repository migrated [%d]: %s/%s", t.Repo.ID, t.Owner.Name, t.Repo.Name)
+ return nil
+ }
+
+ if repo_model.IsErrRepoAlreadyExist(err) {
+ return errors.New("the repository name is already used")
+ }
+
+ // remoteAddr may contain credentials, so we sanitize it
+ err = util.SanitizeErrorCredentialURLs(err)
+ if strings.Contains(err.Error(), "Authentication failed") ||
+ strings.Contains(err.Error(), "could not read Username") {
+ return fmt.Errorf("authentication failed: %w", err)
+ } else if strings.Contains(err.Error(), "fatal:") {
+ return fmt.Errorf("migration failed: %w", err)
+ }
+
+ // do not be tempted to coalesce this line with the return
+ err = handleCreateError(t.Owner, err)
+ return err
+}
diff --git a/services/task/task.go b/services/task/task.go
new file mode 100644
index 0000000..ac659ac
--- /dev/null
+++ b/services/task/task.go
@@ -0,0 +1,169 @@
+// Copyright 2019 Gitea. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package task
+
+import (
+ "context"
+ "fmt"
+
+ admin_model "code.gitea.io/gitea/models/admin"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/secret"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// taskQueue is a global queue of tasks
+var taskQueue *queue.WorkerPoolQueue[*admin_model.Task]
+
+// Run a task
+func Run(ctx context.Context, t *admin_model.Task) error {
+ switch t.Type {
+ case structs.TaskTypeMigrateRepo:
+ return runMigrateTask(ctx, t)
+ default:
+ return fmt.Errorf("Unknown task type: %d", t.Type)
+ }
+}
+
+// Init will start the service to get all unfinished tasks and run them
+func Init() error {
+ taskQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "task", handler)
+ if taskQueue == nil {
+ return fmt.Errorf("unable to create task queue")
+ }
+ go graceful.GetManager().RunWithCancel(taskQueue)
+ return nil
+}
+
+func handler(items ...*admin_model.Task) []*admin_model.Task {
+ for _, task := range items {
+ if err := Run(db.DefaultContext, task); err != nil {
+ log.Error("Run task failed: %v", err)
+ }
+ }
+ return nil
+}
+
+// MigrateRepository add migration repository to task
+func MigrateRepository(ctx context.Context, doer, u *user_model.User, opts base.MigrateOptions) error {
+ task, err := CreateMigrateTask(ctx, doer, u, opts)
+ if err != nil {
+ return err
+ }
+
+ return taskQueue.Push(task)
+}
+
+// CreateMigrateTask creates a migrate task
+func CreateMigrateTask(ctx context.Context, doer, u *user_model.User, opts base.MigrateOptions) (*admin_model.Task, error) {
+ // encrypt credentials for persistence
+ var err error
+ opts.CloneAddrEncrypted, err = secret.EncryptSecret(setting.SecretKey, opts.CloneAddr)
+ if err != nil {
+ return nil, err
+ }
+ opts.CloneAddr = util.SanitizeCredentialURLs(opts.CloneAddr)
+ opts.AuthPasswordEncrypted, err = secret.EncryptSecret(setting.SecretKey, opts.AuthPassword)
+ if err != nil {
+ return nil, err
+ }
+ opts.AuthPassword = ""
+ opts.AuthTokenEncrypted, err = secret.EncryptSecret(setting.SecretKey, opts.AuthToken)
+ if err != nil {
+ return nil, err
+ }
+ opts.AuthToken = ""
+ bs, err := json.Marshal(&opts)
+ if err != nil {
+ return nil, err
+ }
+
+ task := &admin_model.Task{
+ DoerID: doer.ID,
+ OwnerID: u.ID,
+ Type: structs.TaskTypeMigrateRepo,
+ Status: structs.TaskStatusQueued,
+ PayloadContent: string(bs),
+ }
+
+ if err := admin_model.CreateTask(ctx, task); err != nil {
+ return nil, err
+ }
+
+ repo, err := repo_service.CreateRepositoryDirectly(ctx, doer, u, repo_service.CreateRepoOptions{
+ Name: opts.RepoName,
+ Description: opts.Description,
+ OriginalURL: opts.OriginalURL,
+ GitServiceType: opts.GitServiceType,
+ IsPrivate: opts.Private || setting.Repository.ForcePrivate,
+ IsMirror: opts.Mirror,
+ Status: repo_model.RepositoryBeingMigrated,
+ })
+ if err != nil {
+ task.EndTime = timeutil.TimeStampNow()
+ task.Status = structs.TaskStatusFailed
+ err2 := task.UpdateCols(ctx, "end_time", "status")
+ if err2 != nil {
+ log.Error("UpdateCols Failed: %v", err2.Error())
+ }
+ return nil, err
+ }
+
+ task.RepoID = repo.ID
+ if err = task.UpdateCols(ctx, "repo_id"); err != nil {
+ return nil, err
+ }
+
+ return task, nil
+}
+
+// RetryMigrateTask retry a migrate task
+func RetryMigrateTask(ctx context.Context, repoID int64) error {
+ migratingTask, err := admin_model.GetMigratingTask(ctx, repoID)
+ if err != nil {
+ log.Error("GetMigratingTask: %v", err)
+ return err
+ }
+ if migratingTask.Status == structs.TaskStatusQueued || migratingTask.Status == structs.TaskStatusRunning {
+ return nil
+ }
+
+ // TODO Need to removing the storage/database garbage brought by the failed task
+
+ // Reset task status and messages
+ migratingTask.Status = structs.TaskStatusQueued
+ migratingTask.Message = ""
+ if err = migratingTask.UpdateCols(ctx, "status", "message"); err != nil {
+ log.Error("task.UpdateCols failed: %v", err)
+ return err
+ }
+
+ return taskQueue.Push(migratingTask)
+}
+
+func SetMigrateTaskMessage(ctx context.Context, repoID int64, message string) error {
+ migratingTask, err := admin_model.GetMigratingTask(ctx, repoID)
+ if err != nil {
+ log.Error("GetMigratingTask: %v", err)
+ return err
+ }
+
+ migratingTask.Message = message
+ if err = migratingTask.UpdateCols(ctx, "message"); err != nil {
+ log.Error("task.UpdateCols failed: %v", err)
+ return err
+ }
+ return nil
+}
diff --git a/services/uinotification/notify.go b/services/uinotification/notify.go
new file mode 100644
index 0000000..be5f701
--- /dev/null
+++ b/services/uinotification/notify.go
@@ -0,0 +1,261 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package uinotification
+
+import (
+ "context"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/queue"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+type (
+ notificationService struct {
+ notify_service.NullNotifier
+ issueQueue *queue.WorkerPoolQueue[issueNotificationOpts]
+ }
+
+ issueNotificationOpts struct {
+ IssueID int64
+ CommentID int64
+ NotificationAuthorID int64
+ ReceiverID int64 // 0 -- ALL Watcher
+ }
+)
+
+func Init() error {
+ notify_service.RegisterNotifier(NewNotifier())
+
+ return nil
+}
+
+var _ notify_service.Notifier = &notificationService{}
+
+// NewNotifier create a new notificationService notifier
+func NewNotifier() notify_service.Notifier {
+ ns := &notificationService{}
+ ns.issueQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "notification-service", handler)
+ if ns.issueQueue == nil {
+ log.Fatal("Unable to create notification-service queue")
+ }
+ return ns
+}
+
+func handler(items ...issueNotificationOpts) []issueNotificationOpts {
+ for _, opts := range items {
+ if err := activities_model.CreateOrUpdateIssueNotifications(db.DefaultContext, opts.IssueID, opts.CommentID, opts.NotificationAuthorID, opts.ReceiverID); err != nil {
+ log.Error("Was unable to create issue notification: %v", err)
+ }
+ }
+ return nil
+}
+
+func (ns *notificationService) Run() {
+ go graceful.GetManager().RunWithCancel(ns.issueQueue) // TODO: using "go" here doesn't seem right, just leave it as old code
+}
+
+func (ns *notificationService) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
+) {
+ opts := issueNotificationOpts{
+ IssueID: issue.ID,
+ NotificationAuthorID: doer.ID,
+ }
+ if comment != nil {
+ opts.CommentID = comment.ID
+ }
+ _ = ns.issueQueue.Push(opts)
+ for _, mention := range mentions {
+ opts := issueNotificationOpts{
+ IssueID: issue.ID,
+ NotificationAuthorID: doer.ID,
+ ReceiverID: mention.ID,
+ }
+ if comment != nil {
+ opts.CommentID = comment.ID
+ }
+ _ = ns.issueQueue.Push(opts)
+ }
+}
+
+func (ns *notificationService) NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User) {
+ _ = ns.issueQueue.Push(issueNotificationOpts{
+ IssueID: issue.ID,
+ NotificationAuthorID: issue.Poster.ID,
+ })
+ for _, mention := range mentions {
+ _ = ns.issueQueue.Push(issueNotificationOpts{
+ IssueID: issue.ID,
+ NotificationAuthorID: issue.Poster.ID,
+ ReceiverID: mention.ID,
+ })
+ }
+}
+
+func (ns *notificationService) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, actionComment *issues_model.Comment, isClosed bool) {
+ _ = ns.issueQueue.Push(issueNotificationOpts{
+ IssueID: issue.ID,
+ NotificationAuthorID: doer.ID,
+ CommentID: actionComment.ID,
+ })
+}
+
+func (ns *notificationService) IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string) {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("issue.LoadPullRequest: %v", err)
+ return
+ }
+ if issue.IsPull && issues_model.HasWorkInProgressPrefix(oldTitle) && !issue.PullRequest.IsWorkInProgress(ctx) {
+ _ = ns.issueQueue.Push(issueNotificationOpts{
+ IssueID: issue.ID,
+ NotificationAuthorID: doer.ID,
+ })
+ }
+}
+
+func (ns *notificationService) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ _ = ns.issueQueue.Push(issueNotificationOpts{
+ IssueID: pr.Issue.ID,
+ NotificationAuthorID: doer.ID,
+ })
+}
+
+func (ns *notificationService) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ ns.MergePullRequest(ctx, doer, pr)
+}
+
+func (ns *notificationService) NewPullRequest(ctx context.Context, pr *issues_model.PullRequest, mentions []*user_model.User) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("Unable to load issue: %d for pr: %d: Error: %v", pr.IssueID, pr.ID, err)
+ return
+ }
+ toNotify := make(container.Set[int64], 32)
+ repoWatchers, err := repo_model.GetRepoWatchersIDs(ctx, pr.Issue.RepoID)
+ if err != nil {
+ log.Error("GetRepoWatchersIDs: %v", err)
+ return
+ }
+ for _, id := range repoWatchers {
+ toNotify.Add(id)
+ }
+ issueParticipants, err := issues_model.GetParticipantsIDsByIssueID(ctx, pr.IssueID)
+ if err != nil {
+ log.Error("GetParticipantsIDsByIssueID: %v", err)
+ return
+ }
+ for _, id := range issueParticipants {
+ toNotify.Add(id)
+ }
+ delete(toNotify, pr.Issue.PosterID)
+ for _, mention := range mentions {
+ toNotify.Add(mention.ID)
+ }
+ for receiverID := range toNotify {
+ _ = ns.issueQueue.Push(issueNotificationOpts{
+ IssueID: pr.Issue.ID,
+ NotificationAuthorID: pr.Issue.PosterID,
+ ReceiverID: receiverID,
+ })
+ }
+}
+
+func (ns *notificationService) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, r *issues_model.Review, c *issues_model.Comment, mentions []*user_model.User) {
+ opts := issueNotificationOpts{
+ IssueID: pr.Issue.ID,
+ NotificationAuthorID: r.Reviewer.ID,
+ }
+ if c != nil {
+ opts.CommentID = c.ID
+ }
+ _ = ns.issueQueue.Push(opts)
+ for _, mention := range mentions {
+ opts := issueNotificationOpts{
+ IssueID: pr.Issue.ID,
+ NotificationAuthorID: r.Reviewer.ID,
+ ReceiverID: mention.ID,
+ }
+ if c != nil {
+ opts.CommentID = c.ID
+ }
+ _ = ns.issueQueue.Push(opts)
+ }
+}
+
+func (ns *notificationService) PullRequestCodeComment(ctx context.Context, pr *issues_model.PullRequest, c *issues_model.Comment, mentions []*user_model.User) {
+ for _, mention := range mentions {
+ _ = ns.issueQueue.Push(issueNotificationOpts{
+ IssueID: pr.Issue.ID,
+ NotificationAuthorID: c.Poster.ID,
+ CommentID: c.ID,
+ ReceiverID: mention.ID,
+ })
+ }
+}
+
+func (ns *notificationService) PullRequestPushCommits(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, comment *issues_model.Comment) {
+ opts := issueNotificationOpts{
+ IssueID: pr.IssueID,
+ NotificationAuthorID: doer.ID,
+ CommentID: comment.ID,
+ }
+ _ = ns.issueQueue.Push(opts)
+}
+
+func (ns *notificationService) PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment) {
+ opts := issueNotificationOpts{
+ IssueID: review.IssueID,
+ NotificationAuthorID: doer.ID,
+ CommentID: comment.ID,
+ }
+ _ = ns.issueQueue.Push(opts)
+}
+
+func (ns *notificationService) IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
+ if !removed && doer.ID != assignee.ID {
+ opts := issueNotificationOpts{
+ IssueID: issue.ID,
+ NotificationAuthorID: doer.ID,
+ ReceiverID: assignee.ID,
+ }
+
+ if comment != nil {
+ opts.CommentID = comment.ID
+ }
+
+ _ = ns.issueQueue.Push(opts)
+ }
+}
+
+func (ns *notificationService) PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment) {
+ if isRequest {
+ opts := issueNotificationOpts{
+ IssueID: issue.ID,
+ NotificationAuthorID: doer.ID,
+ ReceiverID: reviewer.ID,
+ }
+
+ if comment != nil {
+ opts.CommentID = comment.ID
+ }
+
+ _ = ns.issueQueue.Push(opts)
+ }
+}
+
+func (ns *notificationService) RepoPendingTransfer(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository) {
+ err := db.WithTx(ctx, func(ctx context.Context) error {
+ return activities_model.CreateRepoTransferNotification(ctx, doer, newOwner, repo)
+ })
+ if err != nil {
+ log.Error("CreateRepoTransferNotification: %v", err)
+ }
+}
diff --git a/services/user/TestPurgeUser/public_key.yml b/services/user/TestPurgeUser/public_key.yml
new file mode 100644
index 0000000..75e409a
--- /dev/null
+++ b/services/user/TestPurgeUser/public_key.yml
@@ -0,0 +1,11 @@
+-
+ id: 1001
+ owner_id: 2
+ name: user2@localhost
+ fingerprint: "SHA256:7s+isLFauDv7QSbhAd0Z4OGIYJlQQ4YMtOH9LdjCZL8"
+ content: "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHAv3EOUcaK918Fk9d7mWuVS7oQamif/PNwqnAf/Z34G user2@localhost"
+ mode: 2
+ type: 3
+ created_unix: 1733363453
+ updated_unix: 1733363453
+ login_source_id: 0
diff --git a/services/user/avatar.go b/services/user/avatar.go
new file mode 100644
index 0000000..3f87466
--- /dev/null
+++ b/services/user/avatar.go
@@ -0,0 +1,73 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/avatar"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/storage"
+)
+
+// UploadAvatar saves custom avatar for user.
+func UploadAvatar(ctx context.Context, u *user_model.User, data []byte) error {
+ avatarData, err := avatar.ProcessAvatarImage(data)
+ if err != nil {
+ return err
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ u.UseCustomAvatar = true
+ u.Avatar = avatar.HashAvatar(u.ID, data)
+ if err = user_model.UpdateUserCols(ctx, u, "use_custom_avatar", "avatar"); err != nil {
+ return fmt.Errorf("updateUser: %w", err)
+ }
+
+ if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error {
+ _, err := w.Write(avatarData)
+ return err
+ }); err != nil {
+ return fmt.Errorf("Failed to create dir %s: %w", u.CustomAvatarRelativePath(), err)
+ }
+
+ return committer.Commit()
+}
+
+// DeleteAvatar deletes the user's custom avatar.
+func DeleteAvatar(ctx context.Context, u *user_model.User) error {
+ aPath := u.CustomAvatarRelativePath()
+ log.Trace("DeleteAvatar[%d]: %s", u.ID, aPath)
+
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ hasAvatar := len(u.Avatar) > 0
+ u.UseCustomAvatar = false
+ u.Avatar = ""
+ if _, err := db.GetEngine(ctx).ID(u.ID).Cols("avatar, use_custom_avatar").Update(u); err != nil {
+ return fmt.Errorf("DeleteAvatar: %w", err)
+ }
+
+ if hasAvatar {
+ if err := storage.Avatars.Delete(aPath); err != nil {
+ if !errors.Is(err, os.ErrNotExist) {
+ return fmt.Errorf("failed to remove %s: %w", aPath, err)
+ }
+ log.Warn("Deleting avatar %s but it doesn't exist", aPath)
+ }
+ }
+
+ return nil
+ })
+}
diff --git a/services/user/avatar_test.go b/services/user/avatar_test.go
new file mode 100644
index 0000000..21fca8d
--- /dev/null
+++ b/services/user/avatar_test.go
@@ -0,0 +1,81 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "bytes"
+ "image"
+ "image/png"
+ "os"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+type alreadyDeletedStorage struct {
+ storage.DiscardStorage
+}
+
+func (s alreadyDeletedStorage) Delete(_ string) error {
+ return os.ErrNotExist
+}
+
+func TestUserDeleteAvatar(t *testing.T) {
+ myImage := image.NewRGBA(image.Rect(0, 0, 1, 1))
+ var buff bytes.Buffer
+ png.Encode(&buff, myImage)
+
+ t.Run("AtomicStorageFailure", func(t *testing.T) {
+ defer test.MockProtect[storage.ObjectStorage](&storage.Avatars)()
+
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ err := UploadAvatar(db.DefaultContext, user, buff.Bytes())
+ require.NoError(t, err)
+ verification := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ assert.NotEqual(t, "", verification.Avatar)
+
+ // fail to delete ...
+ storage.Avatars = storage.UninitializedStorage
+ err = DeleteAvatar(db.DefaultContext, user)
+ require.Error(t, err)
+
+ // ... the avatar is not removed from the database
+ verification = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ assert.True(t, verification.UseCustomAvatar)
+
+ // already deleted ...
+ storage.Avatars = alreadyDeletedStorage{}
+ err = DeleteAvatar(db.DefaultContext, user)
+ require.NoError(t, err)
+
+ // ... the avatar is removed from the database
+ verification = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ assert.Equal(t, "", verification.Avatar)
+ })
+
+ t.Run("Success", func(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ err := UploadAvatar(db.DefaultContext, user, buff.Bytes())
+ require.NoError(t, err)
+ verification := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ assert.NotEqual(t, "", verification.Avatar)
+
+ err = DeleteAvatar(db.DefaultContext, user)
+ require.NoError(t, err)
+
+ verification = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ assert.Equal(t, "", verification.Avatar)
+ })
+}
diff --git a/services/user/block.go b/services/user/block.go
new file mode 100644
index 0000000..0b31119
--- /dev/null
+++ b/services/user/block.go
@@ -0,0 +1,95 @@
+// Copyright 2023 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+package user
+
+import (
+ "context"
+
+ model "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "xorm.io/builder"
+)
+
+// BlockUser adds a blocked user entry for userID to block blockID.
+// TODO: Figure out if instance admins should be immune to blocking.
+// TODO: Add more mechanism like removing blocked user as collaborator on
+// repositories where the user is an owner.
+func BlockUser(ctx context.Context, userID, blockID int64) error {
+ if userID == blockID || user_model.IsBlocked(ctx, userID, blockID) {
+ return nil
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ // Add the blocked user entry.
+ _, err = db.GetEngine(ctx).Insert(&user_model.BlockedUser{UserID: userID, BlockID: blockID})
+ if err != nil {
+ return err
+ }
+
+ // Unfollow the user from the block's perspective.
+ err = user_model.UnfollowUser(ctx, blockID, userID)
+ if err != nil {
+ return err
+ }
+
+ // Unfollow the user from the doer's perspective.
+ err = user_model.UnfollowUser(ctx, userID, blockID)
+ if err != nil {
+ return err
+ }
+
+ // Blocked user unwatch all repository owned by the doer.
+ repoIDs, err := repo_model.GetWatchedRepoIDsOwnedBy(ctx, blockID, userID)
+ if err != nil {
+ return err
+ }
+
+ err = repo_model.UnwatchRepos(ctx, blockID, repoIDs)
+ if err != nil {
+ return err
+ }
+
+ // Remove blocked user as collaborator from repositories the user owns as an
+ // individual.
+ collabsID, err := repo_model.GetCollaboratorWithUser(ctx, userID, blockID)
+ if err != nil {
+ return err
+ }
+
+ _, err = db.GetEngine(ctx).In("id", collabsID).Delete(&repo_model.Collaboration{})
+ if err != nil {
+ return err
+ }
+
+ // Remove pending repository transfers, and set the status on those repository
+ // back to ready.
+ pendingTransfersIDs, err := model.GetPendingTransferIDs(ctx, userID, blockID)
+ if err != nil {
+ return err
+ }
+
+ // Use a subquery instead of a JOIN, because not every database supports JOIN
+ // on a UPDATE query.
+ _, err = db.GetEngine(ctx).Table("repository").
+ In("id", builder.Select("repo_id").From("repo_transfer").Where(builder.In("id", pendingTransfersIDs))).
+ Cols("status").
+ Update(&repo_model.Repository{Status: repo_model.RepositoryReady})
+ if err != nil {
+ return err
+ }
+
+ _, err = db.GetEngine(ctx).In("id", pendingTransfersIDs).Delete(&model.RepoTransfer{})
+ if err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
diff --git a/services/user/block_test.go b/services/user/block_test.go
new file mode 100644
index 0000000..f9e95ed
--- /dev/null
+++ b/services/user/block_test.go
@@ -0,0 +1,92 @@
+// Copyright 2023 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "testing"
+
+ model "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// TestBlockUser will ensure that when you block a user, certain actions have
+// been taken, like unfollowing each other etc.
+func TestBlockUser(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
+ blockedUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ t.Run("Follow", func(t *testing.T) {
+ defer user_model.UnblockUser(db.DefaultContext, doer.ID, blockedUser.ID)
+
+ // Follow each other.
+ require.NoError(t, user_model.FollowUser(db.DefaultContext, doer.ID, blockedUser.ID))
+ require.NoError(t, user_model.FollowUser(db.DefaultContext, blockedUser.ID, doer.ID))
+
+ require.NoError(t, BlockUser(db.DefaultContext, doer.ID, blockedUser.ID))
+
+ // Ensure they aren't following each other anymore.
+ assert.False(t, user_model.IsFollowing(db.DefaultContext, doer.ID, blockedUser.ID))
+ assert.False(t, user_model.IsFollowing(db.DefaultContext, blockedUser.ID, doer.ID))
+ })
+
+ t.Run("Watch", func(t *testing.T) {
+ defer user_model.UnblockUser(db.DefaultContext, doer.ID, blockedUser.ID)
+
+ // Blocked user watch repository of doer.
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: doer.ID})
+ require.NoError(t, repo_model.WatchRepo(db.DefaultContext, blockedUser.ID, repo.ID, true))
+
+ require.NoError(t, BlockUser(db.DefaultContext, doer.ID, blockedUser.ID))
+
+ // Ensure blocked user isn't following doer's repository.
+ assert.False(t, repo_model.IsWatching(db.DefaultContext, blockedUser.ID, repo.ID))
+ })
+
+ t.Run("Collaboration", func(t *testing.T) {
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 16})
+ blockedUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 18})
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 22, OwnerID: doer.ID})
+ repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21, OwnerID: doer.ID})
+ defer user_model.UnblockUser(db.DefaultContext, doer.ID, blockedUser.ID)
+
+ isBlockedUserCollab := func(repo *repo_model.Repository) bool {
+ isCollaborator, err := repo_model.IsCollaborator(db.DefaultContext, repo.ID, blockedUser.ID)
+ require.NoError(t, err)
+ return isCollaborator
+ }
+
+ assert.True(t, isBlockedUserCollab(repo1))
+ assert.True(t, isBlockedUserCollab(repo2))
+
+ require.NoError(t, BlockUser(db.DefaultContext, doer.ID, blockedUser.ID))
+
+ assert.False(t, isBlockedUserCollab(repo1))
+ assert.False(t, isBlockedUserCollab(repo2))
+ })
+
+ t.Run("Pending transfers", func(t *testing.T) {
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ blockedUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
+ defer user_model.UnblockUser(db.DefaultContext, doer.ID, blockedUser.ID)
+
+ unittest.AssertExistsIf(t, true, &repo_model.Repository{ID: 3, OwnerID: blockedUser.ID, Status: repo_model.RepositoryPendingTransfer})
+ unittest.AssertExistsIf(t, true, &model.RepoTransfer{ID: 1, RecipientID: doer.ID, DoerID: blockedUser.ID})
+
+ require.NoError(t, BlockUser(db.DefaultContext, doer.ID, blockedUser.ID))
+
+ unittest.AssertExistsIf(t, false, &model.RepoTransfer{ID: 1, RecipientID: doer.ID, DoerID: blockedUser.ID})
+
+ // Don't use AssertExistsIf, as it doesn't include the zero values in the condition such as `repo_model.RepositoryReady`.
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3, OwnerID: blockedUser.ID})
+ assert.Equal(t, repo_model.RepositoryReady, repo.Status)
+ })
+}
diff --git a/services/user/delete.go b/services/user/delete.go
new file mode 100644
index 0000000..587e3c2
--- /dev/null
+++ b/services/user/delete.go
@@ -0,0 +1,224 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ _ "image/jpeg" // Needed for jpeg support
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ activities_model "code.gitea.io/gitea/models/activities"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ pull_model "code.gitea.io/gitea/models/pull"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ issue_service "code.gitea.io/gitea/services/issue"
+
+ "xorm.io/builder"
+)
+
+// deleteUser deletes models associated to an user.
+func deleteUser(ctx context.Context, u *user_model.User, purge bool) (err error) {
+ e := db.GetEngine(ctx)
+
+ // ***** START: Watch *****
+ watchedRepoIDs, err := db.FindIDs(ctx, "watch", "watch.repo_id",
+ builder.Eq{"watch.user_id": u.ID}.
+ And(builder.Neq{"watch.mode": repo_model.WatchModeDont}))
+ if err != nil {
+ return fmt.Errorf("get all watches: %w", err)
+ }
+ if err = db.DecrByIDs(ctx, watchedRepoIDs, "num_watches", new(repo_model.Repository)); err != nil {
+ return fmt.Errorf("decrease repository num_watches: %w", err)
+ }
+ // ***** END: Watch *****
+
+ // ***** START: Star *****
+ starredRepoIDs, err := db.FindIDs(ctx, "star", "star.repo_id",
+ builder.Eq{"star.uid": u.ID})
+ if err != nil {
+ return fmt.Errorf("get all stars: %w", err)
+ } else if err = db.DecrByIDs(ctx, starredRepoIDs, "num_stars", new(repo_model.Repository)); err != nil {
+ return fmt.Errorf("decrease repository num_stars: %w", err)
+ }
+ // ***** END: Star *****
+
+ // ***** START: Follow *****
+ followeeIDs, err := db.FindIDs(ctx, "follow", "follow.follow_id",
+ builder.Eq{"follow.user_id": u.ID})
+ if err != nil {
+ return fmt.Errorf("get all followees: %w", err)
+ } else if err = db.DecrByIDs(ctx, followeeIDs, "num_followers", new(user_model.User)); err != nil {
+ return fmt.Errorf("decrease user num_followers: %w", err)
+ }
+
+ followerIDs, err := db.FindIDs(ctx, "follow", "follow.user_id",
+ builder.Eq{"follow.follow_id": u.ID})
+ if err != nil {
+ return fmt.Errorf("get all followers: %w", err)
+ } else if err = db.DecrByIDs(ctx, followerIDs, "num_following", new(user_model.User)); err != nil {
+ return fmt.Errorf("decrease user num_following: %w", err)
+ }
+ // ***** END: Follow *****
+
+ if err = db.DeleteBeans(ctx,
+ &auth_model.AccessToken{UID: u.ID},
+ &repo_model.Collaboration{UserID: u.ID},
+ &access_model.Access{UserID: u.ID},
+ &repo_model.Watch{UserID: u.ID},
+ &repo_model.Star{UID: u.ID},
+ &user_model.Follow{UserID: u.ID},
+ &user_model.Follow{FollowID: u.ID},
+ &activities_model.Action{UserID: u.ID},
+ &issues_model.IssueUser{UID: u.ID},
+ &user_model.EmailAddress{UID: u.ID},
+ &user_model.UserOpenID{UID: u.ID},
+ &issues_model.Reaction{UserID: u.ID},
+ &organization.TeamUser{UID: u.ID},
+ &issues_model.Stopwatch{UserID: u.ID},
+ &user_model.Setting{UserID: u.ID},
+ &user_model.UserBadge{UserID: u.ID},
+ &pull_model.AutoMerge{DoerID: u.ID},
+ &pull_model.ReviewState{UserID: u.ID},
+ &user_model.Redirect{RedirectUserID: u.ID},
+ &actions_model.ActionRunner{OwnerID: u.ID},
+ &user_model.BlockedUser{BlockID: u.ID},
+ &user_model.BlockedUser{UserID: u.ID},
+ &actions_model.ActionRunnerToken{OwnerID: u.ID},
+ &auth_model.AuthorizationToken{UID: u.ID},
+ ); err != nil {
+ return fmt.Errorf("deleteBeans: %w", err)
+ }
+
+ if err := auth_model.DeleteOAuth2RelictsByUserID(ctx, u.ID); err != nil {
+ return err
+ }
+
+ if purge || (setting.Service.UserDeleteWithCommentsMaxTime != 0 &&
+ u.CreatedUnix.AsTime().Add(setting.Service.UserDeleteWithCommentsMaxTime).After(time.Now())) {
+ // Delete Comments
+ const batchSize = 50
+ for {
+ comments := make([]*issues_model.Comment, 0, batchSize)
+ if err = e.Where("type=? AND poster_id=?", issues_model.CommentTypeComment, u.ID).Limit(batchSize, 0).Find(&comments); err != nil {
+ return err
+ }
+ if len(comments) == 0 {
+ break
+ }
+
+ for _, comment := range comments {
+ if err = issues_model.DeleteComment(ctx, comment); err != nil {
+ return err
+ }
+ }
+ }
+
+ // Delete Reactions
+ if err = issues_model.DeleteReaction(ctx, &issues_model.ReactionOptions{DoerID: u.ID}); err != nil {
+ return err
+ }
+ }
+
+ // ***** START: Issues *****
+ if purge {
+ const batchSize = 50
+
+ for {
+ issues := make([]*issues_model.Issue, 0, batchSize)
+ if err = e.Where("poster_id=?", u.ID).Limit(batchSize, 0).Find(&issues); err != nil {
+ return err
+ }
+ if len(issues) == 0 {
+ break
+ }
+
+ for _, issue := range issues {
+ // NOTE: Don't open git repositories just to remove the reference data,
+ // `git gc` is able to remove that reference which is run as a cron job
+ // by default. Also use the deleted user as doer to delete the issue.
+ if err = issue_service.DeleteIssue(ctx, u, nil, issue); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ // ***** END: Issues *****
+
+ // ***** START: Branch Protections *****
+ {
+ const batchSize = 50
+ for start := 0; ; start += batchSize {
+ protections := make([]*git_model.ProtectedBranch, 0, batchSize)
+ // @perf: We can't filter on DB side by u.ID, as those IDs are serialized as JSON strings.
+ // We could filter down with `WHERE repo_id IN (reposWithPushPermission(u))`,
+ // though that query will be quite complex and tricky to maintain (compare `getRepoAssignees()`).
+ // Also, as we didn't update branch protections when removing entries from `access` table,
+ // it's safer to iterate all protected branches.
+ if err = e.Limit(batchSize, start).Find(&protections); err != nil {
+ return fmt.Errorf("findProtectedBranches: %w", err)
+ }
+ if len(protections) == 0 {
+ break
+ }
+ for _, p := range protections {
+ if err := git_model.RemoveUserIDFromProtectedBranch(ctx, p, u.ID); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ // ***** END: Branch Protections *****
+
+ // ***** START: PublicKey *****
+ if _, err = db.DeleteByBean(ctx, &asymkey_model.PublicKey{OwnerID: u.ID}); err != nil {
+ return fmt.Errorf("deletePublicKeys: %w", err)
+ }
+ // ***** END: PublicKey *****
+
+ // ***** START: GPGPublicKey *****
+ keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ OwnerID: u.ID,
+ })
+ if err != nil {
+ return fmt.Errorf("ListGPGKeys: %w", err)
+ }
+ // Delete GPGKeyImport(s).
+ for _, key := range keys {
+ if _, err = db.DeleteByBean(ctx, &asymkey_model.GPGKeyImport{KeyID: key.KeyID}); err != nil {
+ return fmt.Errorf("deleteGPGKeyImports: %w", err)
+ }
+ }
+ if _, err = db.DeleteByBean(ctx, &asymkey_model.GPGKey{OwnerID: u.ID}); err != nil {
+ return fmt.Errorf("deleteGPGKeys: %w", err)
+ }
+ // ***** END: GPGPublicKey *****
+
+ // Clear assignee.
+ if _, err = db.DeleteByBean(ctx, &issues_model.IssueAssignees{AssigneeID: u.ID}); err != nil {
+ return fmt.Errorf("clear assignee: %w", err)
+ }
+
+ // ***** START: ExternalLoginUser *****
+ if err = user_model.RemoveAllAccountLinks(ctx, u); err != nil {
+ return fmt.Errorf("ExternalLoginUser: %w", err)
+ }
+ // ***** END: ExternalLoginUser *****
+
+ if _, err = db.DeleteByID[user_model.User](ctx, u.ID); err != nil {
+ return fmt.Errorf("delete: %w", err)
+ }
+
+ return nil
+}
diff --git a/services/user/email.go b/services/user/email.go
new file mode 100644
index 0000000..e872526
--- /dev/null
+++ b/services/user/email.go
@@ -0,0 +1,232 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "context"
+ "errors"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/mailer"
+)
+
+// AdminAddOrSetPrimaryEmailAddress is used by admins to add or set a user's primary email address
+func AdminAddOrSetPrimaryEmailAddress(ctx context.Context, u *user_model.User, emailStr string) error {
+ if strings.EqualFold(u.Email, emailStr) {
+ return nil
+ }
+
+ if err := user_model.ValidateEmailForAdmin(emailStr); err != nil {
+ return err
+ }
+
+ // Check if address exists already
+ email, err := user_model.GetEmailAddressByEmail(ctx, emailStr)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ }
+ if email != nil && email.UID != u.ID {
+ return user_model.ErrEmailAlreadyUsed{Email: emailStr}
+ }
+
+ // Update old primary address
+ primary, err := user_model.GetPrimaryEmailAddressOfUser(ctx, u.ID)
+ if err != nil {
+ return err
+ }
+
+ primary.IsPrimary = false
+ if err := user_model.UpdateEmailAddress(ctx, primary); err != nil {
+ return err
+ }
+
+ // Insert new or update existing address
+ if email != nil {
+ email.IsPrimary = true
+ email.IsActivated = true
+ if err := user_model.UpdateEmailAddress(ctx, email); err != nil {
+ return err
+ }
+ } else {
+ email = &user_model.EmailAddress{
+ UID: u.ID,
+ Email: emailStr,
+ IsActivated: true,
+ IsPrimary: true,
+ }
+ if _, err := user_model.InsertEmailAddress(ctx, email); err != nil {
+ return err
+ }
+ }
+
+ u.Email = emailStr
+
+ return user_model.UpdateUserCols(ctx, u, "email")
+}
+
+func ReplacePrimaryEmailAddress(ctx context.Context, u *user_model.User, emailStr string) error {
+ if strings.EqualFold(u.Email, emailStr) {
+ return nil
+ }
+
+ if err := user_model.ValidateEmail(emailStr); err != nil {
+ return err
+ }
+
+ if !u.IsOrganization() {
+ // Check if address exists already
+ email, err := user_model.GetEmailAddressByEmail(ctx, emailStr)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ }
+ if email != nil {
+ if email.IsPrimary && email.UID == u.ID {
+ return nil
+ }
+ return user_model.ErrEmailAlreadyUsed{Email: emailStr}
+ }
+
+ // Remove old primary address
+ primary, err := user_model.GetPrimaryEmailAddressOfUser(ctx, u.ID)
+ if err != nil {
+ return err
+ }
+ if _, err := db.DeleteByID[user_model.EmailAddress](ctx, primary.ID); err != nil {
+ return err
+ }
+
+ // Insert new primary address
+ email = &user_model.EmailAddress{
+ UID: u.ID,
+ Email: emailStr,
+ IsActivated: true,
+ IsPrimary: true,
+ }
+ if _, err := user_model.InsertEmailAddress(ctx, email); err != nil {
+ return err
+ }
+ }
+
+ u.Email = emailStr
+
+ return user_model.UpdateUserCols(ctx, u, "email")
+}
+
+func AddEmailAddresses(ctx context.Context, u *user_model.User, emails []string) error {
+ for _, emailStr := range emails {
+ if err := user_model.ValidateEmail(emailStr); err != nil {
+ return err
+ }
+
+ // Check if address exists already
+ email, err := user_model.GetEmailAddressByEmail(ctx, emailStr)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ }
+ if email != nil {
+ return user_model.ErrEmailAlreadyUsed{Email: emailStr}
+ }
+
+ // Insert new address
+ email = &user_model.EmailAddress{
+ UID: u.ID,
+ Email: emailStr,
+ IsActivated: !setting.Service.RegisterEmailConfirm,
+ IsPrimary: false,
+ }
+ if _, err := user_model.InsertEmailAddress(ctx, email); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// ReplaceInactivePrimaryEmail replaces the primary email of a given user, even if the primary is not yet activated.
+func ReplaceInactivePrimaryEmail(ctx context.Context, oldEmail string, email *user_model.EmailAddress) error {
+ user := &user_model.User{}
+ has, err := db.GetEngine(ctx).ID(email.UID).Get(user)
+ if err != nil {
+ return err
+ } else if !has {
+ return user_model.ErrUserNotExist{
+ UID: email.UID,
+ }
+ }
+
+ err = AddEmailAddresses(ctx, user, []string{email.Email})
+ if err != nil {
+ return err
+ }
+
+ err = MakeEmailAddressPrimary(ctx, user, email, false)
+ if err != nil {
+ return err
+ }
+
+ return DeleteEmailAddresses(ctx, user, []string{oldEmail})
+}
+
+func DeleteEmailAddresses(ctx context.Context, u *user_model.User, emails []string) error {
+ for _, emailStr := range emails {
+ // Check if address exists
+ email, err := user_model.GetEmailAddressOfUser(ctx, emailStr, u.ID)
+ if err != nil {
+ return err
+ }
+ if email.IsPrimary {
+ return user_model.ErrPrimaryEmailCannotDelete{Email: emailStr}
+ }
+
+ // Remove address
+ if _, err := db.DeleteByID[user_model.EmailAddress](ctx, email.ID); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func MakeEmailAddressPrimary(ctx context.Context, u *user_model.User, newPrimaryEmail *user_model.EmailAddress, notify bool) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+ sess := db.GetEngine(ctx)
+
+ oldPrimaryEmail := u.Email
+
+ // 1. Update user table
+ u.Email = newPrimaryEmail.Email
+ if _, err = sess.ID(u.ID).Cols("email").Update(u); err != nil {
+ return err
+ }
+
+ // 2. Update old primary email
+ if _, err = sess.Where("uid=? AND is_primary=?", u.ID, true).Cols("is_primary").Update(&user_model.EmailAddress{
+ IsPrimary: false,
+ }); err != nil {
+ return err
+ }
+
+ // 3. update new primary email
+ newPrimaryEmail.IsPrimary = true
+ if _, err = sess.ID(newPrimaryEmail.ID).Cols("is_primary").Update(newPrimaryEmail); err != nil {
+ return err
+ }
+
+ if err := committer.Commit(); err != nil {
+ return err
+ }
+
+ if notify {
+ return mailer.SendPrimaryMailChange(u, oldPrimaryEmail)
+ }
+ return nil
+}
diff --git a/services/user/email_test.go b/services/user/email_test.go
new file mode 100644
index 0000000..86f31a8
--- /dev/null
+++ b/services/user/email_test.go
@@ -0,0 +1,178 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ organization_model "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/gobwas/glob"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestAdminAddOrSetPrimaryEmailAddress(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 27})
+
+ emails, err := user_model.GetEmailAddresses(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Len(t, emails, 1)
+
+ primary, err := user_model.GetPrimaryEmailAddressOfUser(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.NotEqual(t, "new-primary@example.com", primary.Email)
+ assert.Equal(t, user.Email, primary.Email)
+
+ require.NoError(t, AdminAddOrSetPrimaryEmailAddress(db.DefaultContext, user, "new-primary@example.com"))
+
+ primary, err = user_model.GetPrimaryEmailAddressOfUser(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Equal(t, "new-primary@example.com", primary.Email)
+ assert.Equal(t, user.Email, primary.Email)
+
+ emails, err = user_model.GetEmailAddresses(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Len(t, emails, 2)
+
+ setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("example.org")}
+ defer func() {
+ setting.Service.EmailDomainAllowList = []glob.Glob{}
+ }()
+
+ require.NoError(t, AdminAddOrSetPrimaryEmailAddress(db.DefaultContext, user, "new-primary2@example2.com"))
+
+ primary, err = user_model.GetPrimaryEmailAddressOfUser(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Equal(t, "new-primary2@example2.com", primary.Email)
+ assert.Equal(t, user.Email, primary.Email)
+
+ require.NoError(t, AdminAddOrSetPrimaryEmailAddress(db.DefaultContext, user, "user27@example.com"))
+
+ primary, err = user_model.GetPrimaryEmailAddressOfUser(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Equal(t, "user27@example.com", primary.Email)
+ assert.Equal(t, user.Email, primary.Email)
+
+ emails, err = user_model.GetEmailAddresses(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Len(t, emails, 3)
+}
+
+func TestReplacePrimaryEmailAddress(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ t.Run("User", func(t *testing.T) {
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 13})
+
+ emails, err := user_model.GetEmailAddresses(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Len(t, emails, 1)
+
+ primary, err := user_model.GetPrimaryEmailAddressOfUser(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.NotEqual(t, "primary-13@example.com", primary.Email)
+ assert.Equal(t, user.Email, primary.Email)
+
+ require.NoError(t, ReplacePrimaryEmailAddress(db.DefaultContext, user, "primary-13@example.com"))
+
+ primary, err = user_model.GetPrimaryEmailAddressOfUser(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Equal(t, "primary-13@example.com", primary.Email)
+ assert.Equal(t, user.Email, primary.Email)
+
+ emails, err = user_model.GetEmailAddresses(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Len(t, emails, 1)
+
+ require.NoError(t, ReplacePrimaryEmailAddress(db.DefaultContext, user, "primary-13@example.com"))
+ })
+
+ t.Run("Organization", func(t *testing.T) {
+ org := unittest.AssertExistsAndLoadBean(t, &organization_model.Organization{ID: 3})
+
+ assert.Equal(t, "org3@example.com", org.Email)
+
+ require.NoError(t, ReplacePrimaryEmailAddress(db.DefaultContext, org.AsUser(), "primary-org@example.com"))
+
+ assert.Equal(t, "primary-org@example.com", org.Email)
+ })
+}
+
+func TestAddEmailAddresses(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ require.Error(t, AddEmailAddresses(db.DefaultContext, user, []string{" invalid email "}))
+
+ emails := []string{"user1234@example.com", "user5678@example.com"}
+
+ require.NoError(t, AddEmailAddresses(db.DefaultContext, user, emails))
+
+ err := AddEmailAddresses(db.DefaultContext, user, emails)
+ require.Error(t, err)
+ assert.True(t, user_model.IsErrEmailAlreadyUsed(err))
+}
+
+func TestReplaceInactivePrimaryEmail(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ email := &user_model.EmailAddress{
+ Email: "user9999999@example.com",
+ UID: 9999999,
+ }
+ err := ReplaceInactivePrimaryEmail(db.DefaultContext, "user10@example.com", email)
+ require.Error(t, err)
+ assert.True(t, user_model.IsErrUserNotExist(err))
+
+ email = &user_model.EmailAddress{
+ Email: "user201@example.com",
+ UID: 10,
+ }
+ err = ReplaceInactivePrimaryEmail(db.DefaultContext, "user10@example.com", email)
+ require.NoError(t, err)
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 10})
+ assert.Equal(t, "user201@example.com", user.Email)
+}
+
+func TestDeleteEmailAddresses(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ emails := []string{"user2-2@example.com"}
+
+ err := DeleteEmailAddresses(db.DefaultContext, user, emails)
+ require.NoError(t, err)
+
+ err = DeleteEmailAddresses(db.DefaultContext, user, emails)
+ require.Error(t, err)
+ assert.True(t, user_model.IsErrEmailAddressNotExist(err))
+
+ emails = []string{"user2@example.com"}
+
+ err = DeleteEmailAddresses(db.DefaultContext, user, emails)
+ require.Error(t, err)
+ assert.True(t, user_model.IsErrPrimaryEmailCannotDelete(err))
+}
+
+func TestMakeEmailAddressPrimary(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ newPrimaryEmail := unittest.AssertExistsAndLoadBean(t, &user_model.EmailAddress{ID: 35, UID: user.ID}, "is_primary = false")
+
+ require.NoError(t, MakeEmailAddressPrimary(db.DefaultContext, user, newPrimaryEmail, false))
+
+ unittest.AssertExistsIf(t, true, &user_model.User{ID: 2, Email: newPrimaryEmail.Email})
+ unittest.AssertExistsIf(t, true, &user_model.EmailAddress{ID: 3, UID: user.ID}, "is_primary = false")
+ unittest.AssertExistsIf(t, true, &user_model.EmailAddress{ID: 35, UID: user.ID, IsPrimary: true})
+}
diff --git a/services/user/update.go b/services/user/update.go
new file mode 100644
index 0000000..26c9050
--- /dev/null
+++ b/services/user/update.go
@@ -0,0 +1,233 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models"
+ auth_model "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ password_module "code.gitea.io/gitea/modules/auth/password"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/mailer"
+)
+
+type UpdateOptions struct {
+ KeepEmailPrivate optional.Option[bool]
+ FullName optional.Option[string]
+ Website optional.Option[string]
+ Location optional.Option[string]
+ Description optional.Option[string]
+ Pronouns optional.Option[string]
+ AllowGitHook optional.Option[bool]
+ AllowImportLocal optional.Option[bool]
+ MaxRepoCreation optional.Option[int]
+ IsRestricted optional.Option[bool]
+ Visibility optional.Option[structs.VisibleType]
+ KeepActivityPrivate optional.Option[bool]
+ Language optional.Option[string]
+ Theme optional.Option[string]
+ DiffViewStyle optional.Option[string]
+ AllowCreateOrganization optional.Option[bool]
+ IsActive optional.Option[bool]
+ IsAdmin optional.Option[bool]
+ EmailNotificationsPreference optional.Option[string]
+ SetLastLogin bool
+ RepoAdminChangeTeamAccess optional.Option[bool]
+ EnableRepoUnitHints optional.Option[bool]
+}
+
+func UpdateUser(ctx context.Context, u *user_model.User, opts *UpdateOptions) error {
+ cols := make([]string, 0, 20)
+
+ if opts.KeepEmailPrivate.Has() {
+ u.KeepEmailPrivate = opts.KeepEmailPrivate.Value()
+
+ cols = append(cols, "keep_email_private")
+ }
+
+ if opts.FullName.Has() {
+ u.FullName = opts.FullName.Value()
+
+ cols = append(cols, "full_name")
+ }
+ if opts.Pronouns.Has() {
+ u.Pronouns = opts.Pronouns.Value()
+
+ cols = append(cols, "pronouns")
+ }
+ if opts.Website.Has() {
+ u.Website = opts.Website.Value()
+
+ cols = append(cols, "website")
+ }
+ if opts.Location.Has() {
+ u.Location = opts.Location.Value()
+
+ cols = append(cols, "location")
+ }
+ if opts.Description.Has() {
+ u.Description = opts.Description.Value()
+
+ cols = append(cols, "description")
+ }
+ if opts.Language.Has() {
+ u.Language = opts.Language.Value()
+
+ cols = append(cols, "language")
+ }
+ if opts.Theme.Has() {
+ u.Theme = opts.Theme.Value()
+
+ cols = append(cols, "theme")
+ }
+ if opts.DiffViewStyle.Has() {
+ u.DiffViewStyle = opts.DiffViewStyle.Value()
+
+ cols = append(cols, "diff_view_style")
+ }
+ if opts.EnableRepoUnitHints.Has() {
+ u.EnableRepoUnitHints = opts.EnableRepoUnitHints.Value()
+
+ cols = append(cols, "enable_repo_unit_hints")
+ }
+
+ if opts.AllowGitHook.Has() {
+ u.AllowGitHook = opts.AllowGitHook.Value()
+
+ cols = append(cols, "allow_git_hook")
+ }
+ if opts.AllowImportLocal.Has() {
+ u.AllowImportLocal = opts.AllowImportLocal.Value()
+
+ cols = append(cols, "allow_import_local")
+ }
+
+ if opts.MaxRepoCreation.Has() {
+ u.MaxRepoCreation = opts.MaxRepoCreation.Value()
+
+ cols = append(cols, "max_repo_creation")
+ }
+
+ if opts.IsActive.Has() {
+ u.IsActive = opts.IsActive.Value()
+
+ cols = append(cols, "is_active")
+ }
+ if opts.IsRestricted.Has() {
+ u.IsRestricted = opts.IsRestricted.Value()
+
+ cols = append(cols, "is_restricted")
+ }
+ if opts.IsAdmin.Has() {
+ if !opts.IsAdmin.Value() && user_model.IsLastAdminUser(ctx, u) {
+ return models.ErrDeleteLastAdminUser{UID: u.ID}
+ }
+
+ u.IsAdmin = opts.IsAdmin.Value()
+
+ cols = append(cols, "is_admin")
+ }
+
+ if opts.Visibility.Has() {
+ if !u.IsOrganization() && !setting.Service.AllowedUserVisibilityModesSlice.IsAllowedVisibility(opts.Visibility.Value()) {
+ return fmt.Errorf("visibility mode not allowed: %s", opts.Visibility.Value().String())
+ }
+ u.Visibility = opts.Visibility.Value()
+
+ cols = append(cols, "visibility")
+ }
+ if opts.KeepActivityPrivate.Has() {
+ u.KeepActivityPrivate = opts.KeepActivityPrivate.Value()
+
+ cols = append(cols, "keep_activity_private")
+ }
+
+ if opts.AllowCreateOrganization.Has() {
+ u.AllowCreateOrganization = opts.AllowCreateOrganization.Value()
+
+ cols = append(cols, "allow_create_organization")
+ }
+ if opts.RepoAdminChangeTeamAccess.Has() {
+ u.RepoAdminChangeTeamAccess = opts.RepoAdminChangeTeamAccess.Value()
+
+ cols = append(cols, "repo_admin_change_team_access")
+ }
+
+ if opts.EmailNotificationsPreference.Has() {
+ u.EmailNotificationsPreference = opts.EmailNotificationsPreference.Value()
+
+ cols = append(cols, "email_notifications_preference")
+ }
+
+ if opts.SetLastLogin {
+ u.SetLastLogin()
+
+ cols = append(cols, "last_login_unix")
+ }
+
+ return user_model.UpdateUserCols(ctx, u, cols...)
+}
+
+type UpdateAuthOptions struct {
+ LoginSource optional.Option[int64]
+ LoginName optional.Option[string]
+ Password optional.Option[string]
+ MustChangePassword optional.Option[bool]
+ ProhibitLogin optional.Option[bool]
+}
+
+func UpdateAuth(ctx context.Context, u *user_model.User, opts *UpdateAuthOptions) error {
+ if opts.LoginSource.Has() {
+ source, err := auth_model.GetSourceByID(ctx, opts.LoginSource.Value())
+ if err != nil {
+ return err
+ }
+
+ u.LoginType = source.Type
+ u.LoginSource = source.ID
+ }
+ if opts.LoginName.Has() {
+ u.LoginName = opts.LoginName.Value()
+ }
+
+ if opts.Password.Has() && (u.IsLocal() || u.IsOAuth2()) {
+ password := opts.Password.Value()
+
+ if len(password) < setting.MinPasswordLength {
+ return password_module.ErrMinLength
+ }
+ if !password_module.IsComplexEnough(password) {
+ return password_module.ErrComplexity
+ }
+ if err := password_module.IsPwned(ctx, password); err != nil {
+ return err
+ }
+
+ if err := u.SetPassword(password); err != nil {
+ return err
+ }
+ }
+
+ if opts.MustChangePassword.Has() {
+ u.MustChangePassword = opts.MustChangePassword.Value()
+ }
+ if opts.ProhibitLogin.Has() {
+ u.ProhibitLogin = opts.ProhibitLogin.Value()
+ }
+
+ if err := user_model.UpdateUserCols(ctx, u, "login_type", "login_source", "login_name", "passwd", "passwd_hash_algo", "salt", "must_change_password", "prohibit_login"); err != nil {
+ return err
+ }
+
+ if opts.Password.Has() {
+ return mailer.SendPasswordChange(u)
+ }
+
+ return nil
+}
diff --git a/services/user/update_test.go b/services/user/update_test.go
new file mode 100644
index 0000000..11379d4
--- /dev/null
+++ b/services/user/update_test.go
@@ -0,0 +1,121 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ password_module "code.gitea.io/gitea/modules/auth/password"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestUpdateUser(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ require.Error(t, UpdateUser(db.DefaultContext, admin, &UpdateOptions{
+ IsAdmin: optional.Some(false),
+ }))
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 28})
+
+ opts := &UpdateOptions{
+ KeepEmailPrivate: optional.Some(false),
+ FullName: optional.Some("Changed Name"),
+ Website: optional.Some("https://gitea.com/"),
+ Location: optional.Some("location"),
+ Description: optional.Some("description"),
+ AllowGitHook: optional.Some(true),
+ AllowImportLocal: optional.Some(true),
+ MaxRepoCreation: optional.Some(10),
+ IsRestricted: optional.Some(true),
+ IsActive: optional.Some(false),
+ IsAdmin: optional.Some(true),
+ Visibility: optional.Some(structs.VisibleTypePrivate),
+ KeepActivityPrivate: optional.Some(true),
+ Language: optional.Some("lang"),
+ Theme: optional.Some("theme"),
+ DiffViewStyle: optional.Some("split"),
+ AllowCreateOrganization: optional.Some(false),
+ EmailNotificationsPreference: optional.Some("disabled"),
+ SetLastLogin: true,
+ }
+ require.NoError(t, UpdateUser(db.DefaultContext, user, opts))
+
+ assert.Equal(t, opts.KeepEmailPrivate.Value(), user.KeepEmailPrivate)
+ assert.Equal(t, opts.FullName.Value(), user.FullName)
+ assert.Equal(t, opts.Website.Value(), user.Website)
+ assert.Equal(t, opts.Location.Value(), user.Location)
+ assert.Equal(t, opts.Description.Value(), user.Description)
+ assert.Equal(t, opts.AllowGitHook.Value(), user.AllowGitHook)
+ assert.Equal(t, opts.AllowImportLocal.Value(), user.AllowImportLocal)
+ assert.Equal(t, opts.MaxRepoCreation.Value(), user.MaxRepoCreation)
+ assert.Equal(t, opts.IsRestricted.Value(), user.IsRestricted)
+ assert.Equal(t, opts.IsActive.Value(), user.IsActive)
+ assert.Equal(t, opts.IsAdmin.Value(), user.IsAdmin)
+ assert.Equal(t, opts.Visibility.Value(), user.Visibility)
+ assert.Equal(t, opts.KeepActivityPrivate.Value(), user.KeepActivityPrivate)
+ assert.Equal(t, opts.Language.Value(), user.Language)
+ assert.Equal(t, opts.Theme.Value(), user.Theme)
+ assert.Equal(t, opts.DiffViewStyle.Value(), user.DiffViewStyle)
+ assert.Equal(t, opts.AllowCreateOrganization.Value(), user.AllowCreateOrganization)
+ assert.Equal(t, opts.EmailNotificationsPreference.Value(), user.EmailNotificationsPreference)
+
+ user = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 28})
+ assert.Equal(t, opts.KeepEmailPrivate.Value(), user.KeepEmailPrivate)
+ assert.Equal(t, opts.FullName.Value(), user.FullName)
+ assert.Equal(t, opts.Website.Value(), user.Website)
+ assert.Equal(t, opts.Location.Value(), user.Location)
+ assert.Equal(t, opts.Description.Value(), user.Description)
+ assert.Equal(t, opts.AllowGitHook.Value(), user.AllowGitHook)
+ assert.Equal(t, opts.AllowImportLocal.Value(), user.AllowImportLocal)
+ assert.Equal(t, opts.MaxRepoCreation.Value(), user.MaxRepoCreation)
+ assert.Equal(t, opts.IsRestricted.Value(), user.IsRestricted)
+ assert.Equal(t, opts.IsActive.Value(), user.IsActive)
+ assert.Equal(t, opts.IsAdmin.Value(), user.IsAdmin)
+ assert.Equal(t, opts.Visibility.Value(), user.Visibility)
+ assert.Equal(t, opts.KeepActivityPrivate.Value(), user.KeepActivityPrivate)
+ assert.Equal(t, opts.Language.Value(), user.Language)
+ assert.Equal(t, opts.Theme.Value(), user.Theme)
+ assert.Equal(t, opts.DiffViewStyle.Value(), user.DiffViewStyle)
+ assert.Equal(t, opts.AllowCreateOrganization.Value(), user.AllowCreateOrganization)
+ assert.Equal(t, opts.EmailNotificationsPreference.Value(), user.EmailNotificationsPreference)
+}
+
+func TestUpdateAuth(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 28})
+ userCopy := *user
+
+ require.NoError(t, UpdateAuth(db.DefaultContext, user, &UpdateAuthOptions{
+ LoginName: optional.Some("new-login"),
+ }))
+ assert.Equal(t, "new-login", user.LoginName)
+
+ require.NoError(t, UpdateAuth(db.DefaultContext, user, &UpdateAuthOptions{
+ Password: optional.Some("%$DRZUVB576tfzgu"),
+ MustChangePassword: optional.Some(true),
+ }))
+ assert.True(t, user.MustChangePassword)
+ assert.NotEqual(t, userCopy.Passwd, user.Passwd)
+ assert.NotEqual(t, userCopy.Salt, user.Salt)
+
+ require.NoError(t, UpdateAuth(db.DefaultContext, user, &UpdateAuthOptions{
+ ProhibitLogin: optional.Some(true),
+ }))
+ assert.True(t, user.ProhibitLogin)
+
+ require.ErrorIs(t, UpdateAuth(db.DefaultContext, user, &UpdateAuthOptions{
+ Password: optional.Some("aaaa"),
+ }), password_module.ErrMinLength)
+}
diff --git a/services/user/user.go b/services/user/user.go
new file mode 100644
index 0000000..abaeb88
--- /dev/null
+++ b/services/user/user.go
@@ -0,0 +1,332 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ system_model "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/eventsource"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/agit"
+ org_service "code.gitea.io/gitea/services/org"
+ "code.gitea.io/gitea/services/packages"
+ container_service "code.gitea.io/gitea/services/packages/container"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// RenameUser renames a user
+func RenameUser(ctx context.Context, u *user_model.User, newUserName string) error {
+ // Non-local users are not allowed to change their username.
+ if !u.IsOrganization() && !u.IsLocal() {
+ return user_model.ErrUserIsNotLocal{
+ UID: u.ID,
+ Name: u.Name,
+ }
+ }
+
+ if newUserName == u.Name {
+ return nil
+ }
+
+ if err := user_model.IsUsableUsername(newUserName); err != nil {
+ return err
+ }
+
+ onlyCapitalization := strings.EqualFold(newUserName, u.Name)
+ oldUserName := u.Name
+
+ if onlyCapitalization {
+ u.Name = newUserName
+ if err := user_model.UpdateUserCols(ctx, u, "name"); err != nil {
+ u.Name = oldUserName
+ return err
+ }
+ return repo_model.UpdateRepositoryOwnerNames(ctx, u.ID, newUserName)
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ isExist, err := user_model.IsUserExist(ctx, u.ID, newUserName)
+ if err != nil {
+ return err
+ }
+ if isExist {
+ return user_model.ErrUserAlreadyExist{
+ Name: newUserName,
+ }
+ }
+
+ if err = repo_model.UpdateRepositoryOwnerName(ctx, oldUserName, newUserName); err != nil {
+ return err
+ }
+
+ if err = user_model.NewUserRedirect(ctx, u.ID, oldUserName, newUserName); err != nil {
+ return err
+ }
+
+ if err := agit.UserNameChanged(ctx, u, newUserName); err != nil {
+ return err
+ }
+ if err := container_service.UpdateRepositoryNames(ctx, u, newUserName); err != nil {
+ return err
+ }
+
+ u.Name = newUserName
+ u.LowerName = strings.ToLower(newUserName)
+ if err := user_model.UpdateUserCols(ctx, u, "name", "lower_name"); err != nil {
+ u.Name = oldUserName
+ u.LowerName = strings.ToLower(oldUserName)
+ return err
+ }
+
+ // Do not fail if directory does not exist
+ if err = util.Rename(user_model.UserPath(oldUserName), user_model.UserPath(newUserName)); err != nil && !os.IsNotExist(err) {
+ u.Name = oldUserName
+ u.LowerName = strings.ToLower(oldUserName)
+ return fmt.Errorf("rename user directory: %w", err)
+ }
+
+ if err = committer.Commit(); err != nil {
+ u.Name = oldUserName
+ u.LowerName = strings.ToLower(oldUserName)
+ if err2 := util.Rename(user_model.UserPath(newUserName), user_model.UserPath(oldUserName)); err2 != nil && !os.IsNotExist(err2) {
+ log.Critical("Unable to rollback directory change during failed username change from: %s to: %s. DB Error: %v. Filesystem Error: %v", oldUserName, newUserName, err, err2)
+ return fmt.Errorf("failed to rollback directory change during failed username change from: %s to: %s. DB Error: %w. Filesystem Error: %v", oldUserName, newUserName, err, err2)
+ }
+ return err
+ }
+ return nil
+}
+
+// DeleteUser completely and permanently deletes everything of a user,
+// but issues/comments/pulls will be kept and shown as someone has been deleted,
+// unless the user is younger than USER_DELETE_WITH_COMMENTS_MAX_DAYS.
+func DeleteUser(ctx context.Context, u *user_model.User, purge bool) error {
+ if u.IsOrganization() {
+ return fmt.Errorf("%s is an organization not a user", u.Name)
+ }
+
+ if user_model.IsLastAdminUser(ctx, u) {
+ return models.ErrDeleteLastAdminUser{UID: u.ID}
+ }
+
+ hasSSHKey, err := db.GetEngine(ctx).Where("owner_id = ? AND type != ?", u.ID, asymkey_model.KeyTypePrincipal).Table("public_key").Exist()
+ if err != nil {
+ return err
+ }
+
+ hasPrincipialSSHKey, err := db.GetEngine(ctx).Where("owner_id = ? AND type = ?", u.ID, asymkey_model.KeyTypePrincipal).Table("public_key").Exist()
+ if err != nil {
+ return err
+ }
+
+ if purge {
+ // Disable the user first
+ // NOTE: This is deliberately not within a transaction as it must disable the user immediately to prevent any further action by the user to be purged.
+ if err := user_model.UpdateUserCols(ctx, &user_model.User{
+ ID: u.ID,
+ IsActive: false,
+ IsRestricted: true,
+ IsAdmin: false,
+ ProhibitLogin: true,
+ Passwd: "",
+ Salt: "",
+ PasswdHashAlgo: "",
+ MaxRepoCreation: 0,
+ }, "is_active", "is_restricted", "is_admin", "prohibit_login", "max_repo_creation", "passwd", "salt", "passwd_hash_algo"); err != nil {
+ return fmt.Errorf("unable to disable user: %s[%d] prior to purge. UpdateUserCols: %w", u.Name, u.ID, err)
+ }
+
+ // Force any logged in sessions to log out
+ // FIXME: We also need to tell the session manager to log them out too.
+ eventsource.GetManager().SendMessage(u.ID, &eventsource.Event{
+ Name: "logout",
+ })
+
+ // Delete all repos belonging to this user
+ // Now this is not within a transaction because there are internal transactions within the DeleteRepository
+ // BUT: the db will still be consistent even if a number of repos have already been deleted.
+ // And in fact we want to capture any repositories that are being created in other transactions in the meantime
+ //
+ // An alternative option here would be write a DeleteAllRepositoriesForUserID function which would delete all of the repos
+ // but such a function would likely get out of date
+ err := repo_service.DeleteOwnerRepositoriesDirectly(ctx, u)
+ if err != nil {
+ return err
+ }
+
+ // Remove from Organizations and delete last owner organizations
+ // Now this is not within a transaction because there are internal transactions within the DeleteOrganization
+ // BUT: the db will still be consistent even if a number of organizations memberships and organizations have already been deleted
+ // And in fact we want to capture any organization additions that are being created in other transactions in the meantime
+ //
+ // An alternative option here would be write a function which would delete all organizations but it seems
+ // but such a function would likely get out of date
+ for {
+ orgs, err := db.Find[organization.Organization](ctx, organization.FindOrgOptions{
+ ListOptions: db.ListOptions{
+ PageSize: repo_model.RepositoryListDefaultPageSize,
+ Page: 1,
+ },
+ UserID: u.ID,
+ IncludePrivate: true,
+ })
+ if err != nil {
+ return fmt.Errorf("unable to find org list for %s[%d]. Error: %w", u.Name, u.ID, err)
+ }
+ if len(orgs) == 0 {
+ break
+ }
+ for _, org := range orgs {
+ if err := models.RemoveOrgUser(ctx, org.ID, u.ID); err != nil {
+ if organization.IsErrLastOrgOwner(err) {
+ err = org_service.DeleteOrganization(ctx, org, true)
+ if err != nil {
+ return fmt.Errorf("unable to delete organization %d: %w", org.ID, err)
+ }
+ }
+ if err != nil {
+ return fmt.Errorf("unable to remove user %s[%d] from org %s[%d]. Error: %w", u.Name, u.ID, org.Name, org.ID, err)
+ }
+ }
+ }
+ }
+
+ // Delete Packages
+ if setting.Packages.Enabled {
+ if _, err := packages.RemoveAllPackages(ctx, u.ID); err != nil {
+ return err
+ }
+ }
+
+ // Delete Federated Users
+ if setting.Federation.Enabled {
+ if err := user_model.DeleteFederatedUser(ctx, u.ID); err != nil {
+ return err
+ }
+ }
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ // Note: A user owns any repository or belongs to any organization
+ // cannot perform delete operation. This causes a race with the purge above
+ // however consistency requires that we ensure that this is the case
+
+ // Check ownership of repository.
+ count, err := repo_model.CountRepositories(ctx, repo_model.CountRepositoryOptions{OwnerID: u.ID})
+ if err != nil {
+ return fmt.Errorf("GetRepositoryCount: %w", err)
+ } else if count > 0 {
+ return models.ErrUserOwnRepos{UID: u.ID}
+ }
+
+ // Check membership of organization.
+ count, err = organization.GetOrganizationCount(ctx, u)
+ if err != nil {
+ return fmt.Errorf("GetOrganizationCount: %w", err)
+ } else if count > 0 {
+ return models.ErrUserHasOrgs{UID: u.ID}
+ }
+
+ // Check ownership of packages.
+ if ownsPackages, err := packages_model.HasOwnerPackages(ctx, u.ID); err != nil {
+ return fmt.Errorf("HasOwnerPackages: %w", err)
+ } else if ownsPackages {
+ return models.ErrUserOwnPackages{UID: u.ID}
+ }
+
+ if err := deleteUser(ctx, u, purge); err != nil {
+ return fmt.Errorf("DeleteUser: %w", err)
+ }
+
+ if err := committer.Commit(); err != nil {
+ return err
+ }
+ committer.Close()
+
+ if hasSSHKey {
+ if err = asymkey_model.RewriteAllPublicKeys(ctx); err != nil {
+ return err
+ }
+ }
+
+ if hasPrincipialSSHKey {
+ if err = asymkey_model.RewriteAllPrincipalKeys(ctx); err != nil {
+ return err
+ }
+ }
+
+ // Note: There are something just cannot be roll back,
+ // so just keep error logs of those operations.
+ path := user_model.UserPath(u.Name)
+ if err := util.RemoveAll(path); err != nil {
+ err = fmt.Errorf("Failed to RemoveAll %s: %w", path, err)
+ _ = system_model.CreateNotice(ctx, system_model.NoticeTask, fmt.Sprintf("delete user '%s': %v", u.Name, err))
+ return err
+ }
+
+ if u.Avatar != "" {
+ avatarPath := u.CustomAvatarRelativePath()
+ if err := storage.Avatars.Delete(avatarPath); err != nil {
+ err = fmt.Errorf("Failed to remove %s: %w", avatarPath, err)
+ _ = system_model.CreateNotice(ctx, system_model.NoticeTask, fmt.Sprintf("delete user '%s': %v", u.Name, err))
+ return err
+ }
+ }
+
+ return nil
+}
+
+// DeleteInactiveUsers deletes all inactive users and email addresses.
+func DeleteInactiveUsers(ctx context.Context, olderThan time.Duration) error {
+ users, err := user_model.GetInactiveUsers(ctx, olderThan)
+ if err != nil {
+ return err
+ }
+
+ // FIXME: should only update authorized_keys file once after all deletions.
+ for _, u := range users {
+ select {
+ case <-ctx.Done():
+ return db.ErrCancelledf("Before delete inactive user %s", u.Name)
+ default:
+ }
+ if err := DeleteUser(ctx, u, false); err != nil {
+ // Ignore users that were set inactive by admin.
+ if models.IsErrUserOwnRepos(err) || models.IsErrUserHasOrgs(err) ||
+ models.IsErrUserOwnPackages(err) || models.IsErrDeleteLastAdminUser(err) {
+ log.Warn("Inactive user %q has repositories, organizations or packages, skipping deletion: %v", u.Name, err)
+ continue
+ }
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/services/user/user_test.go b/services/user/user_test.go
new file mode 100644
index 0000000..ad5387c
--- /dev/null
+++ b/services/user/user_test.go
@@ -0,0 +1,264 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func TestDeleteUser(t *testing.T) {
+ test := func(userID int64) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID})
+
+ ownedRepos := make([]*repo_model.Repository, 0, 10)
+ require.NoError(t, db.GetEngine(db.DefaultContext).Find(&ownedRepos, &repo_model.Repository{OwnerID: userID}))
+ if len(ownedRepos) > 0 {
+ err := DeleteUser(db.DefaultContext, user, false)
+ require.Error(t, err)
+ assert.True(t, models.IsErrUserOwnRepos(err))
+ return
+ }
+
+ orgUsers := make([]*organization.OrgUser, 0, 10)
+ require.NoError(t, db.GetEngine(db.DefaultContext).Find(&orgUsers, &organization.OrgUser{UID: userID}))
+ for _, orgUser := range orgUsers {
+ if err := models.RemoveOrgUser(db.DefaultContext, orgUser.OrgID, orgUser.UID); err != nil {
+ assert.True(t, organization.IsErrLastOrgOwner(err))
+ return
+ }
+ }
+ require.NoError(t, DeleteUser(db.DefaultContext, user, false))
+ unittest.AssertNotExistsBean(t, &user_model.User{ID: userID})
+ unittest.CheckConsistencyFor(t, &user_model.User{}, &repo_model.Repository{})
+ }
+ test(2)
+ test(4)
+ test(8)
+ test(11)
+
+ org := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
+ require.Error(t, DeleteUser(db.DefaultContext, org, false))
+}
+
+func TestPurgeUser(t *testing.T) {
+ defer unittest.OverrideFixtures(
+ unittest.FixturesOptions{
+ Dir: filepath.Join(setting.AppWorkPath, "models/fixtures/"),
+ Base: setting.AppWorkPath,
+ Dirs: []string{"services/user/TestPurgeUser/"},
+ },
+ )()
+ require.NoError(t, unittest.PrepareTestDatabase())
+ defer test.MockVariableValue(&setting.SSH.RootPath, t.TempDir())()
+ defer test.MockVariableValue(&setting.SSH.CreateAuthorizedKeysFile, true)()
+ defer test.MockVariableValue(&setting.SSH.CreateAuthorizedPrincipalsFile, true)()
+ defer test.MockVariableValue(&setting.SSH.StartBuiltinServer, false)()
+ require.NoError(t, asymkey_model.RewriteAllPublicKeys(db.DefaultContext))
+ require.NoError(t, asymkey_model.RewriteAllPrincipalKeys(db.DefaultContext))
+
+ test := func(userID int64, modifySSHKey bool) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID})
+
+ fAuthorizedKeys, err := os.Open(filepath.Join(setting.SSH.RootPath, "authorized_keys"))
+ require.NoError(t, err)
+ authorizedKeysStatBefore, err := fAuthorizedKeys.Stat()
+ require.NoError(t, err)
+ fAuthorizedPrincipals, err := os.Open(filepath.Join(setting.SSH.RootPath, "authorized_principals"))
+ require.NoError(t, err)
+ authorizedPrincipalsBefore, err := fAuthorizedPrincipals.Stat()
+ require.NoError(t, err)
+
+ require.NoError(t, DeleteUser(db.DefaultContext, user, true))
+
+ unittest.AssertNotExistsBean(t, &user_model.User{ID: userID})
+ unittest.CheckConsistencyFor(t, &user_model.User{}, &repo_model.Repository{})
+
+ fAuthorizedKeys, err = os.Open(filepath.Join(setting.SSH.RootPath, "authorized_keys"))
+ require.NoError(t, err)
+ fAuthorizedPrincipals, err = os.Open(filepath.Join(setting.SSH.RootPath, "authorized_principals"))
+ require.NoError(t, err)
+
+ authorizedKeysStatAfter, err := fAuthorizedKeys.Stat()
+ require.NoError(t, err)
+ authorizedPrincipalsAfter, err := fAuthorizedPrincipals.Stat()
+ require.NoError(t, err)
+
+ if modifySSHKey {
+ assert.Greater(t, authorizedKeysStatAfter.ModTime(), authorizedKeysStatBefore.ModTime())
+ assert.Greater(t, authorizedPrincipalsAfter.ModTime(), authorizedPrincipalsBefore.ModTime())
+ } else {
+ assert.Equal(t, authorizedKeysStatAfter.ModTime(), authorizedKeysStatBefore.ModTime())
+ assert.Equal(t, authorizedPrincipalsAfter.ModTime(), authorizedPrincipalsBefore.ModTime())
+ }
+ }
+ test(2, true)
+ test(4, false)
+ test(8, false)
+ test(11, false)
+
+ org := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
+ require.Error(t, DeleteUser(db.DefaultContext, org, false))
+}
+
+func TestCreateUser(t *testing.T) {
+ user := &user_model.User{
+ Name: "GiteaBot",
+ Email: "GiteaBot@gitea.io",
+ Passwd: ";p['////..-++']",
+ IsAdmin: false,
+ Theme: setting.UI.DefaultTheme,
+ MustChangePassword: false,
+ }
+
+ require.NoError(t, user_model.CreateUser(db.DefaultContext, user))
+
+ require.NoError(t, DeleteUser(db.DefaultContext, user, false))
+}
+
+func TestRenameUser(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 21})
+
+ t.Run("Non-Local", func(t *testing.T) {
+ u := &user_model.User{
+ Type: user_model.UserTypeIndividual,
+ LoginType: auth.OAuth2,
+ }
+ require.ErrorIs(t, RenameUser(db.DefaultContext, u, "user_rename"), user_model.ErrUserIsNotLocal{})
+ })
+
+ t.Run("Same username", func(t *testing.T) {
+ require.NoError(t, RenameUser(db.DefaultContext, user, user.Name))
+ })
+
+ t.Run("Non usable username", func(t *testing.T) {
+ usernames := []string{"--diff", "aa.png", ".well-known", "search", "aaa.atom"}
+ for _, username := range usernames {
+ t.Run(username, func(t *testing.T) {
+ require.Error(t, user_model.IsUsableUsername(username))
+ require.Error(t, RenameUser(db.DefaultContext, user, username))
+ })
+ }
+ })
+
+ t.Run("Only capitalization", func(t *testing.T) {
+ caps := strings.ToUpper(user.Name)
+ unittest.AssertNotExistsBean(t, &user_model.User{ID: user.ID, Name: caps})
+ unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, OwnerName: user.Name})
+
+ require.NoError(t, RenameUser(db.DefaultContext, user, caps))
+
+ unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: user.ID, Name: caps})
+ unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, OwnerName: caps})
+ })
+
+ t.Run("Already exists", func(t *testing.T) {
+ existUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ require.ErrorIs(t, RenameUser(db.DefaultContext, user, existUser.Name), user_model.ErrUserAlreadyExist{Name: existUser.Name})
+ require.ErrorIs(t, RenameUser(db.DefaultContext, user, existUser.LowerName), user_model.ErrUserAlreadyExist{Name: existUser.LowerName})
+ newUsername := fmt.Sprintf("uSEr%d", existUser.ID)
+ require.ErrorIs(t, RenameUser(db.DefaultContext, user, newUsername), user_model.ErrUserAlreadyExist{Name: newUsername})
+ })
+
+ t.Run("Normal", func(t *testing.T) {
+ oldUsername := user.Name
+ newUsername := "User_Rename"
+
+ require.NoError(t, RenameUser(db.DefaultContext, user, newUsername))
+ unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: user.ID, Name: newUsername, LowerName: strings.ToLower(newUsername)})
+
+ redirectUID, err := user_model.LookupUserRedirect(db.DefaultContext, oldUsername)
+ require.NoError(t, err)
+ assert.EqualValues(t, user.ID, redirectUID)
+
+ unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, OwnerName: user.Name})
+ })
+}
+
+func TestCreateUser_Issue5882(t *testing.T) {
+ // Init settings
+ _ = setting.Admin
+
+ passwd := ".//.;1;;//.,-=_"
+
+ tt := []struct {
+ user *user_model.User
+ disableOrgCreation bool
+ }{
+ {&user_model.User{Name: "GiteaBot", Email: "GiteaBot@gitea.io", Passwd: passwd, MustChangePassword: false}, false},
+ {&user_model.User{Name: "GiteaBot2", Email: "GiteaBot2@gitea.io", Passwd: passwd, MustChangePassword: false}, true},
+ }
+
+ setting.Service.DefaultAllowCreateOrganization = true
+
+ for _, v := range tt {
+ setting.Admin.DisableRegularOrgCreation = v.disableOrgCreation
+
+ require.NoError(t, user_model.CreateUser(db.DefaultContext, v.user))
+
+ u, err := user_model.GetUserByEmail(db.DefaultContext, v.user.Email)
+ require.NoError(t, err)
+
+ assert.Equal(t, !u.AllowCreateOrganization, v.disableOrgCreation)
+
+ require.NoError(t, DeleteUser(db.DefaultContext, v.user, false))
+ }
+}
+
+func TestDeleteInactiveUsers(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ // Add an inactive user older than a minute, with an associated email_address record.
+ oldUser := &user_model.User{Name: "OldInactive", LowerName: "oldinactive", Email: "old@example.com", CreatedUnix: timeutil.TimeStampNow().Add(-120)}
+ _, err := db.GetEngine(db.DefaultContext).NoAutoTime().Insert(oldUser)
+ require.NoError(t, err)
+ oldEmail := &user_model.EmailAddress{UID: oldUser.ID, IsPrimary: true, Email: "old@example.com", LowerEmail: "old@example.com"}
+ err = db.Insert(db.DefaultContext, oldEmail)
+ require.NoError(t, err)
+
+ // Add an inactive user that's not older than a minute, with an associated email_address record.
+ newUser := &user_model.User{Name: "NewInactive", LowerName: "newinactive", Email: "new@example.com"}
+ err = db.Insert(db.DefaultContext, newUser)
+ require.NoError(t, err)
+ newEmail := &user_model.EmailAddress{UID: newUser.ID, IsPrimary: true, Email: "new@example.com", LowerEmail: "new@example.com"}
+ err = db.Insert(db.DefaultContext, newEmail)
+ require.NoError(t, err)
+
+ err = DeleteInactiveUsers(db.DefaultContext, time.Minute)
+ require.NoError(t, err)
+
+ // User older than a minute should be deleted along with their email address.
+ unittest.AssertExistsIf(t, false, oldUser)
+ unittest.AssertExistsIf(t, false, oldEmail)
+
+ // User not older than a minute shouldn't be deleted and their emaill address should still exist.
+ unittest.AssertExistsIf(t, true, newUser)
+ unittest.AssertExistsIf(t, true, newEmail)
+}
diff --git a/services/webhook/default.go b/services/webhook/default.go
new file mode 100644
index 0000000..089ff8b
--- /dev/null
+++ b/services/webhook/default.go
@@ -0,0 +1,160 @@
+// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/svg"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+var _ Handler = defaultHandler{}
+
+type defaultHandler struct {
+ forgejo bool
+}
+
+func (dh defaultHandler) Type() webhook_module.HookType {
+ if dh.forgejo {
+ return webhook_module.FORGEJO
+ }
+ return webhook_module.GITEA
+}
+
+func (dh defaultHandler) Icon(size int) template.HTML {
+ if dh.forgejo {
+ // forgejo.svg is not in web_src/svg/, so svg.RenderHTML does not work
+ return shared.ImgIcon("forgejo.svg", size)
+ }
+ return svg.RenderHTML("gitea-gitea", size, "img")
+}
+
+func (defaultHandler) Metadata(*webhook_model.Webhook) any { return nil }
+
+func (defaultHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ HTTPMethod string `binding:"Required;In(POST,GET)"`
+ ContentType int `binding:"Required"`
+ Secret string
+ }
+ bind(&form)
+
+ contentType := webhook_model.ContentTypeJSON
+ if webhook_model.HookContentType(form.ContentType) == webhook_model.ContentTypeForm {
+ contentType = webhook_model.ContentTypeForm
+ }
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: contentType,
+ Secret: form.Secret,
+ HTTPMethod: form.HTTPMethod,
+ Metadata: nil,
+ }
+}
+
+func (defaultHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (req *http.Request, body []byte, err error) {
+ payloadContent := t.PayloadContent
+ if w.Type == webhook_module.GITEA &&
+ (t.EventType == webhook_module.HookEventCreate || t.EventType == webhook_module.HookEventDelete) {
+ // Woodpecker expects the ref to be short on tag creation only
+ // https://github.com/woodpecker-ci/woodpecker/blob/00ccec078cdced80cf309cd4da460a5041d7991a/server/forge/gitea/helper.go#L134
+ // see https://codeberg.org/codeberg/community/issues/1556
+ payloadContent, err = substituteRefShortName(payloadContent)
+ if err != nil {
+ return nil, nil, fmt.Errorf("could not substitute ref: %w", err)
+ }
+ }
+
+ switch w.HTTPMethod {
+ case "":
+ log.Info("HTTP Method for %s webhook %s [ID: %d] is not set, defaulting to POST", w.Type, w.URL, w.ID)
+ fallthrough
+ case http.MethodPost:
+ switch w.ContentType {
+ case webhook_model.ContentTypeJSON:
+ req, err = http.NewRequest("POST", w.URL, strings.NewReader(payloadContent))
+ if err != nil {
+ return nil, nil, err
+ }
+
+ req.Header.Set("Content-Type", "application/json")
+ case webhook_model.ContentTypeForm:
+ forms := url.Values{
+ "payload": []string{payloadContent},
+ }
+
+ req, err = http.NewRequest("POST", w.URL, strings.NewReader(forms.Encode()))
+ if err != nil {
+ return nil, nil, err
+ }
+
+ req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
+ default:
+ return nil, nil, fmt.Errorf("invalid content type: %v", w.ContentType)
+ }
+ case http.MethodGet:
+ u, err := url.Parse(w.URL)
+ if err != nil {
+ return nil, nil, fmt.Errorf("invalid URL: %w", err)
+ }
+ vals := u.Query()
+ vals["payload"] = []string{payloadContent}
+ u.RawQuery = vals.Encode()
+ req, err = http.NewRequest("GET", u.String(), nil)
+ if err != nil {
+ return nil, nil, err
+ }
+ case http.MethodPut:
+ switch w.Type {
+ case webhook_module.MATRIX: // used when t.Version == 1
+ txnID, err := getMatrixTxnID([]byte(payloadContent))
+ if err != nil {
+ return nil, nil, err
+ }
+ url := fmt.Sprintf("%s/%s", w.URL, url.PathEscape(txnID))
+ req, err = http.NewRequest("PUT", url, strings.NewReader(payloadContent))
+ if err != nil {
+ return nil, nil, err
+ }
+ default:
+ return nil, nil, fmt.Errorf("invalid http method: %v", w.HTTPMethod)
+ }
+ default:
+ return nil, nil, fmt.Errorf("invalid http method: %v", w.HTTPMethod)
+ }
+
+ body = []byte(payloadContent)
+ return req, body, shared.AddDefaultHeaders(req, []byte(w.Secret), t, body)
+}
+
+func substituteRefShortName(body string) (string, error) {
+ var m map[string]any
+ if err := json.Unmarshal([]byte(body), &m); err != nil {
+ return body, err
+ }
+ ref, ok := m["ref"].(string)
+ if !ok {
+ return body, fmt.Errorf("expected string 'ref', got %T", m["ref"])
+ }
+
+ m["ref"] = git.RefName(ref).ShortName()
+
+ buf, err := json.Marshal(m)
+ return string(buf), err
+}
diff --git a/services/webhook/default_test.go b/services/webhook/default_test.go
new file mode 100644
index 0000000..f3e2848
--- /dev/null
+++ b/services/webhook/default_test.go
@@ -0,0 +1,260 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ jsoniter "github.com/json-iterator/go"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGiteaPayload(t *testing.T) {
+ dh := defaultHandler{
+ forgejo: false,
+ }
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.GITEA,
+ URL: "https://gitea.example.com/",
+ Meta: ``,
+ HTTPMethod: "POST",
+ ContentType: webhook_model.ContentTypeJSON,
+ }
+
+ // Woodpecker expects the ref to be short on tag creation only
+ // https://github.com/woodpecker-ci/woodpecker/blob/00ccec078cdced80cf309cd4da460a5041d7991a/server/forge/gitea/helper.go#L134
+ // see https://codeberg.org/codeberg/community/issues/1556
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventCreate,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := dh.NewRequest(context.Background(), hook, task)
+ require.NoError(t, err)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://gitea.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body struct {
+ Ref string `json:"ref"`
+ }
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "test", body.Ref) // short ref
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := dh.NewRequest(context.Background(), hook, task)
+ require.NoError(t, err)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://gitea.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body struct {
+ Ref string `json:"ref"`
+ }
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "refs/heads/test", body.Ref) // full ref
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventDelete,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := dh.NewRequest(context.Background(), hook, task)
+ require.NoError(t, err)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://gitea.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body struct {
+ Ref string `json:"ref"`
+ }
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "test", body.Ref) // short ref
+ })
+}
+
+func TestForgejoPayload(t *testing.T) {
+ dh := defaultHandler{
+ forgejo: true,
+ }
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.FORGEJO,
+ URL: "https://forgejo.example.com/",
+ Meta: ``,
+ HTTPMethod: "POST",
+ ContentType: webhook_model.ContentTypeJSON,
+ }
+
+ // always return the full ref for consistency
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventCreate,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := dh.NewRequest(context.Background(), hook, task)
+ require.NoError(t, err)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://forgejo.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body struct {
+ Ref string `json:"ref"`
+ }
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "refs/heads/test", body.Ref) // full ref
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := dh.NewRequest(context.Background(), hook, task)
+ require.NoError(t, err)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://forgejo.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body struct {
+ Ref string `json:"ref"`
+ }
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "refs/heads/test", body.Ref) // full ref
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventDelete,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := dh.NewRequest(context.Background(), hook, task)
+ require.NoError(t, err)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://forgejo.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body struct {
+ Ref string `json:"ref"`
+ }
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "refs/heads/test", body.Ref) // full ref
+ })
+}
+
+func TestOpenProjectPayload(t *testing.T) {
+ t.Run("PullRequest", func(t *testing.T) {
+ p := pullRequestTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ // adapted from https://github.com/opf/openproject/blob/4c5c45fe995da0060902bc8dd5f1bf704d0b8737/modules/github_integration/lib/open_project/github_integration/services/upsert_pull_request.rb#L56
+ j := jsoniter.Get(data, "pull_request")
+
+ assert.Equal(t, 12, j.Get("id").MustBeValid().ToInt())
+ assert.Equal(t, "user1", j.Get("user", "login").MustBeValid().ToString())
+ assert.Equal(t, 12, j.Get("number").MustBeValid().ToInt())
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", j.Get("html_url").MustBeValid().ToString())
+ assert.Equal(t, jsoniter.NilValue, j.Get("updated_at").ValueType())
+ assert.Equal(t, "", j.Get("state").MustBeValid().ToString())
+ assert.Equal(t, "Fix bug", j.Get("title").MustBeValid().ToString())
+ assert.Equal(t, "fixes bug #2", j.Get("body").MustBeValid().ToString())
+
+ assert.Equal(t, "test/repo", j.Get("base", "repo", "full_name").MustBeValid().ToString())
+ assert.Equal(t, "http://localhost:3000/test/repo", j.Get("base", "repo", "html_url").MustBeValid().ToString())
+
+ assert.False(t, j.Get("draft").MustBeValid().ToBool())
+ assert.Equal(t, jsoniter.NilValue, j.Get("merge_commit_sha").ValueType())
+ assert.False(t, j.Get("merged").MustBeValid().ToBool())
+ assert.Equal(t, jsoniter.NilValue, j.Get("merged_by").ValueType())
+ assert.Equal(t, jsoniter.NilValue, j.Get("merged_at").ValueType())
+ assert.Equal(t, 0, j.Get("comments").MustBeValid().ToInt())
+ assert.Equal(t, 0, j.Get("review_comments").MustBeValid().ToInt())
+ assert.Equal(t, 0, j.Get("additions").MustBeValid().ToInt())
+ assert.Equal(t, 0, j.Get("deletions").MustBeValid().ToInt())
+ assert.Equal(t, 0, j.Get("changed_files").MustBeValid().ToInt())
+ // assert.Equal(t,"labels:", j.Get("labels").map { |values| extract_label_values(values) )
+ })
+}
diff --git a/services/webhook/deliver.go b/services/webhook/deliver.go
new file mode 100644
index 0000000..2566814
--- /dev/null
+++ b/services/webhook/deliver.go
@@ -0,0 +1,258 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "crypto/tls"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync"
+ "time"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/hostmatcher"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/proxy"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/gobwas/glob"
+)
+
+// Deliver creates the [http.Request] (depending on the webhook type), sends it
+// and records the status and response.
+func Deliver(ctx context.Context, t *webhook_model.HookTask) error {
+ w, err := webhook_model.GetWebhookByID(ctx, t.HookID)
+ if err != nil {
+ return err
+ }
+
+ defer func() {
+ err := recover()
+ if err == nil {
+ return
+ }
+ // There was a panic whilst delivering a hook...
+ log.Error("PANIC whilst trying to deliver webhook task[%d] to webhook %s Panic: %v\nStacktrace: %s", t.ID, w.URL, err, log.Stack(2))
+ }()
+
+ t.IsDelivered = true
+
+ handler := GetWebhookHandler(w.Type)
+ if handler == nil {
+ return fmt.Errorf("GetWebhookHandler %q", w.Type)
+ }
+ if t.PayloadVersion == 1 {
+ handler = defaultHandler{true}
+ }
+
+ req, body, err := handler.NewRequest(ctx, w, t)
+ if err != nil {
+ return fmt.Errorf("cannot create http request for webhook %s[%d %s]: %w", w.Type, w.ID, w.URL, err)
+ }
+
+ // Record delivery information.
+ t.RequestInfo = &webhook_model.HookRequest{
+ URL: req.URL.String(),
+ HTTPMethod: req.Method,
+ Headers: map[string]string{},
+ Body: string(body),
+ }
+ for k, vals := range req.Header {
+ t.RequestInfo.Headers[k] = strings.Join(vals, ",")
+ }
+
+ // Add Authorization Header
+ authorization, err := w.HeaderAuthorization()
+ if err != nil {
+ return fmt.Errorf("cannot get Authorization header for webhook %s[%d %s]: %w", w.Type, w.ID, w.URL, err)
+ }
+ if authorization != "" {
+ req.Header.Set("Authorization", authorization)
+ redacted := "******"
+ if strings.HasPrefix(authorization, "Bearer ") {
+ redacted = "Bearer " + redacted
+ } else if strings.HasPrefix(authorization, "Basic ") {
+ redacted = "Basic " + redacted
+ }
+ t.RequestInfo.Headers["Authorization"] = redacted
+ }
+
+ t.ResponseInfo = &webhook_model.HookResponse{
+ Headers: map[string]string{},
+ }
+
+ // OK We're now ready to attempt to deliver the task - we must double check that it
+ // has not been delivered in the meantime
+ updated, err := webhook_model.MarkTaskDelivered(ctx, t)
+ if err != nil {
+ log.Error("MarkTaskDelivered[%d]: %v", t.ID, err)
+ return fmt.Errorf("unable to mark task[%d] delivered in the db: %w", t.ID, err)
+ }
+ if !updated {
+ // This webhook task has already been attempted to be delivered or is in the process of being delivered
+ log.Trace("Webhook Task[%d] already delivered", t.ID)
+ return nil
+ }
+
+ // All code from this point will update the hook task
+ defer func() {
+ t.Delivered = timeutil.TimeStampNanoNow()
+ if t.IsSucceed {
+ log.Trace("Hook delivered: %s", t.UUID)
+ } else if !w.IsActive {
+ log.Trace("Hook delivery skipped as webhook is inactive: %s", t.UUID)
+ } else {
+ log.Trace("Hook delivery failed: %s", t.UUID)
+ }
+
+ if err := webhook_model.UpdateHookTask(ctx, t); err != nil {
+ log.Error("UpdateHookTask [%d]: %v", t.ID, err)
+ }
+
+ // Update webhook last delivery status.
+ if t.IsSucceed {
+ w.LastStatus = webhook_module.HookStatusSucceed
+ } else {
+ w.LastStatus = webhook_module.HookStatusFail
+ }
+ if err = webhook_model.UpdateWebhookLastStatus(ctx, w); err != nil {
+ log.Error("UpdateWebhookLastStatus: %v", err)
+ return
+ }
+ }()
+
+ if setting.DisableWebhooks {
+ return fmt.Errorf("webhook task skipped (webhooks disabled): [%d]", t.ID)
+ }
+
+ if !w.IsActive {
+ log.Trace("Webhook %s in Webhook Task[%d] is not active", w.URL, t.ID)
+ return nil
+ }
+
+ resp, err := webhookHTTPClient.Do(req.WithContext(ctx))
+ if err != nil {
+ t.ResponseInfo.Body = fmt.Sprintf("Delivery: %v", err)
+ return fmt.Errorf("unable to deliver webhook task[%d] in %s due to error in http client: %w", t.ID, w.URL, err)
+ }
+ defer resp.Body.Close()
+
+ // Status code is 20x can be seen as succeed.
+ t.IsSucceed = resp.StatusCode/100 == 2
+ t.ResponseInfo.Status = resp.StatusCode
+ for k, vals := range resp.Header {
+ t.ResponseInfo.Headers[k] = strings.Join(vals, ",")
+ }
+
+ p, err := io.ReadAll(resp.Body)
+ if err != nil {
+ t.ResponseInfo.Body = fmt.Sprintf("read body: %s", err)
+ return fmt.Errorf("unable to deliver webhook task[%d] in %s as unable to read response body: %w", t.ID, w.URL, err)
+ }
+ t.ResponseInfo.Body = string(p)
+ return nil
+}
+
+var (
+ webhookHTTPClient *http.Client
+ once sync.Once
+ hostMatchers []glob.Glob
+)
+
+func webhookProxy(allowList *hostmatcher.HostMatchList) func(req *http.Request) (*url.URL, error) {
+ if setting.Webhook.ProxyURL == "" {
+ return proxy.Proxy()
+ }
+
+ once.Do(func() {
+ for _, h := range setting.Webhook.ProxyHosts {
+ if g, err := glob.Compile(h); err == nil {
+ hostMatchers = append(hostMatchers, g)
+ } else {
+ log.Error("glob.Compile %s failed: %v", h, err)
+ }
+ }
+ })
+
+ return func(req *http.Request) (*url.URL, error) {
+ for _, v := range hostMatchers {
+ if v.Match(req.URL.Host) {
+ if !allowList.MatchHostName(req.URL.Host) {
+ return nil, fmt.Errorf("webhook can only call allowed HTTP servers (check your %s setting), deny '%s'", allowList.SettingKeyHint, req.URL.Host)
+ }
+ return http.ProxyURL(setting.Webhook.ProxyURLFixed)(req)
+ }
+ }
+ return http.ProxyFromEnvironment(req)
+ }
+}
+
+// Init starts the hooks delivery thread
+func Init() error {
+ timeout := time.Duration(setting.Webhook.DeliverTimeout) * time.Second
+
+ allowedHostListValue := setting.Webhook.AllowedHostList
+ if allowedHostListValue == "" {
+ allowedHostListValue = hostmatcher.MatchBuiltinExternal
+ }
+ allowedHostMatcher := hostmatcher.ParseHostMatchList("webhook.ALLOWED_HOST_LIST", allowedHostListValue)
+
+ webhookHTTPClient = &http.Client{
+ Timeout: timeout,
+ Transport: &http.Transport{
+ TLSClientConfig: &tls.Config{InsecureSkipVerify: setting.Webhook.SkipTLSVerify},
+ Proxy: webhookProxy(allowedHostMatcher),
+ DialContext: hostmatcher.NewDialContext("webhook", allowedHostMatcher, nil, setting.Webhook.ProxyURLFixed),
+ },
+ }
+
+ hookQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "webhook_sender", handler)
+ if hookQueue == nil {
+ return fmt.Errorf("unable to create webhook_sender queue")
+ }
+ go graceful.GetManager().RunWithCancel(hookQueue)
+
+ go graceful.GetManager().RunWithShutdownContext(populateWebhookSendingQueue)
+
+ return nil
+}
+
+func populateWebhookSendingQueue(ctx context.Context) {
+ ctx, _, finished := process.GetManager().AddContext(ctx, "Webhook: Populate sending queue")
+ defer finished()
+
+ lowerID := int64(0)
+ for {
+ taskIDs, err := webhook_model.FindUndeliveredHookTaskIDs(ctx, lowerID)
+ if err != nil {
+ log.Error("Unable to populate webhook queue as FindUndeliveredHookTaskIDs failed: %v", err)
+ return
+ }
+ if len(taskIDs) == 0 {
+ return
+ }
+ lowerID = taskIDs[len(taskIDs)-1]
+
+ for _, taskID := range taskIDs {
+ select {
+ case <-ctx.Done():
+ log.Warn("Shutdown before Webhook Sending queue finishing being populated")
+ return
+ default:
+ }
+ if err := enqueueHookTask(taskID); err != nil {
+ log.Error("Unable to push HookTask[%d] to the Webhook Sending queue: %v", taskID, err)
+ }
+ }
+ }
+}
diff --git a/services/webhook/deliver_test.go b/services/webhook/deliver_test.go
new file mode 100644
index 0000000..21af3c7
--- /dev/null
+++ b/services/webhook/deliver_test.go
@@ -0,0 +1,332 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "io"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "os"
+ "strings"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/hostmatcher"
+ "code.gitea.io/gitea/modules/setting"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestWebhookProxy(t *testing.T) {
+ oldWebhook := setting.Webhook
+ oldHTTPProxy := os.Getenv("http_proxy")
+ oldHTTPSProxy := os.Getenv("https_proxy")
+ t.Cleanup(func() {
+ setting.Webhook = oldWebhook
+ os.Setenv("http_proxy", oldHTTPProxy)
+ os.Setenv("https_proxy", oldHTTPSProxy)
+ })
+ os.Unsetenv("http_proxy")
+ os.Unsetenv("https_proxy")
+
+ setting.Webhook.ProxyURL = "http://localhost:8080"
+ setting.Webhook.ProxyURLFixed, _ = url.Parse(setting.Webhook.ProxyURL)
+ setting.Webhook.ProxyHosts = []string{"*.discordapp.com", "discordapp.com"}
+
+ allowedHostMatcher := hostmatcher.ParseHostMatchList("webhook.ALLOWED_HOST_LIST", "discordapp.com,s.discordapp.com")
+
+ tests := []struct {
+ req string
+ want string
+ wantErr bool
+ }{
+ {
+ req: "https://discordapp.com/api/webhooks/xxxxxxxxx/xxxxxxxxxxxxxxxxxxx",
+ want: "http://localhost:8080",
+ wantErr: false,
+ },
+ {
+ req: "http://s.discordapp.com/assets/xxxxxx",
+ want: "http://localhost:8080",
+ wantErr: false,
+ },
+ {
+ req: "http://github.com/a/b",
+ want: "",
+ wantErr: false,
+ },
+ {
+ req: "http://www.discordapp.com/assets/xxxxxx",
+ want: "",
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.req, func(t *testing.T) {
+ req, err := http.NewRequest("POST", tt.req, nil)
+ require.NoError(t, err)
+
+ u, err := webhookProxy(allowedHostMatcher)(req)
+ if tt.wantErr {
+ require.Error(t, err)
+ return
+ }
+
+ require.NoError(t, err)
+
+ got := ""
+ if u != nil {
+ got = u.String()
+ }
+ assert.Equal(t, tt.want, got)
+ })
+ }
+}
+
+func TestWebhookDeliverAuthorizationHeader(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ done := make(chan struct{}, 1)
+ s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ assert.Equal(t, "/webhook", r.URL.Path)
+ assert.Equal(t, "Bearer s3cr3t-t0ken", r.Header.Get("Authorization"))
+ w.WriteHeader(200)
+ done <- struct{}{}
+ }))
+ t.Cleanup(s.Close)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ URL: s.URL + "/webhook",
+ ContentType: webhook_model.ContentTypeJSON,
+ IsActive: true,
+ Type: webhook_module.GITEA,
+ }
+ err := hook.SetHeaderAuthorization("Bearer s3cr3t-t0ken")
+ require.NoError(t, err)
+ require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, hook))
+
+ hookTask := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadVersion: 2,
+ }
+
+ hookTask, err = webhook_model.CreateHookTask(db.DefaultContext, hookTask)
+ require.NoError(t, err)
+ assert.NotNil(t, hookTask)
+
+ require.NoError(t, Deliver(context.Background(), hookTask))
+ select {
+ case <-done:
+ case <-time.After(5 * time.Second):
+ t.Fatal("waited to long for request to happen")
+ }
+
+ assert.True(t, hookTask.IsSucceed)
+ assert.Equal(t, "Bearer ******", hookTask.RequestInfo.Headers["Authorization"])
+}
+
+func TestWebhookDeliverHookTask(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ done := make(chan struct{}, 1)
+ s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ assert.Equal(t, "PUT", r.Method)
+ switch r.URL.Path {
+ case "/webhook/66d222a5d6349e1311f551e50722d837e30fce98":
+ // Version 1
+ assert.Equal(t, "push", r.Header.Get("X-GitHub-Event"))
+ assert.Equal(t, "", r.Header.Get("Content-Type"))
+ body, err := io.ReadAll(r.Body)
+ require.NoError(t, err)
+ assert.Equal(t, `{"data": 42}`, string(body))
+
+ case "/webhook/6db5dc1e282529a8c162c7fe93dd2667494eeb51":
+ // Version 2
+ assert.Equal(t, "application/json", r.Header.Get("Content-Type"))
+ body, err := io.ReadAll(r.Body)
+ require.NoError(t, err)
+ assert.Len(t, body, 2147)
+
+ default:
+ w.WriteHeader(404)
+ t.Fatalf("unexpected url path %s", r.URL.Path)
+ return
+ }
+ w.WriteHeader(200)
+ done <- struct{}{}
+ }))
+ t.Cleanup(s.Close)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.MATRIX,
+ URL: s.URL + "/webhook",
+ HTTPMethod: "PUT",
+ ContentType: webhook_model.ContentTypeJSON,
+ Meta: `{"message_type":0}`, // text
+ }
+ require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, hook))
+
+ t.Run("Version 1", func(t *testing.T) {
+ hookTask := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: `{"data": 42}`,
+ PayloadVersion: 1,
+ }
+
+ hookTask, err := webhook_model.CreateHookTask(db.DefaultContext, hookTask)
+ require.NoError(t, err)
+ assert.NotNil(t, hookTask)
+
+ require.NoError(t, Deliver(context.Background(), hookTask))
+ select {
+ case <-done:
+ case <-time.After(5 * time.Second):
+ t.Fatal("waited to long for request to happen")
+ }
+
+ assert.True(t, hookTask.IsSucceed)
+ })
+
+ t.Run("Version 2", func(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hookTask := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ hookTask, err = webhook_model.CreateHookTask(db.DefaultContext, hookTask)
+ require.NoError(t, err)
+ assert.NotNil(t, hookTask)
+
+ require.NoError(t, Deliver(context.Background(), hookTask))
+ select {
+ case <-done:
+ case <-time.After(5 * time.Second):
+ t.Fatal("waited to long for request to happen")
+ }
+
+ assert.True(t, hookTask.IsSucceed)
+ })
+}
+
+func TestWebhookDeliverSpecificTypes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ type hookCase struct {
+ gotBody chan []byte
+ expectedMethod string
+ }
+
+ cases := map[string]hookCase{
+ webhook_module.SLACK: {
+ gotBody: make(chan []byte, 1),
+ },
+ webhook_module.DISCORD: {
+ gotBody: make(chan []byte, 1),
+ },
+ webhook_module.DINGTALK: {
+ gotBody: make(chan []byte, 1),
+ },
+ webhook_module.TELEGRAM: {
+ gotBody: make(chan []byte, 1),
+ },
+ webhook_module.MSTEAMS: {
+ gotBody: make(chan []byte, 1),
+ },
+ webhook_module.FEISHU: {
+ gotBody: make(chan []byte, 1),
+ },
+ webhook_module.MATRIX: {
+ gotBody: make(chan []byte, 1),
+ expectedMethod: "PUT",
+ },
+ webhook_module.WECHATWORK: {
+ gotBody: make(chan []byte, 1),
+ },
+ webhook_module.PACKAGIST: {
+ gotBody: make(chan []byte, 1),
+ },
+ }
+
+ s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ assert.Equal(t, "application/json", r.Header.Get("Content-Type"), r.URL.Path)
+
+ typ := strings.Split(r.URL.Path, "/")[1] // take first segment (after skipping leading slash)
+ hc := cases[typ]
+
+ if hc.expectedMethod != "" {
+ assert.Equal(t, hc.expectedMethod, r.Method, r.URL.Path)
+ } else {
+ assert.Equal(t, "POST", r.Method, r.URL.Path)
+ }
+
+ require.NotNil(t, hc.gotBody, r.URL.Path)
+ body, err := io.ReadAll(r.Body)
+ require.NoError(t, err)
+ w.WriteHeader(200)
+ hc.gotBody <- body
+ }))
+ t.Cleanup(s.Close)
+
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ for typ, hc := range cases {
+ typ := typ
+ hc := hc
+ t.Run(typ, func(t *testing.T) {
+ t.Parallel()
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: typ,
+ URL: s.URL + "/" + typ,
+ HTTPMethod: "", // should fallback to POST, when left unset by the specific hook
+ ContentType: 0, // set to 0 so that falling back to default request fails with "invalid content type"
+ Meta: "{}",
+ }
+ require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, hook))
+
+ hookTask := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ hookTask, err := webhook_model.CreateHookTask(db.DefaultContext, hookTask)
+ require.NoError(t, err)
+ assert.NotNil(t, hookTask)
+
+ require.NoError(t, Deliver(context.Background(), hookTask))
+ select {
+ case gotBody := <-hc.gotBody:
+ assert.NotEqual(t, string(data), string(gotBody), "request body must be different from the event payload")
+ assert.Equal(t, hookTask.RequestInfo.Body, string(gotBody), "request body was not saved")
+ case <-time.After(5 * time.Second):
+ t.Fatal("waited to long for request to happen")
+ }
+
+ assert.True(t, hookTask.IsSucceed)
+ })
+ }
+}
diff --git a/services/webhook/dingtalk.go b/services/webhook/dingtalk.go
new file mode 100644
index 0000000..899c5b2
--- /dev/null
+++ b/services/webhook/dingtalk.go
@@ -0,0 +1,232 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+type dingtalkHandler struct{}
+
+func (dingtalkHandler) Type() webhook_module.HookType { return webhook_module.DINGTALK }
+func (dingtalkHandler) Metadata(*webhook_model.Webhook) any { return nil }
+func (dingtalkHandler) Icon(size int) template.HTML { return shared.ImgIcon("dingtalk.ico", size) }
+
+func (dingtalkHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ }
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: nil,
+ }
+}
+
+type (
+ // DingtalkPayload represents an dingtalk payload.
+ DingtalkPayload struct {
+ MsgType string `json:"msgtype"`
+ Text struct {
+ Content string `json:"content"`
+ } `json:"text"`
+ ActionCard DingtalkActionCard `json:"actionCard"`
+ }
+
+ DingtalkActionCard struct {
+ Text string `json:"text"`
+ Title string `json:"title"`
+ HideAvatar string `json:"hideAvatar"`
+ SingleTitle string `json:"singleTitle"`
+ SingleURL string `json:"singleURL"`
+ }
+)
+
+// Create implements PayloadConvertor Create method
+func (dc dingtalkConvertor) Create(p *api.CreatePayload) (DingtalkPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName)
+
+ return createDingtalkPayload(title, title, fmt.Sprintf("view ref %s", refName), p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil
+}
+
+// Delete implements PayloadConvertor Delete method
+func (dc dingtalkConvertor) Delete(p *api.DeletePayload) (DingtalkPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName)
+
+ return createDingtalkPayload(title, title, fmt.Sprintf("view ref %s", refName), p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil
+}
+
+// Fork implements PayloadConvertor Fork method
+func (dc dingtalkConvertor) Fork(p *api.ForkPayload) (DingtalkPayload, error) {
+ title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName)
+
+ return createDingtalkPayload(title, title, fmt.Sprintf("view forked repo %s", p.Repo.FullName), p.Repo.HTMLURL), nil
+}
+
+// Push implements PayloadConvertor Push method
+func (dc dingtalkConvertor) Push(p *api.PushPayload) (DingtalkPayload, error) {
+ var (
+ branchName = git.RefName(p.Ref).ShortName()
+ commitDesc string
+ )
+
+ var titleLink, linkText string
+ if p.TotalCommits == 1 {
+ commitDesc = "1 new commit"
+ titleLink = p.Commits[0].URL
+ linkText = "view commit"
+ } else {
+ commitDesc = fmt.Sprintf("%d new commits", p.TotalCommits)
+ titleLink = p.CompareURL
+ linkText = "view commits"
+ }
+ if titleLink == "" {
+ titleLink = p.Repo.HTMLURL + "/src/" + util.PathEscapeSegments(branchName)
+ }
+
+ title := fmt.Sprintf("[%s:%s] %s", p.Repo.FullName, branchName, commitDesc)
+
+ var text string
+ // for each commit, generate attachment text
+ for i, commit := range p.Commits {
+ var authorName string
+ if commit.Author != nil {
+ authorName = " - " + commit.Author.Name
+ }
+ text += fmt.Sprintf("[%s](%s) %s", commit.ID[:7], commit.URL,
+ strings.TrimRight(commit.Message, "\r\n")) + authorName
+ // add linebreak to each commit but the last
+ if i < len(p.Commits)-1 {
+ text += "\r\n"
+ }
+ }
+
+ return createDingtalkPayload(title, text, linkText, titleLink), nil
+}
+
+// Issue implements PayloadConvertor Issue method
+func (dc dingtalkConvertor) Issue(p *api.IssuePayload) (DingtalkPayload, error) {
+ text, issueTitle, attachmentText, _ := getIssuesPayloadInfo(p, noneLinkFormatter, true)
+
+ return createDingtalkPayload(issueTitle, text+"\r\n\r\n"+attachmentText, "view issue", p.Issue.HTMLURL), nil
+}
+
+// Wiki implements PayloadConvertor Wiki method
+func (dc dingtalkConvertor) Wiki(p *api.WikiPayload) (DingtalkPayload, error) {
+ text, _, _ := getWikiPayloadInfo(p, noneLinkFormatter, true)
+ url := p.Repository.HTMLURL + "/wiki/" + url.PathEscape(p.Page)
+
+ return createDingtalkPayload(text, text, "view wiki", url), nil
+}
+
+// IssueComment implements PayloadConvertor IssueComment method
+func (dc dingtalkConvertor) IssueComment(p *api.IssueCommentPayload) (DingtalkPayload, error) {
+ text, issueTitle, _ := getIssueCommentPayloadInfo(p, noneLinkFormatter, true)
+
+ return createDingtalkPayload(issueTitle, text+"\r\n\r\n"+p.Comment.Body, "view issue comment", p.Comment.HTMLURL), nil
+}
+
+// PullRequest implements PayloadConvertor PullRequest method
+func (dc dingtalkConvertor) PullRequest(p *api.PullRequestPayload) (DingtalkPayload, error) {
+ text, issueTitle, attachmentText, _ := getPullRequestPayloadInfo(p, noneLinkFormatter, true)
+
+ return createDingtalkPayload(issueTitle, text+"\r\n\r\n"+attachmentText, "view pull request", p.PullRequest.HTMLURL), nil
+}
+
+// Review implements PayloadConvertor Review method
+func (dc dingtalkConvertor) Review(p *api.PullRequestPayload, event webhook_module.HookEventType) (DingtalkPayload, error) {
+ var text, title string
+ if p.Action == api.HookIssueReviewed {
+ action, err := parseHookPullRequestEventType(event)
+ if err != nil {
+ return DingtalkPayload{}, err
+ }
+
+ title = fmt.Sprintf("[%s] Pull request review %s : #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title)
+ text = p.Review.Content
+ }
+
+ return createDingtalkPayload(title, title+"\r\n\r\n"+text, "view pull request", p.PullRequest.HTMLURL), nil
+}
+
+// Repository implements PayloadConvertor Repository method
+func (dc dingtalkConvertor) Repository(p *api.RepositoryPayload) (DingtalkPayload, error) {
+ switch p.Action {
+ case api.HookRepoCreated:
+ title := fmt.Sprintf("[%s] Repository created", p.Repository.FullName)
+ return createDingtalkPayload(title, title, "view repository", p.Repository.HTMLURL), nil
+ case api.HookRepoDeleted:
+ title := fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName)
+ return DingtalkPayload{
+ MsgType: "text",
+ Text: struct {
+ Content string `json:"content"`
+ }{
+ Content: title,
+ },
+ }, nil
+ }
+
+ return DingtalkPayload{}, nil
+}
+
+// Release implements PayloadConvertor Release method
+func (dc dingtalkConvertor) Release(p *api.ReleasePayload) (DingtalkPayload, error) {
+ text, _ := getReleasePayloadInfo(p, noneLinkFormatter, true)
+
+ return createDingtalkPayload(text, text, "view release", p.Release.HTMLURL), nil
+}
+
+func (dc dingtalkConvertor) Package(p *api.PackagePayload) (DingtalkPayload, error) {
+ text, _ := getPackagePayloadInfo(p, noneLinkFormatter, true)
+
+ return createDingtalkPayload(text, text, "view package", p.Package.HTMLURL), nil
+}
+
+func createDingtalkPayload(title, text, singleTitle, singleURL string) DingtalkPayload {
+ return DingtalkPayload{
+ MsgType: "actionCard",
+ ActionCard: DingtalkActionCard{
+ Text: strings.TrimSpace(text),
+ Title: strings.TrimSpace(title),
+ HideAvatar: "0",
+ SingleTitle: singleTitle,
+
+ // https://developers.dingtalk.com/document/app/message-link-description
+ // to open the link in browser, we should use this URL, otherwise the page is displayed inside DingTalk client, very difficult to visit non-public URLs.
+ SingleURL: "dingtalk://dingtalkclient/page/link?pc_slide=false&url=" + url.QueryEscape(singleURL),
+ },
+ }
+}
+
+type dingtalkConvertor struct{}
+
+var _ shared.PayloadConvertor[DingtalkPayload] = dingtalkConvertor{}
+
+func (dingtalkHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ return shared.NewJSONRequest(dingtalkConvertor{}, w, t, true)
+}
diff --git a/services/webhook/dingtalk_test.go b/services/webhook/dingtalk_test.go
new file mode 100644
index 0000000..d0a2d48
--- /dev/null
+++ b/services/webhook/dingtalk_test.go
@@ -0,0 +1,252 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "net/url"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDingTalkPayload(t *testing.T) {
+ parseRealSingleURL := func(singleURL string) string {
+ if u, err := url.Parse(singleURL); err == nil {
+ assert.Equal(t, "dingtalk", u.Scheme)
+ assert.Equal(t, "dingtalkclient", u.Host)
+ assert.Equal(t, "/page/link", u.Path)
+ return u.Query().Get("url")
+ }
+ return ""
+ }
+ dc := dingtalkConvertor{}
+
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+
+ pl, err := dc.Create(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] branch test created", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo] branch test created", pl.ActionCard.Title)
+ assert.Equal(t, "view ref test", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+
+ pl, err := dc.Delete(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] branch test deleted", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo] branch test deleted", pl.ActionCard.Title)
+ assert.Equal(t, "view ref test", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Fork", func(t *testing.T) {
+ p := forkTestPayload()
+
+ pl, err := dc.Fork(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "test/repo2 is forked to test/repo", pl.ActionCard.Text)
+ assert.Equal(t, "test/repo2 is forked to test/repo", pl.ActionCard.Title)
+ assert.Equal(t, "view forked repo test/repo", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+
+ pl, err := dc.Push(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo:test] 2 new commits", pl.ActionCard.Title)
+ assert.Equal(t, "view commits", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Issue", func(t *testing.T) {
+ p := issueTestPayload()
+
+ p.Action = api.HookIssueOpened
+ pl, err := dc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Issue opened: #2 crash by user1\r\n\r\nissue body", pl.ActionCard.Text)
+ assert.Equal(t, "#2 crash", pl.ActionCard.Title)
+ assert.Equal(t, "view issue", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2", parseRealSingleURL(pl.ActionCard.SingleURL))
+
+ p.Action = api.HookIssueClosed
+ pl, err = dc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Issue closed: #2 crash by user1", pl.ActionCard.Text)
+ assert.Equal(t, "#2 crash", pl.ActionCard.Title)
+ assert.Equal(t, "view issue", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("IssueComment", func(t *testing.T) {
+ p := issueCommentTestPayload()
+
+ pl, err := dc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] New comment on issue #2 crash by user1\r\n\r\nmore info needed", pl.ActionCard.Text)
+ assert.Equal(t, "#2 crash", pl.ActionCard.Title)
+ assert.Equal(t, "view issue comment", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2#issuecomment-4", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("PullRequest", func(t *testing.T) {
+ p := pullRequestTestPayload()
+
+ pl, err := dc.PullRequest(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug by user1\r\n\r\nfixes bug #2", pl.ActionCard.Text)
+ assert.Equal(t, "#12 Fix bug", pl.ActionCard.Title)
+ assert.Equal(t, "view pull request", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("PullRequestComment", func(t *testing.T) {
+ p := pullRequestCommentTestPayload()
+
+ pl, err := dc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug by user1\r\n\r\nchanges requested", pl.ActionCard.Text)
+ assert.Equal(t, "#12 Fix bug", pl.ActionCard.Title)
+ assert.Equal(t, "view issue comment", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12#issuecomment-4", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Review", func(t *testing.T) {
+ p := pullRequestTestPayload()
+ p.Action = api.HookIssueReviewed
+
+ pl, err := dc.Review(p, webhook_module.HookEventPullRequestReviewApproved)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Pull request review approved : #12 Fix bug\r\n\r\ngood job", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo] Pull request review approved : #12 Fix bug", pl.ActionCard.Title)
+ assert.Equal(t, "view pull request", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Repository", func(t *testing.T) {
+ p := repositoryTestPayload()
+
+ pl, err := dc.Repository(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Repository created", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo] Repository created", pl.ActionCard.Title)
+ assert.Equal(t, "view repository", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Package", func(t *testing.T) {
+ p := packageTestPayload()
+
+ pl, err := dc.Package(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "Package created: GiteaContainer:latest by user1", pl.ActionCard.Text)
+ assert.Equal(t, "Package created: GiteaContainer:latest by user1", pl.ActionCard.Title)
+ assert.Equal(t, "view package", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/user1/-/packages/container/GiteaContainer/latest", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Wiki", func(t *testing.T) {
+ p := wikiTestPayload()
+
+ p.Action = api.HookWikiCreated
+ pl, err := dc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment) by user1", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment) by user1", pl.ActionCard.Title)
+ assert.Equal(t, "view wiki", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", parseRealSingleURL(pl.ActionCard.SingleURL))
+
+ p.Action = api.HookWikiEdited
+ pl, err = dc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment) by user1", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment) by user1", pl.ActionCard.Title)
+ assert.Equal(t, "view wiki", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", parseRealSingleURL(pl.ActionCard.SingleURL))
+
+ p.Action = api.HookWikiDeleted
+ pl, err = dc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Wiki page 'index' deleted by user1", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo] Wiki page 'index' deleted by user1", pl.ActionCard.Title)
+ assert.Equal(t, "view wiki", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+
+ t.Run("Release", func(t *testing.T) {
+ p := pullReleaseTestPayload()
+
+ pl, err := dc.Release(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Release created: v1.0 by user1", pl.ActionCard.Text)
+ assert.Equal(t, "[test/repo] Release created: v1.0 by user1", pl.ActionCard.Title)
+ assert.Equal(t, "view release", pl.ActionCard.SingleTitle)
+ assert.Equal(t, "http://localhost:3000/test/repo/releases/tag/v1.0", parseRealSingleURL(pl.ActionCard.SingleURL))
+ })
+}
+
+func TestDingTalkJSONPayload(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.DINGTALK,
+ URL: "https://dingtalk.example.com/",
+ Meta: ``,
+ HTTPMethod: "POST",
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := dingtalkHandler{}.NewRequest(context.Background(), hook, task)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+ require.NoError(t, err)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://dingtalk.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body DingtalkPayload
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", body.ActionCard.Text)
+}
diff --git a/services/webhook/discord.go b/services/webhook/discord.go
new file mode 100644
index 0000000..b0142b8
--- /dev/null
+++ b/services/webhook/discord.go
@@ -0,0 +1,367 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ gitea_context "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+
+ "gitea.com/go-chi/binding"
+)
+
+type discordHandler struct{}
+
+func (discordHandler) Type() webhook_module.HookType { return webhook_module.DISCORD }
+func (discordHandler) Icon(size int) template.HTML { return shared.ImgIcon("discord.png", size) }
+
+type discordForm struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ Username string `binding:"Required;MaxSize(80)"`
+ IconURL string `binding:"ValidUrl"`
+}
+
+var _ binding.Validator = &discordForm{}
+
+// Validate implements binding.Validator.
+func (d *discordForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := gitea_context.GetWebContext(req)
+ if len([]rune(d.IconURL)) > 2048 {
+ errs = append(errs, binding.Error{
+ FieldNames: []string{"IconURL"},
+ Message: ctx.Locale.TrString("repo.settings.discord_icon_url.exceeds_max_length"),
+ })
+ }
+ return errs
+}
+
+func (discordHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form discordForm
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: &DiscordMeta{
+ Username: form.Username,
+ IconURL: form.IconURL,
+ },
+ }
+}
+
+type (
+ // DiscordEmbedFooter for Embed Footer Structure.
+ DiscordEmbedFooter struct {
+ Text string `json:"text,omitempty"`
+ }
+
+ // DiscordEmbedAuthor for Embed Author Structure
+ DiscordEmbedAuthor struct {
+ Name string `json:"name"`
+ URL string `json:"url"`
+ IconURL string `json:"icon_url"`
+ }
+
+ // DiscordEmbedField for Embed Field Structure
+ DiscordEmbedField struct {
+ Name string `json:"name"`
+ Value string `json:"value"`
+ }
+
+ // DiscordEmbed is for Embed Structure
+ DiscordEmbed struct {
+ Title string `json:"title"`
+ Description string `json:"description"`
+ URL string `json:"url"`
+ Color int `json:"color"`
+ Footer DiscordEmbedFooter `json:"footer"`
+ Author DiscordEmbedAuthor `json:"author"`
+ Fields []DiscordEmbedField `json:"fields,omitempty"`
+ }
+
+ // DiscordPayload represents
+ DiscordPayload struct {
+ Wait bool `json:"-"`
+ Content string `json:"-"`
+ Username string `json:"username"`
+ AvatarURL string `json:"avatar_url,omitempty"`
+ TTS bool `json:"-"`
+ Embeds []DiscordEmbed `json:"embeds"`
+ }
+
+ // DiscordMeta contains the discord metadata
+ DiscordMeta struct {
+ Username string `json:"username"`
+ IconURL string `json:"icon_url"`
+ }
+)
+
+// Metadata returns discord metadata
+func (discordHandler) Metadata(w *webhook_model.Webhook) any {
+ s := &DiscordMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), s); err != nil {
+ log.Error("discordHandler.Metadata(%d): %v", w.ID, err)
+ }
+ return s
+}
+
+func color(clr string) int {
+ if clr != "" {
+ clr = strings.TrimLeft(clr, "#")
+ if s, err := strconv.ParseInt(clr, 16, 32); err == nil {
+ return int(s)
+ }
+ }
+
+ return 0
+}
+
+var (
+ greenColor = color("1ac600")
+ greenColorLight = color("bfe5bf")
+ yellowColor = color("ffd930")
+ greyColor = color("4f545c")
+ purpleColor = color("7289da")
+ orangeColor = color("eb6420")
+ orangeColorLight = color("e68d60")
+ redColor = color("ff3232")
+)
+
+// Create implements PayloadConvertor Create method
+func (d discordConvertor) Create(p *api.CreatePayload) (DiscordPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName)
+
+ return d.createPayload(p.Sender, title, "", p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName), greenColor), nil
+}
+
+// Delete implements PayloadConvertor Delete method
+func (d discordConvertor) Delete(p *api.DeletePayload) (DiscordPayload, error) {
+ // deleted tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName)
+
+ return d.createPayload(p.Sender, title, "", p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName), redColor), nil
+}
+
+// Fork implements PayloadConvertor Fork method
+func (d discordConvertor) Fork(p *api.ForkPayload) (DiscordPayload, error) {
+ title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName)
+
+ return d.createPayload(p.Sender, title, "", p.Repo.HTMLURL, greenColor), nil
+}
+
+// Push implements PayloadConvertor Push method
+func (d discordConvertor) Push(p *api.PushPayload) (DiscordPayload, error) {
+ var (
+ branchName = git.RefName(p.Ref).ShortName()
+ commitDesc string
+ )
+
+ var titleLink string
+ if p.TotalCommits == 1 {
+ commitDesc = "1 new commit"
+ titleLink = p.Commits[0].URL
+ } else {
+ commitDesc = fmt.Sprintf("%d new commits", p.TotalCommits)
+ titleLink = p.CompareURL
+ }
+ if titleLink == "" {
+ titleLink = p.Repo.HTMLURL + "/src/" + util.PathEscapeSegments(branchName)
+ }
+
+ title := fmt.Sprintf("[%s:%s] %s", p.Repo.FullName, branchName, commitDesc)
+
+ var text string
+ // for each commit, generate attachment text
+ for i, commit := range p.Commits {
+ // limit the commit message display to just the summary, otherwise it would be hard to read
+ message := strings.TrimRight(strings.SplitN(commit.Message, "\n", 1)[0], "\r")
+
+ // a limit of 50 is set because GitHub does the same
+ if utf8.RuneCountInString(message) > 50 {
+ message = fmt.Sprintf("%.47s...", message)
+ }
+ text += fmt.Sprintf("[%s](%s) %s - %s", commit.ID[:7], commit.URL, message, commit.Author.Name)
+ // add linebreak to each commit but the last
+ if i < len(p.Commits)-1 {
+ text += "\n"
+ }
+ }
+
+ return d.createPayload(p.Sender, title, text, titleLink, greenColor), nil
+}
+
+// Issue implements PayloadConvertor Issue method
+func (d discordConvertor) Issue(p *api.IssuePayload) (DiscordPayload, error) {
+ title, _, text, color := getIssuesPayloadInfo(p, noneLinkFormatter, false)
+
+ return d.createPayload(p.Sender, title, text, p.Issue.HTMLURL, color), nil
+}
+
+// IssueComment implements PayloadConvertor IssueComment method
+func (d discordConvertor) IssueComment(p *api.IssueCommentPayload) (DiscordPayload, error) {
+ title, _, color := getIssueCommentPayloadInfo(p, noneLinkFormatter, false)
+
+ return d.createPayload(p.Sender, title, p.Comment.Body, p.Comment.HTMLURL, color), nil
+}
+
+// PullRequest implements PayloadConvertor PullRequest method
+func (d discordConvertor) PullRequest(p *api.PullRequestPayload) (DiscordPayload, error) {
+ title, _, text, color := getPullRequestPayloadInfo(p, noneLinkFormatter, false)
+
+ return d.createPayload(p.Sender, title, text, p.PullRequest.HTMLURL, color), nil
+}
+
+// Review implements PayloadConvertor Review method
+func (d discordConvertor) Review(p *api.PullRequestPayload, event webhook_module.HookEventType) (DiscordPayload, error) {
+ var text, title string
+ var color int
+ if p.Action == api.HookIssueReviewed {
+ action, err := parseHookPullRequestEventType(event)
+ if err != nil {
+ return DiscordPayload{}, err
+ }
+
+ title = fmt.Sprintf("[%s] Pull request review %s: #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title)
+ text = p.Review.Content
+
+ switch event {
+ case webhook_module.HookEventPullRequestReviewApproved:
+ color = greenColor
+ case webhook_module.HookEventPullRequestReviewRejected:
+ color = redColor
+ case webhook_module.HookEventPullRequestReviewComment:
+ color = greyColor
+ default:
+ color = yellowColor
+ }
+ }
+
+ return d.createPayload(p.Sender, title, text, p.PullRequest.HTMLURL, color), nil
+}
+
+// Repository implements PayloadConvertor Repository method
+func (d discordConvertor) Repository(p *api.RepositoryPayload) (DiscordPayload, error) {
+ var title, url string
+ var color int
+ switch p.Action {
+ case api.HookRepoCreated:
+ title = fmt.Sprintf("[%s] Repository created", p.Repository.FullName)
+ url = p.Repository.HTMLURL
+ color = greenColor
+ case api.HookRepoDeleted:
+ title = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName)
+ color = redColor
+ }
+
+ return d.createPayload(p.Sender, title, "", url, color), nil
+}
+
+// Wiki implements PayloadConvertor Wiki method
+func (d discordConvertor) Wiki(p *api.WikiPayload) (DiscordPayload, error) {
+ text, color, _ := getWikiPayloadInfo(p, noneLinkFormatter, false)
+ htmlLink := p.Repository.HTMLURL + "/wiki/" + url.PathEscape(p.Page)
+
+ var description string
+ if p.Action != api.HookWikiDeleted {
+ description = p.Comment
+ }
+
+ return d.createPayload(p.Sender, text, description, htmlLink, color), nil
+}
+
+// Release implements PayloadConvertor Release method
+func (d discordConvertor) Release(p *api.ReleasePayload) (DiscordPayload, error) {
+ text, color := getReleasePayloadInfo(p, noneLinkFormatter, false)
+
+ return d.createPayload(p.Sender, text, p.Release.Note, p.Release.HTMLURL, color), nil
+}
+
+func (d discordConvertor) Package(p *api.PackagePayload) (DiscordPayload, error) {
+ text, color := getPackagePayloadInfo(p, noneLinkFormatter, false)
+
+ return d.createPayload(p.Sender, text, "", p.Package.HTMLURL, color), nil
+}
+
+type discordConvertor struct {
+ Username string
+ AvatarURL string
+}
+
+var _ shared.PayloadConvertor[DiscordPayload] = discordConvertor{}
+
+func (discordHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ meta := &DiscordMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), meta); err != nil {
+ return nil, nil, fmt.Errorf("discordHandler.NewRequest meta json: %w", err)
+ }
+ sc := discordConvertor{
+ Username: meta.Username,
+ AvatarURL: meta.IconURL,
+ }
+ return shared.NewJSONRequest(sc, w, t, true)
+}
+
+func parseHookPullRequestEventType(event webhook_module.HookEventType) (string, error) {
+ switch event {
+ case webhook_module.HookEventPullRequestReviewApproved:
+ return "approved", nil
+ case webhook_module.HookEventPullRequestReviewRejected:
+ return "rejected", nil
+ case webhook_module.HookEventPullRequestReviewComment:
+ return "comment", nil
+ default:
+ return "", errors.New("unknown event type")
+ }
+}
+
+func (d discordConvertor) createPayload(s *api.User, title, text, url string, color int) DiscordPayload {
+ if len([]rune(title)) > 256 {
+ title = fmt.Sprintf("%.253s...", title)
+ }
+ if len([]rune(text)) > 4096 {
+ text = fmt.Sprintf("%.4093s...", text)
+ }
+ return DiscordPayload{
+ Username: d.Username,
+ AvatarURL: d.AvatarURL,
+ Embeds: []DiscordEmbed{
+ {
+ Title: title,
+ Description: text,
+ URL: url,
+ Color: color,
+ Author: DiscordEmbedAuthor{
+ Name: s.UserName,
+ URL: setting.AppURL + s.UserName,
+ IconURL: s.AvatarURL,
+ },
+ },
+ },
+ }
+}
diff --git a/services/webhook/discord_test.go b/services/webhook/discord_test.go
new file mode 100644
index 0000000..680f780
--- /dev/null
+++ b/services/webhook/discord_test.go
@@ -0,0 +1,348 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDiscordPayload(t *testing.T) {
+ dc := discordConvertor{}
+
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+
+ pl, err := dc.Create(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] branch test created", pl.Embeds[0].Title)
+ assert.Empty(t, pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+
+ pl, err := dc.Delete(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] branch test deleted", pl.Embeds[0].Title)
+ assert.Empty(t, pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Fork", func(t *testing.T) {
+ p := forkTestPayload()
+
+ pl, err := dc.Fork(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "test/repo2 is forked to test/repo", pl.Embeds[0].Title)
+ assert.Empty(t, pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+
+ pl, err := dc.Push(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo:test] 2 new commits", pl.Embeds[0].Title)
+ assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("PushWithLongCommitMessage", func(t *testing.T) {
+ p := pushTestMultilineCommitMessagePayload()
+
+ pl, err := dc.Push(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo:test] 2 new commits", pl.Embeds[0].Title)
+ assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) This is a commit summary âš ï¸âš ï¸âš ï¸âš ï¸ containing 你好... - user1\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) This is a commit summary âš ï¸âš ï¸âš ï¸âš ï¸ containing 你好... - user1", pl.Embeds[0].Description)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Issue", func(t *testing.T) {
+ p := issueTestPayload()
+
+ p.Action = api.HookIssueOpened
+ pl, err := dc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] Issue opened: #2 crash", pl.Embeds[0].Title)
+ assert.Equal(t, "issue body", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+
+ p.Action = api.HookIssueClosed
+ pl, err = dc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] Issue closed: #2 crash", pl.Embeds[0].Title)
+ assert.Empty(t, pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+
+ j, err := json.Marshal(pl)
+ require.NoError(t, err)
+
+ unsetFields := struct {
+ Content *string `json:"content"`
+ TTS *bool `json:"tts"`
+ Wait *bool `json:"wait"`
+ Fields []any `json:"fields"`
+ Footer struct {
+ Text *string `json:"text"`
+ } `json:"footer"`
+ }{}
+
+ err = json.Unmarshal(j, &unsetFields)
+ require.NoError(t, err)
+ assert.Nil(t, unsetFields.Content)
+ assert.Nil(t, unsetFields.TTS)
+ assert.Nil(t, unsetFields.Wait)
+ assert.Nil(t, unsetFields.Fields)
+ assert.Nil(t, unsetFields.Footer.Text)
+ })
+
+ t.Run("Issue with long title", func(t *testing.T) {
+ p := issueTestPayloadWithLongTitle()
+
+ p.Action = api.HookIssueOpened
+ pl, err := dc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Len(t, pl.Embeds[0].Title, 256)
+ })
+
+ t.Run("Issue with long body", func(t *testing.T) {
+ p := issueTestPayloadWithLongBody()
+
+ p.Action = api.HookIssueOpened
+ pl, err := dc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Len(t, pl.Embeds[0].Description, 4096)
+ })
+
+ t.Run("IssueComment", func(t *testing.T) {
+ p := issueCommentTestPayload()
+
+ pl, err := dc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] New comment on issue #2 crash", pl.Embeds[0].Title)
+ assert.Equal(t, "more info needed", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2#issuecomment-4", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("PullRequest", func(t *testing.T) {
+ p := pullRequestTestPayload()
+
+ pl, err := dc.PullRequest(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug", pl.Embeds[0].Title)
+ assert.Equal(t, "fixes bug #2", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("PullRequestComment", func(t *testing.T) {
+ p := pullRequestCommentTestPayload()
+
+ pl, err := dc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug", pl.Embeds[0].Title)
+ assert.Equal(t, "changes requested", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12#issuecomment-4", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Review", func(t *testing.T) {
+ p := pullRequestTestPayload()
+ p.Action = api.HookIssueReviewed
+
+ pl, err := dc.Review(p, webhook_module.HookEventPullRequestReviewApproved)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] Pull request review approved: #12 Fix bug", pl.Embeds[0].Title)
+ assert.Equal(t, "good job", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Repository", func(t *testing.T) {
+ p := repositoryTestPayload()
+
+ pl, err := dc.Repository(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] Repository created", pl.Embeds[0].Title)
+ assert.Empty(t, pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Package", func(t *testing.T) {
+ p := packageTestPayload()
+
+ pl, err := dc.Package(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "Package created: GiteaContainer:latest", pl.Embeds[0].Title)
+ assert.Empty(t, pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/user1/-/packages/container/GiteaContainer/latest", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Wiki", func(t *testing.T) {
+ p := wikiTestPayload()
+
+ p.Action = api.HookWikiCreated
+ pl, err := dc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment)", pl.Embeds[0].Title)
+ assert.Equal(t, "Wiki change comment", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+
+ p.Action = api.HookWikiEdited
+ pl, err = dc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment)", pl.Embeds[0].Title)
+ assert.Equal(t, "Wiki change comment", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+
+ p.Action = api.HookWikiDeleted
+ pl, err = dc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] Wiki page 'index' deleted", pl.Embeds[0].Title)
+ assert.Empty(t, pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+
+ t.Run("Release", func(t *testing.T) {
+ p := pullReleaseTestPayload()
+
+ pl, err := dc.Release(p)
+ require.NoError(t, err)
+
+ assert.Len(t, pl.Embeds, 1)
+ assert.Equal(t, "[test/repo] Release created: v1.0", pl.Embeds[0].Title)
+ assert.Equal(t, "Note of first stable release", pl.Embeds[0].Description)
+ assert.Equal(t, "http://localhost:3000/test/repo/releases/tag/v1.0", pl.Embeds[0].URL)
+ assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
+ assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
+ assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
+ })
+}
+
+func TestDiscordJSONPayload(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.DISCORD,
+ URL: "https://discord.example.com/",
+ Meta: `{}`,
+ HTTPMethod: "POST",
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := discordHandler{}.NewRequest(context.Background(), hook, task)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+ require.NoError(t, err)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://discord.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body DiscordPayload
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", body.Embeds[0].Description)
+}
diff --git a/services/webhook/feishu.go b/services/webhook/feishu.go
new file mode 100644
index 0000000..f77c3bb
--- /dev/null
+++ b/services/webhook/feishu.go
@@ -0,0 +1,200 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "net/http"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+type feishuHandler struct{}
+
+func (feishuHandler) Type() webhook_module.HookType { return webhook_module.FEISHU }
+func (feishuHandler) Icon(size int) template.HTML { return shared.ImgIcon("feishu.png", size) }
+
+func (feishuHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ }
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: nil,
+ }
+}
+
+func (feishuHandler) Metadata(*webhook_model.Webhook) any { return nil }
+
+type (
+ // FeishuPayload represents
+ FeishuPayload struct {
+ MsgType string `json:"msg_type"` // text / post / image / share_chat / interactive / file /audio / media
+ Content struct {
+ Text string `json:"text"`
+ } `json:"content"`
+ }
+)
+
+func newFeishuTextPayload(text string) FeishuPayload {
+ return FeishuPayload{
+ MsgType: "text",
+ Content: struct {
+ Text string `json:"text"`
+ }{
+ Text: strings.TrimSpace(text),
+ },
+ }
+}
+
+// Create implements PayloadConvertor Create method
+func (fc feishuConvertor) Create(p *api.CreatePayload) (FeishuPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ text := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName)
+
+ return newFeishuTextPayload(text), nil
+}
+
+// Delete implements PayloadConvertor Delete method
+func (fc feishuConvertor) Delete(p *api.DeletePayload) (FeishuPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ text := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName)
+
+ return newFeishuTextPayload(text), nil
+}
+
+// Fork implements PayloadConvertor Fork method
+func (fc feishuConvertor) Fork(p *api.ForkPayload) (FeishuPayload, error) {
+ text := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName)
+
+ return newFeishuTextPayload(text), nil
+}
+
+// Push implements PayloadConvertor Push method
+func (fc feishuConvertor) Push(p *api.PushPayload) (FeishuPayload, error) {
+ var (
+ branchName = git.RefName(p.Ref).ShortName()
+ commitDesc string
+ )
+
+ text := fmt.Sprintf("[%s:%s] %s\r\n", p.Repo.FullName, branchName, commitDesc)
+ // for each commit, generate attachment text
+ for i, commit := range p.Commits {
+ var authorName string
+ if commit.Author != nil {
+ authorName = " - " + commit.Author.Name
+ }
+ text += fmt.Sprintf("[%s](%s) %s", commit.ID[:7], commit.URL,
+ strings.TrimRight(commit.Message, "\r\n")) + authorName
+ // add linebreak to each commit but the last
+ if i < len(p.Commits)-1 {
+ text += "\r\n"
+ }
+ }
+
+ return newFeishuTextPayload(text), nil
+}
+
+// Issue implements PayloadConvertor Issue method
+func (fc feishuConvertor) Issue(p *api.IssuePayload) (FeishuPayload, error) {
+ title, link, by, operator, result, assignees := getIssuesInfo(p)
+ if assignees != "" {
+ if p.Action == api.HookIssueAssigned || p.Action == api.HookIssueUnassigned || p.Action == api.HookIssueMilestoned {
+ return newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, result, assignees, p.Issue.Body)), nil
+ }
+ return newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, assignees, p.Issue.Body)), nil
+ }
+ return newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, p.Issue.Body)), nil
+}
+
+// IssueComment implements PayloadConvertor IssueComment method
+func (fc feishuConvertor) IssueComment(p *api.IssueCommentPayload) (FeishuPayload, error) {
+ title, link, by, operator := getIssuesCommentInfo(p)
+ return newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, p.Comment.Body)), nil
+}
+
+// PullRequest implements PayloadConvertor PullRequest method
+func (fc feishuConvertor) PullRequest(p *api.PullRequestPayload) (FeishuPayload, error) {
+ title, link, by, operator, result, assignees := getPullRequestInfo(p)
+ if assignees != "" {
+ if p.Action == api.HookIssueAssigned || p.Action == api.HookIssueUnassigned || p.Action == api.HookIssueMilestoned {
+ return newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, result, assignees, p.PullRequest.Body)), nil
+ }
+ return newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, assignees, p.PullRequest.Body)), nil
+ }
+ return newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, p.PullRequest.Body)), nil
+}
+
+// Review implements PayloadConvertor Review method
+func (fc feishuConvertor) Review(p *api.PullRequestPayload, event webhook_module.HookEventType) (FeishuPayload, error) {
+ action, err := parseHookPullRequestEventType(event)
+ if err != nil {
+ return FeishuPayload{}, err
+ }
+
+ title := fmt.Sprintf("[%s] Pull request review %s : #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title)
+ text := p.Review.Content
+
+ return newFeishuTextPayload(title + "\r\n\r\n" + text), nil
+}
+
+// Repository implements PayloadConvertor Repository method
+func (fc feishuConvertor) Repository(p *api.RepositoryPayload) (FeishuPayload, error) {
+ var text string
+ switch p.Action {
+ case api.HookRepoCreated:
+ text = fmt.Sprintf("[%s] Repository created", p.Repository.FullName)
+ return newFeishuTextPayload(text), nil
+ case api.HookRepoDeleted:
+ text = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName)
+ return newFeishuTextPayload(text), nil
+ }
+
+ return FeishuPayload{}, nil
+}
+
+// Wiki implements PayloadConvertor Wiki method
+func (fc feishuConvertor) Wiki(p *api.WikiPayload) (FeishuPayload, error) {
+ text, _, _ := getWikiPayloadInfo(p, noneLinkFormatter, true)
+
+ return newFeishuTextPayload(text), nil
+}
+
+// Release implements PayloadConvertor Release method
+func (fc feishuConvertor) Release(p *api.ReleasePayload) (FeishuPayload, error) {
+ text, _ := getReleasePayloadInfo(p, noneLinkFormatter, true)
+
+ return newFeishuTextPayload(text), nil
+}
+
+func (fc feishuConvertor) Package(p *api.PackagePayload) (FeishuPayload, error) {
+ text, _ := getPackagePayloadInfo(p, noneLinkFormatter, true)
+
+ return newFeishuTextPayload(text), nil
+}
+
+type feishuConvertor struct{}
+
+var _ shared.PayloadConvertor[FeishuPayload] = feishuConvertor{}
+
+func (feishuHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ return shared.NewJSONRequest(feishuConvertor{}, w, t, true)
+}
diff --git a/services/webhook/feishu_test.go b/services/webhook/feishu_test.go
new file mode 100644
index 0000000..9744571
--- /dev/null
+++ b/services/webhook/feishu_test.go
@@ -0,0 +1,193 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestFeishuPayload(t *testing.T) {
+ fc := feishuConvertor{}
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+
+ pl, err := fc.Create(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[test/repo] branch test created`, pl.Content.Text)
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+
+ pl, err := fc.Delete(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[test/repo] branch test deleted`, pl.Content.Text)
+ })
+
+ t.Run("Fork", func(t *testing.T) {
+ p := forkTestPayload()
+
+ pl, err := fc.Fork(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `test/repo2 is forked to test/repo`, pl.Content.Text)
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+
+ pl, err := fc.Push(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo:test] \r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", pl.Content.Text)
+ })
+
+ t.Run("Issue", func(t *testing.T) {
+ p := issueTestPayload()
+
+ p.Action = api.HookIssueOpened
+ pl, err := fc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[Issue-test/repo #2]: opened\ncrash\nhttp://localhost:3000/test/repo/issues/2\nIssue by user1\nOperator: user1\nAssignees: user1\n\nissue body", pl.Content.Text)
+
+ p.Action = api.HookIssueClosed
+ pl, err = fc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[Issue-test/repo #2]: closed\ncrash\nhttp://localhost:3000/test/repo/issues/2\nIssue by user1\nOperator: user1\nAssignees: user1\n\nissue body", pl.Content.Text)
+ })
+
+ t.Run("IssueComment", func(t *testing.T) {
+ p := issueCommentTestPayload()
+
+ pl, err := fc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[Comment-test/repo #2]: created\ncrash\nhttp://localhost:3000/test/repo/issues/2\nIssue by user1\nOperator: user1\n\nmore info needed", pl.Content.Text)
+ })
+
+ t.Run("PullRequest", func(t *testing.T) {
+ p := pullRequestTestPayload()
+
+ pl, err := fc.PullRequest(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[PullRequest-test/repo #12]: opened\nFix bug\nhttp://localhost:3000/test/repo/pulls/12\nPullRequest by user1\nOperator: user1\nAssignees: user1\n\nfixes bug #2", pl.Content.Text)
+ })
+
+ t.Run("PullRequestComment", func(t *testing.T) {
+ p := pullRequestCommentTestPayload()
+
+ pl, err := fc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[Comment-test/repo #12]: created\nFix bug\nhttp://localhost:3000/test/repo/pulls/12\nPullRequest by user1\nOperator: user1\n\nchanges requested", pl.Content.Text)
+ })
+
+ t.Run("Review", func(t *testing.T) {
+ p := pullRequestTestPayload()
+ p.Action = api.HookIssueReviewed
+
+ pl, err := fc.Review(p, webhook_module.HookEventPullRequestReviewApproved)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Pull request review approved : #12 Fix bug\r\n\r\ngood job", pl.Content.Text)
+ })
+
+ t.Run("Repository", func(t *testing.T) {
+ p := repositoryTestPayload()
+
+ pl, err := fc.Repository(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Repository created", pl.Content.Text)
+ })
+
+ t.Run("Package", func(t *testing.T) {
+ p := packageTestPayload()
+
+ pl, err := fc.Package(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "Package created: GiteaContainer:latest by user1", pl.Content.Text)
+ })
+
+ t.Run("Wiki", func(t *testing.T) {
+ p := wikiTestPayload()
+
+ p.Action = api.HookWikiCreated
+ pl, err := fc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment) by user1", pl.Content.Text)
+
+ p.Action = api.HookWikiEdited
+ pl, err = fc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment) by user1", pl.Content.Text)
+
+ p.Action = api.HookWikiDeleted
+ pl, err = fc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Wiki page 'index' deleted by user1", pl.Content.Text)
+ })
+
+ t.Run("Release", func(t *testing.T) {
+ p := pullReleaseTestPayload()
+
+ pl, err := fc.Release(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Release created: v1.0 by user1", pl.Content.Text)
+ })
+}
+
+func TestFeishuJSONPayload(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.FEISHU,
+ URL: "https://feishu.example.com/",
+ Meta: `{}`,
+ HTTPMethod: "POST",
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := feishuHandler{}.NewRequest(context.Background(), hook, task)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+ require.NoError(t, err)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://feishu.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body FeishuPayload
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "[test/repo:test] \r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", body.Content.Text)
+}
diff --git a/services/webhook/general.go b/services/webhook/general.go
new file mode 100644
index 0000000..c41f58f
--- /dev/null
+++ b/services/webhook/general.go
@@ -0,0 +1,354 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "fmt"
+ "html"
+ "net/url"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+)
+
+type linkFormatter = func(string, string) string
+
+// noneLinkFormatter does not create a link but just returns the text
+func noneLinkFormatter(url, text string) string {
+ return text
+}
+
+// htmlLinkFormatter creates a HTML link
+func htmlLinkFormatter(url, text string) string {
+ return fmt.Sprintf(`<a href="%s">%s</a>`, html.EscapeString(url), html.EscapeString(text))
+}
+
+// getPullRequestInfo gets the information for a pull request
+func getPullRequestInfo(p *api.PullRequestPayload) (title, link, by, operator, operateResult, assignees string) {
+ title = fmt.Sprintf("[PullRequest-%s #%d]: %s\n%s", p.Repository.FullName, p.PullRequest.Index, p.Action, p.PullRequest.Title)
+ assignList := p.PullRequest.Assignees
+ assignStringList := make([]string, len(assignList))
+
+ for i, user := range assignList {
+ assignStringList[i] = user.UserName
+ }
+ if p.Action == api.HookIssueAssigned {
+ operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName)
+ } else if p.Action == api.HookIssueUnassigned {
+ operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName)
+ } else if p.Action == api.HookIssueMilestoned {
+ operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.PullRequest.Milestone.ID)
+ }
+ link = p.PullRequest.HTMLURL
+ by = fmt.Sprintf("PullRequest by %s", p.PullRequest.Poster.UserName)
+ if len(assignStringList) > 0 {
+ assignees = fmt.Sprintf("Assignees: %s", strings.Join(assignStringList, ", "))
+ }
+ operator = fmt.Sprintf("Operator: %s", p.Sender.UserName)
+ return title, link, by, operator, operateResult, assignees
+}
+
+// getIssuesInfo gets the information for an issue
+func getIssuesInfo(p *api.IssuePayload) (issueTitle, link, by, operator, operateResult, assignees string) {
+ issueTitle = fmt.Sprintf("[Issue-%s #%d]: %s\n%s", p.Repository.FullName, p.Issue.Index, p.Action, p.Issue.Title)
+ assignList := p.Issue.Assignees
+ assignStringList := make([]string, len(assignList))
+
+ for i, user := range assignList {
+ assignStringList[i] = user.UserName
+ }
+ if p.Action == api.HookIssueAssigned {
+ operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName)
+ } else if p.Action == api.HookIssueUnassigned {
+ operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName)
+ } else if p.Action == api.HookIssueMilestoned {
+ operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.Issue.Milestone.ID)
+ }
+ link = p.Issue.HTMLURL
+ by = fmt.Sprintf("Issue by %s", p.Issue.Poster.UserName)
+ if len(assignStringList) > 0 {
+ assignees = fmt.Sprintf("Assignees: %s", strings.Join(assignStringList, ", "))
+ }
+ operator = fmt.Sprintf("Operator: %s", p.Sender.UserName)
+ return issueTitle, link, by, operator, operateResult, assignees
+}
+
+// getIssuesCommentInfo gets the information for a comment
+func getIssuesCommentInfo(p *api.IssueCommentPayload) (title, link, by, operator string) {
+ title = fmt.Sprintf("[Comment-%s #%d]: %s\n%s", p.Repository.FullName, p.Issue.Index, p.Action, p.Issue.Title)
+ link = p.Issue.HTMLURL
+ if p.IsPull {
+ by = fmt.Sprintf("PullRequest by %s", p.Issue.Poster.UserName)
+ } else {
+ by = fmt.Sprintf("Issue by %s", p.Issue.Poster.UserName)
+ }
+ operator = fmt.Sprintf("Operator: %s", p.Sender.UserName)
+ return title, link, by, operator
+}
+
+func getIssuesPayloadInfo(p *api.IssuePayload, linkFormatter linkFormatter, withSender bool) (string, string, string, int) {
+ repoLink := linkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ issueTitle := fmt.Sprintf("#%d %s", p.Index, p.Issue.Title)
+ titleLink := linkFormatter(fmt.Sprintf("%s/issues/%d", p.Repository.HTMLURL, p.Index), issueTitle)
+ var text string
+ color := yellowColor
+
+ switch p.Action {
+ case api.HookIssueOpened:
+ text = fmt.Sprintf("[%s] Issue opened: %s", repoLink, titleLink)
+ color = orangeColor
+ case api.HookIssueClosed:
+ text = fmt.Sprintf("[%s] Issue closed: %s", repoLink, titleLink)
+ color = redColor
+ case api.HookIssueReOpened:
+ text = fmt.Sprintf("[%s] Issue re-opened: %s", repoLink, titleLink)
+ case api.HookIssueEdited:
+ text = fmt.Sprintf("[%s] Issue edited: %s", repoLink, titleLink)
+ case api.HookIssueAssigned:
+ list := make([]string, len(p.Issue.Assignees))
+ for i, user := range p.Issue.Assignees {
+ list[i] = linkFormatter(setting.AppURL+url.PathEscape(user.UserName), user.UserName)
+ }
+ text = fmt.Sprintf("[%s] Issue assigned to %s: %s", repoLink, strings.Join(list, ", "), titleLink)
+ color = greenColor
+ case api.HookIssueUnassigned:
+ text = fmt.Sprintf("[%s] Issue unassigned: %s", repoLink, titleLink)
+ case api.HookIssueLabelUpdated:
+ text = fmt.Sprintf("[%s] Issue labels updated: %s", repoLink, titleLink)
+ case api.HookIssueLabelCleared:
+ text = fmt.Sprintf("[%s] Issue labels cleared: %s", repoLink, titleLink)
+ case api.HookIssueSynchronized:
+ text = fmt.Sprintf("[%s] Issue synchronized: %s", repoLink, titleLink)
+ case api.HookIssueMilestoned:
+ mileStoneLink := fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.Issue.Milestone.ID)
+ text = fmt.Sprintf("[%s] Issue milestoned to %s: %s", repoLink,
+ linkFormatter(mileStoneLink, p.Issue.Milestone.Title), titleLink)
+ case api.HookIssueDemilestoned:
+ text = fmt.Sprintf("[%s] Issue milestone cleared: %s", repoLink, titleLink)
+ }
+ if withSender {
+ text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName))
+ }
+
+ var attachmentText string
+ if p.Action == api.HookIssueOpened || p.Action == api.HookIssueEdited {
+ attachmentText = p.Issue.Body
+ }
+
+ return text, issueTitle, attachmentText, color
+}
+
+func getPullRequestPayloadInfo(p *api.PullRequestPayload, linkFormatter linkFormatter, withSender bool) (string, string, string, int) {
+ repoLink := linkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ issueTitle := fmt.Sprintf("#%d %s", p.Index, p.PullRequest.Title)
+ titleLink := linkFormatter(p.PullRequest.URL, issueTitle)
+ var text string
+ var attachmentText string
+ color := yellowColor
+
+ switch p.Action {
+ case api.HookIssueOpened:
+ text = fmt.Sprintf("[%s] Pull request opened: %s", repoLink, titleLink)
+ attachmentText = p.PullRequest.Body
+ color = greenColor
+ case api.HookIssueClosed:
+ if p.PullRequest.HasMerged {
+ text = fmt.Sprintf("[%s] Pull request merged: %s", repoLink, titleLink)
+ color = purpleColor
+ } else {
+ text = fmt.Sprintf("[%s] Pull request closed: %s", repoLink, titleLink)
+ color = redColor
+ }
+ case api.HookIssueReOpened:
+ text = fmt.Sprintf("[%s] Pull request re-opened: %s", repoLink, titleLink)
+ case api.HookIssueEdited:
+ text = fmt.Sprintf("[%s] Pull request edited: %s", repoLink, titleLink)
+ attachmentText = p.PullRequest.Body
+ case api.HookIssueAssigned:
+ list := make([]string, len(p.PullRequest.Assignees))
+ for i, user := range p.PullRequest.Assignees {
+ list[i] = linkFormatter(setting.AppURL+user.UserName, user.UserName)
+ }
+ text = fmt.Sprintf("[%s] Pull request assigned to %s: %s", repoLink,
+ strings.Join(list, ", "), titleLink)
+ color = greenColor
+ case api.HookIssueUnassigned:
+ text = fmt.Sprintf("[%s] Pull request unassigned: %s", repoLink, titleLink)
+ case api.HookIssueLabelUpdated:
+ text = fmt.Sprintf("[%s] Pull request labels updated: %s", repoLink, titleLink)
+ case api.HookIssueLabelCleared:
+ text = fmt.Sprintf("[%s] Pull request labels cleared: %s", repoLink, titleLink)
+ case api.HookIssueSynchronized:
+ text = fmt.Sprintf("[%s] Pull request synchronized: %s", repoLink, titleLink)
+ case api.HookIssueMilestoned:
+ mileStoneLink := fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.PullRequest.Milestone.ID)
+ text = fmt.Sprintf("[%s] Pull request milestoned to %s: %s", repoLink,
+ linkFormatter(mileStoneLink, p.PullRequest.Milestone.Title), titleLink)
+ case api.HookIssueDemilestoned:
+ text = fmt.Sprintf("[%s] Pull request milestone cleared: %s", repoLink, titleLink)
+ case api.HookIssueReviewed:
+ text = fmt.Sprintf("[%s] Pull request reviewed: %s", repoLink, titleLink)
+ attachmentText = p.Review.Content
+ case api.HookIssueReviewRequested:
+ text = fmt.Sprintf("[%s] Pull request review requested: %s", repoLink, titleLink)
+ case api.HookIssueReviewRequestRemoved:
+ text = fmt.Sprintf("[%s] Pull request review request removed: %s", repoLink, titleLink)
+ }
+ if withSender {
+ text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName))
+ }
+
+ return text, issueTitle, attachmentText, color
+}
+
+func getReleasePayloadInfo(p *api.ReleasePayload, linkFormatter linkFormatter, withSender bool) (text string, color int) {
+ repoLink := linkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ refLink := linkFormatter(p.Repository.HTMLURL+"/releases/tag/"+util.PathEscapeSegments(p.Release.TagName), p.Release.TagName)
+
+ switch p.Action {
+ case api.HookReleasePublished:
+ text = fmt.Sprintf("[%s] Release created: %s", repoLink, refLink)
+ color = greenColor
+ case api.HookReleaseUpdated:
+ text = fmt.Sprintf("[%s] Release updated: %s", repoLink, refLink)
+ color = yellowColor
+ case api.HookReleaseDeleted:
+ text = fmt.Sprintf("[%s] Release deleted: %s", repoLink, refLink)
+ color = redColor
+ }
+ if withSender {
+ text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName))
+ }
+
+ return text, color
+}
+
+func getWikiPayloadInfo(p *api.WikiPayload, linkFormatter linkFormatter, withSender bool) (string, int, string) {
+ repoLink := linkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ pageLink := linkFormatter(p.Repository.HTMLURL+"/wiki/"+url.PathEscape(p.Page), p.Page)
+
+ var text string
+ color := greenColor
+
+ switch p.Action {
+ case api.HookWikiCreated:
+ text = fmt.Sprintf("[%s] New wiki page '%s'", repoLink, pageLink)
+ case api.HookWikiEdited:
+ text = fmt.Sprintf("[%s] Wiki page '%s' edited", repoLink, pageLink)
+ color = yellowColor
+ case api.HookWikiDeleted:
+ text = fmt.Sprintf("[%s] Wiki page '%s' deleted", repoLink, pageLink)
+ color = redColor
+ }
+
+ if p.Action != api.HookWikiDeleted && p.Comment != "" {
+ text += fmt.Sprintf(" (%s)", p.Comment)
+ }
+
+ if withSender {
+ text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName))
+ }
+
+ return text, color, pageLink
+}
+
+func getIssueCommentPayloadInfo(p *api.IssueCommentPayload, linkFormatter linkFormatter, withSender bool) (string, string, int) {
+ repoLink := linkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ issueTitle := fmt.Sprintf("#%d %s", p.Issue.Index, p.Issue.Title)
+
+ var text, typ, titleLink string
+ color := yellowColor
+
+ if p.IsPull {
+ typ = "pull request"
+ titleLink = linkFormatter(p.Comment.PRURL, issueTitle)
+ } else {
+ typ = "issue"
+ titleLink = linkFormatter(p.Comment.IssueURL, issueTitle)
+ }
+
+ switch p.Action {
+ case api.HookIssueCommentCreated:
+ text = fmt.Sprintf("[%s] New comment on %s %s", repoLink, typ, titleLink)
+ if p.IsPull {
+ color = greenColorLight
+ } else {
+ color = orangeColorLight
+ }
+ case api.HookIssueCommentEdited:
+ text = fmt.Sprintf("[%s] Comment edited on %s %s", repoLink, typ, titleLink)
+ case api.HookIssueCommentDeleted:
+ text = fmt.Sprintf("[%s] Comment deleted on %s %s", repoLink, typ, titleLink)
+ color = redColor
+ }
+ if withSender {
+ text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName))
+ }
+
+ return text, issueTitle, color
+}
+
+func getPackagePayloadInfo(p *api.PackagePayload, linkFormatter linkFormatter, withSender bool) (text string, color int) {
+ refLink := linkFormatter(p.Package.HTMLURL, p.Package.Name+":"+p.Package.Version)
+
+ switch p.Action {
+ case api.HookPackageCreated:
+ text = fmt.Sprintf("Package created: %s", refLink)
+ color = greenColor
+ case api.HookPackageDeleted:
+ text = fmt.Sprintf("Package deleted: %s", refLink)
+ color = redColor
+ }
+ if withSender {
+ text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName))
+ }
+
+ return text, color
+}
+
+// ToHook convert models.Webhook to api.Hook
+// This function is not part of the convert package to prevent an import cycle
+func ToHook(repoLink string, w *webhook_model.Webhook) (*api.Hook, error) {
+ // config is deprecated, but kept for compatibility
+ config := map[string]string{
+ "url": w.URL,
+ "content_type": w.ContentType.Name(),
+ }
+ if w.Type == webhook_module.SLACK {
+ if s, ok := (slackHandler{}.Metadata(w)).(*SlackMeta); ok {
+ config["channel"] = s.Channel
+ config["username"] = s.Username
+ config["icon_url"] = s.IconURL
+ config["color"] = s.Color
+ }
+ }
+
+ authorizationHeader, err := w.HeaderAuthorization()
+ if err != nil {
+ return nil, err
+ }
+ var metadata any
+ if handler := GetWebhookHandler(w.Type); handler != nil {
+ metadata = handler.Metadata(w)
+ }
+
+ return &api.Hook{
+ ID: w.ID,
+ Type: w.Type,
+ BranchFilter: w.BranchFilter,
+ URL: w.URL,
+ Config: config,
+ Events: w.EventsArray(),
+ AuthorizationHeader: authorizationHeader,
+ ContentType: w.ContentType.Name(),
+ Metadata: metadata,
+ Active: w.IsActive,
+ Updated: w.UpdatedUnix.AsTime(),
+ Created: w.CreatedUnix.AsTime(),
+ }, nil
+}
diff --git a/services/webhook/general_test.go b/services/webhook/general_test.go
new file mode 100644
index 0000000..6dcd787
--- /dev/null
+++ b/services/webhook/general_test.go
@@ -0,0 +1,673 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "strings"
+ "testing"
+
+ api "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func createTestPayload() *api.CreatePayload {
+ return &api.CreatePayload{
+ Sha: "2020558fe2e34debb818a514715839cabd25e777",
+ Ref: "refs/heads/test",
+ RefType: "branch",
+ Repo: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ }
+}
+
+func deleteTestPayload() *api.DeletePayload {
+ return &api.DeletePayload{
+ Ref: "refs/heads/test",
+ RefType: "branch",
+ Repo: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ }
+}
+
+func forkTestPayload() *api.ForkPayload {
+ return &api.ForkPayload{
+ Forkee: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo2",
+ Name: "repo2",
+ FullName: "test/repo2",
+ },
+ Repo: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ }
+}
+
+func pushTestPayload() *api.PushPayload {
+ return pushTestPayloadWithCommitMessage("commit message")
+}
+
+func pushTestMultilineCommitMessagePayload() *api.PushPayload {
+ return pushTestPayloadWithCommitMessage("This is a commit summary âš ï¸âš ï¸âš ï¸âš ï¸ containing 你好 âš ï¸âš ï¸ï¸\n\nThis is the message body.")
+}
+
+func pushTestPayloadWithCommitMessage(message string) *api.PushPayload {
+ commit := &api.PayloadCommit{
+ ID: "2020558fe2e34debb818a514715839cabd25e778",
+ Message: message,
+ URL: "http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778",
+ Author: &api.PayloadUser{
+ Name: "user1",
+ Email: "user1@localhost",
+ UserName: "user1",
+ },
+ Committer: &api.PayloadUser{
+ Name: "user1",
+ Email: "user1@localhost",
+ UserName: "user1",
+ },
+ }
+
+ return &api.PushPayload{
+ Ref: "refs/heads/test",
+ Before: "2020558fe2e34debb818a514715839cabd25e777",
+ After: "2020558fe2e34debb818a514715839cabd25e778",
+ CompareURL: "",
+ HeadCommit: commit,
+ Commits: []*api.PayloadCommit{commit, commit},
+ TotalCommits: 2,
+ Repo: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Pusher: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ }
+}
+
+func issueTestPayload() *api.IssuePayload {
+ return issuePayloadWithTitleAndBody("crash", "issue body")
+}
+
+func issueTestPayloadWithLongBody() *api.IssuePayload {
+ return issuePayloadWithTitleAndBody("crash", strings.Repeat("issue body", 4097))
+}
+
+func issueTestPayloadWithLongTitle() *api.IssuePayload {
+ return issuePayloadWithTitleAndBody(strings.Repeat("a", 257), "issue body")
+}
+
+func issuePayloadWithTitleAndBody(title, body string) *api.IssuePayload {
+ return &api.IssuePayload{
+ Index: 2,
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Repository: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Issue: &api.Issue{
+ ID: 2,
+ Index: 2,
+ URL: "http://localhost:3000/api/v1/repos/test/repo/issues/2",
+ HTMLURL: "http://localhost:3000/test/repo/issues/2",
+ Title: title,
+ Body: body,
+ Poster: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Assignees: []*api.User{
+ {
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ },
+ Milestone: &api.Milestone{
+ ID: 1,
+ Title: "Milestone Title",
+ Description: "Milestone Description",
+ },
+ },
+ }
+}
+
+func issueCommentTestPayload() *api.IssueCommentPayload {
+ return &api.IssueCommentPayload{
+ Action: api.HookIssueCommentCreated,
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Repository: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Comment: &api.Comment{
+ HTMLURL: "http://localhost:3000/test/repo/issues/2#issuecomment-4",
+ IssueURL: "http://localhost:3000/test/repo/issues/2",
+ Body: "more info needed",
+ },
+ Issue: &api.Issue{
+ ID: 2,
+ Index: 2,
+ URL: "http://localhost:3000/api/v1/repos/test/repo/issues/2",
+ HTMLURL: "http://localhost:3000/test/repo/issues/2",
+ Title: "crash",
+ Poster: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Body: "this happened",
+ },
+ }
+}
+
+func pullRequestCommentTestPayload() *api.IssueCommentPayload {
+ return &api.IssueCommentPayload{
+ Action: api.HookIssueCommentCreated,
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Repository: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Comment: &api.Comment{
+ HTMLURL: "http://localhost:3000/test/repo/pulls/12#issuecomment-4",
+ PRURL: "http://localhost:3000/test/repo/pulls/12",
+ Body: "changes requested",
+ },
+ Issue: &api.Issue{
+ ID: 12,
+ Index: 12,
+ URL: "http://localhost:3000/api/v1/repos/test/repo/pulls/12",
+ HTMLURL: "http://localhost:3000/test/repo/pulls/12",
+ Title: "Fix bug",
+ Body: "fixes bug #2",
+ Poster: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ },
+ IsPull: true,
+ }
+}
+
+func wikiTestPayload() *api.WikiPayload {
+ return &api.WikiPayload{
+ Repository: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Page: "index",
+ Comment: "Wiki change comment",
+ }
+}
+
+func pullReleaseTestPayload() *api.ReleasePayload {
+ return &api.ReleasePayload{
+ Action: api.HookReleasePublished,
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Repository: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ Release: &api.Release{
+ TagName: "v1.0",
+ Target: "master",
+ Title: "First stable release",
+ Note: "Note of first stable release",
+ HTMLURL: "http://localhost:3000/test/repo/releases/tag/v1.0",
+ },
+ }
+}
+
+func pullRequestTestPayload() *api.PullRequestPayload {
+ return &api.PullRequestPayload{
+ Action: api.HookIssueOpened,
+ Index: 12,
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Repository: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ PullRequest: &api.PullRequest{
+ ID: 12,
+ Index: 12,
+ URL: "http://localhost:3000/test/repo/pulls/12",
+ HTMLURL: "http://localhost:3000/test/repo/pulls/12",
+ Title: "Fix bug",
+ Body: "fixes bug #2",
+ Mergeable: true,
+ Poster: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Assignees: []*api.User{
+ {
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ },
+ Milestone: &api.Milestone{
+ ID: 1,
+ Title: "Milestone Title",
+ Description: "Milestone Description",
+ },
+ Base: &api.PRBranchInfo{
+ Name: "branch1",
+ Ref: "refs/pull/2/head",
+ Sha: "4a357436d925b5c974181ff12a994538ddc5a269",
+ RepoID: 1,
+ Repository: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ },
+ },
+ Review: &api.ReviewPayload{
+ Content: "good job",
+ },
+ }
+}
+
+func repositoryTestPayload() *api.RepositoryPayload {
+ return &api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Repository: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ }
+}
+
+func packageTestPayload() *api.PackagePayload {
+ return &api.PackagePayload{
+ Action: api.HookPackageCreated,
+ Sender: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Repository: nil,
+ Organization: &api.User{
+ UserName: "org1",
+ AvatarURL: "http://localhost:3000/org1/avatar",
+ },
+ Package: &api.Package{
+ Owner: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Repository: nil,
+ Creator: &api.User{
+ UserName: "user1",
+ AvatarURL: "http://localhost:3000/user1/avatar",
+ },
+ Type: "container",
+ Name: "GiteaContainer",
+ Version: "latest",
+ HTMLURL: "http://localhost:3000/user1/-/packages/container/GiteaContainer/latest",
+ },
+ }
+}
+
+func TestGetIssuesPayloadInfo(t *testing.T) {
+ p := issueTestPayload()
+
+ cases := []struct {
+ action api.HookIssueAction
+ text string
+ issueTitle string
+ attachmentText string
+ color int
+ }{
+ {
+ api.HookIssueOpened,
+ "[test/repo] Issue opened: #2 crash by user1",
+ "#2 crash",
+ "issue body",
+ orangeColor,
+ },
+ {
+ api.HookIssueClosed,
+ "[test/repo] Issue closed: #2 crash by user1",
+ "#2 crash",
+ "",
+ redColor,
+ },
+ {
+ api.HookIssueReOpened,
+ "[test/repo] Issue re-opened: #2 crash by user1",
+ "#2 crash",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueEdited,
+ "[test/repo] Issue edited: #2 crash by user1",
+ "#2 crash",
+ "issue body",
+ yellowColor,
+ },
+ {
+ api.HookIssueAssigned,
+ "[test/repo] Issue assigned to user1: #2 crash by user1",
+ "#2 crash",
+ "",
+ greenColor,
+ },
+ {
+ api.HookIssueUnassigned,
+ "[test/repo] Issue unassigned: #2 crash by user1",
+ "#2 crash",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueLabelUpdated,
+ "[test/repo] Issue labels updated: #2 crash by user1",
+ "#2 crash",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueLabelCleared,
+ "[test/repo] Issue labels cleared: #2 crash by user1",
+ "#2 crash",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueSynchronized,
+ "[test/repo] Issue synchronized: #2 crash by user1",
+ "#2 crash",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueMilestoned,
+ "[test/repo] Issue milestoned to Milestone Title: #2 crash by user1",
+ "#2 crash",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueDemilestoned,
+ "[test/repo] Issue milestone cleared: #2 crash by user1",
+ "#2 crash",
+ "",
+ yellowColor,
+ },
+ }
+
+ for i, c := range cases {
+ p.Action = c.action
+ text, issueTitle, attachmentText, color := getIssuesPayloadInfo(p, noneLinkFormatter, true)
+ assert.Equal(t, c.text, text, "case %d", i)
+ assert.Equal(t, c.issueTitle, issueTitle, "case %d", i)
+ assert.Equal(t, c.attachmentText, attachmentText, "case %d", i)
+ assert.Equal(t, c.color, color, "case %d", i)
+ }
+}
+
+func TestGetPullRequestPayloadInfo(t *testing.T) {
+ p := pullRequestTestPayload()
+
+ cases := []struct {
+ action api.HookIssueAction
+ text string
+ issueTitle string
+ attachmentText string
+ color int
+ }{
+ {
+ api.HookIssueOpened,
+ "[test/repo] Pull request opened: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "fixes bug #2",
+ greenColor,
+ },
+ {
+ api.HookIssueClosed,
+ "[test/repo] Pull request closed: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ redColor,
+ },
+ {
+ api.HookIssueReOpened,
+ "[test/repo] Pull request re-opened: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueEdited,
+ "[test/repo] Pull request edited: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "fixes bug #2",
+ yellowColor,
+ },
+ {
+ api.HookIssueAssigned,
+ "[test/repo] Pull request assigned to user1: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ greenColor,
+ },
+ {
+ api.HookIssueUnassigned,
+ "[test/repo] Pull request unassigned: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueLabelUpdated,
+ "[test/repo] Pull request labels updated: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueLabelCleared,
+ "[test/repo] Pull request labels cleared: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueSynchronized,
+ "[test/repo] Pull request synchronized: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueMilestoned,
+ "[test/repo] Pull request milestoned to Milestone Title: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ yellowColor,
+ },
+ {
+ api.HookIssueDemilestoned,
+ "[test/repo] Pull request milestone cleared: #12 Fix bug by user1",
+ "#12 Fix bug",
+ "",
+ yellowColor,
+ },
+ }
+
+ for i, c := range cases {
+ p.Action = c.action
+ text, issueTitle, attachmentText, color := getPullRequestPayloadInfo(p, noneLinkFormatter, true)
+ assert.Equal(t, c.text, text, "case %d", i)
+ assert.Equal(t, c.issueTitle, issueTitle, "case %d", i)
+ assert.Equal(t, c.attachmentText, attachmentText, "case %d", i)
+ assert.Equal(t, c.color, color, "case %d", i)
+ }
+}
+
+func TestGetWikiPayloadInfo(t *testing.T) {
+ p := wikiTestPayload()
+
+ cases := []struct {
+ action api.HookWikiAction
+ text string
+ color int
+ link string
+ }{
+ {
+ api.HookWikiCreated,
+ "[test/repo] New wiki page 'index' (Wiki change comment) by user1",
+ greenColor,
+ "index",
+ },
+ {
+ api.HookWikiEdited,
+ "[test/repo] Wiki page 'index' edited (Wiki change comment) by user1",
+ yellowColor,
+ "index",
+ },
+ {
+ api.HookWikiDeleted,
+ "[test/repo] Wiki page 'index' deleted by user1",
+ redColor,
+ "index",
+ },
+ }
+
+ for i, c := range cases {
+ p.Action = c.action
+ text, color, link := getWikiPayloadInfo(p, noneLinkFormatter, true)
+ assert.Equal(t, c.text, text, "case %d", i)
+ assert.Equal(t, c.color, color, "case %d", i)
+ assert.Equal(t, c.link, link, "case %d", i)
+ }
+}
+
+func TestGetReleasePayloadInfo(t *testing.T) {
+ p := pullReleaseTestPayload()
+
+ cases := []struct {
+ action api.HookReleaseAction
+ text string
+ color int
+ }{
+ {
+ api.HookReleasePublished,
+ "[test/repo] Release created: v1.0 by user1",
+ greenColor,
+ },
+ {
+ api.HookReleaseUpdated,
+ "[test/repo] Release updated: v1.0 by user1",
+ yellowColor,
+ },
+ {
+ api.HookReleaseDeleted,
+ "[test/repo] Release deleted: v1.0 by user1",
+ redColor,
+ },
+ }
+
+ for i, c := range cases {
+ p.Action = c.action
+ text, color := getReleasePayloadInfo(p, noneLinkFormatter, true)
+ assert.Equal(t, c.text, text, "case %d", i)
+ assert.Equal(t, c.color, color, "case %d", i)
+ }
+}
+
+func TestGetIssueCommentPayloadInfo(t *testing.T) {
+ p := pullRequestCommentTestPayload()
+
+ cases := []struct {
+ action api.HookIssueCommentAction
+ text string
+ issueTitle string
+ color int
+ }{
+ {
+ api.HookIssueCommentCreated,
+ "[test/repo] New comment on pull request #12 Fix bug by user1",
+ "#12 Fix bug",
+ greenColorLight,
+ },
+ {
+ api.HookIssueCommentEdited,
+ "[test/repo] Comment edited on pull request #12 Fix bug by user1",
+ "#12 Fix bug",
+ yellowColor,
+ },
+ {
+ api.HookIssueCommentDeleted,
+ "[test/repo] Comment deleted on pull request #12 Fix bug by user1",
+ "#12 Fix bug",
+ redColor,
+ },
+ }
+
+ for i, c := range cases {
+ p.Action = c.action
+ text, issueTitle, color := getIssueCommentPayloadInfo(p, noneLinkFormatter, true)
+ assert.Equal(t, c.text, text, "case %d", i)
+ assert.Equal(t, c.issueTitle, issueTitle, "case %d", i)
+ assert.Equal(t, c.color, color, "case %d", i)
+ }
+}
diff --git a/services/webhook/gogs.go b/services/webhook/gogs.go
new file mode 100644
index 0000000..7dbf643
--- /dev/null
+++ b/services/webhook/gogs.go
@@ -0,0 +1,42 @@
+// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "html/template"
+ "net/http"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+type gogsHandler struct{ defaultHandler }
+
+func (gogsHandler) Type() webhook_module.HookType { return webhook_module.GOGS }
+func (gogsHandler) Icon(size int) template.HTML { return shared.ImgIcon("gogs.ico", size) }
+
+func (gogsHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ ContentType int `binding:"Required"`
+ Secret string
+ }
+ bind(&form)
+
+ contentType := webhook_model.ContentTypeJSON
+ if webhook_model.HookContentType(form.ContentType) == webhook_model.ContentTypeForm {
+ contentType = webhook_model.ContentTypeForm
+ }
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: contentType,
+ Secret: form.Secret,
+ HTTPMethod: http.MethodPost,
+ Metadata: nil,
+ }
+}
diff --git a/services/webhook/main_test.go b/services/webhook/main_test.go
new file mode 100644
index 0000000..756b9db
--- /dev/null
+++ b/services/webhook/main_test.go
@@ -0,0 +1,26 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/hostmatcher"
+ "code.gitea.io/gitea/modules/setting"
+
+ _ "code.gitea.io/gitea/models"
+ _ "code.gitea.io/gitea/models/actions"
+)
+
+func TestMain(m *testing.M) {
+ // for tests, allow only loopback IPs
+ setting.Webhook.AllowedHostList = hostmatcher.MatchBuiltinLoopback
+ unittest.MainTest(m, &unittest.TestOptions{
+ SetUp: func() error {
+ setting.LoadQueueSettings()
+ return Init()
+ },
+ })
+}
diff --git a/services/webhook/matrix.go b/services/webhook/matrix.go
new file mode 100644
index 0000000..e70e7a2
--- /dev/null
+++ b/services/webhook/matrix.go
@@ -0,0 +1,316 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "bytes"
+ "context"
+ "crypto/sha1"
+ "encoding/hex"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+ "regexp"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/svg"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+type matrixHandler struct{}
+
+func (matrixHandler) Type() webhook_module.HookType { return webhook_module.MATRIX }
+
+func (matrixHandler) Icon(size int) template.HTML {
+ return svg.RenderHTML("gitea-matrix", size, "img")
+}
+
+func (matrixHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ HomeserverURL string `binding:"Required;ValidUrl"`
+ RoomID string `binding:"Required"`
+ MessageType int
+ AccessToken string `binding:"Required"`
+ }
+ bind(&form)
+ form.AuthorizationHeader = "Bearer " + strings.TrimSpace(form.AccessToken)
+
+ // https://spec.matrix.org/v1.10/client-server-api/#sending-events-to-a-room
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: fmt.Sprintf("%s/_matrix/client/v3/rooms/%s/send/m.room.message", form.HomeserverURL, url.PathEscape(form.RoomID)),
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPut,
+ Metadata: &MatrixMeta{
+ HomeserverURL: form.HomeserverURL,
+ Room: form.RoomID,
+ MessageType: form.MessageType,
+ },
+ }
+}
+
+func (matrixHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ meta := &MatrixMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), meta); err != nil {
+ return nil, nil, fmt.Errorf("matrixHandler.NewRequest meta json: %w", err)
+ }
+ mc := matrixConvertor{
+ MsgType: messageTypeText[meta.MessageType],
+ }
+ payload, err := shared.NewPayload(mc, []byte(t.PayloadContent), t.EventType)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ body, err := json.MarshalIndent(payload, "", " ")
+ if err != nil {
+ return nil, nil, err
+ }
+
+ txnID, err := getMatrixTxnID(body)
+ if err != nil {
+ return nil, nil, err
+ }
+ req, err := http.NewRequest(http.MethodPut, w.URL+"/"+txnID, bytes.NewReader(body))
+ if err != nil {
+ return nil, nil, err
+ }
+ req.Header.Set("Content-Type", "application/json")
+
+ return req, body, nil
+}
+
+const matrixPayloadSizeLimit = 1024 * 64
+
+// MatrixMeta contains the Matrix metadata
+type MatrixMeta struct {
+ HomeserverURL string `json:"homeserver_url"`
+ Room string `json:"room_id"`
+ MessageType int `json:"message_type"`
+}
+
+var messageTypeText = map[int]string{
+ 1: "m.notice",
+ 2: "m.text",
+}
+
+// Metadata returns Matrix metadata
+func (matrixHandler) Metadata(w *webhook_model.Webhook) any {
+ s := &MatrixMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), s); err != nil {
+ log.Error("matrixHandler.Metadata(%d): %v", w.ID, err)
+ }
+ return s
+}
+
+// MatrixPayload contains payload for a Matrix room
+type MatrixPayload struct {
+ Body string `json:"body"`
+ MsgType string `json:"msgtype"`
+ Format string `json:"format"`
+ FormattedBody string `json:"formatted_body"`
+ Commits []*api.PayloadCommit `json:"io.gitea.commits,omitempty"`
+}
+
+var _ shared.PayloadConvertor[MatrixPayload] = matrixConvertor{}
+
+type matrixConvertor struct {
+ MsgType string
+}
+
+func (m matrixConvertor) newPayload(text string, commits ...*api.PayloadCommit) (MatrixPayload, error) {
+ return MatrixPayload{
+ Body: getMessageBody(text),
+ MsgType: m.MsgType,
+ Format: "org.matrix.custom.html",
+ FormattedBody: text,
+ Commits: commits,
+ }, nil
+}
+
+// Create implements payloadConvertor Create method
+func (m matrixConvertor) Create(p *api.CreatePayload) (MatrixPayload, error) {
+ repoLink := htmlLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName)
+ refLink := MatrixLinkToRef(p.Repo.HTMLURL, p.Ref)
+ text := fmt.Sprintf("[%s:%s] %s created by %s", repoLink, refLink, p.RefType, p.Sender.UserName)
+
+ return m.newPayload(text)
+}
+
+// Delete composes Matrix payload for delete a branch or tag.
+func (m matrixConvertor) Delete(p *api.DeletePayload) (MatrixPayload, error) {
+ refName := git.RefName(p.Ref).ShortName()
+ repoLink := htmlLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName)
+ text := fmt.Sprintf("[%s:%s] %s deleted by %s", repoLink, refName, p.RefType, p.Sender.UserName)
+
+ return m.newPayload(text)
+}
+
+// Fork composes Matrix payload for forked by a repository.
+func (m matrixConvertor) Fork(p *api.ForkPayload) (MatrixPayload, error) {
+ baseLink := htmlLinkFormatter(p.Forkee.HTMLURL, p.Forkee.FullName)
+ forkLink := htmlLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName)
+ text := fmt.Sprintf("%s is forked to %s", baseLink, forkLink)
+
+ return m.newPayload(text)
+}
+
+// Issue implements payloadConvertor Issue method
+func (m matrixConvertor) Issue(p *api.IssuePayload) (MatrixPayload, error) {
+ text, _, _, _ := getIssuesPayloadInfo(p, htmlLinkFormatter, true)
+
+ return m.newPayload(text)
+}
+
+// IssueComment implements payloadConvertor IssueComment method
+func (m matrixConvertor) IssueComment(p *api.IssueCommentPayload) (MatrixPayload, error) {
+ text, _, _ := getIssueCommentPayloadInfo(p, htmlLinkFormatter, true)
+
+ return m.newPayload(text)
+}
+
+// Wiki implements payloadConvertor Wiki method
+func (m matrixConvertor) Wiki(p *api.WikiPayload) (MatrixPayload, error) {
+ text, _, _ := getWikiPayloadInfo(p, htmlLinkFormatter, true)
+
+ return m.newPayload(text)
+}
+
+// Release implements payloadConvertor Release method
+func (m matrixConvertor) Release(p *api.ReleasePayload) (MatrixPayload, error) {
+ text, _ := getReleasePayloadInfo(p, htmlLinkFormatter, true)
+
+ return m.newPayload(text)
+}
+
+// Push implements payloadConvertor Push method
+func (m matrixConvertor) Push(p *api.PushPayload) (MatrixPayload, error) {
+ var commitDesc string
+
+ if p.TotalCommits == 1 {
+ commitDesc = "1 commit"
+ } else {
+ commitDesc = fmt.Sprintf("%d commits", p.TotalCommits)
+ }
+
+ repoLink := htmlLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName)
+ branchLink := MatrixLinkToRef(p.Repo.HTMLURL, p.Ref)
+ text := fmt.Sprintf("[%s] %s pushed %s to %s:<br>", repoLink, p.Pusher.UserName, commitDesc, branchLink)
+
+ // for each commit, generate a new line text
+ for i, commit := range p.Commits {
+ text += fmt.Sprintf("%s: %s - %s", htmlLinkFormatter(commit.URL, commit.ID[:7]), commit.Message, commit.Author.Name)
+ // add linebreak to each commit but the last
+ if i < len(p.Commits)-1 {
+ text += "<br>"
+ }
+ }
+
+ return m.newPayload(text, p.Commits...)
+}
+
+// PullRequest implements payloadConvertor PullRequest method
+func (m matrixConvertor) PullRequest(p *api.PullRequestPayload) (MatrixPayload, error) {
+ text, _, _, _ := getPullRequestPayloadInfo(p, htmlLinkFormatter, true)
+
+ return m.newPayload(text)
+}
+
+// Review implements payloadConvertor Review method
+func (m matrixConvertor) Review(p *api.PullRequestPayload, event webhook_module.HookEventType) (MatrixPayload, error) {
+ senderLink := htmlLinkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)
+ title := fmt.Sprintf("#%d %s", p.Index, p.PullRequest.Title)
+ titleLink := htmlLinkFormatter(p.PullRequest.HTMLURL, title)
+ repoLink := htmlLinkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ var text string
+
+ if p.Action == api.HookIssueReviewed {
+ action, err := parseHookPullRequestEventType(event)
+ if err != nil {
+ return MatrixPayload{}, err
+ }
+
+ text = fmt.Sprintf("[%s] Pull request review %s: %s by %s", repoLink, action, titleLink, senderLink)
+ }
+
+ return m.newPayload(text)
+}
+
+// Repository implements payloadConvertor Repository method
+func (m matrixConvertor) Repository(p *api.RepositoryPayload) (MatrixPayload, error) {
+ senderLink := htmlLinkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName)
+ repoLink := htmlLinkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ var text string
+
+ switch p.Action {
+ case api.HookRepoCreated:
+ text = fmt.Sprintf("[%s] Repository created by %s", repoLink, senderLink)
+ case api.HookRepoDeleted:
+ text = fmt.Sprintf("[%s] Repository deleted by %s", repoLink, senderLink)
+ }
+ return m.newPayload(text)
+}
+
+func (m matrixConvertor) Package(p *api.PackagePayload) (MatrixPayload, error) {
+ senderLink := htmlLinkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName)
+ packageLink := htmlLinkFormatter(p.Package.HTMLURL, p.Package.Name)
+ var text string
+
+ switch p.Action {
+ case api.HookPackageCreated:
+ text = fmt.Sprintf("[%s] Package published by %s", packageLink, senderLink)
+ case api.HookPackageDeleted:
+ text = fmt.Sprintf("[%s] Package deleted by %s", packageLink, senderLink)
+ }
+
+ return m.newPayload(text)
+}
+
+var urlRegex = regexp.MustCompile(`<a [^>]*?href="([^">]*?)">(.*?)</a>`)
+
+func getMessageBody(htmlText string) string {
+ htmlText = urlRegex.ReplaceAllString(htmlText, "[$2]($1)")
+ htmlText = strings.ReplaceAll(htmlText, "<br>", "\n")
+ return htmlText
+}
+
+// getMatrixTxnID computes the transaction ID to ensure idempotency
+func getMatrixTxnID(payload []byte) (string, error) {
+ if len(payload) >= matrixPayloadSizeLimit {
+ return "", fmt.Errorf("getMatrixTxnID: payload size %d > %d", len(payload), matrixPayloadSizeLimit)
+ }
+
+ h := sha1.New()
+ _, err := h.Write(payload)
+ if err != nil {
+ return "", err
+ }
+
+ return hex.EncodeToString(h.Sum(nil)), nil
+}
+
+// MatrixLinkToRef Matrix-formatter link to a repo ref
+func MatrixLinkToRef(repoURL, ref string) string {
+ refName := git.RefName(ref).ShortName()
+ switch {
+ case strings.HasPrefix(ref, git.BranchPrefix):
+ return htmlLinkFormatter(repoURL+"/src/branch/"+util.PathEscapeSegments(refName), refName)
+ case strings.HasPrefix(ref, git.TagPrefix):
+ return htmlLinkFormatter(repoURL+"/src/tag/"+util.PathEscapeSegments(refName), refName)
+ default:
+ return htmlLinkFormatter(repoURL+"/src/commit/"+util.PathEscapeSegments(refName), refName)
+ }
+}
diff --git a/services/webhook/matrix_test.go b/services/webhook/matrix_test.go
new file mode 100644
index 0000000..6cedb15
--- /dev/null
+++ b/services/webhook/matrix_test.go
@@ -0,0 +1,255 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMatrixPayload(t *testing.T) {
+ mc := matrixConvertor{
+ MsgType: "m.text",
+ }
+
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+
+ pl, err := mc.Create(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo):[test](http://localhost:3000/test/repo/src/branch/test)] branch created by user1", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>:<a href="http://localhost:3000/test/repo/src/branch/test">test</a>] branch created by user1`, pl.FormattedBody)
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+
+ pl, err := mc.Delete(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo):test] branch deleted by user1", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>:test] branch deleted by user1`, pl.FormattedBody)
+ })
+
+ t.Run("Fork", func(t *testing.T) {
+ p := forkTestPayload()
+
+ pl, err := mc.Fork(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[test/repo2](http://localhost:3000/test/repo2) is forked to [test/repo](http://localhost:3000/test/repo)", pl.Body)
+ assert.Equal(t, `<a href="http://localhost:3000/test/repo2">test/repo2</a> is forked to <a href="http://localhost:3000/test/repo">test/repo</a>`, pl.FormattedBody)
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+
+ pl, err := mc.Push(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] user1 pushed 2 commits to [test](http://localhost:3000/test/repo/src/branch/test):\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778): commit message - user1\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778): commit message - user1", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] user1 pushed 2 commits to <a href="http://localhost:3000/test/repo/src/branch/test">test</a>:<br><a href="http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778">2020558</a>: commit message - user1<br><a href="http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778">2020558</a>: commit message - user1`, pl.FormattedBody)
+ })
+
+ t.Run("Issue", func(t *testing.T) {
+ p := issueTestPayload()
+
+ p.Action = api.HookIssueOpened
+ pl, err := mc.Issue(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Issue opened: [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] Issue opened: <a href="http://localhost:3000/test/repo/issues/2">#2 crash</a> by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+
+ p.Action = api.HookIssueClosed
+ pl, err = mc.Issue(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Issue closed: [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] Issue closed: <a href="http://localhost:3000/test/repo/issues/2">#2 crash</a> by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+
+ t.Run("IssueComment", func(t *testing.T) {
+ p := issueCommentTestPayload()
+
+ pl, err := mc.IssueComment(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] New comment on issue [#2 crash](http://localhost:3000/test/repo/issues/2) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] New comment on issue <a href="http://localhost:3000/test/repo/issues/2">#2 crash</a> by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+
+ t.Run("PullRequest", func(t *testing.T) {
+ p := pullRequestTestPayload()
+
+ pl, err := mc.PullRequest(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Pull request opened: [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] Pull request opened: <a href="http://localhost:3000/test/repo/pulls/12">#12 Fix bug</a> by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+
+ t.Run("PullRequestComment", func(t *testing.T) {
+ p := pullRequestCommentTestPayload()
+
+ pl, err := mc.IssueComment(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] New comment on pull request [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] New comment on pull request <a href="http://localhost:3000/test/repo/pulls/12">#12 Fix bug</a> by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+
+ t.Run("Review", func(t *testing.T) {
+ p := pullRequestTestPayload()
+ p.Action = api.HookIssueReviewed
+
+ pl, err := mc.Review(p, webhook_module.HookEventPullRequestReviewApproved)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Pull request review approved: [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] Pull request review approved: <a href="http://localhost:3000/test/repo/pulls/12">#12 Fix bug</a> by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+
+ t.Run("Repository", func(t *testing.T) {
+ p := repositoryTestPayload()
+
+ pl, err := mc.Repository(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, `[[test/repo](http://localhost:3000/test/repo)] Repository created by [user1](https://try.gitea.io/user1)`, pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] Repository created by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+
+ t.Run("Package", func(t *testing.T) {
+ p := packageTestPayload()
+
+ pl, err := mc.Package(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, `[[GiteaContainer](http://localhost:3000/user1/-/packages/container/GiteaContainer/latest)] Package published by [user1](https://try.gitea.io/user1)`, pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/user1/-/packages/container/GiteaContainer/latest">GiteaContainer</a>] Package published by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+
+ t.Run("Wiki", func(t *testing.T) {
+ p := wikiTestPayload()
+
+ p.Action = api.HookWikiCreated
+ pl, err := mc.Wiki(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] New wiki page '[index](http://localhost:3000/test/repo/wiki/index)' (Wiki change comment) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] New wiki page '<a href="http://localhost:3000/test/repo/wiki/index">index</a>' (Wiki change comment) by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+
+ p.Action = api.HookWikiEdited
+ pl, err = mc.Wiki(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Wiki page '[index](http://localhost:3000/test/repo/wiki/index)' edited (Wiki change comment) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] Wiki page '<a href="http://localhost:3000/test/repo/wiki/index">index</a>' edited (Wiki change comment) by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+
+ p.Action = api.HookWikiDeleted
+ pl, err = mc.Wiki(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Wiki page '[index](http://localhost:3000/test/repo/wiki/index)' deleted by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] Wiki page '<a href="http://localhost:3000/test/repo/wiki/index">index</a>' deleted by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+
+ t.Run("Release", func(t *testing.T) {
+ p := pullReleaseTestPayload()
+
+ pl, err := mc.Release(p)
+ require.NoError(t, err)
+ require.NotNil(t, pl)
+
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] Release created: [v1.0](http://localhost:3000/test/repo/releases/tag/v1.0) by [user1](https://try.gitea.io/user1)", pl.Body)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo">test/repo</a>] Release created: <a href="http://localhost:3000/test/repo/releases/tag/v1.0">v1.0</a> by <a href="https://try.gitea.io/user1">user1</a>`, pl.FormattedBody)
+ })
+}
+
+func TestMatrixJSONPayload(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.MATRIX,
+ URL: "https://matrix.example.com/_matrix/client/v3/rooms/ROOM_ID/send/m.room.message",
+ Meta: `{"message_type":0}`, // text
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := matrixHandler{}.NewRequest(context.Background(), hook, task)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+ require.NoError(t, err)
+
+ assert.Equal(t, "PUT", req.Method)
+ assert.Equal(t, "/_matrix/client/v3/rooms/ROOM_ID/send/m.room.message/6db5dc1e282529a8c162c7fe93dd2667494eeb51", req.URL.Path)
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body MatrixPayload
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "[[test/repo](http://localhost:3000/test/repo)] user1 pushed 2 commits to [test](http://localhost:3000/test/repo/src/branch/test):\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778): commit message - user1\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778): commit message - user1", body.Body)
+}
+
+func Test_getTxnID(t *testing.T) {
+ type args struct {
+ payload []byte
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ wantErr bool
+ }{
+ {
+ name: "dummy payload",
+ args: args{payload: []byte("Hello World")},
+ want: "0a4d55a8d778e5022fab701977c5d840bbc486d0",
+ wantErr: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := getMatrixTxnID(tt.args.payload)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("getMatrixTxnID() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ assert.Equal(t, tt.want, got)
+ })
+ }
+}
diff --git a/services/webhook/msteams.go b/services/webhook/msteams.go
new file mode 100644
index 0000000..736d084
--- /dev/null
+++ b/services/webhook/msteams.go
@@ -0,0 +1,377 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+type msteamsHandler struct{}
+
+func (msteamsHandler) Type() webhook_module.HookType { return webhook_module.MSTEAMS }
+func (msteamsHandler) Metadata(*webhook_model.Webhook) any { return nil }
+func (msteamsHandler) Icon(size int) template.HTML { return shared.ImgIcon("msteams.png", size) }
+
+func (msteamsHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ }
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: nil,
+ }
+}
+
+type (
+ // MSTeamsFact for Fact Structure
+ MSTeamsFact struct {
+ Name string `json:"name"`
+ Value string `json:"value"`
+ }
+
+ // MSTeamsSection is a MessageCard section
+ MSTeamsSection struct {
+ ActivityTitle string `json:"activityTitle"`
+ ActivitySubtitle string `json:"activitySubtitle"`
+ ActivityImage string `json:"activityImage"`
+ Facts []MSTeamsFact `json:"facts"`
+ Text string `json:"text"`
+ }
+
+ // MSTeamsAction is an action (creates buttons, links etc)
+ MSTeamsAction struct {
+ Type string `json:"@type"`
+ Name string `json:"name"`
+ Targets []MSTeamsActionTarget `json:"targets,omitempty"`
+ }
+
+ // MSTeamsActionTarget is the actual link to follow, etc
+ MSTeamsActionTarget struct {
+ Os string `json:"os"`
+ URI string `json:"uri"`
+ }
+
+ // MSTeamsPayload is the parent object
+ MSTeamsPayload struct {
+ Type string `json:"@type"`
+ Context string `json:"@context"`
+ ThemeColor string `json:"themeColor"`
+ Title string `json:"title"`
+ Summary string `json:"summary"`
+ Sections []MSTeamsSection `json:"sections"`
+ PotentialAction []MSTeamsAction `json:"potentialAction"`
+ }
+)
+
+// Create implements PayloadConvertor Create method
+func (m msteamsConvertor) Create(p *api.CreatePayload) (MSTeamsPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName)
+
+ return createMSTeamsPayload(
+ p.Repo,
+ p.Sender,
+ title,
+ "",
+ p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName),
+ greenColor,
+ &MSTeamsFact{fmt.Sprintf("%s:", p.RefType), refName},
+ ), nil
+}
+
+// Delete implements PayloadConvertor Delete method
+func (m msteamsConvertor) Delete(p *api.DeletePayload) (MSTeamsPayload, error) {
+ // deleted tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName)
+
+ return createMSTeamsPayload(
+ p.Repo,
+ p.Sender,
+ title,
+ "",
+ p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName),
+ yellowColor,
+ &MSTeamsFact{fmt.Sprintf("%s:", p.RefType), refName},
+ ), nil
+}
+
+// Fork implements PayloadConvertor Fork method
+func (m msteamsConvertor) Fork(p *api.ForkPayload) (MSTeamsPayload, error) {
+ title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName)
+
+ return createMSTeamsPayload(
+ p.Repo,
+ p.Sender,
+ title,
+ "",
+ p.Repo.HTMLURL,
+ greenColor,
+ &MSTeamsFact{"Forkee:", p.Forkee.FullName},
+ ), nil
+}
+
+// Push implements PayloadConvertor Push method
+func (m msteamsConvertor) Push(p *api.PushPayload) (MSTeamsPayload, error) {
+ var (
+ branchName = git.RefName(p.Ref).ShortName()
+ commitDesc string
+ )
+
+ var titleLink string
+ if p.TotalCommits == 1 {
+ commitDesc = "1 new commit"
+ titleLink = p.Commits[0].URL
+ } else {
+ commitDesc = fmt.Sprintf("%d new commits", p.TotalCommits)
+ titleLink = p.CompareURL
+ }
+ if titleLink == "" {
+ titleLink = p.Repo.HTMLURL + "/src/" + util.PathEscapeSegments(branchName)
+ }
+
+ title := fmt.Sprintf("[%s:%s] %s", p.Repo.FullName, branchName, commitDesc)
+
+ var text string
+ // for each commit, generate attachment text
+ for i, commit := range p.Commits {
+ text += fmt.Sprintf("[%s](%s) %s - %s", commit.ID[:7], commit.URL,
+ strings.TrimRight(commit.Message, "\r\n"), commit.Author.Name)
+ // add linebreak to each commit but the last
+ if i < len(p.Commits)-1 {
+ text += "\n\n"
+ }
+ }
+
+ return createMSTeamsPayload(
+ p.Repo,
+ p.Sender,
+ title,
+ text,
+ titleLink,
+ greenColor,
+ &MSTeamsFact{"Commit count:", fmt.Sprintf("%d", p.TotalCommits)},
+ ), nil
+}
+
+// Issue implements PayloadConvertor Issue method
+func (m msteamsConvertor) Issue(p *api.IssuePayload) (MSTeamsPayload, error) {
+ title, _, attachmentText, color := getIssuesPayloadInfo(p, noneLinkFormatter, false)
+
+ return createMSTeamsPayload(
+ p.Repository,
+ p.Sender,
+ title,
+ attachmentText,
+ p.Issue.HTMLURL,
+ color,
+ &MSTeamsFact{"Issue #:", fmt.Sprintf("%d", p.Issue.ID)},
+ ), nil
+}
+
+// IssueComment implements PayloadConvertor IssueComment method
+func (m msteamsConvertor) IssueComment(p *api.IssueCommentPayload) (MSTeamsPayload, error) {
+ title, _, color := getIssueCommentPayloadInfo(p, noneLinkFormatter, false)
+
+ return createMSTeamsPayload(
+ p.Repository,
+ p.Sender,
+ title,
+ p.Comment.Body,
+ p.Comment.HTMLURL,
+ color,
+ &MSTeamsFact{"Issue #:", fmt.Sprintf("%d", p.Issue.ID)},
+ ), nil
+}
+
+// PullRequest implements PayloadConvertor PullRequest method
+func (m msteamsConvertor) PullRequest(p *api.PullRequestPayload) (MSTeamsPayload, error) {
+ title, _, attachmentText, color := getPullRequestPayloadInfo(p, noneLinkFormatter, false)
+
+ return createMSTeamsPayload(
+ p.Repository,
+ p.Sender,
+ title,
+ attachmentText,
+ p.PullRequest.HTMLURL,
+ color,
+ &MSTeamsFact{"Pull request #:", fmt.Sprintf("%d", p.PullRequest.ID)},
+ ), nil
+}
+
+// Review implements PayloadConvertor Review method
+func (m msteamsConvertor) Review(p *api.PullRequestPayload, event webhook_module.HookEventType) (MSTeamsPayload, error) {
+ var text, title string
+ var color int
+ if p.Action == api.HookIssueReviewed {
+ action, err := parseHookPullRequestEventType(event)
+ if err != nil {
+ return MSTeamsPayload{}, err
+ }
+
+ title = fmt.Sprintf("[%s] Pull request review %s: #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title)
+ text = p.Review.Content
+
+ switch event {
+ case webhook_module.HookEventPullRequestReviewApproved:
+ color = greenColor
+ case webhook_module.HookEventPullRequestReviewRejected:
+ color = redColor
+ case webhook_module.HookEventPullRequestReviewComment:
+ color = greyColor
+ default:
+ color = yellowColor
+ }
+ }
+
+ return createMSTeamsPayload(
+ p.Repository,
+ p.Sender,
+ title,
+ text,
+ p.PullRequest.HTMLURL,
+ color,
+ &MSTeamsFact{"Pull request #:", fmt.Sprintf("%d", p.PullRequest.ID)},
+ ), nil
+}
+
+// Repository implements PayloadConvertor Repository method
+func (m msteamsConvertor) Repository(p *api.RepositoryPayload) (MSTeamsPayload, error) {
+ var title, url string
+ var color int
+ switch p.Action {
+ case api.HookRepoCreated:
+ title = fmt.Sprintf("[%s] Repository created", p.Repository.FullName)
+ url = p.Repository.HTMLURL
+ color = greenColor
+ case api.HookRepoDeleted:
+ title = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName)
+ color = yellowColor
+ }
+
+ return createMSTeamsPayload(
+ p.Repository,
+ p.Sender,
+ title,
+ "",
+ url,
+ color,
+ nil,
+ ), nil
+}
+
+// Wiki implements PayloadConvertor Wiki method
+func (m msteamsConvertor) Wiki(p *api.WikiPayload) (MSTeamsPayload, error) {
+ title, color, _ := getWikiPayloadInfo(p, noneLinkFormatter, false)
+
+ return createMSTeamsPayload(
+ p.Repository,
+ p.Sender,
+ title,
+ "",
+ p.Repository.HTMLURL+"/wiki/"+url.PathEscape(p.Page),
+ color,
+ &MSTeamsFact{"Repository:", p.Repository.FullName},
+ ), nil
+}
+
+// Release implements PayloadConvertor Release method
+func (m msteamsConvertor) Release(p *api.ReleasePayload) (MSTeamsPayload, error) {
+ title, color := getReleasePayloadInfo(p, noneLinkFormatter, false)
+
+ return createMSTeamsPayload(
+ p.Repository,
+ p.Sender,
+ title,
+ "",
+ p.Release.HTMLURL,
+ color,
+ &MSTeamsFact{"Tag:", p.Release.TagName},
+ ), nil
+}
+
+func (m msteamsConvertor) Package(p *api.PackagePayload) (MSTeamsPayload, error) {
+ title, color := getPackagePayloadInfo(p, noneLinkFormatter, false)
+
+ return createMSTeamsPayload(
+ p.Repository,
+ p.Sender,
+ title,
+ "",
+ p.Package.HTMLURL,
+ color,
+ &MSTeamsFact{"Package:", p.Package.Name},
+ ), nil
+}
+
+func createMSTeamsPayload(r *api.Repository, s *api.User, title, text, actionTarget string, color int, fact *MSTeamsFact) MSTeamsPayload {
+ facts := make([]MSTeamsFact, 0, 2)
+ if r != nil {
+ facts = append(facts, MSTeamsFact{
+ Name: "Repository:",
+ Value: r.FullName,
+ })
+ }
+ if fact != nil {
+ facts = append(facts, *fact)
+ }
+
+ return MSTeamsPayload{
+ Type: "MessageCard",
+ Context: "https://schema.org/extensions",
+ ThemeColor: fmt.Sprintf("%x", color),
+ Title: title,
+ Summary: title,
+ Sections: []MSTeamsSection{
+ {
+ ActivityTitle: s.FullName,
+ ActivitySubtitle: s.UserName,
+ ActivityImage: s.AvatarURL,
+ Text: text,
+ Facts: facts,
+ },
+ },
+ PotentialAction: []MSTeamsAction{
+ {
+ Type: "OpenUri",
+ Name: "View in Gitea",
+ Targets: []MSTeamsActionTarget{
+ {
+ Os: "default",
+ URI: actionTarget,
+ },
+ },
+ },
+ },
+ }
+}
+
+type msteamsConvertor struct{}
+
+var _ shared.PayloadConvertor[MSTeamsPayload] = msteamsConvertor{}
+
+func (msteamsHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ return shared.NewJSONRequest(msteamsConvertor{}, w, t, true)
+}
diff --git a/services/webhook/msteams_test.go b/services/webhook/msteams_test.go
new file mode 100644
index 0000000..a97e9f3
--- /dev/null
+++ b/services/webhook/msteams_test.go
@@ -0,0 +1,455 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMSTeamsPayload(t *testing.T) {
+ mc := msteamsConvertor{}
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+
+ pl, err := mc.Create(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] branch test created", pl.Title)
+ assert.Equal(t, "[test/repo] branch test created", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Empty(t, pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repo.FullName, fact.Value)
+ } else if fact.Name == "branch:" {
+ assert.Equal(t, "test", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+
+ pl, err := mc.Delete(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] branch test deleted", pl.Title)
+ assert.Equal(t, "[test/repo] branch test deleted", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Empty(t, pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repo.FullName, fact.Value)
+ } else if fact.Name == "branch:" {
+ assert.Equal(t, "test", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Fork", func(t *testing.T) {
+ p := forkTestPayload()
+
+ pl, err := mc.Fork(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "test/repo2 is forked to test/repo", pl.Title)
+ assert.Equal(t, "test/repo2 is forked to test/repo", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Empty(t, pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repo.FullName, fact.Value)
+ } else if fact.Name == "Forkee:" {
+ assert.Equal(t, p.Forkee.FullName, fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+
+ pl, err := mc.Push(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo:test] 2 new commits", pl.Title)
+ assert.Equal(t, "[test/repo:test] 2 new commits", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\n\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repo.FullName, fact.Value)
+ } else if fact.Name == "Commit count:" {
+ assert.Equal(t, "2", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/src/test", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Issue", func(t *testing.T) {
+ p := issueTestPayload()
+
+ p.Action = api.HookIssueOpened
+ pl, err := mc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Issue opened: #2 crash", pl.Title)
+ assert.Equal(t, "[test/repo] Issue opened: #2 crash", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Equal(t, "issue body", pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else if fact.Name == "Issue #:" {
+ assert.Equal(t, "2", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.PotentialAction[0].Targets[0].URI)
+
+ p.Action = api.HookIssueClosed
+ pl, err = mc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Issue closed: #2 crash", pl.Title)
+ assert.Equal(t, "[test/repo] Issue closed: #2 crash", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Empty(t, pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else if fact.Name == "Issue #:" {
+ assert.Equal(t, "2", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("IssueComment", func(t *testing.T) {
+ p := issueCommentTestPayload()
+
+ pl, err := mc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] New comment on issue #2 crash", pl.Title)
+ assert.Equal(t, "[test/repo] New comment on issue #2 crash", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Equal(t, "more info needed", pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else if fact.Name == "Issue #:" {
+ assert.Equal(t, "2", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/issues/2#issuecomment-4", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("PullRequest", func(t *testing.T) {
+ p := pullRequestTestPayload()
+
+ pl, err := mc.PullRequest(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug", pl.Title)
+ assert.Equal(t, "[test/repo] Pull request opened: #12 Fix bug", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Equal(t, "fixes bug #2", pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else if fact.Name == "Pull request #:" {
+ assert.Equal(t, "12", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("PullRequestComment", func(t *testing.T) {
+ p := pullRequestCommentTestPayload()
+
+ pl, err := mc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug", pl.Title)
+ assert.Equal(t, "[test/repo] New comment on pull request #12 Fix bug", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Equal(t, "changes requested", pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else if fact.Name == "Issue #:" {
+ assert.Equal(t, "12", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12#issuecomment-4", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Review", func(t *testing.T) {
+ p := pullRequestTestPayload()
+ p.Action = api.HookIssueReviewed
+
+ pl, err := mc.Review(p, webhook_module.HookEventPullRequestReviewApproved)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Pull request review approved: #12 Fix bug", pl.Title)
+ assert.Equal(t, "[test/repo] Pull request review approved: #12 Fix bug", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Equal(t, "good job", pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else if fact.Name == "Pull request #:" {
+ assert.Equal(t, "12", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Repository", func(t *testing.T) {
+ p := repositoryTestPayload()
+
+ pl, err := mc.Repository(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Repository created", pl.Title)
+ assert.Equal(t, "[test/repo] Repository created", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Empty(t, pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 1)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Package", func(t *testing.T) {
+ p := packageTestPayload()
+
+ pl, err := mc.Package(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "Package created: GiteaContainer:latest", pl.Title)
+ assert.Equal(t, "Package created: GiteaContainer:latest", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Empty(t, pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 1)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Package:" {
+ assert.Equal(t, p.Package.Name, fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/user1/-/packages/container/GiteaContainer/latest", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Wiki", func(t *testing.T) {
+ p := wikiTestPayload()
+
+ p.Action = api.HookWikiCreated
+ pl, err := mc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment)", pl.Title)
+ assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment)", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Equal(t, "", pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", pl.PotentialAction[0].Targets[0].URI)
+
+ p.Action = api.HookWikiEdited
+ pl, err = mc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment)", pl.Title)
+ assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment)", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Equal(t, "", pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", pl.PotentialAction[0].Targets[0].URI)
+
+ p.Action = api.HookWikiDeleted
+ pl, err = mc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Wiki page 'index' deleted", pl.Title)
+ assert.Equal(t, "[test/repo] Wiki page 'index' deleted", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Empty(t, pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/wiki/index", pl.PotentialAction[0].Targets[0].URI)
+ })
+
+ t.Run("Release", func(t *testing.T) {
+ p := pullReleaseTestPayload()
+
+ pl, err := mc.Release(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[test/repo] Release created: v1.0", pl.Title)
+ assert.Equal(t, "[test/repo] Release created: v1.0", pl.Summary)
+ assert.Len(t, pl.Sections, 1)
+ assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
+ assert.Empty(t, pl.Sections[0].Text)
+ assert.Len(t, pl.Sections[0].Facts, 2)
+ for _, fact := range pl.Sections[0].Facts {
+ if fact.Name == "Repository:" {
+ assert.Equal(t, p.Repository.FullName, fact.Value)
+ } else if fact.Name == "Tag:" {
+ assert.Equal(t, "v1.0", fact.Value)
+ } else {
+ t.Fail()
+ }
+ }
+ assert.Len(t, pl.PotentialAction, 1)
+ assert.Len(t, pl.PotentialAction[0].Targets, 1)
+ assert.Equal(t, "http://localhost:3000/test/repo/releases/tag/v1.0", pl.PotentialAction[0].Targets[0].URI)
+ })
+}
+
+func TestMSTeamsJSONPayload(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.MSTEAMS,
+ URL: "https://msteams.example.com/",
+ Meta: ``,
+ HTTPMethod: "POST",
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := msteamsHandler{}.NewRequest(context.Background(), hook, task)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+ require.NoError(t, err)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://msteams.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body MSTeamsPayload
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "[test/repo:test] 2 new commits", body.Summary)
+}
diff --git a/services/webhook/notifier.go b/services/webhook/notifier.go
new file mode 100644
index 0000000..a9b3422
--- /dev/null
+++ b/services/webhook/notifier.go
@@ -0,0 +1,887 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/convert"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+func init() {
+ notify_service.RegisterNotifier(&webhookNotifier{})
+}
+
+type webhookNotifier struct {
+ notify_service.NullNotifier
+}
+
+var _ notify_service.Notifier = &webhookNotifier{}
+
+// NewNotifier create a new webhookNotifier notifier
+func NewNotifier() notify_service.Notifier {
+ return &webhookNotifier{}
+}
+
+func (m *webhookNotifier) IssueClearLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
+ if err := issue.LoadPoster(ctx); err != nil {
+ log.Error("LoadPoster: %v", err)
+ return
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error("LoadRepo: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ var err error
+ if issue.IsPull {
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest: %v", err)
+ return
+ }
+
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestLabel, &api.PullRequestPayload{
+ Action: api.HookIssueLabelCleared,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ } else {
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssueLabel, &api.IssuePayload{
+ Action: api.HookIssueLabelCleared,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ }
+ if err != nil {
+ log.Error("PrepareWebhooks [is_pull: %v]: %v", issue.IsPull, err)
+ }
+}
+
+func (m *webhookNotifier) ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
+ oldPermission, _ := access_model.GetUserRepoPermission(ctx, oldRepo, doer)
+ permission, _ := access_model.GetUserRepoPermission(ctx, repo, doer)
+
+ // forked webhook
+ if err := PrepareWebhooks(ctx, EventSource{Repository: oldRepo}, webhook_module.HookEventFork, &api.ForkPayload{
+ Forkee: convert.ToRepo(ctx, oldRepo, oldPermission),
+ Repo: convert.ToRepo(ctx, repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks [repo_id: %d]: %v", oldRepo.ID, err)
+ }
+
+ u := repo.MustOwner(ctx)
+
+ // Add to hook queue for created repo after session commit.
+ if u.IsOrganization() {
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventRepository, &api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
+ }
+ }
+}
+
+func (m *webhookNotifier) CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ // Add to hook queue for created repo after session commit.
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventRepository, &api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
+ }
+}
+
+func (m *webhookNotifier) DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository) {
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventRepository, &api.RepositoryPayload{
+ Action: api.HookRepoDeleted,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, repo.MustOwner(ctx), nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
+ }
+}
+
+func (m *webhookNotifier) MigrateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ // Add to hook queue for created repo after session commit.
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventRepository, &api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
+ }
+}
+
+func (m *webhookNotifier) IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
+ if issue.IsPull {
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest failed: %v", err)
+ return
+ }
+ apiPullRequest := &api.PullRequestPayload{
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }
+ if removed {
+ apiPullRequest.Action = api.HookIssueUnassigned
+ } else {
+ apiPullRequest.Action = api.HookIssueAssigned
+ }
+ // Assignee comment triggers a webhook
+ if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestAssign, apiPullRequest); err != nil {
+ log.Error("PrepareWebhooks [is_pull: %v, remove_assignee: %v]: %v", issue.IsPull, removed, err)
+ return
+ }
+ } else {
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ apiIssue := &api.IssuePayload{
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }
+ if removed {
+ apiIssue.Action = api.HookIssueUnassigned
+ } else {
+ apiIssue.Action = api.HookIssueAssigned
+ }
+ // Assignee comment triggers a webhook
+ if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssueAssign, apiIssue); err != nil {
+ log.Error("PrepareWebhooks [is_pull: %v, remove_assignee: %v]: %v", issue.IsPull, removed, err)
+ return
+ }
+ }
+}
+
+func (m *webhookNotifier) IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string) {
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ var err error
+ if issue.IsPull {
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest failed: %v", err)
+ return
+ }
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequest, &api.PullRequestPayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ Changes: &api.ChangesPayload{
+ Title: &api.ChangesFromPayload{
+ From: oldTitle,
+ },
+ },
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ } else {
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssues, &api.IssuePayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ Changes: &api.ChangesPayload{
+ Title: &api.ChangesFromPayload{
+ From: oldTitle,
+ },
+ },
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ }
+
+ if err != nil {
+ log.Error("PrepareWebhooks [is_pull: %v]: %v", issue.IsPull, err)
+ }
+}
+
+func (m *webhookNotifier) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, actionComment *issues_model.Comment, isClosed bool) {
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ var err error
+ if issue.IsPull {
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest: %v", err)
+ return
+ }
+ // Merge pull request calls issue.changeStatus so we need to handle separately.
+ apiPullRequest := &api.PullRequestPayload{
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ CommitID: commitID,
+ }
+ if isClosed {
+ apiPullRequest.Action = api.HookIssueClosed
+ } else {
+ apiPullRequest.Action = api.HookIssueReOpened
+ }
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequest, apiPullRequest)
+ } else {
+ apiIssue := &api.IssuePayload{
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ CommitID: commitID,
+ }
+ if isClosed {
+ apiIssue.Action = api.HookIssueClosed
+ } else {
+ apiIssue.Action = api.HookIssueReOpened
+ }
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssues, apiIssue)
+ }
+ if err != nil {
+ log.Error("PrepareWebhooks [is_pull: %v, is_closed: %v]: %v", issue.IsPull, isClosed, err)
+ }
+}
+
+func (m *webhookNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User) {
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error("issue.LoadRepo: %v", err)
+ return
+ }
+ if err := issue.LoadPoster(ctx); err != nil {
+ log.Error("issue.LoadPoster: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssues, &api.IssuePayload{
+ Action: api.HookIssueOpened,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, issue.Poster, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, issue.Poster, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func (m *webhookNotifier) NewPullRequest(ctx context.Context, pull *issues_model.PullRequest, mentions []*user_model.User) {
+ if err := pull.LoadIssue(ctx); err != nil {
+ log.Error("pull.LoadIssue: %v", err)
+ return
+ }
+ if err := pull.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pull.Issue.LoadRepo: %v", err)
+ return
+ }
+ if err := pull.Issue.LoadPoster(ctx); err != nil {
+ log.Error("pull.Issue.LoadPoster: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, pull.Issue.Repo, pull.Issue.Poster)
+ if err := PrepareWebhooks(ctx, EventSource{Repository: pull.Issue.Repo}, webhook_module.HookEventPullRequest, &api.PullRequestPayload{
+ Action: api.HookIssueOpened,
+ Index: pull.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pull, nil),
+ Repository: convert.ToRepo(ctx, pull.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, pull.Issue.Poster, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func (m *webhookNotifier) IssueChangeContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldContent string) {
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error("LoadRepo: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ var err error
+ if issue.IsPull {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest: %v", err)
+ return
+ }
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequest, &api.PullRequestPayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ Changes: &api.ChangesPayload{
+ Body: &api.ChangesFromPayload{
+ From: oldContent,
+ },
+ },
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ } else {
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssues, &api.IssuePayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ Changes: &api.ChangesPayload{
+ Body: &api.ChangesFromPayload{
+ From: oldContent,
+ },
+ },
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ }
+ if err != nil {
+ log.Error("PrepareWebhooks [is_pull: %v]: %v", issue.IsPull, err)
+ }
+}
+
+func (m *webhookNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
+ if err := c.LoadPoster(ctx); err != nil {
+ log.Error("LoadPoster: %v", err)
+ return
+ }
+ if err := c.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ if err := c.Issue.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ var eventType webhook_module.HookEventType
+ if c.Issue.IsPull {
+ eventType = webhook_module.HookEventPullRequestComment
+ } else {
+ eventType = webhook_module.HookEventIssueComment
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, c.Issue.Repo, doer)
+ if err := PrepareWebhooks(ctx, EventSource{Repository: c.Issue.Repo}, eventType, &api.IssueCommentPayload{
+ Action: api.HookIssueCommentEdited,
+ Issue: convert.ToAPIIssue(ctx, doer, c.Issue),
+ Comment: convert.ToAPIComment(ctx, c.Issue.Repo, c),
+ Changes: &api.ChangesPayload{
+ Body: &api.ChangesFromPayload{
+ From: oldContent,
+ },
+ },
+ Repository: convert.ToRepo(ctx, c.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ IsPull: c.Issue.IsPull,
+ }); err != nil {
+ log.Error("PrepareWebhooks [comment_id: %d]: %v", c.ID, err)
+ }
+}
+
+func (m *webhookNotifier) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
+) {
+ var eventType webhook_module.HookEventType
+ if issue.IsPull {
+ eventType = webhook_module.HookEventPullRequestComment
+ } else {
+ eventType = webhook_module.HookEventIssueComment
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, repo, doer)
+ if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, eventType, &api.IssueCommentPayload{
+ Action: api.HookIssueCommentCreated,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Comment: convert.ToAPIComment(ctx, repo, comment),
+ Repository: convert.ToRepo(ctx, repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ IsPull: issue.IsPull,
+ }); err != nil {
+ log.Error("PrepareWebhooks [comment_id: %d]: %v", comment.ID, err)
+ }
+}
+
+func (m *webhookNotifier) DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) {
+ var err error
+
+ if err = comment.LoadPoster(ctx); err != nil {
+ log.Error("LoadPoster: %v", err)
+ return
+ }
+ if err = comment.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ if err = comment.Issue.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ var eventType webhook_module.HookEventType
+ if comment.Issue.IsPull {
+ eventType = webhook_module.HookEventPullRequestComment
+ } else {
+ eventType = webhook_module.HookEventIssueComment
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, comment.Issue.Repo, doer)
+ if err := PrepareWebhooks(ctx, EventSource{Repository: comment.Issue.Repo}, eventType, &api.IssueCommentPayload{
+ Action: api.HookIssueCommentDeleted,
+ Issue: convert.ToAPIIssue(ctx, doer, comment.Issue),
+ Comment: convert.ToAPIComment(ctx, comment.Issue.Repo, comment),
+ Repository: convert.ToRepo(ctx, comment.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ IsPull: comment.Issue.IsPull,
+ }); err != nil {
+ log.Error("PrepareWebhooks [comment_id: %d]: %v", comment.ID, err)
+ }
+}
+
+func (m *webhookNotifier) NewWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+ // Add to hook queue for created wiki page.
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventWiki, &api.WikiPayload{
+ Action: api.HookWikiCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ Comment: comment,
+ }); err != nil {
+ log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
+ }
+}
+
+func (m *webhookNotifier) EditWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+ // Add to hook queue for edit wiki page.
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventWiki, &api.WikiPayload{
+ Action: api.HookWikiEdited,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ Comment: comment,
+ }); err != nil {
+ log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
+ }
+}
+
+func (m *webhookNotifier) DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page string) {
+ // Add to hook queue for edit wiki page.
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventWiki, &api.WikiPayload{
+ Action: api.HookWikiDeleted,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ }); err != nil {
+ log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
+ }
+}
+
+func (m *webhookNotifier) IssueChangeLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue,
+ addedLabels, removedLabels []*issues_model.Label,
+) {
+ var err error
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ log.Error("LoadRepo: %v", err)
+ return
+ }
+
+ if err = issue.LoadPoster(ctx); err != nil {
+ log.Error("LoadPoster: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ if issue.IsPull {
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ log.Error("loadPullRequest: %v", err)
+ return
+ }
+ if err = issue.PullRequest.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestLabel, &api.PullRequestPayload{
+ Action: api.HookIssueLabelUpdated,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ } else {
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssueLabel, &api.IssuePayload{
+ Action: api.HookIssueLabelUpdated,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ }
+ if err != nil {
+ log.Error("PrepareWebhooks [is_pull: %v]: %v", issue.IsPull, err)
+ }
+}
+
+func (m *webhookNotifier) IssueChangeMilestone(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64) {
+ var hookAction api.HookIssueAction
+ var err error
+ if issue.MilestoneID > 0 {
+ hookAction = api.HookIssueMilestoned
+ } else {
+ hookAction = api.HookIssueDemilestoned
+ }
+
+ if err = issue.LoadAttributes(ctx); err != nil {
+ log.Error("issue.LoadAttributes failed: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if issue.IsPull {
+ err = issue.PullRequest.LoadIssue(ctx)
+ if err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestMilestone, &api.PullRequestPayload{
+ Action: hookAction,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ } else {
+ err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssueMilestone, &api.IssuePayload{
+ Action: hookAction,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ })
+ }
+ if err != nil {
+ log.Error("PrepareWebhooks [is_pull: %v]: %v", issue.IsPull, err)
+ }
+}
+
+func (m *webhookNotifier) PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ if err != nil {
+ log.Error("commits.ToAPIPayloadCommits failed: %v", err)
+ return
+ }
+
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventPush, &api.PushPayload{
+ Ref: opts.RefFullName.String(),
+ Before: opts.OldCommitID,
+ After: opts.NewCommitID,
+ CompareURL: setting.AppURL + commits.CompareURL,
+ Commits: apiCommits,
+ TotalCommits: commits.Len,
+ HeadCommit: apiHeadCommit,
+ Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Pusher: apiPusher,
+ Sender: apiPusher,
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func (m *webhookNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ // just redirect to the MergePullRequest
+ m.MergePullRequest(ctx, doer, pr)
+}
+
+func (*webhookNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ // Reload pull request information.
+ if err := pr.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo: %v", err)
+ return
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, pr.Issue.Repo, doer)
+ if err != nil {
+ log.Error("models.GetUserRepoPermission: %v", err)
+ return
+ }
+
+ // Merge pull request calls issue.changeStatus so we need to handle separately.
+ apiPullRequest := &api.PullRequestPayload{
+ Index: pr.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Action: api.HookIssueClosed,
+ }
+
+ if err := PrepareWebhooks(ctx, EventSource{Repository: pr.Issue.Repo}, webhook_module.HookEventPullRequest, apiPullRequest); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func (m *webhookNotifier) PullRequestChangeTargetBranch(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, oldBranch string) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ issue := pr.Issue
+
+ mode, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequest, &api.PullRequestPayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ Changes: &api.ChangesPayload{
+ Ref: &api.ChangesFromPayload{
+ From: oldBranch,
+ },
+ },
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, mode),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks [pr: %d]: %v", pr.ID, err)
+ }
+}
+
+func (m *webhookNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
+ var reviewHookType webhook_module.HookEventType
+
+ switch review.Type {
+ case issues_model.ReviewTypeApprove:
+ reviewHookType = webhook_module.HookEventPullRequestReviewApproved
+ case issues_model.ReviewTypeComment:
+ reviewHookType = webhook_module.HookEventPullRequestReviewComment
+ case issues_model.ReviewTypeReject:
+ reviewHookType = webhook_module.HookEventPullRequestReviewRejected
+ default:
+ // unsupported review webhook type here
+ log.Error("Unsupported review webhook type")
+ return
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, review.Issue.Repo, review.Issue.Poster)
+ if err != nil {
+ log.Error("models.GetUserRepoPermission: %v", err)
+ return
+ }
+ if err := PrepareWebhooks(ctx, EventSource{Repository: review.Issue.Repo}, reviewHookType, &api.PullRequestPayload{
+ Action: api.HookIssueReviewed,
+ Index: review.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, review.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, review.Reviewer, nil),
+ Review: &api.ReviewPayload{
+ Type: string(reviewHookType),
+ Content: review.Content,
+ },
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func (m *webhookNotifier) PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment) {
+ if !issue.IsPull {
+ log.Warn("PullRequestReviewRequest: issue is not a pull request: %v", issue.ID)
+ return
+ }
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest failed: %v", err)
+ return
+ }
+ apiPullRequest := &api.PullRequestPayload{
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ RequestedReviewer: convert.ToUser(ctx, reviewer, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }
+ if isRequest {
+ apiPullRequest.Action = api.HookIssueReviewRequested
+ } else {
+ apiPullRequest.Action = api.HookIssueReviewRequestRemoved
+ }
+ if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestReviewRequest, apiPullRequest); err != nil {
+ log.Error("PrepareWebhooks [review_requested: %v]: %v", isRequest, err)
+ return
+ }
+}
+
+func (m *webhookNotifier) CreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiRepo := convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeNone})
+
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventCreate, &api.CreatePayload{
+ Ref: refFullName.String(),
+ Sha: refID,
+ RefType: refFullName.RefType(),
+ Repo: apiRepo,
+ Sender: apiPusher,
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func (m *webhookNotifier) PullRequestSynchronized(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ if err := pr.Issue.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := PrepareWebhooks(ctx, EventSource{Repository: pr.Issue.Repo}, webhook_module.HookEventPullRequestSync, &api.PullRequestPayload{
+ Action: api.HookIssueSynchronized,
+ Index: pr.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, pr.Issue.Repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks [pull_id: %v]: %v", pr.ID, err)
+ }
+}
+
+func (m *webhookNotifier) DeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiRepo := convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner})
+
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventDelete, &api.DeletePayload{
+ Ref: refFullName.String(),
+ RefType: refFullName.RefType(),
+ PusherType: api.PusherTypeUser,
+ Repo: apiRepo,
+ Sender: apiPusher,
+ }); err != nil {
+ log.Error("PrepareWebhooks.(delete %s): %v", refFullName.RefType(), err)
+ }
+}
+
+func sendReleaseHook(ctx context.Context, doer *user_model.User, rel *repo_model.Release, action api.HookReleaseAction) {
+ if err := rel.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, rel.Repo, doer)
+ if err := PrepareWebhooks(ctx, EventSource{Repository: rel.Repo}, webhook_module.HookEventRelease, &api.ReleasePayload{
+ Action: action,
+ Release: convert.ToAPIRelease(ctx, rel.Repo, rel),
+ Repository: convert.ToRepo(ctx, rel.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func (m *webhookNotifier) NewRelease(ctx context.Context, rel *repo_model.Release) {
+ sendReleaseHook(ctx, rel.Publisher, rel, api.HookReleasePublished)
+}
+
+func (m *webhookNotifier) UpdateRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+ sendReleaseHook(ctx, doer, rel, api.HookReleaseUpdated)
+}
+
+func (m *webhookNotifier) DeleteRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+ sendReleaseHook(ctx, doer, rel, api.HookReleaseDeleted)
+}
+
+func (m *webhookNotifier) SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ if err != nil {
+ log.Error("commits.ToAPIPayloadCommits failed: %v", err)
+ return
+ }
+
+ if err := PrepareWebhooks(ctx, EventSource{Repository: repo}, webhook_module.HookEventPush, &api.PushPayload{
+ Ref: opts.RefFullName.String(),
+ Before: opts.OldCommitID,
+ After: opts.NewCommitID,
+ CompareURL: setting.AppURL + commits.CompareURL,
+ Commits: apiCommits,
+ TotalCommits: commits.Len,
+ HeadCommit: apiHeadCommit,
+ Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Pusher: apiPusher,
+ Sender: apiPusher,
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func (m *webhookNotifier) SyncCreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ m.CreateRef(ctx, pusher, repo, refFullName, refID)
+}
+
+func (m *webhookNotifier) SyncDeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ m.DeleteRef(ctx, pusher, repo, refFullName)
+}
+
+func (m *webhookNotifier) PackageCreate(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+ notifyPackage(ctx, doer, pd, api.HookPackageCreated)
+}
+
+func (m *webhookNotifier) PackageDelete(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+ notifyPackage(ctx, doer, pd, api.HookPackageDeleted)
+}
+
+func notifyPackage(ctx context.Context, sender *user_model.User, pd *packages_model.PackageDescriptor, action api.HookPackageAction) {
+ source := EventSource{
+ Repository: pd.Repository,
+ Owner: pd.Owner,
+ }
+
+ apiPackage, err := convert.ToPackage(ctx, pd, sender)
+ if err != nil {
+ log.Error("Error converting package: %v", err)
+ return
+ }
+
+ if err := PrepareWebhooks(ctx, source, webhook_module.HookEventPackage, &api.PackagePayload{
+ Action: action,
+ Package: apiPackage,
+ Sender: convert.ToUser(ctx, sender, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
diff --git a/services/webhook/packagist.go b/services/webhook/packagist.go
new file mode 100644
index 0000000..9831a4e
--- /dev/null
+++ b/services/webhook/packagist.go
@@ -0,0 +1,90 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+type packagistHandler struct{}
+
+func (packagistHandler) Type() webhook_module.HookType { return webhook_module.PACKAGIST }
+func (packagistHandler) Icon(size int) template.HTML { return shared.ImgIcon("packagist.png", size) }
+
+func (packagistHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ Username string `binding:"Required"`
+ APIToken string `binding:"Required"`
+ PackageURL string `binding:"Required;ValidUrl"`
+ }
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: fmt.Sprintf("https://packagist.org/api/update-package?username=%s&apiToken=%s", url.QueryEscape(form.Username), url.QueryEscape(form.APIToken)),
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: &PackagistMeta{
+ Username: form.Username,
+ APIToken: form.APIToken,
+ PackageURL: form.PackageURL,
+ },
+ }
+}
+
+type (
+ // PackagistPayload represents a packagist payload
+ // as expected by https://packagist.org/about
+ PackagistPayload struct {
+ PackagistRepository struct {
+ URL string `json:"url"`
+ } `json:"repository"`
+ }
+
+ // PackagistMeta contains the metadata for the webhook
+ PackagistMeta struct {
+ Username string `json:"username"`
+ APIToken string `json:"api_token"`
+ PackageURL string `json:"package_url"`
+ }
+)
+
+// Metadata returns packagist metadata
+func (packagistHandler) Metadata(w *webhook_model.Webhook) any {
+ s := &PackagistMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), s); err != nil {
+ log.Error("packagistHandler.Metadata(%d): %v", w.ID, err)
+ }
+ return s
+}
+
+// newPackagistRequest creates a request with the [PackagistPayload] for packagist (same payload for all events).
+func (packagistHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ meta := &PackagistMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), meta); err != nil {
+ return nil, nil, fmt.Errorf("packagistHandler.NewRequest meta json: %w", err)
+ }
+
+ payload := PackagistPayload{
+ PackagistRepository: struct {
+ URL string `json:"url"`
+ }{
+ URL: meta.PackageURL,
+ },
+ }
+ return shared.NewJSONRequestWithPayload(payload, w, t, false)
+}
diff --git a/services/webhook/packagist_test.go b/services/webhook/packagist_test.go
new file mode 100644
index 0000000..320c1c8
--- /dev/null
+++ b/services/webhook/packagist_test.go
@@ -0,0 +1,70 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestPackagistPayload(t *testing.T) {
+ payloads := []api.Payloader{
+ createTestPayload(),
+ deleteTestPayload(),
+ forkTestPayload(),
+ pushTestPayload(),
+ issueTestPayload(),
+ issueCommentTestPayload(),
+ pullRequestCommentTestPayload(),
+ pullRequestTestPayload(),
+ repositoryTestPayload(),
+ packageTestPayload(),
+ wikiTestPayload(),
+ pullReleaseTestPayload(),
+ }
+
+ for _, payloader := range payloads {
+ t.Run(fmt.Sprintf("%T", payloader), func(t *testing.T) {
+ data, err := payloader.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.PACKAGIST,
+ URL: "https://packagist.org/api/update-package?username=THEUSERNAME&apiToken=TOPSECRETAPITOKEN",
+ Meta: `{"package_url":"https://packagist.org/packages/example"}`,
+ HTTPMethod: "POST",
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := packagistHandler{}.NewRequest(context.Background(), hook, task)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+ require.NoError(t, err)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://packagist.org/api/update-package?username=THEUSERNAME&apiToken=TOPSECRETAPITOKEN", req.URL.String())
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body PackagistPayload
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "https://packagist.org/packages/example", body.PackagistRepository.URL)
+ })
+ }
+}
diff --git a/services/webhook/shared/img.go b/services/webhook/shared/img.go
new file mode 100644
index 0000000..2d65ba4
--- /dev/null
+++ b/services/webhook/shared/img.go
@@ -0,0 +1,15 @@
+package shared
+
+import (
+ "html"
+ "html/template"
+ "strconv"
+
+ "code.gitea.io/gitea/modules/setting"
+)
+
+func ImgIcon(name string, size int) template.HTML {
+ s := strconv.Itoa(size)
+ src := html.EscapeString(setting.StaticURLPrefix + "/assets/img/" + name)
+ return template.HTML(`<img width="` + s + `" height="` + s + `" src="` + src + `">`)
+}
diff --git a/services/webhook/shared/payloader.go b/services/webhook/shared/payloader.go
new file mode 100644
index 0000000..cf0bfa8
--- /dev/null
+++ b/services/webhook/shared/payloader.go
@@ -0,0 +1,161 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package shared
+
+import (
+ "bytes"
+ "crypto/hmac"
+ "crypto/sha1"
+ "crypto/sha256"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+)
+
+var ErrPayloadTypeNotSupported = errors.New("unsupported webhook event")
+
+// PayloadConvertor defines the interface to convert system payload to webhook payload
+type PayloadConvertor[T any] interface {
+ Create(*api.CreatePayload) (T, error)
+ Delete(*api.DeletePayload) (T, error)
+ Fork(*api.ForkPayload) (T, error)
+ Issue(*api.IssuePayload) (T, error)
+ IssueComment(*api.IssueCommentPayload) (T, error)
+ Push(*api.PushPayload) (T, error)
+ PullRequest(*api.PullRequestPayload) (T, error)
+ Review(*api.PullRequestPayload, webhook_module.HookEventType) (T, error)
+ Repository(*api.RepositoryPayload) (T, error)
+ Release(*api.ReleasePayload) (T, error)
+ Wiki(*api.WikiPayload) (T, error)
+ Package(*api.PackagePayload) (T, error)
+}
+
+func convertUnmarshalledJSON[T, P any](convert func(P) (T, error), data []byte) (T, error) {
+ var p P
+ if err := json.Unmarshal(data, &p); err != nil {
+ var t T
+ return t, fmt.Errorf("could not unmarshal payload: %w", err)
+ }
+ return convert(p)
+}
+
+func NewPayload[T any](rc PayloadConvertor[T], data []byte, event webhook_module.HookEventType) (T, error) {
+ switch event {
+ case webhook_module.HookEventCreate:
+ return convertUnmarshalledJSON(rc.Create, data)
+ case webhook_module.HookEventDelete:
+ return convertUnmarshalledJSON(rc.Delete, data)
+ case webhook_module.HookEventFork:
+ return convertUnmarshalledJSON(rc.Fork, data)
+ case webhook_module.HookEventIssues, webhook_module.HookEventIssueAssign, webhook_module.HookEventIssueLabel, webhook_module.HookEventIssueMilestone:
+ return convertUnmarshalledJSON(rc.Issue, data)
+ case webhook_module.HookEventIssueComment, webhook_module.HookEventPullRequestComment:
+ // previous code sometimes sent s.PullRequest(p.(*api.PullRequestPayload))
+ // however I couldn't find in notifier.go such a payload with an HookEvent***Comment event
+
+ // History (most recent first):
+ // - refactored in https://github.com/go-gitea/gitea/pull/12310
+ // - assertion added in https://github.com/go-gitea/gitea/pull/12046
+ // - issue raised in https://github.com/go-gitea/gitea/issues/11940#issuecomment-645713996
+ // > That's because for HookEventPullRequestComment event, some places use IssueCommentPayload and others use PullRequestPayload
+
+ // In modules/actions/workflows.go:183 the type assertion is always payload.(*api.IssueCommentPayload)
+ return convertUnmarshalledJSON(rc.IssueComment, data)
+ case webhook_module.HookEventPush:
+ return convertUnmarshalledJSON(rc.Push, data)
+ case webhook_module.HookEventPullRequest, webhook_module.HookEventPullRequestAssign, webhook_module.HookEventPullRequestLabel,
+ webhook_module.HookEventPullRequestMilestone, webhook_module.HookEventPullRequestSync, webhook_module.HookEventPullRequestReviewRequest:
+ return convertUnmarshalledJSON(rc.PullRequest, data)
+ case webhook_module.HookEventPullRequestReviewApproved, webhook_module.HookEventPullRequestReviewRejected, webhook_module.HookEventPullRequestReviewComment:
+ return convertUnmarshalledJSON(func(p *api.PullRequestPayload) (T, error) {
+ return rc.Review(p, event)
+ }, data)
+ case webhook_module.HookEventRepository:
+ return convertUnmarshalledJSON(rc.Repository, data)
+ case webhook_module.HookEventRelease:
+ return convertUnmarshalledJSON(rc.Release, data)
+ case webhook_module.HookEventWiki:
+ return convertUnmarshalledJSON(rc.Wiki, data)
+ case webhook_module.HookEventPackage:
+ return convertUnmarshalledJSON(rc.Package, data)
+ }
+ var t T
+ return t, fmt.Errorf("newPayload unsupported event: %s", event)
+}
+
+func NewJSONRequest[T any](pc PayloadConvertor[T], w *webhook_model.Webhook, t *webhook_model.HookTask, withDefaultHeaders bool) (*http.Request, []byte, error) {
+ payload, err := NewPayload(pc, []byte(t.PayloadContent), t.EventType)
+ if err != nil {
+ return nil, nil, err
+ }
+ return NewJSONRequestWithPayload(payload, w, t, withDefaultHeaders)
+}
+
+func NewJSONRequestWithPayload(payload any, w *webhook_model.Webhook, t *webhook_model.HookTask, withDefaultHeaders bool) (*http.Request, []byte, error) {
+ body, err := json.MarshalIndent(payload, "", " ")
+ if err != nil {
+ return nil, nil, err
+ }
+
+ method := w.HTTPMethod
+ if method == "" {
+ method = http.MethodPost
+ }
+
+ req, err := http.NewRequest(method, w.URL, bytes.NewReader(body))
+ if err != nil {
+ return nil, nil, err
+ }
+ req.Header.Set("Content-Type", "application/json")
+
+ if withDefaultHeaders {
+ return req, body, AddDefaultHeaders(req, []byte(w.Secret), t, body)
+ }
+ return req, body, nil
+}
+
+// AddDefaultHeaders adds the X-Forgejo, X-Gitea, X-Gogs, X-Hub, X-GitHub headers to the given request
+func AddDefaultHeaders(req *http.Request, secret []byte, t *webhook_model.HookTask, payloadContent []byte) error {
+ var signatureSHA1 string
+ var signatureSHA256 string
+ if len(secret) > 0 {
+ sig1 := hmac.New(sha1.New, secret)
+ sig256 := hmac.New(sha256.New, secret)
+ _, err := io.MultiWriter(sig1, sig256).Write(payloadContent)
+ if err != nil {
+ // this error should never happen, since the hashes are writing to []byte and always return a nil error.
+ return fmt.Errorf("prepareWebhooks.sigWrite: %w", err)
+ }
+ signatureSHA1 = hex.EncodeToString(sig1.Sum(nil))
+ signatureSHA256 = hex.EncodeToString(sig256.Sum(nil))
+ }
+
+ event := t.EventType.Event()
+ eventType := string(t.EventType)
+ req.Header.Add("X-Forgejo-Delivery", t.UUID)
+ req.Header.Add("X-Forgejo-Event", event)
+ req.Header.Add("X-Forgejo-Event-Type", eventType)
+ req.Header.Add("X-Forgejo-Signature", signatureSHA256)
+ req.Header.Add("X-Gitea-Delivery", t.UUID)
+ req.Header.Add("X-Gitea-Event", event)
+ req.Header.Add("X-Gitea-Event-Type", eventType)
+ req.Header.Add("X-Gitea-Signature", signatureSHA256)
+ req.Header.Add("X-Gogs-Delivery", t.UUID)
+ req.Header.Add("X-Gogs-Event", event)
+ req.Header.Add("X-Gogs-Event-Type", eventType)
+ req.Header.Add("X-Gogs-Signature", signatureSHA256)
+ req.Header.Add("X-Hub-Signature", "sha1="+signatureSHA1)
+ req.Header.Add("X-Hub-Signature-256", "sha256="+signatureSHA256)
+ req.Header["X-GitHub-Delivery"] = []string{t.UUID}
+ req.Header["X-GitHub-Event"] = []string{event}
+ req.Header["X-GitHub-Event-Type"] = []string{eventType}
+ return nil
+}
diff --git a/services/webhook/slack.go b/services/webhook/slack.go
new file mode 100644
index 0000000..af93976
--- /dev/null
+++ b/services/webhook/slack.go
@@ -0,0 +1,361 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "net/http"
+ "regexp"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ gitea_context "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+
+ "gitea.com/go-chi/binding"
+)
+
+type slackHandler struct{}
+
+func (slackHandler) Type() webhook_module.HookType { return webhook_module.SLACK }
+func (slackHandler) Icon(size int) template.HTML { return shared.ImgIcon("slack.png", size) }
+
+type slackForm struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ Channel string `binding:"Required"`
+ Username string
+ IconURL string
+ Color string
+}
+
+var _ binding.Validator = &slackForm{}
+
+// Validate implements binding.Validator.
+func (s *slackForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := gitea_context.GetWebContext(req)
+ if !IsValidSlackChannel(strings.TrimSpace(s.Channel)) {
+ errs = append(errs, binding.Error{
+ FieldNames: []string{"Channel"},
+ Classification: "",
+ Message: ctx.Locale.TrString("repo.settings.add_webhook.invalid_channel_name"),
+ })
+ }
+ return errs
+}
+
+func (slackHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form slackForm
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: &SlackMeta{
+ Channel: strings.TrimSpace(form.Channel),
+ Username: form.Username,
+ IconURL: form.IconURL,
+ Color: form.Color,
+ },
+ }
+}
+
+// SlackMeta contains the slack metadata
+type SlackMeta struct {
+ Channel string `json:"channel"`
+ Username string `json:"username"`
+ IconURL string `json:"icon_url"`
+ Color string `json:"color"`
+}
+
+// Metadata returns slack metadata
+func (slackHandler) Metadata(w *webhook_model.Webhook) any {
+ s := &SlackMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), s); err != nil {
+ log.Error("slackHandler.Metadata(%d): %v", w.ID, err)
+ }
+ return s
+}
+
+// SlackPayload contains the information about the slack channel
+type SlackPayload struct {
+ Channel string `json:"channel"`
+ Text string `json:"text"`
+ Username string `json:"username"`
+ IconURL string `json:"icon_url"`
+ UnfurlLinks int `json:"unfurl_links"`
+ LinkNames int `json:"link_names"`
+ Attachments []SlackAttachment `json:"attachments"`
+}
+
+// SlackAttachment contains the slack message
+type SlackAttachment struct {
+ Fallback string `json:"fallback"`
+ Color string `json:"color"`
+ Title string `json:"title"`
+ TitleLink string `json:"title_link"`
+ Text string `json:"text"`
+}
+
+// SlackTextFormatter replaces &, <, > with HTML characters
+// see: https://api.slack.com/docs/formatting
+func SlackTextFormatter(s string) string {
+ // replace & < >
+ s = strings.ReplaceAll(s, "&", "&amp;")
+ s = strings.ReplaceAll(s, "<", "&lt;")
+ s = strings.ReplaceAll(s, ">", "&gt;")
+ return s
+}
+
+// SlackShortTextFormatter replaces &, <, > with HTML characters
+func SlackShortTextFormatter(s string) string {
+ s = strings.Split(s, "\n")[0]
+ // replace & < >
+ s = strings.ReplaceAll(s, "&", "&amp;")
+ s = strings.ReplaceAll(s, "<", "&lt;")
+ s = strings.ReplaceAll(s, ">", "&gt;")
+ return s
+}
+
+// SlackLinkFormatter creates a link compatible with slack
+func SlackLinkFormatter(url, text string) string {
+ return fmt.Sprintf("<%s|%s>", url, SlackTextFormatter(text))
+}
+
+// SlackLinkToRef slack-formatter link to a repo ref
+func SlackLinkToRef(repoURL, ref string) string {
+ // FIXME: SHA1 hardcoded here
+ url := git.RefURL(repoURL, ref)
+ refName := git.RefName(ref).ShortName()
+ return SlackLinkFormatter(url, refName)
+}
+
+// Create implements payloadConvertor Create method
+func (s slackConvertor) Create(p *api.CreatePayload) (SlackPayload, error) {
+ repoLink := SlackLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName)
+ refLink := SlackLinkToRef(p.Repo.HTMLURL, p.Ref)
+ text := fmt.Sprintf("[%s:%s] %s created by %s", repoLink, refLink, p.RefType, p.Sender.UserName)
+
+ return s.createPayload(text, nil), nil
+}
+
+// Delete composes Slack payload for delete a branch or tag.
+func (s slackConvertor) Delete(p *api.DeletePayload) (SlackPayload, error) {
+ refName := git.RefName(p.Ref).ShortName()
+ repoLink := SlackLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName)
+ text := fmt.Sprintf("[%s:%s] %s deleted by %s", repoLink, refName, p.RefType, p.Sender.UserName)
+
+ return s.createPayload(text, nil), nil
+}
+
+// Fork composes Slack payload for forked by a repository.
+func (s slackConvertor) Fork(p *api.ForkPayload) (SlackPayload, error) {
+ baseLink := SlackLinkFormatter(p.Forkee.HTMLURL, p.Forkee.FullName)
+ forkLink := SlackLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName)
+ text := fmt.Sprintf("%s is forked to %s", baseLink, forkLink)
+
+ return s.createPayload(text, nil), nil
+}
+
+// Issue implements payloadConvertor Issue method
+func (s slackConvertor) Issue(p *api.IssuePayload) (SlackPayload, error) {
+ text, issueTitle, attachmentText, color := getIssuesPayloadInfo(p, SlackLinkFormatter, true)
+
+ var attachments []SlackAttachment
+ if attachmentText != "" {
+ attachmentText = SlackTextFormatter(attachmentText)
+ issueTitle = SlackTextFormatter(issueTitle)
+ attachments = append(attachments, SlackAttachment{
+ Color: fmt.Sprintf("%x", color),
+ Title: issueTitle,
+ TitleLink: p.Issue.HTMLURL,
+ Text: attachmentText,
+ })
+ }
+
+ return s.createPayload(text, attachments), nil
+}
+
+// IssueComment implements payloadConvertor IssueComment method
+func (s slackConvertor) IssueComment(p *api.IssueCommentPayload) (SlackPayload, error) {
+ text, issueTitle, color := getIssueCommentPayloadInfo(p, SlackLinkFormatter, true)
+
+ return s.createPayload(text, []SlackAttachment{{
+ Color: fmt.Sprintf("%x", color),
+ Title: issueTitle,
+ TitleLink: p.Comment.HTMLURL,
+ Text: SlackTextFormatter(p.Comment.Body),
+ }}), nil
+}
+
+// Wiki implements payloadConvertor Wiki method
+func (s slackConvertor) Wiki(p *api.WikiPayload) (SlackPayload, error) {
+ text, _, _ := getWikiPayloadInfo(p, SlackLinkFormatter, true)
+
+ return s.createPayload(text, nil), nil
+}
+
+// Release implements payloadConvertor Release method
+func (s slackConvertor) Release(p *api.ReleasePayload) (SlackPayload, error) {
+ text, _ := getReleasePayloadInfo(p, SlackLinkFormatter, true)
+
+ return s.createPayload(text, nil), nil
+}
+
+func (s slackConvertor) Package(p *api.PackagePayload) (SlackPayload, error) {
+ text, _ := getPackagePayloadInfo(p, SlackLinkFormatter, true)
+
+ return s.createPayload(text, nil), nil
+}
+
+// Push implements payloadConvertor Push method
+func (s slackConvertor) Push(p *api.PushPayload) (SlackPayload, error) {
+ // n new commits
+ var (
+ commitDesc string
+ commitString string
+ )
+
+ if p.TotalCommits == 1 {
+ commitDesc = "1 new commit"
+ } else {
+ commitDesc = fmt.Sprintf("%d new commits", p.TotalCommits)
+ }
+ if len(p.CompareURL) > 0 {
+ commitString = SlackLinkFormatter(p.CompareURL, commitDesc)
+ } else {
+ commitString = commitDesc
+ }
+
+ repoLink := SlackLinkFormatter(p.Repo.HTMLURL, p.Repo.FullName)
+ branchLink := SlackLinkToRef(p.Repo.HTMLURL, p.Ref)
+ text := fmt.Sprintf("[%s:%s] %s pushed by %s", repoLink, branchLink, commitString, p.Pusher.UserName)
+
+ var attachmentText string
+ // for each commit, generate attachment text
+ for i, commit := range p.Commits {
+ attachmentText += fmt.Sprintf("%s: %s - %s", SlackLinkFormatter(commit.URL, commit.ID[:7]), SlackShortTextFormatter(commit.Message), SlackTextFormatter(commit.Author.Name))
+ // add linebreak to each commit but the last
+ if i < len(p.Commits)-1 {
+ attachmentText += "\n"
+ }
+ }
+
+ return s.createPayload(text, []SlackAttachment{{
+ Color: s.Color,
+ Title: p.Repo.HTMLURL,
+ TitleLink: p.Repo.HTMLURL,
+ Text: attachmentText,
+ }}), nil
+}
+
+// PullRequest implements payloadConvertor PullRequest method
+func (s slackConvertor) PullRequest(p *api.PullRequestPayload) (SlackPayload, error) {
+ text, issueTitle, attachmentText, color := getPullRequestPayloadInfo(p, SlackLinkFormatter, true)
+
+ var attachments []SlackAttachment
+ if attachmentText != "" {
+ attachmentText = SlackTextFormatter(p.PullRequest.Body)
+ issueTitle = SlackTextFormatter(issueTitle)
+ attachments = append(attachments, SlackAttachment{
+ Color: fmt.Sprintf("%x", color),
+ Title: issueTitle,
+ TitleLink: p.PullRequest.HTMLURL,
+ Text: attachmentText,
+ })
+ }
+
+ return s.createPayload(text, attachments), nil
+}
+
+// Review implements payloadConvertor Review method
+func (s slackConvertor) Review(p *api.PullRequestPayload, event webhook_module.HookEventType) (SlackPayload, error) {
+ senderLink := SlackLinkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName)
+ title := fmt.Sprintf("#%d %s", p.Index, p.PullRequest.Title)
+ titleLink := fmt.Sprintf("%s/pulls/%d", p.Repository.HTMLURL, p.Index)
+ repoLink := SlackLinkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ var text string
+
+ if p.Action == api.HookIssueReviewed {
+ action, err := parseHookPullRequestEventType(event)
+ if err != nil {
+ return SlackPayload{}, err
+ }
+
+ text = fmt.Sprintf("[%s] Pull request review %s: [%s](%s) by %s", repoLink, action, title, titleLink, senderLink)
+ }
+
+ return s.createPayload(text, nil), nil
+}
+
+// Repository implements payloadConvertor Repository method
+func (s slackConvertor) Repository(p *api.RepositoryPayload) (SlackPayload, error) {
+ senderLink := SlackLinkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName)
+ repoLink := SlackLinkFormatter(p.Repository.HTMLURL, p.Repository.FullName)
+ var text string
+
+ switch p.Action {
+ case api.HookRepoCreated:
+ text = fmt.Sprintf("[%s] Repository created by %s", repoLink, senderLink)
+ case api.HookRepoDeleted:
+ text = fmt.Sprintf("[%s] Repository deleted by %s", repoLink, senderLink)
+ }
+
+ return s.createPayload(text, nil), nil
+}
+
+func (s slackConvertor) createPayload(text string, attachments []SlackAttachment) SlackPayload {
+ return SlackPayload{
+ Channel: s.Channel,
+ Text: text,
+ Username: s.Username,
+ IconURL: s.IconURL,
+ Attachments: attachments,
+ }
+}
+
+type slackConvertor struct {
+ Channel string
+ Username string
+ IconURL string
+ Color string
+}
+
+var _ shared.PayloadConvertor[SlackPayload] = slackConvertor{}
+
+func (slackHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ meta := &SlackMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), meta); err != nil {
+ return nil, nil, fmt.Errorf("slackHandler.NewRequest meta json: %w", err)
+ }
+ sc := slackConvertor{
+ Channel: meta.Channel,
+ Username: meta.Username,
+ IconURL: meta.IconURL,
+ Color: meta.Color,
+ }
+ return shared.NewJSONRequest(sc, w, t, true)
+}
+
+var slackChannel = regexp.MustCompile(`^#?[a-z0-9_-]{1,80}$`)
+
+// IsValidSlackChannel validates a channel name conforms to what slack expects:
+// https://api.slack.com/methods/conversations.rename#naming
+// Conversation names can only contain lowercase letters, numbers, hyphens, and underscores, and must be 80 characters or less.
+// Forgejo accepts if it starts with a #.
+func IsValidSlackChannel(name string) bool {
+ return slackChannel.MatchString(name)
+}
diff --git a/services/webhook/slack_test.go b/services/webhook/slack_test.go
new file mode 100644
index 0000000..3d80184
--- /dev/null
+++ b/services/webhook/slack_test.go
@@ -0,0 +1,265 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestSlackPayload(t *testing.T) {
+ sc := slackConvertor{}
+
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+
+ pl, err := sc.Create(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>:<http://localhost:3000/test/repo/src/branch/test|test>] branch created by user1", pl.Text)
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+
+ pl, err := sc.Delete(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>:test] branch deleted by user1", pl.Text)
+ })
+
+ t.Run("Fork", func(t *testing.T) {
+ p := forkTestPayload()
+
+ pl, err := sc.Fork(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "<http://localhost:3000/test/repo2|test/repo2> is forked to <http://localhost:3000/test/repo|test/repo>", pl.Text)
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+
+ pl, err := sc.Push(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>:<http://localhost:3000/test/repo/src/branch/test|test>] 2 new commits pushed by user1", pl.Text)
+ })
+
+ t.Run("Issue", func(t *testing.T) {
+ p := issueTestPayload()
+
+ p.Action = api.HookIssueOpened
+ pl, err := sc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] Issue opened: <http://localhost:3000/test/repo/issues/2|#2 crash> by <https://try.gitea.io/user1|user1>", pl.Text)
+
+ p.Action = api.HookIssueClosed
+ pl, err = sc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] Issue closed: <http://localhost:3000/test/repo/issues/2|#2 crash> by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+
+ t.Run("IssueComment", func(t *testing.T) {
+ p := issueCommentTestPayload()
+
+ pl, err := sc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] New comment on issue <http://localhost:3000/test/repo/issues/2|#2 crash> by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+
+ t.Run("PullRequest", func(t *testing.T) {
+ p := pullRequestTestPayload()
+
+ pl, err := sc.PullRequest(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] Pull request opened: <http://localhost:3000/test/repo/pulls/12|#12 Fix bug> by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+
+ t.Run("PullRequestComment", func(t *testing.T) {
+ p := pullRequestCommentTestPayload()
+
+ pl, err := sc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] New comment on pull request <http://localhost:3000/test/repo/pulls/12|#12 Fix bug> by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+
+ t.Run("Review", func(t *testing.T) {
+ p := pullRequestTestPayload()
+ p.Action = api.HookIssueReviewed
+
+ pl, err := sc.Review(p, webhook_module.HookEventPullRequestReviewApproved)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] Pull request review approved: [#12 Fix bug](http://localhost:3000/test/repo/pulls/12) by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+
+ t.Run("Repository", func(t *testing.T) {
+ p := repositoryTestPayload()
+
+ pl, err := sc.Repository(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] Repository created by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+
+ t.Run("Package", func(t *testing.T) {
+ p := packageTestPayload()
+
+ pl, err := sc.Package(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "Package created: <http://localhost:3000/user1/-/packages/container/GiteaContainer/latest|GiteaContainer:latest> by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+
+ t.Run("Wiki", func(t *testing.T) {
+ p := wikiTestPayload()
+
+ p.Action = api.HookWikiCreated
+ pl, err := sc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] New wiki page '<http://localhost:3000/test/repo/wiki/index|index>' (Wiki change comment) by <https://try.gitea.io/user1|user1>", pl.Text)
+
+ p.Action = api.HookWikiEdited
+ pl, err = sc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] Wiki page '<http://localhost:3000/test/repo/wiki/index|index>' edited (Wiki change comment) by <https://try.gitea.io/user1|user1>", pl.Text)
+
+ p.Action = api.HookWikiDeleted
+ pl, err = sc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] Wiki page '<http://localhost:3000/test/repo/wiki/index|index>' deleted by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+
+ t.Run("Release", func(t *testing.T) {
+ p := pullReleaseTestPayload()
+
+ pl, err := sc.Release(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>] Release created: <http://localhost:3000/test/repo/releases/tag/v1.0|v1.0> by <https://try.gitea.io/user1|user1>", pl.Text)
+ })
+}
+
+func TestSlackJSONPayload(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.SLACK,
+ URL: "https://slack.example.com/",
+ Meta: `{}`,
+ HTTPMethod: "POST",
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := slackHandler{}.NewRequest(context.Background(), hook, task)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+ require.NoError(t, err)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://slack.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body SlackPayload
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "[<http://localhost:3000/test/repo|test/repo>:<http://localhost:3000/test/repo/src/branch/test|test>] 2 new commits pushed by user1", body.Text)
+}
+
+func TestIsValidSlackChannel(t *testing.T) {
+ tt := []struct {
+ channelName string
+ expected bool
+ }{
+ {"gitea", true},
+ {"#gitea", true},
+ {" ", false},
+ {"#", false},
+ {" #", false},
+ {"gitea ", false},
+ {" gitea", false},
+ }
+
+ for _, v := range tt {
+ assert.Equal(t, v.expected, IsValidSlackChannel(v.channelName))
+ }
+}
+
+func TestSlackMetadata(t *testing.T) {
+ w := &webhook_model.Webhook{
+ Meta: `{"channel": "foo", "username": "username", "color": "blue"}`,
+ }
+ slackHook := slackHandler{}.Metadata(w)
+ assert.Equal(t, SlackMeta{
+ Channel: "foo",
+ Username: "username",
+ Color: "blue",
+ },
+ *slackHook.(*SlackMeta))
+}
+
+func TestSlackToHook(t *testing.T) {
+ w := &webhook_model.Webhook{
+ Type: webhook_module.SLACK,
+ ContentType: webhook_model.ContentTypeJSON,
+ URL: "https://slack.example.com",
+ Meta: `{"channel": "foo", "username": "username", "color": "blue"}`,
+ HookEvent: &webhook_module.HookEvent{
+ PushOnly: true,
+ SendEverything: false,
+ ChooseEvents: false,
+ HookEvents: webhook_module.HookEvents{
+ Create: false,
+ Push: true,
+ PullRequest: false,
+ },
+ },
+ }
+ h, err := ToHook("repoLink", w)
+ require.NoError(t, err)
+
+ assert.Equal(t, map[string]string{
+ "url": "https://slack.example.com",
+ "content_type": "json",
+
+ "channel": "foo",
+ "color": "blue",
+ "icon_url": "",
+ "username": "username",
+ }, h.Config)
+ assert.Equal(t, "https://slack.example.com", h.URL)
+ assert.Equal(t, "json", h.ContentType)
+ assert.Equal(t, &SlackMeta{
+ Channel: "foo",
+ Username: "username",
+ IconURL: "",
+ Color: "blue",
+ }, h.Metadata)
+}
diff --git a/services/webhook/sourcehut/builds.go b/services/webhook/sourcehut/builds.go
new file mode 100644
index 0000000..7b7ace1
--- /dev/null
+++ b/services/webhook/sourcehut/builds.go
@@ -0,0 +1,301 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package sourcehut
+
+import (
+ "cmp"
+ "context"
+ "fmt"
+ "html/template"
+ "io/fs"
+ "net/http"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ gitea_context "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+
+ "gitea.com/go-chi/binding"
+ "gopkg.in/yaml.v3"
+)
+
+type BuildsHandler struct{}
+
+func (BuildsHandler) Type() webhook_module.HookType { return webhook_module.SOURCEHUT_BUILDS }
+func (BuildsHandler) Metadata(w *webhook_model.Webhook) any {
+ s := &BuildsMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), s); err != nil {
+ log.Error("sourcehut.BuildsHandler.Metadata(%d): %v", w.ID, err)
+ }
+ return s
+}
+
+func (BuildsHandler) Icon(size int) template.HTML {
+ return shared.ImgIcon("sourcehut.svg", size)
+}
+
+type buildsForm struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ ManifestPath string `binding:"Required"`
+ Visibility string `binding:"Required;In(PUBLIC,UNLISTED,PRIVATE)"`
+ Secrets bool
+ AccessToken string `binding:"Required"`
+}
+
+var _ binding.Validator = &buildsForm{}
+
+// Validate implements binding.Validator.
+func (f *buildsForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := gitea_context.GetWebContext(req)
+ if !fs.ValidPath(f.ManifestPath) {
+ errs = append(errs, binding.Error{
+ FieldNames: []string{"ManifestPath"},
+ Classification: "",
+ Message: ctx.Locale.TrString("repo.settings.add_webhook.invalid_path"),
+ })
+ }
+ f.AuthorizationHeader = "Bearer " + strings.TrimSpace(f.AccessToken)
+ return errs
+}
+
+func (BuildsHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form buildsForm
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: &BuildsMeta{
+ ManifestPath: form.ManifestPath,
+ Visibility: form.Visibility,
+ Secrets: form.Secrets,
+ },
+ }
+}
+
+type (
+ graphqlPayload[V any] struct {
+ Query string `json:"query,omitempty"`
+ Error string `json:"error,omitempty"`
+ Variables V `json:"variables,omitempty"`
+ }
+ // buildsVariables according to https://man.sr.ht/builds.sr.ht/graphql.md
+ buildsVariables struct {
+ Manifest string `json:"manifest"`
+ Tags []string `json:"tags"`
+ Note string `json:"note"`
+ Secrets bool `json:"secrets"`
+ Execute bool `json:"execute"`
+ Visibility string `json:"visibility"`
+ }
+
+ // BuildsMeta contains the metadata for the webhook
+ BuildsMeta struct {
+ ManifestPath string `json:"manifest_path"`
+ Visibility string `json:"visibility"`
+ Secrets bool `json:"secrets"`
+ }
+)
+
+type sourcehutConvertor struct {
+ ctx context.Context
+ meta BuildsMeta
+}
+
+var _ shared.PayloadConvertor[graphqlPayload[buildsVariables]] = sourcehutConvertor{}
+
+func (BuildsHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ meta := BuildsMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), &meta); err != nil {
+ return nil, nil, fmt.Errorf("newSourcehutRequest meta json: %w", err)
+ }
+ pc := sourcehutConvertor{
+ ctx: ctx,
+ meta: meta,
+ }
+ return shared.NewJSONRequest(pc, w, t, false)
+}
+
+// Create implements PayloadConvertor Create method
+func (pc sourcehutConvertor) Create(p *api.CreatePayload) (graphqlPayload[buildsVariables], error) {
+ return pc.newPayload(p.Repo, p.Sha, p.Ref, p.RefType+" "+git.RefName(p.Ref).ShortName()+" created", true)
+}
+
+// Delete implements PayloadConvertor Delete method
+func (pc sourcehutConvertor) Delete(_ *api.DeletePayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// Fork implements PayloadConvertor Fork method
+func (pc sourcehutConvertor) Fork(_ *api.ForkPayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// Push implements PayloadConvertor Push method
+func (pc sourcehutConvertor) Push(p *api.PushPayload) (graphqlPayload[buildsVariables], error) {
+ return pc.newPayload(p.Repo, p.HeadCommit.ID, p.Ref, p.HeadCommit.Message, true)
+}
+
+// Issue implements PayloadConvertor Issue method
+func (pc sourcehutConvertor) Issue(_ *api.IssuePayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// IssueComment implements PayloadConvertor IssueComment method
+func (pc sourcehutConvertor) IssueComment(_ *api.IssueCommentPayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// PullRequest implements PayloadConvertor PullRequest method
+func (pc sourcehutConvertor) PullRequest(_ *api.PullRequestPayload) (graphqlPayload[buildsVariables], error) {
+ // TODO
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// Review implements PayloadConvertor Review method
+func (pc sourcehutConvertor) Review(_ *api.PullRequestPayload, _ webhook_module.HookEventType) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// Repository implements PayloadConvertor Repository method
+func (pc sourcehutConvertor) Repository(_ *api.RepositoryPayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// Wiki implements PayloadConvertor Wiki method
+func (pc sourcehutConvertor) Wiki(_ *api.WikiPayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// Release implements PayloadConvertor Release method
+func (pc sourcehutConvertor) Release(_ *api.ReleasePayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+func (pc sourcehutConvertor) Package(_ *api.PackagePayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
+// mustBuildManifest adjusts the manifest to submit to the builds service
+//
+// in case of an error the Error field will be set, to be visible by the end-user under recent deliveries
+func (pc sourcehutConvertor) newPayload(repo *api.Repository, commitID, ref, note string, trusted bool) (graphqlPayload[buildsVariables], error) {
+ manifest, err := pc.buildManifest(repo, commitID, ref)
+ if err != nil {
+ if len(manifest) == 0 {
+ return graphqlPayload[buildsVariables]{}, err
+ }
+ // the manifest contains an error for the user: log the actual error and construct the payload
+ // the error will be visible under the "recent deliveries" of the webhook settings.
+ log.Warn("sourcehut.builds: could not construct manifest for %s: %v", repo.FullName, err)
+ msg := fmt.Sprintf("%s:%s %s", repo.FullName, ref, manifest)
+ return graphqlPayload[buildsVariables]{
+ Error: msg,
+ }, nil
+ }
+
+ gitRef := git.RefName(ref)
+ return graphqlPayload[buildsVariables]{
+ Query: `mutation (
+ $manifest: String!
+ $tags: [String!]
+ $note: String!
+ $secrets: Boolean!
+ $execute: Boolean!
+ $visibility: Visibility!
+) {
+ submit(
+ manifest: $manifest
+ tags: $tags
+ note: $note
+ secrets: $secrets
+ execute: $execute
+ visibility: $visibility
+ ) {
+ id
+ }
+}`, Variables: buildsVariables{
+ Manifest: string(manifest),
+ Tags: []string{repo.FullName, gitRef.RefType() + "/" + gitRef.ShortName(), pc.meta.ManifestPath},
+ Note: note,
+ Secrets: pc.meta.Secrets && trusted,
+ Execute: trusted,
+ Visibility: cmp.Or(pc.meta.Visibility, "PRIVATE"),
+ },
+ }, nil
+}
+
+// buildManifest adjusts the manifest to submit to the builds service
+// in case of an error the []byte might contain an error that can be displayed to the user
+func (pc sourcehutConvertor) buildManifest(repo *api.Repository, commitID, gitRef string) ([]byte, error) {
+ gitRepo, err := gitrepo.OpenRepository(pc.ctx, repo)
+ if err != nil {
+ msg := "could not open repository"
+ return []byte(msg), fmt.Errorf(msg+": %w", err)
+ }
+ defer gitRepo.Close()
+
+ commit, err := gitRepo.GetCommit(commitID)
+ if err != nil {
+ msg := fmt.Sprintf("could not get commit %q", commitID)
+ return []byte(msg), fmt.Errorf(msg+": %w", err)
+ }
+ entry, err := commit.GetTreeEntryByPath(pc.meta.ManifestPath)
+ if err != nil {
+ msg := fmt.Sprintf("could not open manifest %q", pc.meta.ManifestPath)
+ return []byte(msg), fmt.Errorf(msg+": %w", err)
+ }
+ r, err := entry.Blob().DataAsync()
+ if err != nil {
+ msg := fmt.Sprintf("could not read manifest %q", pc.meta.ManifestPath)
+ return []byte(msg), fmt.Errorf(msg+": %w", err)
+ }
+ defer r.Close()
+
+ // reference: https://man.sr.ht/builds.sr.ht/manifest.md
+ var manifest struct {
+ Sources []string `yaml:"sources"`
+ Environment map[string]string `yaml:"environment"`
+
+ Rest map[string]yaml.Node `yaml:",inline"`
+ }
+ if err := yaml.NewDecoder(r).Decode(&manifest); err != nil {
+ msg := fmt.Sprintf("could not decode manifest %q", pc.meta.ManifestPath)
+ return []byte(msg), fmt.Errorf(msg+": %w", err)
+ }
+
+ if manifest.Environment == nil {
+ manifest.Environment = make(map[string]string)
+ }
+ manifest.Environment["BUILD_SUBMITTER"] = "forgejo"
+ manifest.Environment["BUILD_SUBMITTER_URL"] = setting.AppURL
+ manifest.Environment["GIT_REF"] = gitRef
+
+ source := repo.CloneURL + "#" + commitID
+ found := false
+ for i, s := range manifest.Sources {
+ if s == repo.CloneURL {
+ manifest.Sources[i] = source
+ found = true
+ break
+ }
+ }
+ if !found {
+ manifest.Sources = append(manifest.Sources, source)
+ }
+
+ return yaml.Marshal(manifest)
+}
diff --git a/services/webhook/sourcehut/builds_test.go b/services/webhook/sourcehut/builds_test.go
new file mode 100644
index 0000000..1a37279
--- /dev/null
+++ b/services/webhook/sourcehut/builds_test.go
@@ -0,0 +1,386 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package sourcehut
+
+import (
+ "context"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/test"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/webhook/shared"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func gitInit(t testing.TB) {
+ if setting.Git.HomePath != "" {
+ return
+ }
+ t.Cleanup(test.MockVariableValue(&setting.Git.HomePath, t.TempDir()))
+ require.NoError(t, git.InitSimple(context.Background()))
+}
+
+func TestSourcehutBuildsPayload(t *testing.T) {
+ gitInit(t)
+ defer test.MockVariableValue(&setting.RepoRootPath, ".")()
+ defer test.MockVariableValue(&setting.AppURL, "https://example.forgejo.org/")()
+
+ repo := &api.Repository{
+ HTMLURL: "http://localhost:3000/testdata/repo",
+ Name: "repo",
+ FullName: "testdata/repo",
+ Owner: &api.User{
+ UserName: "testdata",
+ },
+ CloneURL: "http://localhost:3000/testdata/repo.git",
+ }
+
+ pc := sourcehutConvertor{
+ ctx: git.DefaultContext,
+ meta: BuildsMeta{
+ ManifestPath: "adjust me in each test",
+ Visibility: "UNLISTED",
+ Secrets: true,
+ },
+ }
+ t.Run("Create/branch", func(t *testing.T) {
+ p := &api.CreatePayload{
+ Sha: "58771003157b81abc6bf41df0c5db4147a3e3c83",
+ Ref: "refs/heads/test",
+ RefType: "branch",
+ Repo: repo,
+ }
+
+ pc.meta.ManifestPath = "simple.yml"
+ pl, err := pc.Create(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `sources:
+ - http://localhost:3000/testdata/repo.git#58771003157b81abc6bf41df0c5db4147a3e3c83
+environment:
+ BUILD_SUBMITTER: forgejo
+ BUILD_SUBMITTER_URL: https://example.forgejo.org/
+ GIT_REF: refs/heads/test
+image: alpine/edge
+tasks:
+ - say-hello: |
+ echo hello
+ - say-world: echo world
+`, pl.Variables.Manifest)
+ assert.Equal(t, buildsVariables{
+ Manifest: pl.Variables.Manifest, // the manifest correctness is checked above, for nicer diff on error
+ Note: "branch test created",
+ Tags: []string{"testdata/repo", "branch/test", "simple.yml"},
+ Secrets: true,
+ Execute: true,
+ Visibility: "UNLISTED",
+ }, pl.Variables)
+ })
+ t.Run("Create/tag", func(t *testing.T) {
+ p := &api.CreatePayload{
+ Sha: "58771003157b81abc6bf41df0c5db4147a3e3c83",
+ Ref: "refs/tags/v1.0.0",
+ RefType: "tag",
+ Repo: repo,
+ }
+
+ pc.meta.ManifestPath = "simple.yml"
+ pl, err := pc.Create(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `sources:
+ - http://localhost:3000/testdata/repo.git#58771003157b81abc6bf41df0c5db4147a3e3c83
+environment:
+ BUILD_SUBMITTER: forgejo
+ BUILD_SUBMITTER_URL: https://example.forgejo.org/
+ GIT_REF: refs/tags/v1.0.0
+image: alpine/edge
+tasks:
+ - say-hello: |
+ echo hello
+ - say-world: echo world
+`, pl.Variables.Manifest)
+ assert.Equal(t, buildsVariables{
+ Manifest: pl.Variables.Manifest, // the manifest correctness is checked above, for nicer diff on error
+ Note: "tag v1.0.0 created",
+ Tags: []string{"testdata/repo", "tag/v1.0.0", "simple.yml"},
+ Secrets: true,
+ Execute: true,
+ Visibility: "UNLISTED",
+ }, pl.Variables)
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := &api.DeletePayload{}
+
+ pl, err := pc.Delete(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("Fork", func(t *testing.T) {
+ p := &api.ForkPayload{}
+
+ pl, err := pc.Fork(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("Push/simple", func(t *testing.T) {
+ p := &api.PushPayload{
+ Ref: "refs/heads/main",
+ HeadCommit: &api.PayloadCommit{
+ ID: "58771003157b81abc6bf41df0c5db4147a3e3c83",
+ Message: "add simple",
+ },
+ Repo: repo,
+ }
+
+ pc.meta.ManifestPath = "simple.yml"
+ pl, err := pc.Push(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `sources:
+ - http://localhost:3000/testdata/repo.git#58771003157b81abc6bf41df0c5db4147a3e3c83
+environment:
+ BUILD_SUBMITTER: forgejo
+ BUILD_SUBMITTER_URL: https://example.forgejo.org/
+ GIT_REF: refs/heads/main
+image: alpine/edge
+tasks:
+ - say-hello: |
+ echo hello
+ - say-world: echo world
+`, pl.Variables.Manifest)
+ assert.Equal(t, buildsVariables{
+ Manifest: pl.Variables.Manifest, // the manifest correctness is checked above, for nicer diff on error
+ Note: "add simple",
+ Tags: []string{"testdata/repo", "branch/main", "simple.yml"},
+ Secrets: true,
+ Execute: true,
+ Visibility: "UNLISTED",
+ }, pl.Variables)
+ })
+ t.Run("Push/complex", func(t *testing.T) {
+ p := &api.PushPayload{
+ Ref: "refs/heads/main",
+ HeadCommit: &api.PayloadCommit{
+ ID: "b0404943256a1f5a50c3726f4378756b4c1e5704",
+ Message: "replace simple with complex",
+ },
+ Repo: repo,
+ }
+
+ pc.meta.ManifestPath = "complex.yaml"
+ pc.meta.Visibility = "PRIVATE"
+ pc.meta.Secrets = false
+ pl, err := pc.Push(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `sources:
+ - http://localhost:3000/testdata/repo.git#b0404943256a1f5a50c3726f4378756b4c1e5704
+environment:
+ BUILD_SUBMITTER: forgejo
+ BUILD_SUBMITTER_URL: https://example.forgejo.org/
+ GIT_REF: refs/heads/main
+ deploy: synapse@synapse-bt.org
+image: archlinux
+packages:
+ - nodejs
+ - npm
+ - rsync
+secrets:
+ - 7ebab768-e5e4-4c9d-ba57-ec41a72c5665
+tasks: []
+triggers:
+ - condition: failure
+ action: email
+ to: Jim Jimson <jim@example.org>
+ # report back the status
+ - condition: always
+ action: webhook
+ url: https://hook.example.org
+`, pl.Variables.Manifest)
+ assert.Equal(t, buildsVariables{
+ Manifest: pl.Variables.Manifest, // the manifest correctness is checked above, for nicer diff on error
+ Note: "replace simple with complex",
+ Tags: []string{"testdata/repo", "branch/main", "complex.yaml"},
+ Secrets: false,
+ Execute: true,
+ Visibility: "PRIVATE",
+ }, pl.Variables)
+ })
+
+ t.Run("Push/error", func(t *testing.T) {
+ p := &api.PushPayload{
+ Ref: "refs/heads/main",
+ HeadCommit: &api.PayloadCommit{
+ ID: "58771003157b81abc6bf41df0c5db4147a3e3c83",
+ Message: "add simple",
+ },
+ Repo: repo,
+ }
+
+ pc.meta.ManifestPath = "non-existing.yml"
+ pl, err := pc.Push(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, graphqlPayload[buildsVariables]{
+ Error: "testdata/repo:refs/heads/main could not open manifest \"non-existing.yml\"",
+ }, pl)
+ })
+
+ t.Run("Issue", func(t *testing.T) {
+ p := &api.IssuePayload{}
+
+ p.Action = api.HookIssueOpened
+ pl, err := pc.Issue(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+
+ p.Action = api.HookIssueClosed
+ pl, err = pc.Issue(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("IssueComment", func(t *testing.T) {
+ p := &api.IssueCommentPayload{}
+
+ pl, err := pc.IssueComment(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("PullRequest", func(t *testing.T) {
+ p := &api.PullRequestPayload{}
+
+ pl, err := pc.PullRequest(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("PullRequestComment", func(t *testing.T) {
+ p := &api.IssueCommentPayload{
+ IsPull: true,
+ }
+
+ pl, err := pc.IssueComment(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("Review", func(t *testing.T) {
+ p := &api.PullRequestPayload{}
+ p.Action = api.HookIssueReviewed
+
+ pl, err := pc.Review(p, webhook_module.HookEventPullRequestReviewApproved)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("Repository", func(t *testing.T) {
+ p := &api.RepositoryPayload{}
+
+ pl, err := pc.Repository(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("Package", func(t *testing.T) {
+ p := &api.PackagePayload{}
+
+ pl, err := pc.Package(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("Wiki", func(t *testing.T) {
+ p := &api.WikiPayload{}
+
+ p.Action = api.HookWikiCreated
+ pl, err := pc.Wiki(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+
+ p.Action = api.HookWikiEdited
+ pl, err = pc.Wiki(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+
+ p.Action = api.HookWikiDeleted
+ pl, err = pc.Wiki(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+
+ t.Run("Release", func(t *testing.T) {
+ p := &api.ReleasePayload{}
+
+ pl, err := pc.Release(p)
+ require.Equal(t, shared.ErrPayloadTypeNotSupported, err)
+ require.Equal(t, graphqlPayload[buildsVariables]{}, pl)
+ })
+}
+
+func TestSourcehutJSONPayload(t *testing.T) {
+ gitInit(t)
+ defer test.MockVariableValue(&setting.RepoRootPath, ".")()
+ defer test.MockVariableValue(&setting.AppURL, "https://example.forgejo.org/")()
+
+ repo := &api.Repository{
+ HTMLURL: "http://localhost:3000/testdata/repo",
+ Name: "repo",
+ FullName: "testdata/repo",
+ Owner: &api.User{
+ UserName: "testdata",
+ },
+ CloneURL: "http://localhost:3000/testdata/repo.git",
+ }
+
+ p := &api.PushPayload{
+ Ref: "refs/heads/main",
+ HeadCommit: &api.PayloadCommit{
+ ID: "58771003157b81abc6bf41df0c5db4147a3e3c83",
+ Message: "json test",
+ },
+ Repo: repo,
+ }
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.MATRIX,
+ URL: "https://sourcehut.example.com/api/jobs",
+ Meta: `{"manifest_path":"simple.yml"}`,
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := BuildsHandler{}.NewRequest(context.Background(), hook, task)
+ require.NoError(t, err)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "/api/jobs", req.URL.Path)
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body graphqlPayload[buildsVariables]
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, "json test", body.Variables.Note)
+}
diff --git a/services/webhook/sourcehut/testdata/repo.git/HEAD b/services/webhook/sourcehut/testdata/repo.git/HEAD
new file mode 100644
index 0000000..b870d82
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/main
diff --git a/services/webhook/sourcehut/testdata/repo.git/config b/services/webhook/sourcehut/testdata/repo.git/config
new file mode 100644
index 0000000..07d359d
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/config
@@ -0,0 +1,4 @@
+[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = true
diff --git a/services/webhook/sourcehut/testdata/repo.git/description b/services/webhook/sourcehut/testdata/repo.git/description
new file mode 100644
index 0000000..498b267
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/description
@@ -0,0 +1 @@
+Unnamed repository; edit this file 'description' to name the repository.
diff --git a/services/webhook/sourcehut/testdata/repo.git/info/exclude b/services/webhook/sourcehut/testdata/repo.git/info/exclude
new file mode 100644
index 0000000..a5196d1
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/info/exclude
@@ -0,0 +1,6 @@
+# git ls-files --others --exclude-from=.git/info/exclude
+# Lines that start with '#' are comments.
+# For a project mostly in C, the following would be a good set of
+# exclude patterns (uncomment them if you want to use them):
+# *.[oa]
+# *~
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/3c/3d4b799b3933ba687b263eeef2034300a5315e b/services/webhook/sourcehut/testdata/repo.git/objects/3c/3d4b799b3933ba687b263eeef2034300a5315e
new file mode 100644
index 0000000..f03b45d
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/3c/3d4b799b3933ba687b263eeef2034300a5315e
Binary files differ
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/58/771003157b81abc6bf41df0c5db4147a3e3c83 b/services/webhook/sourcehut/testdata/repo.git/objects/58/771003157b81abc6bf41df0c5db4147a3e3c83
new file mode 100644
index 0000000..e9ff0d0
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/58/771003157b81abc6bf41df0c5db4147a3e3c83
@@ -0,0 +1,2 @@
+x=ŽÁ‚0D=÷+önBºXšÃÉÿhéVk¨%¥?_PãmÞáÍÌ”b °ÇCÉÌ ¹±Dä{´
+;ƒµF’&«”q®ë™m¥“Â<Ê5e8§|á[‚ÁÃÈ/—™« O€„5¶¤ GYK)¦Ï\á iOÎKJ3 —PÆçjñÆU>ÿÝVÓÏÛXÃÞÑ܃絈7\p;¼ \ No newline at end of file
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/69/b217caa89166a02b8cd368b64fb83a44720e14 b/services/webhook/sourcehut/testdata/repo.git/objects/69/b217caa89166a02b8cd368b64fb83a44720e14
new file mode 100644
index 0000000..1aed811
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/69/b217caa89166a02b8cd368b64fb83a44720e14
@@ -0,0 +1 @@
+x=ŽÍnà „{æ)ö^ÉZ ,EUN}ï&T¶A„¶yüÒõ6ßa¾™Tö=w˜­êˆÌ‚ŽÄ¢5‹çO‚ ²\ôm\¼uFT¥ÆG¼×ˆF;ƒ¦˜NQ¬^“[£ÕÖ“a“‚QôÞo¥ÁkiW~+p–ßpáíuãià h¯ça²ˆðŒ3¢J?÷:7([þàVKÙà|ÍýòÍ™ÛT…ÖIÚ7 ÿëªÆu£Ä°Ó‘…ï>s¿ÁPŽ½ Û=—C}Ë¢O» \ No newline at end of file
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/99/fb389b232e5497f0dcdb1c1065eac1d10d3794 b/services/webhook/sourcehut/testdata/repo.git/objects/99/fb389b232e5497f0dcdb1c1065eac1d10d3794
new file mode 100644
index 0000000..43dd885
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/99/fb389b232e5497f0dcdb1c1065eac1d10d3794
Binary files differ
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/9e/4b777f81b316a1c75a0797b33add68ee49b0d0 b/services/webhook/sourcehut/testdata/repo.git/objects/9e/4b777f81b316a1c75a0797b33add68ee49b0d0
new file mode 100644
index 0000000..081cfcd
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/9e/4b777f81b316a1c75a0797b33add68ee49b0d0
Binary files differ
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/a5/4082fdb8e55055382725f10a81bb4dc2b13029 b/services/webhook/sourcehut/testdata/repo.git/objects/a5/4082fdb8e55055382725f10a81bb4dc2b13029
new file mode 100644
index 0000000..071f79e
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/a5/4082fdb8e55055382725f10a81bb4dc2b13029
@@ -0,0 +1,4 @@
+xUÝnƒ0 …w§°´k
+ë lÑ4õz¯0í ¤„%fmß~@ÛýD²Ùùìc<(·Å°% mç]˜NjDÛÏRÒ
+æ“Aà†é–ŒÃõÓ9X•xŠö·¶už{¶è;N¢‹¢È…’4(˜GyÓ:Q‚©OÞ?…/9 älhô|Ö0c¢ýõÎŒl8¶*‘$?ÃÕdÐÔ»§Œ**³Ò>7™ÁªÎÈ–XomµÛUJItmKqíÊÀrhœ8>Ñù)ÒºÚ‹FÃ,®Š°†77,‘8ÀËÁ {:á0zZfy‹îaÙ)
+˜Ù5Ž Ê´Øõ„þˆç‹‰7ΑLÇܯ¤)z ‹yivoQ78J}³è‡¤ \ No newline at end of file
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/aa/3905af404394f576f88f00e7f0919b4b97453f b/services/webhook/sourcehut/testdata/repo.git/objects/aa/3905af404394f576f88f00e7f0919b4b97453f
new file mode 100644
index 0000000..cc96171
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/aa/3905af404394f576f88f00e7f0919b4b97453f
Binary files differ
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/b0/404943256a1f5a50c3726f4378756b4c1e5704 b/services/webhook/sourcehut/testdata/repo.git/objects/b0/404943256a1f5a50c3726f4378756b4c1e5704
new file mode 100644
index 0000000..a2cff63
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/b0/404943256a1f5a50c3726f4378756b4c1e5704
Binary files differ
diff --git a/services/webhook/sourcehut/testdata/repo.git/objects/d2/e0862c8b8097ba4bdd72946c20479751d307a0 b/services/webhook/sourcehut/testdata/repo.git/objects/d2/e0862c8b8097ba4bdd72946c20479751d307a0
new file mode 100644
index 0000000..f57ab8a
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/objects/d2/e0862c8b8097ba4bdd72946c20479751d307a0
@@ -0,0 +1,4 @@
+xENInÃ0 ìY¯Ð»®—D§þ#È¢ Û,
+"$¿¯¦É\fÁ™9ئ9~,+Lä-œã’¶»É€×=oìgô#ÿ&¯OUä‘Ðoß·³jöU!Î,ê¿êº®”DGP¨
+e>L‹¹Š·ç‹¡t[
+§•’þŽ”#?¼ÝßCú~² zà2!,¤¯qCtÔQëZ<.@78Âö†»¾ïŒù\«I \ No newline at end of file
diff --git a/services/webhook/sourcehut/testdata/repo.git/refs/heads/main b/services/webhook/sourcehut/testdata/repo.git/refs/heads/main
new file mode 100644
index 0000000..a7ab419
--- /dev/null
+++ b/services/webhook/sourcehut/testdata/repo.git/refs/heads/main
@@ -0,0 +1 @@
+b0404943256a1f5a50c3726f4378756b4c1e5704
diff --git a/services/webhook/telegram.go b/services/webhook/telegram.go
new file mode 100644
index 0000000..bacfa64
--- /dev/null
+++ b/services/webhook/telegram.go
@@ -0,0 +1,228 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+type telegramHandler struct{}
+
+func (telegramHandler) Type() webhook_module.HookType { return webhook_module.TELEGRAM }
+func (telegramHandler) Icon(size int) template.HTML { return shared.ImgIcon("telegram.png", size) }
+
+func (telegramHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ BotToken string `binding:"Required"`
+ ChatID string `binding:"Required"`
+ ThreadID string
+ }
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: fmt.Sprintf("https://api.telegram.org/bot%s/sendMessage?chat_id=%s&message_thread_id=%s", url.PathEscape(form.BotToken), url.QueryEscape(form.ChatID), url.QueryEscape(form.ThreadID)),
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: &TelegramMeta{
+ BotToken: form.BotToken,
+ ChatID: form.ChatID,
+ ThreadID: form.ThreadID,
+ },
+ }
+}
+
+type (
+ // TelegramPayload represents
+ TelegramPayload struct {
+ Message string `json:"text"`
+ ParseMode string `json:"parse_mode"`
+ DisableWebPreview bool `json:"disable_web_page_preview"`
+ }
+
+ // TelegramMeta contains the telegram metadata
+ TelegramMeta struct {
+ BotToken string `json:"bot_token"`
+ ChatID string `json:"chat_id"`
+ ThreadID string `json:"thread_id"`
+ }
+)
+
+// Metadata returns telegram metadata
+func (telegramHandler) Metadata(w *webhook_model.Webhook) any {
+ s := &TelegramMeta{}
+ if err := json.Unmarshal([]byte(w.Meta), s); err != nil {
+ log.Error("telegramHandler.Metadata(%d): %v", w.ID, err)
+ }
+ return s
+}
+
+// Create implements PayloadConvertor Create method
+func (t telegramConvertor) Create(p *api.CreatePayload) (TelegramPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf(`[<a href="%s">%s</a>] %s <a href="%s">%s</a> created`, p.Repo.HTMLURL, p.Repo.FullName, p.RefType,
+ p.Repo.HTMLURL+"/src/"+refName, refName)
+
+ return createTelegramPayload(title), nil
+}
+
+// Delete implements PayloadConvertor Delete method
+func (t telegramConvertor) Delete(p *api.DeletePayload) (TelegramPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf(`[<a href="%s">%s</a>] %s <a href="%s">%s</a> deleted`, p.Repo.HTMLURL, p.Repo.FullName, p.RefType,
+ p.Repo.HTMLURL+"/src/"+refName, refName)
+
+ return createTelegramPayload(title), nil
+}
+
+// Fork implements PayloadConvertor Fork method
+func (t telegramConvertor) Fork(p *api.ForkPayload) (TelegramPayload, error) {
+ title := fmt.Sprintf(`%s is forked to <a href="%s">%s</a>`, p.Forkee.FullName, p.Repo.HTMLURL, p.Repo.FullName)
+
+ return createTelegramPayload(title), nil
+}
+
+// Push implements PayloadConvertor Push method
+func (t telegramConvertor) Push(p *api.PushPayload) (TelegramPayload, error) {
+ var (
+ branchName = git.RefName(p.Ref).ShortName()
+ commitDesc string
+ )
+
+ var titleLink string
+ if p.TotalCommits == 1 {
+ commitDesc = "1 new commit"
+ titleLink = p.Commits[0].URL
+ } else {
+ commitDesc = fmt.Sprintf("%d new commits", p.TotalCommits)
+ titleLink = p.CompareURL
+ }
+ if titleLink == "" {
+ titleLink = p.Repo.HTMLURL + "/src/" + branchName
+ }
+ title := fmt.Sprintf(`[<a href="%s">%s</a>:<a href="%s">%s</a>] %s`, p.Repo.HTMLURL, p.Repo.FullName, titleLink, branchName, commitDesc)
+
+ var text string
+ // for each commit, generate attachment text
+ for i, commit := range p.Commits {
+ var authorName string
+ if commit.Author != nil {
+ authorName = " - " + commit.Author.Name
+ }
+ text += fmt.Sprintf(`[<a href="%s">%s</a>] %s`, commit.URL, commit.ID[:7],
+ strings.TrimRight(commit.Message, "\r\n")) + authorName
+ // add linebreak to each commit but the last
+ if i < len(p.Commits)-1 {
+ text += "\n"
+ }
+ }
+
+ return createTelegramPayload(title + "\n" + text), nil
+}
+
+// Issue implements PayloadConvertor Issue method
+func (t telegramConvertor) Issue(p *api.IssuePayload) (TelegramPayload, error) {
+ text, _, attachmentText, _ := getIssuesPayloadInfo(p, htmlLinkFormatter, true)
+
+ return createTelegramPayload(text + "\n\n" + attachmentText), nil
+}
+
+// IssueComment implements PayloadConvertor IssueComment method
+func (t telegramConvertor) IssueComment(p *api.IssueCommentPayload) (TelegramPayload, error) {
+ text, _, _ := getIssueCommentPayloadInfo(p, htmlLinkFormatter, true)
+
+ return createTelegramPayload(text + "\n" + p.Comment.Body), nil
+}
+
+// PullRequest implements PayloadConvertor PullRequest method
+func (t telegramConvertor) PullRequest(p *api.PullRequestPayload) (TelegramPayload, error) {
+ text, _, attachmentText, _ := getPullRequestPayloadInfo(p, htmlLinkFormatter, true)
+
+ return createTelegramPayload(text + "\n" + attachmentText), nil
+}
+
+// Review implements PayloadConvertor Review method
+func (t telegramConvertor) Review(p *api.PullRequestPayload, event webhook_module.HookEventType) (TelegramPayload, error) {
+ var text, attachmentText string
+ if p.Action == api.HookIssueReviewed {
+ action, err := parseHookPullRequestEventType(event)
+ if err != nil {
+ return TelegramPayload{}, err
+ }
+
+ text = fmt.Sprintf("[%s] Pull request review %s: #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title)
+ attachmentText = p.Review.Content
+ }
+
+ return createTelegramPayload(text + "\n" + attachmentText), nil
+}
+
+// Repository implements PayloadConvertor Repository method
+func (t telegramConvertor) Repository(p *api.RepositoryPayload) (TelegramPayload, error) {
+ var title string
+ switch p.Action {
+ case api.HookRepoCreated:
+ title = fmt.Sprintf(`[<a href="%s">%s</a>] Repository created`, p.Repository.HTMLURL, p.Repository.FullName)
+ return createTelegramPayload(title), nil
+ case api.HookRepoDeleted:
+ title = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName)
+ return createTelegramPayload(title), nil
+ }
+ return TelegramPayload{}, nil
+}
+
+// Wiki implements PayloadConvertor Wiki method
+func (t telegramConvertor) Wiki(p *api.WikiPayload) (TelegramPayload, error) {
+ text, _, _ := getWikiPayloadInfo(p, htmlLinkFormatter, true)
+
+ return createTelegramPayload(text), nil
+}
+
+// Release implements PayloadConvertor Release method
+func (t telegramConvertor) Release(p *api.ReleasePayload) (TelegramPayload, error) {
+ text, _ := getReleasePayloadInfo(p, htmlLinkFormatter, true)
+
+ return createTelegramPayload(text), nil
+}
+
+func (t telegramConvertor) Package(p *api.PackagePayload) (TelegramPayload, error) {
+ text, _ := getPackagePayloadInfo(p, htmlLinkFormatter, true)
+
+ return createTelegramPayload(text), nil
+}
+
+func createTelegramPayload(message string) TelegramPayload {
+ return TelegramPayload{
+ Message: markup.Sanitize(strings.TrimSpace(message)),
+ ParseMode: "HTML",
+ DisableWebPreview: true,
+ }
+}
+
+type telegramConvertor struct{}
+
+var _ shared.PayloadConvertor[TelegramPayload] = telegramConvertor{}
+
+func (telegramHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ return shared.NewJSONRequest(telegramConvertor{}, w, t, true)
+}
diff --git a/services/webhook/telegram_test.go b/services/webhook/telegram_test.go
new file mode 100644
index 0000000..0e27535
--- /dev/null
+++ b/services/webhook/telegram_test.go
@@ -0,0 +1,212 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "testing"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/json"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestTelegramPayload(t *testing.T) {
+ tc := telegramConvertor{}
+
+ t.Run("Correct webhook params", func(t *testing.T) {
+ p := createTelegramPayload("testMsg ")
+
+ assert.Equal(t, "HTML", p.ParseMode)
+ assert.True(t, p.DisableWebPreview)
+ assert.Equal(t, "testMsg", p.Message)
+ })
+
+ t.Run("Create", func(t *testing.T) {
+ p := createTestPayload()
+
+ pl, err := tc.Create(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] branch <a href="http://localhost:3000/test/repo/src/test" rel="nofollow">test</a> created`, pl.Message)
+ })
+
+ t.Run("Delete", func(t *testing.T) {
+ p := deleteTestPayload()
+
+ pl, err := tc.Delete(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] branch <a href="http://localhost:3000/test/repo/src/test" rel="nofollow">test</a> deleted`, pl.Message)
+ })
+
+ t.Run("Fork", func(t *testing.T) {
+ p := forkTestPayload()
+
+ pl, err := tc.Fork(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `test/repo2 is forked to <a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>`, pl.Message)
+ })
+
+ t.Run("Push", func(t *testing.T) {
+ p := pushTestPayload()
+
+ pl, err := tc.Push(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>:<a href="http://localhost:3000/test/repo/src/test" rel="nofollow">test</a>] 2 new commits
+[<a href="http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778" rel="nofollow">2020558</a>] commit message - user1
+[<a href="http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778" rel="nofollow">2020558</a>] commit message - user1`, pl.Message)
+ })
+
+ t.Run("Issue", func(t *testing.T) {
+ p := issueTestPayload()
+
+ p.Action = api.HookIssueOpened
+ pl, err := tc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] Issue opened: <a href="http://localhost:3000/test/repo/issues/2" rel="nofollow">#2 crash</a> by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>
+
+issue body`, pl.Message)
+
+ p.Action = api.HookIssueClosed
+ pl, err = tc.Issue(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] Issue closed: <a href="http://localhost:3000/test/repo/issues/2" rel="nofollow">#2 crash</a> by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>`, pl.Message)
+ })
+
+ t.Run("IssueComment", func(t *testing.T) {
+ p := issueCommentTestPayload()
+
+ pl, err := tc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] New comment on issue <a href="http://localhost:3000/test/repo/issues/2" rel="nofollow">#2 crash</a> by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>
+more info needed`, pl.Message)
+ })
+
+ t.Run("PullRequest", func(t *testing.T) {
+ p := pullRequestTestPayload()
+
+ pl, err := tc.PullRequest(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] Pull request opened: <a href="http://localhost:3000/test/repo/pulls/12" rel="nofollow">#12 Fix bug</a> by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>
+fixes bug #2`, pl.Message)
+ })
+
+ t.Run("PullRequestComment", func(t *testing.T) {
+ p := pullRequestCommentTestPayload()
+
+ pl, err := tc.IssueComment(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] New comment on pull request <a href="http://localhost:3000/test/repo/pulls/12" rel="nofollow">#12 Fix bug</a> by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>
+changes requested`, pl.Message)
+ })
+
+ t.Run("Review", func(t *testing.T) {
+ p := pullRequestTestPayload()
+ p.Action = api.HookIssueReviewed
+
+ pl, err := tc.Review(p, webhook_module.HookEventPullRequestReviewApproved)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[test/repo] Pull request review approved: #12 Fix bug
+good job`, pl.Message)
+ })
+
+ t.Run("Repository", func(t *testing.T) {
+ p := repositoryTestPayload()
+
+ pl, err := tc.Repository(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] Repository created`, pl.Message)
+ })
+
+ t.Run("Package", func(t *testing.T) {
+ p := packageTestPayload()
+
+ pl, err := tc.Package(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `Package created: <a href="http://localhost:3000/user1/-/packages/container/GiteaContainer/latest" rel="nofollow">GiteaContainer:latest</a> by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>`, pl.Message)
+ })
+
+ t.Run("Wiki", func(t *testing.T) {
+ p := wikiTestPayload()
+
+ p.Action = api.HookWikiCreated
+ pl, err := tc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] New wiki page &#39;<a href="http://localhost:3000/test/repo/wiki/index" rel="nofollow">index</a>&#39; (Wiki change comment) by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>`, pl.Message)
+
+ p.Action = api.HookWikiEdited
+ pl, err = tc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] Wiki page &#39;<a href="http://localhost:3000/test/repo/wiki/index" rel="nofollow">index</a>&#39; edited (Wiki change comment) by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>`, pl.Message)
+
+ p.Action = api.HookWikiDeleted
+ pl, err = tc.Wiki(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] Wiki page &#39;<a href="http://localhost:3000/test/repo/wiki/index" rel="nofollow">index</a>&#39; deleted by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>`, pl.Message)
+ })
+
+ t.Run("Release", func(t *testing.T) {
+ p := pullReleaseTestPayload()
+
+ pl, err := tc.Release(p)
+ require.NoError(t, err)
+
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>] Release created: <a href="http://localhost:3000/test/repo/releases/tag/v1.0" rel="nofollow">v1.0</a> by <a href="https://try.gitea.io/user1" rel="nofollow">user1</a>`, pl.Message)
+ })
+}
+
+func TestTelegramJSONPayload(t *testing.T) {
+ p := pushTestPayload()
+ data, err := p.JSONPayload()
+ require.NoError(t, err)
+
+ hook := &webhook_model.Webhook{
+ RepoID: 3,
+ IsActive: true,
+ Type: webhook_module.TELEGRAM,
+ URL: "https://telegram.example.com/",
+ Meta: ``,
+ HTTPMethod: "POST",
+ }
+ task := &webhook_model.HookTask{
+ HookID: hook.ID,
+ EventType: webhook_module.HookEventPush,
+ PayloadContent: string(data),
+ PayloadVersion: 2,
+ }
+
+ req, reqBody, err := telegramHandler{}.NewRequest(context.Background(), hook, task)
+ require.NotNil(t, req)
+ require.NotNil(t, reqBody)
+ require.NoError(t, err)
+
+ assert.Equal(t, "POST", req.Method)
+ assert.Equal(t, "https://telegram.example.com/", req.URL.String())
+ assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256"))
+ assert.Equal(t, "application/json", req.Header.Get("Content-Type"))
+ var body TelegramPayload
+ err = json.NewDecoder(req.Body).Decode(&body)
+ require.NoError(t, err)
+ assert.Equal(t, `[<a href="http://localhost:3000/test/repo" rel="nofollow">test/repo</a>:<a href="http://localhost:3000/test/repo/src/test" rel="nofollow">test</a>] 2 new commits
+[<a href="http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778" rel="nofollow">2020558</a>] commit message - user1
+[<a href="http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778" rel="nofollow">2020558</a>] commit message - user1`, body.Message)
+}
diff --git a/services/webhook/webhook.go b/services/webhook/webhook.go
new file mode 100644
index 0000000..1366ea8
--- /dev/null
+++ b/services/webhook/webhook.go
@@ -0,0 +1,270 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "html/template"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/sourcehut"
+
+ "github.com/gobwas/glob"
+)
+
+type Handler interface {
+ Type() webhook_module.HookType
+ Metadata(*webhook_model.Webhook) any
+ // UnmarshalForm provides a function to bind the request to the form.
+ // If form implements the [binding.Validator] interface, the Validate method will be called
+ UnmarshalForm(bind func(form any)) forms.WebhookForm
+ NewRequest(context.Context, *webhook_model.Webhook, *webhook_model.HookTask) (req *http.Request, body []byte, err error)
+ Icon(size int) template.HTML
+}
+
+var webhookHandlers = []Handler{
+ defaultHandler{true},
+ defaultHandler{false},
+ gogsHandler{},
+
+ slackHandler{},
+ discordHandler{},
+ dingtalkHandler{},
+ telegramHandler{},
+ msteamsHandler{},
+ feishuHandler{},
+ matrixHandler{},
+ wechatworkHandler{},
+ packagistHandler{},
+ sourcehut.BuildsHandler{},
+}
+
+// GetWebhookHandler return the handler for a given webhook type (nil if not found)
+func GetWebhookHandler(name webhook_module.HookType) Handler {
+ for _, h := range webhookHandlers {
+ if h.Type() == name {
+ return h
+ }
+ }
+ return nil
+}
+
+// List provides a list of the supported webhooks
+func List() []Handler {
+ return webhookHandlers
+}
+
+// IsValidHookTaskType returns true if a webhook registered
+func IsValidHookTaskType(name string) bool {
+ return GetWebhookHandler(name) != nil
+}
+
+// hookQueue is a global queue of web hooks
+var hookQueue *queue.WorkerPoolQueue[int64]
+
+// getPayloadBranch returns branch for hook event, if applicable.
+func getPayloadBranch(p api.Payloader) string {
+ var ref string
+ switch pp := p.(type) {
+ case *api.CreatePayload:
+ ref = pp.Ref
+ case *api.DeletePayload:
+ ref = pp.Ref
+ case *api.PushPayload:
+ ref = pp.Ref
+ }
+ if strings.HasPrefix(ref, git.BranchPrefix) {
+ return ref[len(git.BranchPrefix):]
+ }
+ return ""
+}
+
+// EventSource represents the source of a webhook action. Repository and/or Owner must be set.
+type EventSource struct {
+ Repository *repo_model.Repository
+ Owner *user_model.User
+}
+
+// handle delivers hook tasks
+func handler(items ...int64) []int64 {
+ ctx := graceful.GetManager().HammerContext()
+
+ for _, taskID := range items {
+ task, err := webhook_model.GetHookTaskByID(ctx, taskID)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ log.Warn("GetHookTaskByID[%d] warn: %v", taskID, err)
+ } else {
+ log.Error("GetHookTaskByID[%d] failed: %v", taskID, err)
+ }
+ continue
+ }
+
+ if task.IsDelivered {
+ // Already delivered in the meantime
+ log.Trace("Task[%d] has already been delivered", task.ID)
+ continue
+ }
+
+ if err := Deliver(ctx, task); err != nil {
+ log.Error("Unable to deliver webhook task[%d]: %v", task.ID, err)
+ }
+ }
+
+ return nil
+}
+
+func enqueueHookTask(taskID int64) error {
+ err := hookQueue.Push(taskID)
+ if err != nil && err != queue.ErrAlreadyInQueue {
+ return err
+ }
+ return nil
+}
+
+func checkBranch(w *webhook_model.Webhook, branch string) bool {
+ if w.BranchFilter == "" || w.BranchFilter == "*" {
+ return true
+ }
+
+ g, err := glob.Compile(w.BranchFilter)
+ if err != nil {
+ // should not really happen as BranchFilter is validated
+ log.Error("CheckBranch failed: %s", err)
+ return false
+ }
+
+ return g.Match(branch)
+}
+
+// PrepareWebhook creates a hook task and enqueues it for processing.
+// The payload is saved as-is. The adjustments depending on the webhook type happen
+// right before delivery, in the [Deliver] method.
+func PrepareWebhook(ctx context.Context, w *webhook_model.Webhook, event webhook_module.HookEventType, p api.Payloader) error {
+ // Skip sending if webhooks are disabled.
+ if setting.DisableWebhooks {
+ return nil
+ }
+
+ for _, e := range w.EventCheckers() {
+ if event == e.Type {
+ if !e.Has() {
+ return nil
+ }
+
+ break
+ }
+ }
+
+ // Avoid sending "0 new commits" to non-integration relevant webhooks (e.g. slack, discord, etc.).
+ // Integration webhooks (e.g. drone) still receive the required data.
+ if pushEvent, ok := p.(*api.PushPayload); ok &&
+ w.Type != webhook_module.FORGEJO && w.Type != webhook_module.GITEA && w.Type != webhook_module.GOGS &&
+ len(pushEvent.Commits) == 0 {
+ return nil
+ }
+
+ // If payload has no associated branch (e.g. it's a new tag, issue, etc.),
+ // branch filter has no effect.
+ if branch := getPayloadBranch(p); branch != "" {
+ if !checkBranch(w, branch) {
+ log.Info("Branch %q doesn't match branch filter %q, skipping", branch, w.BranchFilter)
+ return nil
+ }
+ }
+
+ payload, err := p.JSONPayload()
+ if err != nil {
+ return fmt.Errorf("JSONPayload for %s: %w", event, err)
+ }
+
+ task, err := webhook_model.CreateHookTask(ctx, &webhook_model.HookTask{
+ HookID: w.ID,
+ PayloadContent: string(payload),
+ EventType: event,
+ PayloadVersion: 2,
+ })
+ if err != nil {
+ return fmt.Errorf("CreateHookTask for %s: %w", event, err)
+ }
+
+ return enqueueHookTask(task.ID)
+}
+
+// PrepareWebhooks adds new webhooks to task queue for given payload.
+func PrepareWebhooks(ctx context.Context, source EventSource, event webhook_module.HookEventType, p api.Payloader) error {
+ owner := source.Owner
+
+ var ws []*webhook_model.Webhook
+
+ if source.Repository != nil {
+ repoHooks, err := db.Find[webhook_model.Webhook](ctx, webhook_model.ListWebhookOptions{
+ RepoID: source.Repository.ID,
+ IsActive: optional.Some(true),
+ })
+ if err != nil {
+ return fmt.Errorf("ListWebhooksByOpts: %w", err)
+ }
+ ws = append(ws, repoHooks...)
+
+ owner = source.Repository.MustOwner(ctx)
+ }
+
+ // append additional webhooks of a user or organization
+ if owner != nil {
+ ownerHooks, err := db.Find[webhook_model.Webhook](ctx, webhook_model.ListWebhookOptions{
+ OwnerID: owner.ID,
+ IsActive: optional.Some(true),
+ })
+ if err != nil {
+ return fmt.Errorf("ListWebhooksByOpts: %w", err)
+ }
+ ws = append(ws, ownerHooks...)
+ }
+
+ // Add any admin-defined system webhooks
+ systemHooks, err := webhook_model.GetSystemWebhooks(ctx, true)
+ if err != nil {
+ return fmt.Errorf("GetSystemWebhooks: %w", err)
+ }
+ ws = append(ws, systemHooks...)
+
+ if len(ws) == 0 {
+ return nil
+ }
+
+ for _, w := range ws {
+ if err := PrepareWebhook(ctx, w, event, p); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// ReplayHookTask replays a webhook task
+func ReplayHookTask(ctx context.Context, w *webhook_model.Webhook, uuid string) error {
+ task, err := webhook_model.ReplayHookTask(ctx, w.ID, uuid)
+ if err != nil {
+ return err
+ }
+
+ return enqueueHookTask(task.ID)
+}
diff --git a/services/webhook/webhook_test.go b/services/webhook/webhook_test.go
new file mode 100644
index 0000000..816940a
--- /dev/null
+++ b/services/webhook/webhook_test.go
@@ -0,0 +1,100 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "fmt"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func activateWebhook(t *testing.T, hookID int64) {
+ t.Helper()
+ updated, err := db.GetEngine(db.DefaultContext).ID(hookID).Cols("is_active").Update(webhook_model.Webhook{IsActive: true})
+ assert.Equal(t, int64(1), updated)
+ require.NoError(t, err)
+}
+
+func TestPrepareWebhooks(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ activateWebhook(t, 1)
+
+ hookTasks := []*webhook_model.HookTask{
+ {HookID: 1, EventType: webhook_module.HookEventPush},
+ }
+ for _, hookTask := range hookTasks {
+ unittest.AssertNotExistsBean(t, hookTask)
+ }
+ require.NoError(t, PrepareWebhooks(db.DefaultContext, EventSource{Repository: repo}, webhook_module.HookEventPush, &api.PushPayload{Commits: []*api.PayloadCommit{{}}}))
+ for _, hookTask := range hookTasks {
+ unittest.AssertExistsAndLoadBean(t, hookTask)
+ }
+}
+
+func eventType(p api.Payloader) webhook_module.HookEventType {
+ switch p.(type) {
+ case *api.CreatePayload:
+ return webhook_module.HookEventCreate
+ case *api.DeletePayload:
+ return webhook_module.HookEventDelete
+ case *api.PushPayload:
+ return webhook_module.HookEventPush
+ }
+ panic(fmt.Sprintf("no event type for payload %T", p))
+}
+
+func TestPrepareWebhooksBranchFilterMatch(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // branch_filter: {master,feature*}
+ w := unittest.AssertExistsAndLoadBean(t, &webhook_model.Webhook{ID: 4})
+ activateWebhook(t, w.ID)
+
+ for _, p := range []api.Payloader{
+ &api.PushPayload{Ref: "refs/heads/feature/7791"},
+ &api.CreatePayload{Ref: "refs/heads/feature/7791"}, // branch creation
+ &api.DeletePayload{Ref: "refs/heads/feature/7791"}, // branch deletion
+ } {
+ t.Run(fmt.Sprintf("%T", p), func(t *testing.T) {
+ db.DeleteBeans(db.DefaultContext, webhook_model.HookTask{HookID: w.ID})
+ typ := eventType(p)
+ require.NoError(t, PrepareWebhook(db.DefaultContext, w, typ, p))
+ unittest.AssertExistsAndLoadBean(t, &webhook_model.HookTask{
+ HookID: w.ID,
+ EventType: typ,
+ })
+ })
+ }
+}
+
+func TestPrepareWebhooksBranchFilterNoMatch(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // branch_filter: {master,feature*}
+ w := unittest.AssertExistsAndLoadBean(t, &webhook_model.Webhook{ID: 4})
+ activateWebhook(t, w.ID)
+
+ for _, p := range []api.Payloader{
+ &api.PushPayload{Ref: "refs/heads/fix_weird_bug"},
+ &api.CreatePayload{Ref: "refs/heads/fix_weird_bug"}, // branch creation
+ &api.DeletePayload{Ref: "refs/heads/fix_weird_bug"}, // branch deletion
+ } {
+ t.Run(fmt.Sprintf("%T", p), func(t *testing.T) {
+ db.DeleteBeans(db.DefaultContext, webhook_model.HookTask{HookID: w.ID})
+ require.NoError(t, PrepareWebhook(db.DefaultContext, w, eventType(p), p))
+ unittest.AssertNotExistsBean(t, &webhook_model.HookTask{HookID: w.ID})
+ })
+ }
+}
diff --git a/services/webhook/wechatwork.go b/services/webhook/wechatwork.go
new file mode 100644
index 0000000..87f8bb8
--- /dev/null
+++ b/services/webhook/wechatwork.go
@@ -0,0 +1,210 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webhook
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "net/http"
+ "strings"
+
+ webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/webhook/shared"
+)
+
+type wechatworkHandler struct{}
+
+func (wechatworkHandler) Type() webhook_module.HookType { return webhook_module.WECHATWORK }
+func (wechatworkHandler) Metadata(*webhook_model.Webhook) any { return nil }
+
+func (wechatworkHandler) Icon(size int) template.HTML {
+ return shared.ImgIcon("wechatwork.png", size)
+}
+
+func (wechatworkHandler) UnmarshalForm(bind func(any)) forms.WebhookForm {
+ var form struct {
+ forms.WebhookCoreForm
+ PayloadURL string `binding:"Required;ValidUrl"`
+ }
+ bind(&form)
+
+ return forms.WebhookForm{
+ WebhookCoreForm: form.WebhookCoreForm,
+ URL: form.PayloadURL,
+ ContentType: webhook_model.ContentTypeJSON,
+ Secret: "",
+ HTTPMethod: http.MethodPost,
+ Metadata: nil,
+ }
+}
+
+type (
+ // WechatworkPayload represents
+ WechatworkPayload struct {
+ Msgtype string `json:"msgtype"`
+ Text struct {
+ Content string `json:"content"`
+ MentionedList []string `json:"mentioned_list"`
+ MentionedMobileList []string `json:"mentioned_mobile_list"`
+ } `json:"text"`
+ Markdown struct {
+ Content string `json:"content"`
+ } `json:"markdown"`
+ }
+)
+
+func newWechatworkMarkdownPayload(title string) WechatworkPayload {
+ return WechatworkPayload{
+ Msgtype: "markdown",
+ Markdown: struct {
+ Content string `json:"content"`
+ }{
+ Content: title,
+ },
+ }
+}
+
+// Create implements PayloadConvertor Create method
+func (wc wechatworkConvertor) Create(p *api.CreatePayload) (WechatworkPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName)
+
+ return newWechatworkMarkdownPayload(title), nil
+}
+
+// Delete implements PayloadConvertor Delete method
+func (wc wechatworkConvertor) Delete(p *api.DeletePayload) (WechatworkPayload, error) {
+ // created tag/branch
+ refName := git.RefName(p.Ref).ShortName()
+ title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName)
+
+ return newWechatworkMarkdownPayload(title), nil
+}
+
+// Fork implements PayloadConvertor Fork method
+func (wc wechatworkConvertor) Fork(p *api.ForkPayload) (WechatworkPayload, error) {
+ title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName)
+
+ return newWechatworkMarkdownPayload(title), nil
+}
+
+// Push implements PayloadConvertor Push method
+func (wc wechatworkConvertor) Push(p *api.PushPayload) (WechatworkPayload, error) {
+ var (
+ branchName = git.RefName(p.Ref).ShortName()
+ commitDesc string
+ )
+
+ title := fmt.Sprintf("# %s:%s <font color=\"warning\"> %s </font>", p.Repo.FullName, branchName, commitDesc)
+
+ var text string
+ // for each commit, generate attachment text
+ for i, commit := range p.Commits {
+ var authorName string
+ if commit.Author != nil {
+ authorName = "Author: " + commit.Author.Name
+ }
+
+ message := strings.ReplaceAll(commit.Message, "\n\n", "\r\n")
+ text += fmt.Sprintf(" > [%s](%s) \r\n ><font color=\"info\">%s</font> \n ><font color=\"warning\">%s</font>", commit.ID[:7], commit.URL,
+ message, authorName)
+
+ // add linebreak to each commit but the last
+ if i < len(p.Commits)-1 {
+ text += "\n"
+ }
+ }
+ return newWechatworkMarkdownPayload(title + "\r\n\r\n" + text), nil
+}
+
+// Issue implements PayloadConvertor Issue method
+func (wc wechatworkConvertor) Issue(p *api.IssuePayload) (WechatworkPayload, error) {
+ text, issueTitle, attachmentText, _ := getIssuesPayloadInfo(p, noneLinkFormatter, true)
+ var content string
+ content += fmt.Sprintf(" ><font color=\"info\">%s</font>\n >%s \n ><font color=\"warning\"> %s</font> \n [%s](%s)", text, attachmentText, issueTitle, p.Issue.HTMLURL, p.Issue.HTMLURL)
+
+ return newWechatworkMarkdownPayload(content), nil
+}
+
+// IssueComment implements PayloadConvertor IssueComment method
+func (wc wechatworkConvertor) IssueComment(p *api.IssueCommentPayload) (WechatworkPayload, error) {
+ text, issueTitle, _ := getIssueCommentPayloadInfo(p, noneLinkFormatter, true)
+ var content string
+ content += fmt.Sprintf(" ><font color=\"info\">%s</font>\n >%s \n ><font color=\"warning\">%s</font> \n [%s](%s)", text, p.Comment.Body, issueTitle, p.Comment.HTMLURL, p.Comment.HTMLURL)
+
+ return newWechatworkMarkdownPayload(content), nil
+}
+
+// PullRequest implements PayloadConvertor PullRequest method
+func (wc wechatworkConvertor) PullRequest(p *api.PullRequestPayload) (WechatworkPayload, error) {
+ text, issueTitle, attachmentText, _ := getPullRequestPayloadInfo(p, noneLinkFormatter, true)
+ pr := fmt.Sprintf("> <font color=\"info\"> %s </font> \r\n > <font color=\"comment\">%s </font> \r\n > <font color=\"comment\">%s </font> \r\n",
+ text, issueTitle, attachmentText)
+
+ return newWechatworkMarkdownPayload(pr), nil
+}
+
+// Review implements PayloadConvertor Review method
+func (wc wechatworkConvertor) Review(p *api.PullRequestPayload, event webhook_module.HookEventType) (WechatworkPayload, error) {
+ var text, title string
+ if p.Action == api.HookIssueReviewed {
+ action, err := parseHookPullRequestEventType(event)
+ if err != nil {
+ return WechatworkPayload{}, err
+ }
+ title = fmt.Sprintf("[%s] Pull request review %s : #%d %s", p.Repository.FullName, action, p.Index, p.PullRequest.Title)
+ text = p.Review.Content
+ }
+
+ return newWechatworkMarkdownPayload("# " + title + "\r\n\r\n >" + text), nil
+}
+
+// Repository implements PayloadConvertor Repository method
+func (wc wechatworkConvertor) Repository(p *api.RepositoryPayload) (WechatworkPayload, error) {
+ var title string
+ switch p.Action {
+ case api.HookRepoCreated:
+ title = fmt.Sprintf("[%s] Repository created", p.Repository.FullName)
+ return newWechatworkMarkdownPayload(title), nil
+ case api.HookRepoDeleted:
+ title = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName)
+ return newWechatworkMarkdownPayload(title), nil
+ }
+
+ return WechatworkPayload{}, nil
+}
+
+// Wiki implements PayloadConvertor Wiki method
+func (wc wechatworkConvertor) Wiki(p *api.WikiPayload) (WechatworkPayload, error) {
+ text, _, _ := getWikiPayloadInfo(p, noneLinkFormatter, true)
+
+ return newWechatworkMarkdownPayload(text), nil
+}
+
+// Release implements PayloadConvertor Release method
+func (wc wechatworkConvertor) Release(p *api.ReleasePayload) (WechatworkPayload, error) {
+ text, _ := getReleasePayloadInfo(p, noneLinkFormatter, true)
+
+ return newWechatworkMarkdownPayload(text), nil
+}
+
+func (wc wechatworkConvertor) Package(p *api.PackagePayload) (WechatworkPayload, error) {
+ text, _ := getPackagePayloadInfo(p, noneLinkFormatter, true)
+
+ return newWechatworkMarkdownPayload(text), nil
+}
+
+type wechatworkConvertor struct{}
+
+var _ shared.PayloadConvertor[WechatworkPayload] = wechatworkConvertor{}
+
+func (wechatworkHandler) NewRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
+ return shared.NewJSONRequest(wechatworkConvertor{}, w, t, true)
+}
diff --git a/services/wiki/wiki.go b/services/wiki/wiki.go
new file mode 100644
index 0000000..aba1115
--- /dev/null
+++ b/services/wiki/wiki.go
@@ -0,0 +1,449 @@
+// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package wiki
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ system_model "code.gitea.io/gitea/models/system"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/sync"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// TODO: use clustered lock (unique queue? or *abuse* cache)
+var wikiWorkingPool = sync.NewExclusivePool()
+
+const (
+ DefaultRemote = "origin"
+)
+
+// InitWiki initializes a wiki for repository,
+// it does nothing when repository already has wiki.
+func InitWiki(ctx context.Context, repo *repo_model.Repository) error {
+ if repo.HasWiki() {
+ return nil
+ }
+
+ branch := repo.GetWikiBranchName()
+
+ if err := git.InitRepository(ctx, repo.WikiPath(), true, repo.ObjectFormatName); err != nil {
+ return fmt.Errorf("InitRepository: %w", err)
+ } else if err = repo_module.CreateDelegateHooks(repo.WikiPath()); err != nil {
+ return fmt.Errorf("createDelegateHooks: %w", err)
+ } else if _, _, err = git.NewCommand(ctx, "symbolic-ref", "HEAD").AddDynamicArguments(git.BranchPrefix + branch).RunStdString(&git.RunOpts{Dir: repo.WikiPath()}); err != nil {
+ return fmt.Errorf("unable to set default wiki branch to %s: %w", branch, err)
+ }
+ return nil
+}
+
+// NormalizeWikiBranch renames a repository wiki's branch to `setting.Repository.DefaultBranch`
+func NormalizeWikiBranch(ctx context.Context, repo *repo_model.Repository, to string) error {
+ from := repo.GetWikiBranchName()
+
+ if err := repo.MustNotBeArchived(); err != nil {
+ return err
+ }
+
+ updateDB := func() error {
+ repo.WikiBranch = to
+ return repo_model.UpdateRepositoryCols(ctx, repo, "wiki_branch")
+ }
+
+ if !repo.HasWiki() {
+ return updateDB()
+ }
+
+ if from == to {
+ return nil
+ }
+
+ gitRepo, err := git.OpenRepository(ctx, repo.WikiPath())
+ if err != nil {
+ return err
+ }
+ defer gitRepo.Close()
+
+ if gitRepo.IsBranchExist(to) {
+ return nil
+ }
+
+ if !gitRepo.IsBranchExist(from) {
+ return nil
+ }
+
+ if err := gitRepo.RenameBranch(from, to); err != nil {
+ return err
+ }
+
+ if err := gitrepo.SetDefaultBranch(ctx, repo, to); err != nil {
+ return err
+ }
+
+ return updateDB()
+}
+
+// prepareGitPath try to find a suitable file path with file name by the given raw wiki name.
+// return: existence, prepared file path with name, error
+func prepareGitPath(gitRepo *git.Repository, branch string, wikiPath WebPath) (bool, string, error) {
+ unescaped := string(wikiPath) + ".md"
+ gitPath := WebPathToGitPath(wikiPath)
+
+ // Look for both files
+ filesInIndex, err := gitRepo.LsTree(branch, unescaped, gitPath)
+ if err != nil {
+ if strings.Contains(err.Error(), "Not a valid object name "+branch) {
+ return false, gitPath, nil
+ }
+ log.Error("%v", err)
+ return false, gitPath, err
+ }
+
+ foundEscaped := false
+ for _, filename := range filesInIndex {
+ switch filename {
+ case unescaped:
+ // if we find the unescaped file return it
+ return true, unescaped, nil
+ case gitPath:
+ foundEscaped = true
+ }
+ }
+
+ // If not return whether the escaped file exists, and the escaped filename to keep backwards compatibility.
+ return foundEscaped, gitPath, nil
+}
+
+// updateWikiPage adds a new page or edits an existing page in repository wiki.
+func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldWikiName, newWikiName WebPath, content, message string, isNew bool) (err error) {
+ err = repo.MustNotBeArchived()
+ if err != nil {
+ return err
+ }
+
+ if err = validateWebPath(newWikiName); err != nil {
+ return err
+ }
+ wikiWorkingPool.CheckIn(fmt.Sprint(repo.ID))
+ defer wikiWorkingPool.CheckOut(fmt.Sprint(repo.ID))
+
+ if err = InitWiki(ctx, repo); err != nil {
+ return fmt.Errorf("InitWiki: %w", err)
+ }
+
+ hasMasterBranch := git.IsBranchExist(ctx, repo.WikiPath(), repo.GetWikiBranchName())
+
+ basePath, err := repo_module.CreateTemporaryPath("update-wiki")
+ if err != nil {
+ return err
+ }
+ defer func() {
+ if err := repo_module.RemoveTemporaryPath(basePath); err != nil {
+ log.Error("Merge: RemoveTemporaryPath: %s", err)
+ }
+ }()
+
+ cloneOpts := git.CloneRepoOptions{
+ Bare: true,
+ Shared: true,
+ }
+
+ if hasMasterBranch {
+ cloneOpts.Branch = repo.GetWikiBranchName()
+ }
+
+ if err := git.Clone(ctx, repo.WikiPath(), basePath, cloneOpts); err != nil {
+ log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err)
+ return fmt.Errorf("failed to clone repository: %s (%w)", repo.FullName(), err)
+ }
+
+ gitRepo, err := git.OpenRepository(ctx, basePath)
+ if err != nil {
+ log.Error("Unable to open temporary repository: %s (%v)", basePath, err)
+ return fmt.Errorf("failed to open new temporary repository in: %s %w", basePath, err)
+ }
+ defer gitRepo.Close()
+
+ if hasMasterBranch {
+ if err := gitRepo.ReadTreeToIndex("HEAD"); err != nil {
+ log.Error("Unable to read HEAD tree to index in: %s %v", basePath, err)
+ return fmt.Errorf("fnable to read HEAD tree to index in: %s %w", basePath, err)
+ }
+ }
+
+ isWikiExist, newWikiPath, err := prepareGitPath(gitRepo, repo.GetWikiBranchName(), newWikiName)
+ if err != nil {
+ return err
+ }
+
+ if isNew {
+ if isWikiExist {
+ return repo_model.ErrWikiAlreadyExist{
+ Title: newWikiPath,
+ }
+ }
+ } else {
+ // avoid check existence again if wiki name is not changed since gitRepo.LsFiles(...) is not free.
+ isOldWikiExist := true
+ oldWikiPath := newWikiPath
+ if oldWikiName != newWikiName {
+ isOldWikiExist, oldWikiPath, err = prepareGitPath(gitRepo, repo.GetWikiBranchName(), oldWikiName)
+ if err != nil {
+ return err
+ }
+ }
+
+ if isOldWikiExist {
+ err := gitRepo.RemoveFilesFromIndex(oldWikiPath)
+ if err != nil {
+ log.Error("RemoveFilesFromIndex failed: %v", err)
+ return err
+ }
+ }
+ }
+
+ // FIXME: The wiki doesn't have lfs support at present - if this changes need to check attributes here
+
+ objectHash, err := gitRepo.HashObject(strings.NewReader(content))
+ if err != nil {
+ log.Error("HashObject failed: %v", err)
+ return err
+ }
+
+ if err := gitRepo.AddObjectToIndex("100644", objectHash, newWikiPath); err != nil {
+ log.Error("AddObjectToIndex failed: %v", err)
+ return err
+ }
+
+ tree, err := gitRepo.WriteTree()
+ if err != nil {
+ log.Error("WriteTree failed: %v", err)
+ return err
+ }
+
+ commitTreeOpts := git.CommitTreeOpts{
+ Message: message,
+ }
+
+ committer := doer.NewGitSig()
+
+ sign, signingKey, signer, _ := asymkey_service.SignWikiCommit(ctx, repo, doer)
+ if sign {
+ commitTreeOpts.KeyID = signingKey
+ if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
+ committer = signer
+ }
+ } else {
+ commitTreeOpts.NoGPGSign = true
+ }
+ if hasMasterBranch {
+ commitTreeOpts.Parents = []string{"HEAD"}
+ }
+
+ commitHash, err := gitRepo.CommitTree(doer.NewGitSig(), committer, tree, commitTreeOpts)
+ if err != nil {
+ log.Error("CommitTree failed: %v", err)
+ return err
+ }
+
+ if err := git.Push(gitRepo.Ctx, basePath, git.PushOptions{
+ Remote: DefaultRemote,
+ Branch: fmt.Sprintf("%s:%s%s", commitHash.String(), git.BranchPrefix, repo.GetWikiBranchName()),
+ Env: repo_module.FullPushingEnvironment(
+ doer,
+ doer,
+ repo,
+ repo.Name+".wiki",
+ 0,
+ ),
+ }); err != nil {
+ log.Error("Push failed: %v", err)
+ if git.IsErrPushOutOfDate(err) || git.IsErrPushRejected(err) {
+ return err
+ }
+ return fmt.Errorf("failed to push: %w", err)
+ }
+
+ return nil
+}
+
+// AddWikiPage adds a new wiki page with a given wikiPath.
+func AddWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, wikiName WebPath, content, message string) error {
+ return updateWikiPage(ctx, doer, repo, "", wikiName, content, message, true)
+}
+
+// EditWikiPage updates a wiki page identified by its wikiPath,
+// optionally also changing wikiPath.
+func EditWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldWikiName, newWikiName WebPath, content, message string) error {
+ return updateWikiPage(ctx, doer, repo, oldWikiName, newWikiName, content, message, false)
+}
+
+// DeleteWikiPage deletes a wiki page identified by its path.
+func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, wikiName WebPath) (err error) {
+ err = repo.MustNotBeArchived()
+ if err != nil {
+ return err
+ }
+
+ wikiWorkingPool.CheckIn(fmt.Sprint(repo.ID))
+ defer wikiWorkingPool.CheckOut(fmt.Sprint(repo.ID))
+
+ if err = InitWiki(ctx, repo); err != nil {
+ return fmt.Errorf("InitWiki: %w", err)
+ }
+
+ basePath, err := repo_module.CreateTemporaryPath("update-wiki")
+ if err != nil {
+ return err
+ }
+ defer func() {
+ if err := repo_module.RemoveTemporaryPath(basePath); err != nil {
+ log.Error("Merge: RemoveTemporaryPath: %s", err)
+ }
+ }()
+
+ if err := git.Clone(ctx, repo.WikiPath(), basePath, git.CloneRepoOptions{
+ Bare: true,
+ Shared: true,
+ Branch: repo.GetWikiBranchName(),
+ }); err != nil {
+ log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err)
+ return fmt.Errorf("failed to clone repository: %s (%w)", repo.FullName(), err)
+ }
+
+ gitRepo, err := git.OpenRepository(ctx, basePath)
+ if err != nil {
+ log.Error("Unable to open temporary repository: %s (%v)", basePath, err)
+ return fmt.Errorf("failed to open new temporary repository in: %s %w", basePath, err)
+ }
+ defer gitRepo.Close()
+
+ if err := gitRepo.ReadTreeToIndex("HEAD"); err != nil {
+ log.Error("Unable to read HEAD tree to index in: %s %v", basePath, err)
+ return fmt.Errorf("unable to read HEAD tree to index in: %s %w", basePath, err)
+ }
+
+ found, wikiPath, err := prepareGitPath(gitRepo, repo.GetWikiBranchName(), wikiName)
+ if err != nil {
+ return err
+ }
+ if found {
+ err := gitRepo.RemoveFilesFromIndex(wikiPath)
+ if err != nil {
+ return err
+ }
+ } else {
+ return os.ErrNotExist
+ }
+
+ // FIXME: The wiki doesn't have lfs support at present - if this changes need to check attributes here
+
+ tree, err := gitRepo.WriteTree()
+ if err != nil {
+ return err
+ }
+ message := fmt.Sprintf("Delete page %q", wikiName)
+ commitTreeOpts := git.CommitTreeOpts{
+ Message: message,
+ Parents: []string{"HEAD"},
+ }
+
+ committer := doer.NewGitSig()
+
+ sign, signingKey, signer, _ := asymkey_service.SignWikiCommit(ctx, repo, doer)
+ if sign {
+ commitTreeOpts.KeyID = signingKey
+ if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
+ committer = signer
+ }
+ } else {
+ commitTreeOpts.NoGPGSign = true
+ }
+
+ commitHash, err := gitRepo.CommitTree(doer.NewGitSig(), committer, tree, commitTreeOpts)
+ if err != nil {
+ return err
+ }
+
+ if err := git.Push(gitRepo.Ctx, basePath, git.PushOptions{
+ Remote: DefaultRemote,
+ Branch: fmt.Sprintf("%s:%s%s", commitHash.String(), git.BranchPrefix, repo.GetWikiBranchName()),
+ Env: repo_module.FullPushingEnvironment(
+ doer,
+ doer,
+ repo,
+ repo.Name+".wiki",
+ 0,
+ ),
+ }); err != nil {
+ if git.IsErrPushOutOfDate(err) || git.IsErrPushRejected(err) {
+ return err
+ }
+ return fmt.Errorf("Push: %w", err)
+ }
+
+ return nil
+}
+
+// DeleteWiki removes the actual and local copy of repository wiki.
+func DeleteWiki(ctx context.Context, repo *repo_model.Repository) error {
+ if err := repo_service.UpdateRepositoryUnits(ctx, repo, nil, []unit.Type{unit.TypeWiki}); err != nil {
+ return err
+ }
+
+ system_model.RemoveAllWithNotice(ctx, "Delete repository wiki", repo.WikiPath())
+ return nil
+}
+
+type SearchContentsResult struct {
+ *git.GrepResult
+ Title string
+}
+
+func SearchWikiContents(ctx context.Context, repo *repo_model.Repository, keyword string) ([]SearchContentsResult, error) {
+ gitRepo, err := git.OpenRepository(ctx, repo.WikiPath())
+ if err != nil {
+ return nil, err
+ }
+ defer gitRepo.Close()
+
+ grepRes, err := git.GrepSearch(ctx, gitRepo, keyword, git.GrepOptions{
+ ContextLineNumber: 0,
+ IsFuzzy: true,
+ RefName: repo.GetWikiBranchName(),
+ MaxResultLimit: 10,
+ MatchesPerFile: 3,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ res := make([]SearchContentsResult, 0, len(grepRes))
+ for _, entry := range grepRes {
+ wp, err := GitPathToWebPath(entry.Filename)
+ if err != nil {
+ return nil, err
+ }
+ _, title := WebPathToUserTitle(wp)
+
+ res = append(res, SearchContentsResult{
+ GrepResult: entry,
+ Title: title,
+ })
+ }
+
+ return res, nil
+}
diff --git a/services/wiki/wiki_path.go b/services/wiki/wiki_path.go
new file mode 100644
index 0000000..74c7064
--- /dev/null
+++ b/services/wiki/wiki_path.go
@@ -0,0 +1,172 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package wiki
+
+import (
+ "net/url"
+ "path"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/convert"
+)
+
+// To define the wiki related concepts:
+// * Display Segment: the text what user see for a wiki page (aka, the title):
+// - "Home Page"
+// - "100% Free"
+// - "2000-01-02 meeting"
+// * Web Path:
+// - "/wiki/Home-Page"
+// - "/wiki/100%25+Free"
+// - "/wiki/2000-01-02+meeting.-"
+// - If a segment has a suffix "DashMarker(.-)", it means that there is no dash-space conversion for this segment.
+// - If a WebPath is a "*.md" pattern, then use the unescaped value directly as GitPath, to make users can access the raw file.
+// * Git Path (only space doesn't need to be escaped):
+// - "/.wiki.git/Home-Page.md"
+// - "/.wiki.git/100%25 Free.md"
+// - "/.wiki.git/2000-01-02 meeting.-.md"
+// TODO: support subdirectory in the future
+//
+// Although this package now has the ability to support subdirectory, but the route package doesn't:
+// * Double-escaping problem: the URL "/wiki/abc%2Fdef" becomes "/wiki/abc/def" by ctx.Params, which is incorrect
+// * This problem should have been 99% fixed, but it needs more tests.
+// * The old wiki code's behavior is always using %2F, instead of subdirectory, so there are a lot of legacy "%2F" files in user wikis.
+
+type WebPath string
+
+var reservedWikiNames = []string{"_pages", "_new", "_edit", "raw"}
+
+func validateWebPath(name WebPath) error {
+ for _, s := range WebPathSegments(name) {
+ if util.SliceContainsString(reservedWikiNames, s) {
+ return repo_model.ErrWikiReservedName{Title: s}
+ }
+ }
+ return nil
+}
+
+func hasDashMarker(s string) bool {
+ return strings.HasSuffix(s, ".-")
+}
+
+func removeDashMarker(s string) string {
+ return strings.TrimSuffix(s, ".-")
+}
+
+func addDashMarker(s string) string {
+ return s + ".-"
+}
+
+func unescapeSegment(s string) (string, error) {
+ if hasDashMarker(s) {
+ s = removeDashMarker(s)
+ } else {
+ s = strings.ReplaceAll(s, "-", " ")
+ }
+ unescaped, err := url.QueryUnescape(s)
+ if err != nil {
+ return s, err // un-escaping failed, but it's still safe to return the original string, because it is only a title for end users
+ }
+ return unescaped, nil
+}
+
+func escapeSegToWeb(s string, hadDashMarker bool) string {
+ if hadDashMarker || strings.Contains(s, "-") || strings.HasSuffix(s, ".md") {
+ s = addDashMarker(s)
+ } else {
+ s = strings.ReplaceAll(s, " ", "-")
+ }
+ s = url.QueryEscape(s)
+ return s
+}
+
+func WebPathSegments(s WebPath) []string {
+ a := strings.Split(string(s), "/")
+ for i := range a {
+ a[i], _ = unescapeSegment(a[i])
+ }
+ return a
+}
+
+func WebPathToGitPath(s WebPath) string {
+ if strings.HasSuffix(string(s), ".md") {
+ ret, _ := url.PathUnescape(string(s))
+ return util.PathJoinRelX(ret)
+ }
+
+ a := strings.Split(string(s), "/")
+ for i := range a {
+ shouldAddDashMarker := hasDashMarker(a[i])
+ a[i], _ = unescapeSegment(a[i])
+ a[i] = escapeSegToWeb(a[i], shouldAddDashMarker)
+ a[i] = strings.ReplaceAll(a[i], "%20", " ") // space is safe to be kept in git path
+ a[i] = strings.ReplaceAll(a[i], "+", " ")
+ }
+ return strings.Join(a, "/") + ".md"
+}
+
+func GitPathToWebPath(s string) (wp WebPath, err error) {
+ if !strings.HasSuffix(s, ".md") {
+ return "", repo_model.ErrWikiInvalidFileName{FileName: s}
+ }
+ s = strings.TrimSuffix(s, ".md")
+ a := strings.Split(s, "/")
+ for i := range a {
+ shouldAddDashMarker := hasDashMarker(a[i])
+ if a[i], err = unescapeSegment(a[i]); err != nil {
+ return "", err
+ }
+ a[i] = escapeSegToWeb(a[i], shouldAddDashMarker)
+ }
+ return WebPath(strings.Join(a, "/")), nil
+}
+
+func WebPathToUserTitle(s WebPath) (dir, display string) {
+ dir = path.Dir(string(s))
+ display = path.Base(string(s))
+ if strings.HasSuffix(display, ".md") {
+ display = strings.TrimSuffix(display, ".md")
+ display, _ = url.PathUnescape(display)
+ }
+ display, _ = unescapeSegment(display)
+ return dir, display
+}
+
+func WebPathToURLPath(s WebPath) string {
+ return string(s)
+}
+
+func WebPathFromRequest(s string) WebPath {
+ s = util.PathJoinRelX(s)
+ // The old wiki code's behavior is always using %2F, instead of subdirectory.
+ s = strings.ReplaceAll(s, "/", "%2F")
+ return WebPath(s)
+}
+
+func UserTitleToWebPath(base, title string) WebPath {
+ // TODO: no support for subdirectory, because the old wiki code's behavior is always using %2F, instead of subdirectory.
+ // So we do not add the support for writing slashes in title at the moment.
+ title = strings.TrimSpace(title)
+ title = util.PathJoinRelX(base, escapeSegToWeb(title, false))
+ if title == "" || title == "." {
+ title = "unnamed"
+ }
+ return WebPath(title)
+}
+
+// ToWikiPageMetaData converts meta information to a WikiPageMetaData
+func ToWikiPageMetaData(wikiName WebPath, lastCommit *git.Commit, repo *repo_model.Repository) *api.WikiPageMetaData {
+ subURL := string(wikiName)
+ _, title := WebPathToUserTitle(wikiName)
+ return &api.WikiPageMetaData{
+ Title: title,
+ HTMLURL: util.URLJoin(repo.HTMLURL(), "wiki", subURL),
+ SubURL: subURL,
+ LastCommit: convert.ToWikiCommit(lastCommit),
+ }
+}
diff --git a/services/wiki/wiki_test.go b/services/wiki/wiki_test.go
new file mode 100644
index 0000000..efcc13d
--- /dev/null
+++ b/services/wiki/wiki_test.go
@@ -0,0 +1,327 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package wiki
+
+import (
+ "math/rand"
+ "strings"
+ "testing"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+
+ _ "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func TestWebPathSegments(t *testing.T) {
+ a := WebPathSegments("a%2Fa/b+c/d-e/f-g.-")
+ assert.EqualValues(t, []string{"a/a", "b c", "d e", "f-g"}, a)
+}
+
+func TestUserTitleToWebPath(t *testing.T) {
+ type test struct {
+ Expected string
+ UserTitle string
+ }
+ for _, test := range []test{
+ {"unnamed", ""},
+ {"unnamed", "."},
+ {"unnamed", ".."},
+ {"wiki-name", "wiki name"},
+ {"title.md.-", "title.md"},
+ {"wiki-name.-", "wiki-name"},
+ {"the+wiki-name.-", "the wiki-name"},
+ {"a%2Fb", "a/b"},
+ {"a%25b", "a%b"},
+ } {
+ assert.EqualValues(t, test.Expected, UserTitleToWebPath("", test.UserTitle))
+ }
+}
+
+func TestWebPathToDisplayName(t *testing.T) {
+ type test struct {
+ Expected string
+ WebPath WebPath
+ }
+ for _, test := range []test{
+ {"wiki name", "wiki-name"},
+ {"wiki-name", "wiki-name.-"},
+ {"name with / slash", "name-with %2F slash"},
+ {"name with % percent", "name-with %25 percent"},
+ {"2000-01-02 meeting", "2000-01-02+meeting.-.md"},
+ {"a b", "a%20b.md"},
+ } {
+ _, displayName := WebPathToUserTitle(test.WebPath)
+ assert.EqualValues(t, test.Expected, displayName)
+ }
+}
+
+func TestWebPathToGitPath(t *testing.T) {
+ type test struct {
+ Expected string
+ WikiName WebPath
+ }
+ for _, test := range []test{
+ {"wiki-name.md", "wiki%20name"},
+ {"wiki-name.md", "wiki+name"},
+ {"wiki name.md", "wiki%20name.md"},
+ {"wiki%20name.md", "wiki%2520name.md"},
+ {"2000-01-02-meeting.md", "2000-01-02+meeting"},
+ {"2000-01-02 meeting.-.md", "2000-01-02%20meeting.-"},
+ } {
+ assert.EqualValues(t, test.Expected, WebPathToGitPath(test.WikiName))
+ }
+}
+
+func TestGitPathToWebPath(t *testing.T) {
+ type test struct {
+ Expected string
+ Filename string
+ }
+ for _, test := range []test{
+ {"hello-world", "hello-world.md"}, // this shouldn't happen, because it should always have a ".-" suffix
+ {"hello-world", "hello world.md"},
+ {"hello-world.-", "hello-world.-.md"},
+ {"hello+world.-", "hello world.-.md"},
+ {"symbols-%2F", "symbols %2F.md"},
+ } {
+ name, err := GitPathToWebPath(test.Filename)
+ require.NoError(t, err)
+ assert.EqualValues(t, test.Expected, name)
+ }
+ for _, badFilename := range []string{
+ "nofileextension",
+ "wrongfileextension.txt",
+ } {
+ _, err := GitPathToWebPath(badFilename)
+ require.Error(t, err)
+ assert.True(t, repo_model.IsErrWikiInvalidFileName(err))
+ }
+ _, err := GitPathToWebPath("badescaping%%.md")
+ require.Error(t, err)
+ assert.False(t, repo_model.IsErrWikiInvalidFileName(err))
+}
+
+func TestUserWebGitPathConsistency(t *testing.T) {
+ maxLen := 20
+ b := make([]byte, maxLen)
+ for i := 0; i < 1000; i++ {
+ l := rand.Intn(maxLen)
+ for j := 0; j < l; j++ {
+ r := rand.Intn(0x80-0x20) + 0x20
+ b[j] = byte(r)
+ }
+
+ userTitle := strings.TrimSpace(string(b[:l]))
+ if userTitle == "" || userTitle == "." || userTitle == ".." {
+ continue
+ }
+ webPath := UserTitleToWebPath("", userTitle)
+ gitPath := WebPathToGitPath(webPath)
+
+ webPath1, _ := GitPathToWebPath(gitPath)
+ _, userTitle1 := WebPathToUserTitle(webPath1)
+ gitPath1 := WebPathToGitPath(webPath1)
+
+ assert.EqualValues(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle)
+ assert.EqualValues(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle)
+ assert.EqualValues(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle)
+ }
+}
+
+func TestRepository_InitWiki(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ // repo1 already has a wiki
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ require.NoError(t, InitWiki(git.DefaultContext, repo1))
+
+ // repo2 does not already have a wiki
+ repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ require.NoError(t, InitWiki(git.DefaultContext, repo2))
+ assert.True(t, repo2.HasWiki())
+}
+
+func TestRepository_AddWikiPage(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ const wikiContent = "This is the wiki content"
+ const commitMsg = "Commit message"
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ for _, userTitle := range []string{
+ "Another page",
+ "Here's a <tag> and a/slash",
+ } {
+ t.Run("test wiki exist: "+userTitle, func(t *testing.T) {
+ webPath := UserTitleToWebPath("", userTitle)
+ require.NoError(t, AddWikiPage(git.DefaultContext, doer, repo, webPath, wikiContent, commitMsg))
+ // Now need to show that the page has been added:
+ gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+
+ defer gitRepo.Close()
+ masterTree, err := gitRepo.GetTree("master")
+ require.NoError(t, err)
+ gitPath := WebPathToGitPath(webPath)
+ entry, err := masterTree.GetTreeEntryByPath(gitPath)
+ require.NoError(t, err)
+ assert.EqualValues(t, gitPath, entry.Name(), "%s not added correctly", userTitle)
+ })
+ }
+
+ t.Run("check wiki already exist", func(t *testing.T) {
+ t.Parallel()
+ // test for already-existing wiki name
+ err := AddWikiPage(git.DefaultContext, doer, repo, "Home", wikiContent, commitMsg)
+ require.Error(t, err)
+ assert.True(t, repo_model.IsErrWikiAlreadyExist(err))
+ })
+
+ t.Run("check wiki reserved name", func(t *testing.T) {
+ t.Parallel()
+ // test for reserved wiki name
+ err := AddWikiPage(git.DefaultContext, doer, repo, "_edit", wikiContent, commitMsg)
+ require.Error(t, err)
+ assert.True(t, repo_model.IsErrWikiReservedName(err))
+ })
+}
+
+func TestRepository_EditWikiPage(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ const newWikiContent = "This is the new content"
+ const commitMsg = "Commit message"
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ for _, newWikiName := range []string{
+ "Home", // same name as before
+ "New home",
+ "New/name/with/slashes",
+ } {
+ webPath := UserTitleToWebPath("", newWikiName)
+ unittest.PrepareTestEnv(t)
+ require.NoError(t, EditWikiPage(git.DefaultContext, doer, repo, "Home", webPath, newWikiContent, commitMsg))
+
+ // Now need to show that the page has been added:
+ gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+ masterTree, err := gitRepo.GetTree("master")
+ require.NoError(t, err)
+ gitPath := WebPathToGitPath(webPath)
+ entry, err := masterTree.GetTreeEntryByPath(gitPath)
+ require.NoError(t, err)
+ assert.EqualValues(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName)
+
+ if newWikiName != "Home" {
+ _, err := masterTree.GetTreeEntryByPath("Home.md")
+ require.Error(t, err)
+ }
+ gitRepo.Close()
+ }
+}
+
+func TestRepository_DeleteWikiPage(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ require.NoError(t, DeleteWikiPage(git.DefaultContext, doer, repo, "Home"))
+
+ // Now need to show that the page has been added:
+ gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+ masterTree, err := gitRepo.GetTree("master")
+ require.NoError(t, err)
+ gitPath := WebPathToGitPath("Home")
+ _, err = masterTree.GetTreeEntryByPath(gitPath)
+ require.Error(t, err)
+}
+
+func TestPrepareWikiFileName(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+
+ defer gitRepo.Close()
+
+ tests := []struct {
+ name string
+ arg string
+ existence bool
+ wikiPath string
+ wantErr bool
+ }{{
+ name: "add suffix",
+ arg: "Home",
+ existence: true,
+ wikiPath: "Home.md",
+ wantErr: false,
+ }, {
+ name: "test special chars",
+ arg: "home of and & or wiki page!",
+ existence: false,
+ wikiPath: "home-of-and-%26-or-wiki-page%21.md",
+ wantErr: false,
+ }}
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ webPath := UserTitleToWebPath("", tt.arg)
+ existence, newWikiPath, err := prepareGitPath(gitRepo, "master", webPath)
+ if (err != nil) != tt.wantErr {
+ require.NoError(t, err)
+ return
+ }
+ if existence != tt.existence {
+ if existence {
+ t.Errorf("expect to find no escaped file but we detect one")
+ } else {
+ t.Errorf("expect to find an escaped file but we could not detect one")
+ }
+ }
+ assert.EqualValues(t, tt.wikiPath, newWikiPath)
+ })
+ }
+}
+
+func TestPrepareWikiFileName_FirstPage(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ // Now create a temporaryDirectory
+ tmpDir := t.TempDir()
+
+ err := git.InitRepository(git.DefaultContext, tmpDir, true, git.Sha1ObjectFormat.Name())
+ require.NoError(t, err)
+
+ gitRepo, err := git.OpenRepository(git.DefaultContext, tmpDir)
+ require.NoError(t, err)
+
+ defer gitRepo.Close()
+
+ existence, newWikiPath, err := prepareGitPath(gitRepo, "master", "Home")
+ assert.False(t, existence)
+ require.NoError(t, err)
+ assert.EqualValues(t, "Home.md", newWikiPath)
+}
+
+func TestWebPathConversion(t *testing.T) {
+ assert.Equal(t, "path/wiki", WebPathToURLPath(WebPath("path/wiki")))
+ assert.Equal(t, "wiki", WebPathToURLPath(WebPath("wiki")))
+ assert.Equal(t, "", WebPathToURLPath(WebPath("")))
+}
+
+func TestWebPathFromRequest(t *testing.T) {
+ assert.Equal(t, WebPath("a%2Fb"), WebPathFromRequest("a/b"))
+ assert.Equal(t, WebPath("a"), WebPathFromRequest("a"))
+ assert.Equal(t, WebPath("b"), WebPathFromRequest("a/../b"))
+}