summaryrefslogtreecommitdiffstats
path: root/services/actions
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--services/actions/auth.go102
-rw-r--r--services/actions/auth_test.go65
-rw-r--r--services/actions/cleanup.go126
-rw-r--r--services/actions/clear_tasks.go101
-rw-r--r--services/actions/commit_status.go167
-rw-r--r--services/actions/init.go26
-rw-r--r--services/actions/interface.go28
-rw-r--r--services/actions/job_emitter.go162
-rw-r--r--services/actions/job_emitter_test.go136
-rw-r--r--services/actions/main_test.go17
-rw-r--r--services/actions/notifier.go765
-rw-r--r--services/actions/notifier_helper.go582
-rw-r--r--services/actions/notifier_helper_test.go51
-rw-r--r--services/actions/rerun.go38
-rw-r--r--services/actions/rerun_test.go48
-rw-r--r--services/actions/schedule_tasks.go154
-rw-r--r--services/actions/variables.go100
-rw-r--r--services/actions/workflows.go173
18 files changed, 2841 insertions, 0 deletions
diff --git a/services/actions/auth.go b/services/actions/auth.go
new file mode 100644
index 0000000..8e934d8
--- /dev/null
+++ b/services/actions/auth.go
@@ -0,0 +1,102 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/golang-jwt/jwt/v5"
+)
+
+type actionsClaims struct {
+ jwt.RegisteredClaims
+ Scp string `json:"scp"`
+ TaskID int64
+ RunID int64
+ JobID int64
+ Ac string `json:"ac"`
+}
+
+type actionsCacheScope struct {
+ Scope string
+ Permission actionsCachePermission
+}
+
+type actionsCachePermission int
+
+const (
+ actionsCachePermissionRead = 1 << iota
+ actionsCachePermissionWrite
+)
+
+func CreateAuthorizationToken(taskID, runID, jobID int64) (string, error) {
+ now := time.Now()
+
+ ac, err := json.Marshal(&[]actionsCacheScope{
+ {
+ Scope: "",
+ Permission: actionsCachePermissionWrite,
+ },
+ })
+ if err != nil {
+ return "", err
+ }
+
+ claims := actionsClaims{
+ RegisteredClaims: jwt.RegisteredClaims{
+ ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
+ NotBefore: jwt.NewNumericDate(now),
+ },
+ Scp: fmt.Sprintf("Actions.Results:%d:%d", runID, jobID),
+ Ac: string(ac),
+ TaskID: taskID,
+ RunID: runID,
+ JobID: jobID,
+ }
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
+
+ tokenString, err := token.SignedString(setting.GetGeneralTokenSigningSecret())
+ if err != nil {
+ return "", err
+ }
+
+ return tokenString, nil
+}
+
+func ParseAuthorizationToken(req *http.Request) (int64, error) {
+ h := req.Header.Get("Authorization")
+ if h == "" {
+ return 0, nil
+ }
+
+ parts := strings.SplitN(h, " ", 2)
+ if len(parts) != 2 {
+ log.Error("split token failed: %s", h)
+ return 0, fmt.Errorf("split token failed")
+ }
+
+ token, err := jwt.ParseWithClaims(parts[1], &actionsClaims{}, func(t *jwt.Token) (any, error) {
+ if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
+ return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
+ }
+ return setting.GetGeneralTokenSigningSecret(), nil
+ })
+ if err != nil {
+ return 0, err
+ }
+
+ c, ok := token.Claims.(*actionsClaims)
+ if !token.Valid || !ok {
+ return 0, fmt.Errorf("invalid token claim")
+ }
+
+ return c.TaskID, nil
+}
diff --git a/services/actions/auth_test.go b/services/actions/auth_test.go
new file mode 100644
index 0000000..1400e61
--- /dev/null
+++ b/services/actions/auth_test.go
@@ -0,0 +1,65 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "net/http"
+ "testing"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/golang-jwt/jwt/v5"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCreateAuthorizationToken(t *testing.T) {
+ var taskID int64 = 23
+ token, err := CreateAuthorizationToken(taskID, 1, 2)
+ require.NoError(t, err)
+ assert.NotEqual(t, "", token)
+ claims := jwt.MapClaims{}
+ _, err = jwt.ParseWithClaims(token, claims, func(t *jwt.Token) (any, error) {
+ return setting.GetGeneralTokenSigningSecret(), nil
+ })
+ require.NoError(t, err)
+ scp, ok := claims["scp"]
+ assert.True(t, ok, "Has scp claim in jwt token")
+ assert.Contains(t, scp, "Actions.Results:1:2")
+ taskIDClaim, ok := claims["TaskID"]
+ assert.True(t, ok, "Has TaskID claim in jwt token")
+ assert.InDelta(t, float64(taskID), taskIDClaim, 0, "Supplied taskid must match stored one")
+ acClaim, ok := claims["ac"]
+ assert.True(t, ok, "Has ac claim in jwt token")
+ ac, ok := acClaim.(string)
+ assert.True(t, ok, "ac claim is a string for buildx gha cache")
+ scopes := []actionsCacheScope{}
+ err = json.Unmarshal([]byte(ac), &scopes)
+ require.NoError(t, err, "ac claim is a json list for buildx gha cache")
+ assert.GreaterOrEqual(t, len(scopes), 1, "Expected at least one action cache scope for buildx gha cache")
+}
+
+func TestParseAuthorizationToken(t *testing.T) {
+ var taskID int64 = 23
+ token, err := CreateAuthorizationToken(taskID, 1, 2)
+ require.NoError(t, err)
+ assert.NotEqual(t, "", token)
+ headers := http.Header{}
+ headers.Set("Authorization", "Bearer "+token)
+ rTaskID, err := ParseAuthorizationToken(&http.Request{
+ Header: headers,
+ })
+ require.NoError(t, err)
+ assert.Equal(t, taskID, rTaskID)
+}
+
+func TestParseAuthorizationTokenNoAuthHeader(t *testing.T) {
+ headers := http.Header{}
+ rTaskID, err := ParseAuthorizationToken(&http.Request{
+ Header: headers,
+ })
+ require.NoError(t, err)
+ assert.Equal(t, int64(0), rTaskID)
+}
diff --git a/services/actions/cleanup.go b/services/actions/cleanup.go
new file mode 100644
index 0000000..1223ebc
--- /dev/null
+++ b/services/actions/cleanup.go
@@ -0,0 +1,126 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ actions_module "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+// Cleanup removes expired actions logs, data and artifacts
+func Cleanup(ctx context.Context) error {
+ // clean up expired artifacts
+ if err := CleanupArtifacts(ctx); err != nil {
+ return fmt.Errorf("cleanup artifacts: %w", err)
+ }
+
+ // clean up old logs
+ if err := CleanupLogs(ctx); err != nil {
+ return fmt.Errorf("cleanup logs: %w", err)
+ }
+
+ return nil
+}
+
+// CleanupArtifacts removes expired add need-deleted artifacts and set records expired status
+func CleanupArtifacts(taskCtx context.Context) error {
+ if err := cleanExpiredArtifacts(taskCtx); err != nil {
+ return err
+ }
+ return cleanNeedDeleteArtifacts(taskCtx)
+}
+
+func cleanExpiredArtifacts(taskCtx context.Context) error {
+ artifacts, err := actions_model.ListNeedExpiredArtifacts(taskCtx)
+ if err != nil {
+ return err
+ }
+ log.Info("Found %d expired artifacts", len(artifacts))
+ for _, artifact := range artifacts {
+ if err := actions_model.SetArtifactExpired(taskCtx, artifact.ID); err != nil {
+ log.Error("Cannot set artifact %d expired: %v", artifact.ID, err)
+ continue
+ }
+ if err := storage.ActionsArtifacts.Delete(artifact.StoragePath); err != nil {
+ log.Error("Cannot delete artifact %d: %v", artifact.ID, err)
+ continue
+ }
+ log.Info("Artifact %d set expired", artifact.ID)
+ }
+ return nil
+}
+
+// deleteArtifactBatchSize is the batch size of deleting artifacts
+const deleteArtifactBatchSize = 100
+
+func cleanNeedDeleteArtifacts(taskCtx context.Context) error {
+ for {
+ artifacts, err := actions_model.ListPendingDeleteArtifacts(taskCtx, deleteArtifactBatchSize)
+ if err != nil {
+ return err
+ }
+ log.Info("Found %d artifacts pending deletion", len(artifacts))
+ for _, artifact := range artifacts {
+ if err := actions_model.SetArtifactDeleted(taskCtx, artifact.ID); err != nil {
+ log.Error("Cannot set artifact %d deleted: %v", artifact.ID, err)
+ continue
+ }
+ if err := storage.ActionsArtifacts.Delete(artifact.StoragePath); err != nil {
+ log.Error("Cannot delete artifact %d: %v", artifact.ID, err)
+ continue
+ }
+ log.Info("Artifact %d set deleted", artifact.ID)
+ }
+ if len(artifacts) < deleteArtifactBatchSize {
+ log.Debug("No more artifacts pending deletion")
+ break
+ }
+ }
+ return nil
+}
+
+const deleteLogBatchSize = 100
+
+// CleanupLogs removes logs which are older than the configured retention time
+func CleanupLogs(ctx context.Context) error {
+ olderThan := timeutil.TimeStampNow().AddDuration(-time.Duration(setting.Actions.LogRetentionDays) * 24 * time.Hour)
+
+ count := 0
+ for {
+ tasks, err := actions_model.FindOldTasksToExpire(ctx, olderThan, deleteLogBatchSize)
+ if err != nil {
+ return fmt.Errorf("find old tasks: %w", err)
+ }
+ for _, task := range tasks {
+ if err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename); err != nil {
+ log.Error("Failed to remove log %s (in storage %v) of task %v: %v", task.LogFilename, task.LogInStorage, task.ID, err)
+ // do not return error here, continue to next task
+ continue
+ }
+ task.LogIndexes = nil // clear log indexes since it's a heavy field
+ task.LogExpired = true
+ if err := actions_model.UpdateTask(ctx, task, "log_indexes", "log_expired"); err != nil {
+ log.Error("Failed to update task %v: %v", task.ID, err)
+ // do not return error here, continue to next task
+ continue
+ }
+ count++
+ log.Trace("Removed log %s of task %v", task.LogFilename, task.ID)
+ }
+ if len(tasks) < deleteLogBatchSize {
+ break
+ }
+ }
+
+ log.Info("Removed %d logs", count)
+ return nil
+}
diff --git a/services/actions/clear_tasks.go b/services/actions/clear_tasks.go
new file mode 100644
index 0000000..6737378
--- /dev/null
+++ b/services/actions/clear_tasks.go
@@ -0,0 +1,101 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+// StopZombieTasks stops the task which have running status, but haven't been updated for a long time
+func StopZombieTasks(ctx context.Context) error {
+ return stopTasks(ctx, actions_model.FindTaskOptions{
+ Status: actions_model.StatusRunning,
+ UpdatedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.ZombieTaskTimeout).Unix()),
+ })
+}
+
+// StopEndlessTasks stops the tasks which have running status and continuous updates, but don't end for a long time
+func StopEndlessTasks(ctx context.Context) error {
+ return stopTasks(ctx, actions_model.FindTaskOptions{
+ Status: actions_model.StatusRunning,
+ StartedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.EndlessTaskTimeout).Unix()),
+ })
+}
+
+func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error {
+ tasks, err := db.Find[actions_model.ActionTask](ctx, opts)
+ if err != nil {
+ return fmt.Errorf("find tasks: %w", err)
+ }
+
+ jobs := make([]*actions_model.ActionRunJob, 0, len(tasks))
+ for _, task := range tasks {
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := actions_model.StopTask(ctx, task.ID, actions_model.StatusFailure); err != nil {
+ return err
+ }
+ if err := task.LoadJob(ctx); err != nil {
+ return err
+ }
+ jobs = append(jobs, task.Job)
+ return nil
+ }); err != nil {
+ log.Warn("Cannot stop task %v: %v", task.ID, err)
+ continue
+ }
+
+ remove, err := actions.TransferLogs(ctx, task.LogFilename)
+ if err != nil {
+ log.Warn("Cannot transfer logs of task %v: %v", task.ID, err)
+ continue
+ }
+ task.LogInStorage = true
+ if err := actions_model.UpdateTask(ctx, task, "log_in_storage"); err != nil {
+ log.Warn("Cannot update task %v: %v", task.ID, err)
+ continue
+ }
+ remove()
+ }
+
+ CreateCommitStatus(ctx, jobs...)
+
+ return nil
+}
+
+// CancelAbandonedJobs cancels the jobs which have waiting status, but haven't been picked by a runner for a long time
+func CancelAbandonedJobs(ctx context.Context) error {
+ jobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{
+ Statuses: []actions_model.Status{actions_model.StatusWaiting, actions_model.StatusBlocked},
+ UpdatedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.AbandonedJobTimeout).Unix()),
+ })
+ if err != nil {
+ log.Warn("find abandoned tasks: %v", err)
+ return err
+ }
+
+ now := timeutil.TimeStampNow()
+ for _, job := range jobs {
+ job.Status = actions_model.StatusCancelled
+ job.Stopped = now
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ _, err := actions_model.UpdateRunJob(ctx, job, nil, "status", "stopped")
+ return err
+ }); err != nil {
+ log.Warn("cancel abandoned job %v: %v", job.ID, err)
+ // go on
+ }
+ CreateCommitStatus(ctx, job)
+ }
+
+ return nil
+}
diff --git a/services/actions/commit_status.go b/services/actions/commit_status.go
new file mode 100644
index 0000000..2698059
--- /dev/null
+++ b/services/actions/commit_status.go
@@ -0,0 +1,167 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "fmt"
+ "path"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ user_model "code.gitea.io/gitea/models/user"
+ actions_module "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus"
+
+ "github.com/nektos/act/pkg/jobparser"
+)
+
+// CreateCommitStatus creates a commit status for the given job.
+// It won't return an error failed, but will log it, because it's not critical.
+func CreateCommitStatus(ctx context.Context, jobs ...*actions_model.ActionRunJob) {
+ for _, job := range jobs {
+ if err := createCommitStatus(ctx, job); err != nil {
+ log.Error("Failed to create commit status for job %d: %v", job.ID, err)
+ }
+ }
+}
+
+func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) error {
+ if err := job.LoadAttributes(ctx); err != nil {
+ return fmt.Errorf("load run: %w", err)
+ }
+
+ run := job.Run
+
+ var (
+ sha string
+ event string
+ )
+ switch run.Event {
+ case webhook_module.HookEventPush:
+ event = "push"
+ payload, err := run.GetPushEventPayload()
+ if err != nil {
+ return fmt.Errorf("GetPushEventPayload: %w", err)
+ }
+ if payload.HeadCommit == nil {
+ return fmt.Errorf("head commit is missing in event payload")
+ }
+ sha = payload.HeadCommit.ID
+ case webhook_module.HookEventPullRequest, webhook_module.HookEventPullRequestSync:
+ if run.TriggerEvent == actions_module.GithubEventPullRequestTarget {
+ event = "pull_request_target"
+ } else {
+ event = "pull_request"
+ }
+ payload, err := run.GetPullRequestEventPayload()
+ if err != nil {
+ return fmt.Errorf("GetPullRequestEventPayload: %w", err)
+ }
+ if payload.PullRequest == nil {
+ return fmt.Errorf("pull request is missing in event payload")
+ } else if payload.PullRequest.Head == nil {
+ return fmt.Errorf("head of pull request is missing in event payload")
+ }
+ sha = payload.PullRequest.Head.Sha
+ case webhook_module.HookEventRelease:
+ event = string(run.Event)
+ sha = run.CommitSHA
+ default:
+ return nil
+ }
+
+ repo := run.Repo
+ // TODO: store workflow name as a field in ActionRun to avoid parsing
+ runName := path.Base(run.WorkflowID)
+ if wfs, err := jobparser.Parse(job.WorkflowPayload); err == nil && len(wfs) > 0 {
+ runName = wfs[0].Name
+ }
+ ctxname := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event)
+ state := toCommitStatus(job.Status)
+ if statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil {
+ for _, v := range statuses {
+ if v.Context == ctxname {
+ if v.State == state {
+ // no need to update
+ return nil
+ }
+ break
+ }
+ }
+ } else {
+ return fmt.Errorf("GetLatestCommitStatus: %w", err)
+ }
+
+ description := ""
+ switch job.Status {
+ // TODO: if we want support description in different languages, we need to support i18n placeholders in it
+ case actions_model.StatusSuccess:
+ description = fmt.Sprintf("Successful in %s", job.Duration())
+ case actions_model.StatusFailure:
+ description = fmt.Sprintf("Failing after %s", job.Duration())
+ case actions_model.StatusCancelled:
+ description = "Has been cancelled"
+ case actions_model.StatusSkipped:
+ description = "Has been skipped"
+ case actions_model.StatusRunning:
+ description = "Has started running"
+ case actions_model.StatusWaiting:
+ description = "Waiting to run"
+ case actions_model.StatusBlocked:
+ description = "Blocked by required conditions"
+ }
+
+ index, err := getIndexOfJob(ctx, job)
+ if err != nil {
+ return fmt.Errorf("getIndexOfJob: %w", err)
+ }
+
+ creator := user_model.NewActionsUser()
+ if err := commitstatus_service.CreateCommitStatus(ctx, repo, creator,
+ sha,
+ &git_model.CommitStatus{
+ SHA: sha,
+ TargetURL: fmt.Sprintf("%s/jobs/%d", run.Link(), index),
+ Description: description,
+ Context: ctxname,
+ CreatorID: creator.ID,
+ State: state,
+ }); err != nil {
+ return fmt.Errorf("NewCommitStatus: %w", err)
+ }
+
+ return nil
+}
+
+func toCommitStatus(status actions_model.Status) api.CommitStatusState {
+ switch status {
+ case actions_model.StatusSuccess, actions_model.StatusSkipped:
+ return api.CommitStatusSuccess
+ case actions_model.StatusFailure, actions_model.StatusCancelled:
+ return api.CommitStatusFailure
+ case actions_model.StatusWaiting, actions_model.StatusBlocked, actions_model.StatusRunning:
+ return api.CommitStatusPending
+ default:
+ return api.CommitStatusError
+ }
+}
+
+func getIndexOfJob(ctx context.Context, job *actions_model.ActionRunJob) (int, error) {
+ // TODO: store job index as a field in ActionRunJob to avoid this
+ jobs, err := actions_model.GetRunJobsByRunID(ctx, job.RunID)
+ if err != nil {
+ return 0, err
+ }
+ for i, v := range jobs {
+ if v.ID == job.ID {
+ return i, nil
+ }
+ }
+ return 0, nil
+}
diff --git a/services/actions/init.go b/services/actions/init.go
new file mode 100644
index 0000000..0f49cb6
--- /dev/null
+++ b/services/actions/init.go
@@ -0,0 +1,26 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+func Init() {
+ if !setting.Actions.Enabled {
+ return
+ }
+
+ jobEmitterQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "actions_ready_job", jobEmitterQueueHandler)
+ if jobEmitterQueue == nil {
+ log.Fatal("Unable to create actions_ready_job queue")
+ }
+ go graceful.GetManager().RunWithCancel(jobEmitterQueue)
+
+ notify_service.RegisterNotifier(NewNotifier())
+}
diff --git a/services/actions/interface.go b/services/actions/interface.go
new file mode 100644
index 0000000..d4fa782
--- /dev/null
+++ b/services/actions/interface.go
@@ -0,0 +1,28 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import "code.gitea.io/gitea/services/context"
+
+// API for actions of a repository or organization
+type API interface {
+ // ListActionsSecrets list secrets
+ ListActionsSecrets(*context.APIContext)
+ // CreateOrUpdateSecret create or update a secret
+ CreateOrUpdateSecret(*context.APIContext)
+ // DeleteSecret delete a secret
+ DeleteSecret(*context.APIContext)
+ // ListVariables list variables
+ ListVariables(*context.APIContext)
+ // GetVariable get a variable
+ GetVariable(*context.APIContext)
+ // DeleteVariable delete a variable
+ DeleteVariable(*context.APIContext)
+ // CreateVariable create a variable
+ CreateVariable(*context.APIContext)
+ // UpdateVariable update a variable
+ UpdateVariable(*context.APIContext)
+ // GetRegistrationToken get registration token
+ GetRegistrationToken(*context.APIContext)
+}
diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go
new file mode 100644
index 0000000..1f859fc
--- /dev/null
+++ b/services/actions/job_emitter.go
@@ -0,0 +1,162 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "errors"
+ "fmt"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/queue"
+
+ "github.com/nektos/act/pkg/jobparser"
+ "xorm.io/builder"
+)
+
+var jobEmitterQueue *queue.WorkerPoolQueue[*jobUpdate]
+
+type jobUpdate struct {
+ RunID int64
+}
+
+func EmitJobsIfReady(runID int64) error {
+ err := jobEmitterQueue.Push(&jobUpdate{
+ RunID: runID,
+ })
+ if errors.Is(err, queue.ErrAlreadyInQueue) {
+ return nil
+ }
+ return err
+}
+
+func jobEmitterQueueHandler(items ...*jobUpdate) []*jobUpdate {
+ ctx := graceful.GetManager().ShutdownContext()
+ var ret []*jobUpdate
+ for _, update := range items {
+ if err := checkJobsOfRun(ctx, update.RunID); err != nil {
+ ret = append(ret, update)
+ }
+ }
+ return ret
+}
+
+func checkJobsOfRun(ctx context.Context, runID int64) error {
+ jobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: runID})
+ if err != nil {
+ return err
+ }
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
+ for _, job := range jobs {
+ idToJobs[job.JobID] = append(idToJobs[job.JobID], job)
+ }
+
+ updates := newJobStatusResolver(jobs).Resolve()
+ for _, job := range jobs {
+ if status, ok := updates[job.ID]; ok {
+ job.Status = status
+ if n, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"status": actions_model.StatusBlocked}, "status"); err != nil {
+ return err
+ } else if n != 1 {
+ return fmt.Errorf("no affected for updating blocked job %v", job.ID)
+ }
+ }
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+ CreateCommitStatus(ctx, jobs...)
+ return nil
+}
+
+type jobStatusResolver struct {
+ statuses map[int64]actions_model.Status
+ needs map[int64][]int64
+ jobMap map[int64]*actions_model.ActionRunJob
+}
+
+func newJobStatusResolver(jobs actions_model.ActionJobList) *jobStatusResolver {
+ idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
+ jobMap := make(map[int64]*actions_model.ActionRunJob)
+ for _, job := range jobs {
+ idToJobs[job.JobID] = append(idToJobs[job.JobID], job)
+ jobMap[job.ID] = job
+ }
+
+ statuses := make(map[int64]actions_model.Status, len(jobs))
+ needs := make(map[int64][]int64, len(jobs))
+ for _, job := range jobs {
+ statuses[job.ID] = job.Status
+ for _, need := range job.Needs {
+ for _, v := range idToJobs[need] {
+ needs[job.ID] = append(needs[job.ID], v.ID)
+ }
+ }
+ }
+ return &jobStatusResolver{
+ statuses: statuses,
+ needs: needs,
+ jobMap: jobMap,
+ }
+}
+
+func (r *jobStatusResolver) Resolve() map[int64]actions_model.Status {
+ ret := map[int64]actions_model.Status{}
+ for i := 0; i < len(r.statuses); i++ {
+ updated := r.resolve()
+ if len(updated) == 0 {
+ return ret
+ }
+ for k, v := range updated {
+ ret[k] = v
+ r.statuses[k] = v
+ }
+ }
+ return ret
+}
+
+func (r *jobStatusResolver) resolve() map[int64]actions_model.Status {
+ ret := map[int64]actions_model.Status{}
+ for id, status := range r.statuses {
+ if status != actions_model.StatusBlocked {
+ continue
+ }
+ allDone, allSucceed := true, true
+ for _, need := range r.needs[id] {
+ needStatus := r.statuses[need]
+ if !needStatus.IsDone() {
+ allDone = false
+ }
+ if needStatus.In(actions_model.StatusFailure, actions_model.StatusCancelled, actions_model.StatusSkipped) {
+ allSucceed = false
+ }
+ }
+ if allDone {
+ if allSucceed {
+ ret[id] = actions_model.StatusWaiting
+ } else {
+ // Check if the job has an "if" condition
+ hasIf := false
+ if wfJobs, _ := jobparser.Parse(r.jobMap[id].WorkflowPayload); len(wfJobs) == 1 {
+ _, wfJob := wfJobs[0].Job()
+ hasIf = len(wfJob.If.Value) > 0
+ }
+
+ if hasIf {
+ // act_runner will check the "if" condition
+ ret[id] = actions_model.StatusWaiting
+ } else {
+ // If the "if" condition is empty and not all dependent jobs completed successfully,
+ // the job should be skipped.
+ ret[id] = actions_model.StatusSkipped
+ }
+ }
+ }
+ }
+ return ret
+}
diff --git a/services/actions/job_emitter_test.go b/services/actions/job_emitter_test.go
new file mode 100644
index 0000000..58c2dc3
--- /dev/null
+++ b/services/actions/job_emitter_test.go
@@ -0,0 +1,136 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_jobStatusResolver_Resolve(t *testing.T) {
+ tests := []struct {
+ name string
+ jobs actions_model.ActionJobList
+ want map[int64]actions_model.Status
+ }{
+ {
+ name: "no blocked",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusWaiting, Needs: []string{}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusWaiting, Needs: []string{}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusWaiting, Needs: []string{}},
+ },
+ want: map[int64]actions_model.Status{},
+ },
+ {
+ name: "single blocked",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusSuccess, Needs: []string{}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusWaiting, Needs: []string{}},
+ },
+ want: map[int64]actions_model.Status{
+ 2: actions_model.StatusWaiting,
+ },
+ },
+ {
+ name: "multiple blocked",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusSuccess, Needs: []string{}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ },
+ want: map[int64]actions_model.Status{
+ 2: actions_model.StatusWaiting,
+ 3: actions_model.StatusWaiting,
+ },
+ },
+ {
+ name: "chain blocked",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusFailure, Needs: []string{}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"2"}},
+ },
+ want: map[int64]actions_model.Status{
+ 2: actions_model.StatusSkipped,
+ 3: actions_model.StatusSkipped,
+ },
+ },
+ {
+ name: "loop need",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "1", Status: actions_model.StatusBlocked, Needs: []string{"3"}},
+ {ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
+ {ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"2"}},
+ },
+ want: map[int64]actions_model.Status{},
+ },
+ {
+ name: "`if` is not empty and all jobs in `needs` completed successfully",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "job1", Status: actions_model.StatusSuccess, Needs: []string{}},
+ {ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
+ `
+name: test
+on: push
+jobs:
+ job2:
+ runs-on: ubuntu-latest
+ needs: job1
+ if: ${{ always() && needs.job1.result == 'success' }}
+ steps:
+ - run: echo "will be checked by act_runner"
+`)},
+ },
+ want: map[int64]actions_model.Status{2: actions_model.StatusWaiting},
+ },
+ {
+ name: "`if` is not empty and not all jobs in `needs` completed successfully",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
+ {ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
+ `
+name: test
+on: push
+jobs:
+ job2:
+ runs-on: ubuntu-latest
+ needs: job1
+ if: ${{ always() && needs.job1.result == 'failure' }}
+ steps:
+ - run: echo "will be checked by act_runner"
+`)},
+ },
+ want: map[int64]actions_model.Status{2: actions_model.StatusWaiting},
+ },
+ {
+ name: "`if` is empty and not all jobs in `needs` completed successfully",
+ jobs: actions_model.ActionJobList{
+ {ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
+ {ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
+ `
+name: test
+on: push
+jobs:
+ job2:
+ runs-on: ubuntu-latest
+ needs: job1
+ steps:
+ - run: echo "should be skipped"
+`)},
+ },
+ want: map[int64]actions_model.Status{2: actions_model.StatusSkipped},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := newJobStatusResolver(tt.jobs)
+ assert.Equal(t, tt.want, r.Resolve())
+ })
+ }
+}
diff --git a/services/actions/main_test.go b/services/actions/main_test.go
new file mode 100644
index 0000000..ea37ff5
--- /dev/null
+++ b/services/actions/main_test.go
@@ -0,0 +1,17 @@
+// Copyright 2024 The Forgejo Authors
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models/actions"
+ _ "code.gitea.io/gitea/models/activities"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/actions/notifier.go b/services/actions/notifier.go
new file mode 100644
index 0000000..e97afad
--- /dev/null
+++ b/services/actions/notifier.go
@@ -0,0 +1,765 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ perm_model "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/convert"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+type actionsNotifier struct {
+ notify_service.NullNotifier
+}
+
+var _ notify_service.Notifier = &actionsNotifier{}
+
+// NewNotifier create a new actionsNotifier notifier
+func NewNotifier() notify_service.Notifier {
+ return &actionsNotifier{}
+}
+
+// NewIssue notifies issue created event
+func (n *actionsNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, _ []*user_model.User) {
+ ctx = withMethod(ctx, "NewIssue")
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error("issue.LoadRepo: %v", err)
+ return
+ }
+ if err := issue.LoadPoster(ctx); err != nil {
+ log.Error("issue.LoadPoster: %v", err)
+ return
+ }
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+
+ newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).WithPayload(&api.IssuePayload{
+ Action: api.HookIssueOpened,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, issue.Poster, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, issue.Poster, nil),
+ }).Notify(withMethod(ctx, "NewIssue"))
+}
+
+func (n *actionsNotifier) IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, _ string) {
+ ctx = withMethod(ctx, "IssueChangeTitle")
+
+ n.issueChange(ctx, doer, issue)
+}
+
+// IssueChangeContent notifies change content of issue
+func (n *actionsNotifier) IssueChangeContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, _ string) {
+ ctx = withMethod(ctx, "IssueChangeContent")
+
+ n.issueChange(ctx, doer, issue)
+}
+
+func (n *actionsNotifier) issueChange(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
+ var err error
+ if err = issue.LoadRepo(ctx); err != nil {
+ log.Error("LoadRepo: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ if issue.IsPull {
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ log.Error("loadPullRequest: %v", err)
+ return
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequest).
+ WithDoer(doer).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(issue.PullRequest).
+ Notify(ctx)
+ return
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).
+ WithDoer(doer).
+ WithPayload(&api.IssuePayload{
+ Action: api.HookIssueEdited,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ Notify(ctx)
+}
+
+// IssueChangeStatus notifies close or reopen issue to notifiers
+func (n *actionsNotifier) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, _ *issues_model.Comment, isClosed bool) {
+ ctx = withMethod(ctx, "IssueChangeStatus")
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ if issue.IsPull {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest: %v", err)
+ return
+ }
+ // Merge pull request calls issue.changeStatus so we need to handle separately.
+ apiPullRequest := &api.PullRequestPayload{
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ CommitID: commitID,
+ }
+ if isClosed {
+ apiPullRequest.Action = api.HookIssueClosed
+ } else {
+ apiPullRequest.Action = api.HookIssueReOpened
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequest).
+ WithDoer(doer).
+ WithPayload(apiPullRequest).
+ WithPullRequest(issue.PullRequest).
+ Notify(ctx)
+ return
+ }
+ apiIssue := &api.IssuePayload{
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }
+ if isClosed {
+ apiIssue.Action = api.HookIssueClosed
+ } else {
+ apiIssue.Action = api.HookIssueReOpened
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).
+ WithDoer(doer).
+ WithPayload(apiIssue).
+ Notify(ctx)
+}
+
+// IssueChangeAssignee notifies assigned or unassigned to notifiers
+func (n *actionsNotifier) IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
+ ctx = withMethod(ctx, "IssueChangeAssignee")
+
+ var action api.HookIssueAction
+ if removed {
+ action = api.HookIssueUnassigned
+ } else {
+ action = api.HookIssueAssigned
+ }
+
+ hookEvent := webhook_module.HookEventIssueAssign
+ if issue.IsPull {
+ hookEvent = webhook_module.HookEventPullRequestAssign
+ }
+
+ notifyIssueChange(ctx, doer, issue, hookEvent, action)
+}
+
+// IssueChangeMilestone notifies assignee to notifiers
+func (n *actionsNotifier) IssueChangeMilestone(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64) {
+ ctx = withMethod(ctx, "IssueChangeMilestone")
+
+ var action api.HookIssueAction
+ if issue.MilestoneID > 0 {
+ action = api.HookIssueMilestoned
+ } else {
+ action = api.HookIssueDemilestoned
+ }
+
+ hookEvent := webhook_module.HookEventIssueMilestone
+ if issue.IsPull {
+ hookEvent = webhook_module.HookEventPullRequestMilestone
+ }
+
+ notifyIssueChange(ctx, doer, issue, hookEvent, action)
+}
+
+func (n *actionsNotifier) IssueChangeLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue,
+ _, _ []*issues_model.Label,
+) {
+ ctx = withMethod(ctx, "IssueChangeLabels")
+
+ hookEvent := webhook_module.HookEventIssueLabel
+ if issue.IsPull {
+ hookEvent = webhook_module.HookEventPullRequestLabel
+ }
+
+ notifyIssueChange(ctx, doer, issue, hookEvent, api.HookIssueLabelUpdated)
+}
+
+func notifyIssueChange(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, event webhook_module.HookEventType, action api.HookIssueAction) {
+ var err error
+ if err = issue.LoadRepo(ctx); err != nil {
+ log.Error("LoadRepo: %v", err)
+ return
+ }
+
+ if err = issue.LoadPoster(ctx); err != nil {
+ log.Error("LoadPoster: %v", err)
+ return
+ }
+
+ if issue.IsPull {
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ log.Error("loadPullRequest: %v", err)
+ return
+ }
+ newNotifyInputFromIssue(issue, event).
+ WithDoer(doer).
+ WithPayload(&api.PullRequestPayload{
+ Action: action,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(issue.PullRequest).
+ Notify(ctx)
+ return
+ }
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
+ newNotifyInputFromIssue(issue, event).
+ WithDoer(doer).
+ WithPayload(&api.IssuePayload{
+ Action: action,
+ Index: issue.Index,
+ Issue: convert.ToAPIIssue(ctx, doer, issue),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ Notify(ctx)
+}
+
+// CreateIssueComment notifies comment on an issue to notifiers
+func (n *actionsNotifier) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
+ issue *issues_model.Issue, comment *issues_model.Comment, _ []*user_model.User,
+) {
+ ctx = withMethod(ctx, "CreateIssueComment")
+
+ if issue.IsPull {
+ notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentCreated)
+ return
+ }
+ notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventIssueComment, api.HookIssueCommentCreated)
+}
+
+func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
+ ctx = withMethod(ctx, "UpdateComment")
+
+ if err := c.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ if c.Issue.IsPull {
+ notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventPullRequestComment, api.HookIssueCommentEdited)
+ return
+ }
+ notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventIssueComment, api.HookIssueCommentEdited)
+}
+
+func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) {
+ ctx = withMethod(ctx, "DeleteComment")
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+
+ if comment.Issue.IsPull {
+ notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentDeleted)
+ return
+ }
+ notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventIssueComment, api.HookIssueCommentDeleted)
+}
+
+func notifyIssueCommentChange(ctx context.Context, doer *user_model.User, comment *issues_model.Comment, oldContent string, event webhook_module.HookEventType, action api.HookIssueCommentAction) {
+ if err := comment.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return
+ }
+ if err := comment.Issue.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, comment.Issue.Repo, doer)
+
+ payload := &api.IssueCommentPayload{
+ Action: action,
+ Issue: convert.ToAPIIssue(ctx, doer, comment.Issue),
+ Comment: convert.ToAPIComment(ctx, comment.Issue.Repo, comment),
+ Repository: convert.ToRepo(ctx, comment.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ IsPull: comment.Issue.IsPull,
+ }
+
+ if action == api.HookIssueCommentEdited {
+ payload.Changes = &api.ChangesPayload{
+ Body: &api.ChangesFromPayload{
+ From: oldContent,
+ },
+ }
+ }
+
+ if comment.Issue.IsPull {
+ if err := comment.Issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest: %v", err)
+ return
+ }
+ newNotifyInputFromIssue(comment.Issue, event).
+ WithDoer(doer).
+ WithPayload(payload).
+ WithPullRequest(comment.Issue.PullRequest).
+ Notify(ctx)
+ return
+ }
+
+ newNotifyInputFromIssue(comment.Issue, event).
+ WithDoer(doer).
+ WithPayload(payload).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) NewPullRequest(ctx context.Context, pull *issues_model.PullRequest, _ []*user_model.User) {
+ ctx = withMethod(ctx, "NewPullRequest")
+
+ if err := pull.LoadIssue(ctx); err != nil {
+ log.Error("pull.LoadIssue: %v", err)
+ return
+ }
+ if err := pull.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pull.Issue.LoadRepo: %v", err)
+ return
+ }
+ if err := pull.Issue.LoadPoster(ctx); err != nil {
+ log.Error("pull.Issue.LoadPoster: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, pull.Issue.Repo, pull.Issue.Poster)
+
+ newNotifyInputFromIssue(pull.Issue, webhook_module.HookEventPullRequest).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueOpened,
+ Index: pull.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pull, nil),
+ Repository: convert.ToRepo(ctx, pull.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, pull.Issue.Poster, nil),
+ }).
+ WithPullRequest(pull).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ ctx = withMethod(ctx, "CreateRepository")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventRepository).WithPayload(&api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).Notify(ctx)
+}
+
+func (n *actionsNotifier) ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
+ ctx = withMethod(ctx, "ForkRepository")
+
+ oldPermission, _ := access_model.GetUserRepoPermission(ctx, oldRepo, doer)
+ permission, _ := access_model.GetUserRepoPermission(ctx, repo, doer)
+
+ // forked webhook
+ newNotifyInput(oldRepo, doer, webhook_module.HookEventFork).WithPayload(&api.ForkPayload{
+ Forkee: convert.ToRepo(ctx, oldRepo, oldPermission),
+ Repo: convert.ToRepo(ctx, repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).Notify(ctx)
+
+ u := repo.MustOwner(ctx)
+
+ // Add to hook queue for created repo after session commit.
+ if u.IsOrganization() {
+ newNotifyInput(repo, doer, webhook_module.HookEventRepository).
+ WithRef(git.RefNameFromBranch(oldRepo.DefaultBranch).String()).
+ WithPayload(&api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).Notify(ctx)
+ }
+}
+
+func (n *actionsNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, _ *issues_model.Comment, _ []*user_model.User) {
+ ctx = withMethod(ctx, "PullRequestReview")
+
+ var reviewHookType webhook_module.HookEventType
+
+ switch review.Type {
+ case issues_model.ReviewTypeApprove:
+ reviewHookType = webhook_module.HookEventPullRequestReviewApproved
+ case issues_model.ReviewTypeComment:
+ reviewHookType = webhook_module.HookEventPullRequestReviewComment
+ case issues_model.ReviewTypeReject:
+ reviewHookType = webhook_module.HookEventPullRequestReviewRejected
+ default:
+ // unsupported review webhook type here
+ log.Error("Unsupported review webhook type")
+ return
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("pr.LoadIssue: %v", err)
+ return
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, review.Issue.Repo, review.Issue.Poster)
+ if err != nil {
+ log.Error("models.GetUserRepoPermission: %v", err)
+ return
+ }
+
+ newNotifyInput(review.Issue.Repo, review.Reviewer, reviewHookType).
+ WithRef(review.CommitID).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueReviewed,
+ Index: review.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, review.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, review.Reviewer, nil),
+ Review: &api.ReviewPayload{
+ Type: string(reviewHookType),
+ Content: review.Content,
+ },
+ }).Notify(ctx)
+}
+
+func (n *actionsNotifier) PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment) {
+ if !issue.IsPull {
+ log.Warn("PullRequestReviewRequest: issue is not a pull request: %v", issue.ID)
+ return
+ }
+
+ ctx = withMethod(ctx, "PullRequestReviewRequest")
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ log.Error("LoadPullRequest failed: %v", err)
+ return
+ }
+ var action api.HookIssueAction
+ if isRequest {
+ action = api.HookIssueReviewRequested
+ } else {
+ action = api.HookIssueReviewRequestRemoved
+ }
+ newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequestReviewRequest).
+ WithDoer(doer).
+ WithPayload(&api.PullRequestPayload{
+ Action: action,
+ Index: issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
+ RequestedReviewer: convert.ToUser(ctx, reviewer, nil),
+ Repository: convert.ToRepo(ctx, issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(issue.PullRequest).
+ Notify(ctx)
+}
+
+func (*actionsNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ ctx = withMethod(ctx, "MergePullRequest")
+
+ // Reload pull request information.
+ if err := pr.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo: %v", err)
+ return
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, pr.Issue.Repo, doer)
+ if err != nil {
+ log.Error("models.GetUserRepoPermission: %v", err)
+ return
+ }
+
+ // Merge pull request calls issue.changeStatus so we need to handle separately.
+ apiPullRequest := &api.PullRequestPayload{
+ Index: pr.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Action: api.HookIssueClosed,
+ }
+
+ newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequest).
+ WithRef(pr.MergedCommitID).
+ WithPayload(apiPullRequest).
+ WithPullRequest(pr).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ if git.IsEmptyCommitID(opts.NewCommitID, nil) {
+ log.Trace("new commitID is empty")
+ return
+ }
+
+ ctx = withMethod(ctx, "PushCommits")
+
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ if err != nil {
+ log.Error("commits.ToAPIPayloadCommits failed: %v", err)
+ return
+ }
+
+ newNotifyInput(repo, pusher, webhook_module.HookEventPush).
+ WithRef(opts.RefFullName.String()).
+ WithPayload(&api.PushPayload{
+ Ref: opts.RefFullName.String(),
+ Before: opts.OldCommitID,
+ After: opts.NewCommitID,
+ CompareURL: setting.AppURL + commits.CompareURL,
+ Commits: apiCommits,
+ HeadCommit: apiHeadCommit,
+ Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Pusher: apiPusher,
+ Sender: apiPusher,
+ }).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) CreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ ctx = withMethod(ctx, "CreateRef")
+
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiRepo := convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeNone})
+
+ newNotifyInput(repo, pusher, webhook_module.HookEventCreate).
+ WithRef(refFullName.String()).
+ WithPayload(&api.CreatePayload{
+ Ref: refFullName.String(),
+ Sha: refID,
+ RefType: refFullName.RefType(),
+ Repo: apiRepo,
+ Sender: apiPusher,
+ }).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) DeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ ctx = withMethod(ctx, "DeleteRef")
+
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiRepo := convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeNone})
+
+ newNotifyInput(repo, pusher, webhook_module.HookEventDelete).
+ WithPayload(&api.DeletePayload{
+ Ref: refFullName.String(),
+ RefType: refFullName.RefType(),
+ PusherType: api.PusherTypeUser,
+ Repo: apiRepo,
+ Sender: apiPusher,
+ }).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
+ ctx = withMethod(ctx, "SyncPushCommits")
+
+ apiPusher := convert.ToUser(ctx, pusher, nil)
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ if err != nil {
+ log.Error("commits.ToAPIPayloadCommits failed: %v", err)
+ return
+ }
+
+ newNotifyInput(repo, pusher, webhook_module.HookEventPush).
+ WithRef(opts.RefFullName.String()).
+ WithPayload(&api.PushPayload{
+ Ref: opts.RefFullName.String(),
+ Before: opts.OldCommitID,
+ After: opts.NewCommitID,
+ CompareURL: setting.AppURL + commits.CompareURL,
+ Commits: apiCommits,
+ TotalCommits: commits.Len,
+ HeadCommit: apiHeadCommit,
+ Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Pusher: apiPusher,
+ Sender: apiPusher,
+ }).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) SyncCreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
+ ctx = withMethod(ctx, "SyncCreateRef")
+ n.CreateRef(ctx, pusher, repo, refFullName, refID)
+}
+
+func (n *actionsNotifier) SyncDeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
+ ctx = withMethod(ctx, "SyncDeleteRef")
+ n.DeleteRef(ctx, pusher, repo, refFullName)
+}
+
+func (n *actionsNotifier) NewRelease(ctx context.Context, rel *repo_model.Release) {
+ ctx = withMethod(ctx, "NewRelease")
+ notifyRelease(ctx, rel.Publisher, rel, api.HookReleasePublished)
+}
+
+func (n *actionsNotifier) UpdateRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+ ctx = withMethod(ctx, "UpdateRelease")
+ notifyRelease(ctx, doer, rel, api.HookReleaseUpdated)
+}
+
+func (n *actionsNotifier) DeleteRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
+ if rel.IsTag {
+ // has sent same action in `PushCommits`, so skip it.
+ return
+ }
+ ctx = withMethod(ctx, "DeleteRelease")
+ notifyRelease(ctx, doer, rel, api.HookReleaseDeleted)
+}
+
+func (n *actionsNotifier) PackageCreate(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+ ctx = withMethod(ctx, "PackageCreate")
+ notifyPackage(ctx, doer, pd, api.HookPackageCreated)
+}
+
+func (n *actionsNotifier) PackageDelete(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
+ ctx = withMethod(ctx, "PackageDelete")
+ notifyPackage(ctx, doer, pd, api.HookPackageDeleted)
+}
+
+func (n *actionsNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ ctx = withMethod(ctx, "AutoMergePullRequest")
+ n.MergePullRequest(ctx, doer, pr)
+}
+
+func (n *actionsNotifier) PullRequestSynchronized(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
+ ctx = withMethod(ctx, "PullRequestSynchronized")
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo: %v", err)
+ return
+ }
+
+ newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequestSync).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueSynchronized,
+ Index: pr.Issue.Index,
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, pr.Issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(pr).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) PullRequestChangeTargetBranch(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, oldBranch string) {
+ ctx = withMethod(ctx, "PullRequestChangeTargetBranch")
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, pr.Issue.Repo, pr.Issue.Poster)
+ newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequest).
+ WithPayload(&api.PullRequestPayload{
+ Action: api.HookIssueEdited,
+ Index: pr.Issue.Index,
+ Changes: &api.ChangesPayload{
+ Ref: &api.ChangesFromPayload{
+ From: oldBranch,
+ },
+ },
+ PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
+ Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ WithPullRequest(pr).
+ Notify(ctx)
+}
+
+func (n *actionsNotifier) NewWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+ ctx = withMethod(ctx, "NewWikiPage")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
+ Action: api.HookWikiCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ Comment: comment,
+ }).Notify(ctx)
+}
+
+func (n *actionsNotifier) EditWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
+ ctx = withMethod(ctx, "EditWikiPage")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
+ Action: api.HookWikiEdited,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ Comment: comment,
+ }).Notify(ctx)
+}
+
+func (n *actionsNotifier) DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page string) {
+ ctx = withMethod(ctx, "DeleteWikiPage")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
+ Action: api.HookWikiDeleted,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Page: page,
+ }).Notify(ctx)
+}
+
+// MigrateRepository is used to detect workflows after a repository has been migrated
+func (n *actionsNotifier) MigrateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
+ ctx = withMethod(ctx, "MigrateRepository")
+
+ newNotifyInput(repo, doer, webhook_module.HookEventRepository).WithPayload(&api.RepositoryPayload{
+ Action: api.HookRepoCreated,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
+ Organization: convert.ToUser(ctx, u, nil),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).Notify(ctx)
+}
diff --git a/services/actions/notifier_helper.go b/services/actions/notifier_helper.go
new file mode 100644
index 0000000..c4bf695
--- /dev/null
+++ b/services/actions/notifier_helper.go
@@ -0,0 +1,582 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "slices"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ packages_model "code.gitea.io/gitea/models/packages"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ actions_module "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/convert"
+
+ "github.com/nektos/act/pkg/jobparser"
+ "github.com/nektos/act/pkg/model"
+)
+
+type methodCtx struct{}
+
+var methodCtxKey = methodCtx{}
+
+// withMethod sets the notification method that this context currently executes.
+// Used for debugging/ troubleshooting purposes.
+func withMethod(ctx context.Context, method string) context.Context {
+ // don't overwrite
+ if v := ctx.Value(methodCtxKey); v != nil {
+ if _, ok := v.(string); ok {
+ return ctx
+ }
+ }
+ return context.WithValue(ctx, methodCtxKey, method)
+}
+
+// getMethod gets the notification method that this context currently executes.
+// Default: "notify"
+// Used for debugging/ troubleshooting purposes.
+func getMethod(ctx context.Context) string {
+ if v := ctx.Value(methodCtxKey); v != nil {
+ if s, ok := v.(string); ok {
+ return s
+ }
+ }
+ return "notify"
+}
+
+type notifyInput struct {
+ // required
+ Repo *repo_model.Repository
+ Doer *user_model.User
+ Event webhook_module.HookEventType
+
+ // optional
+ Ref git.RefName
+ Payload api.Payloader
+ PullRequest *issues_model.PullRequest
+}
+
+func newNotifyInput(repo *repo_model.Repository, doer *user_model.User, event webhook_module.HookEventType) *notifyInput {
+ return &notifyInput{
+ Repo: repo,
+ Doer: doer,
+ Event: event,
+ }
+}
+
+func newNotifyInputForSchedules(repo *repo_model.Repository) *notifyInput {
+ // the doer here will be ignored as we force using action user when handling schedules
+ return newNotifyInput(repo, user_model.NewActionsUser(), webhook_module.HookEventSchedule)
+}
+
+func (input *notifyInput) WithDoer(doer *user_model.User) *notifyInput {
+ input.Doer = doer
+ return input
+}
+
+func (input *notifyInput) WithRef(ref string) *notifyInput {
+ input.Ref = git.RefName(ref)
+ return input
+}
+
+func (input *notifyInput) WithPayload(payload api.Payloader) *notifyInput {
+ input.Payload = payload
+ return input
+}
+
+func (input *notifyInput) WithPullRequest(pr *issues_model.PullRequest) *notifyInput {
+ input.PullRequest = pr
+ if input.Ref == "" {
+ input.Ref = git.RefName(pr.GetGitRefName())
+ }
+ return input
+}
+
+func (input *notifyInput) Notify(ctx context.Context) {
+ log.Trace("execute %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
+
+ if err := notify(ctx, input); err != nil {
+ log.Error("an error occurred while executing the %s actions method: %v", getMethod(ctx), err)
+ }
+}
+
+func notify(ctx context.Context, input *notifyInput) error {
+ if input.Doer.IsActions() {
+ // avoiding triggering cyclically, for example:
+ // a comment of an issue will trigger the runner to add a new comment as reply,
+ // and the new comment will trigger the runner again.
+ log.Debug("ignore executing %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
+ return nil
+ }
+ if input.Repo.IsEmpty || input.Repo.IsArchived {
+ return nil
+ }
+ if unit_model.TypeActions.UnitGlobalDisabled() {
+ if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo, true); err != nil {
+ log.Error("CleanRepoScheduleTasks: %v", err)
+ }
+ return nil
+ }
+ if err := input.Repo.LoadUnits(ctx); err != nil {
+ return fmt.Errorf("repo.LoadUnits: %w", err)
+ } else if !input.Repo.UnitEnabled(ctx, unit_model.TypeActions) {
+ return nil
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(context.Background(), input.Repo)
+ if err != nil {
+ return fmt.Errorf("git.OpenRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ ref := input.Ref
+ if ref.BranchName() != input.Repo.DefaultBranch && actions_module.IsDefaultBranchWorkflow(input.Event) {
+ if ref != "" {
+ log.Warn("Event %q should only trigger workflows on the default branch, but its ref is %q. Will fall back to the default branch",
+ input.Event, ref)
+ }
+ ref = git.RefNameFromBranch(input.Repo.DefaultBranch)
+ }
+ if ref == "" {
+ log.Warn("Ref of event %q is empty, will fall back to the default branch", input.Event)
+ ref = git.RefNameFromBranch(input.Repo.DefaultBranch)
+ }
+
+ commitID, err := gitRepo.GetRefCommitID(ref.String())
+ if err != nil {
+ return fmt.Errorf("gitRepo.GetRefCommitID: %w", err)
+ }
+
+ // Get the commit object for the ref
+ commit, err := gitRepo.GetCommit(commitID)
+ if err != nil {
+ return fmt.Errorf("gitRepo.GetCommit: %w", err)
+ }
+
+ if skipWorkflows(input, commit) {
+ return nil
+ }
+
+ if SkipPullRequestEvent(ctx, input.Event, input.Repo.ID, commit.ID.String()) {
+ log.Trace("repo %s with commit %s skip event %v", input.Repo.RepoPath(), commit.ID, input.Event)
+ return nil
+ }
+
+ var detectedWorkflows []*actions_module.DetectedWorkflow
+ actionsConfig := input.Repo.MustGetUnit(ctx, unit_model.TypeActions).ActionsConfig()
+ shouldDetectSchedules := input.Event == webhook_module.HookEventPush && input.Ref.BranchName() == input.Repo.DefaultBranch
+ workflows, schedules, err := actions_module.DetectWorkflows(gitRepo, commit,
+ input.Event,
+ input.Payload,
+ shouldDetectSchedules,
+ )
+ if err != nil {
+ return fmt.Errorf("DetectWorkflows: %w", err)
+ }
+
+ log.Trace("repo %s with commit %s event %s find %d workflows and %d schedules",
+ input.Repo.RepoPath(),
+ commit.ID,
+ input.Event,
+ len(workflows),
+ len(schedules),
+ )
+
+ for _, wf := range workflows {
+ if actionsConfig.IsWorkflowDisabled(wf.EntryName) {
+ log.Trace("repo %s has disable workflows %s", input.Repo.RepoPath(), wf.EntryName)
+ continue
+ }
+
+ if wf.TriggerEvent.Name != actions_module.GithubEventPullRequestTarget {
+ detectedWorkflows = append(detectedWorkflows, wf)
+ }
+ }
+
+ if input.PullRequest != nil {
+ // detect pull_request_target workflows
+ baseRef := git.BranchPrefix + input.PullRequest.BaseBranch
+ baseCommit, err := gitRepo.GetCommit(baseRef)
+ if err != nil {
+ if prp, ok := input.Payload.(*api.PullRequestPayload); ok && errors.Is(err, util.ErrNotExist) {
+ // the baseBranch was deleted and the PR closed: the action can be skipped
+ if prp.Action == api.HookIssueClosed {
+ return nil
+ }
+ }
+ return fmt.Errorf("gitRepo.GetCommit: %w", err)
+ }
+ baseWorkflows, _, err := actions_module.DetectWorkflows(gitRepo, baseCommit, input.Event, input.Payload, false)
+ if err != nil {
+ return fmt.Errorf("DetectWorkflows: %w", err)
+ }
+ if len(baseWorkflows) == 0 {
+ log.Trace("repo %s with commit %s couldn't find pull_request_target workflows", input.Repo.RepoPath(), baseCommit.ID)
+ } else {
+ for _, wf := range baseWorkflows {
+ if wf.TriggerEvent.Name == actions_module.GithubEventPullRequestTarget {
+ detectedWorkflows = append(detectedWorkflows, wf)
+ }
+ }
+ }
+ }
+
+ if shouldDetectSchedules {
+ if err := handleSchedules(ctx, schedules, commit, input, ref.String()); err != nil {
+ return err
+ }
+ }
+
+ return handleWorkflows(ctx, detectedWorkflows, commit, input, ref.String())
+}
+
+func SkipPullRequestEvent(ctx context.Context, event webhook_module.HookEventType, repoID int64, commitSHA string) bool {
+ if event != webhook_module.HookEventPullRequestSync {
+ return false
+ }
+
+ run := actions_model.ActionRun{
+ Event: webhook_module.HookEventPullRequest,
+ RepoID: repoID,
+ CommitSHA: commitSHA,
+ }
+ exist, err := db.GetEngine(ctx).Exist(&run)
+ if err != nil {
+ log.Error("Exist ActionRun %v: %v", run, err)
+ return false
+ }
+ return exist
+}
+
+func skipWorkflows(input *notifyInput, commit *git.Commit) bool {
+ // skip workflow runs with a configured skip-ci string in commit message or pr title if the event is push or pull_request(_sync)
+ // https://docs.github.com/en/actions/managing-workflow-runs/skipping-workflow-runs
+ skipWorkflowEvents := []webhook_module.HookEventType{
+ webhook_module.HookEventPush,
+ webhook_module.HookEventPullRequest,
+ webhook_module.HookEventPullRequestSync,
+ }
+ if slices.Contains(skipWorkflowEvents, input.Event) {
+ for _, s := range setting.Actions.SkipWorkflowStrings {
+ if input.PullRequest != nil && strings.Contains(input.PullRequest.Issue.Title, s) {
+ log.Debug("repo %s: skipped run for pr %v because of %s string", input.Repo.RepoPath(), input.PullRequest.Issue.ID, s)
+ return true
+ }
+ if strings.Contains(commit.CommitMessage, s) {
+ log.Debug("repo %s with commit %s: skipped run because of %s string", input.Repo.RepoPath(), commit.ID, s)
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func handleWorkflows(
+ ctx context.Context,
+ detectedWorkflows []*actions_module.DetectedWorkflow,
+ commit *git.Commit,
+ input *notifyInput,
+ ref string,
+) error {
+ if len(detectedWorkflows) == 0 {
+ log.Trace("repo %s with commit %s couldn't find workflows", input.Repo.RepoPath(), commit.ID)
+ return nil
+ }
+
+ p, err := json.Marshal(input.Payload)
+ if err != nil {
+ return fmt.Errorf("json.Marshal: %w", err)
+ }
+
+ isForkPullRequest := false
+ if pr := input.PullRequest; pr != nil {
+ switch pr.Flow {
+ case issues_model.PullRequestFlowGithub:
+ isForkPullRequest = pr.IsFromFork()
+ case issues_model.PullRequestFlowAGit:
+ // There is no fork concept in agit flow, anyone with read permission can push refs/for/<target-branch>/<topic-branch> to the repo.
+ // So we can treat it as a fork pull request because it may be from an untrusted user
+ isForkPullRequest = true
+ default:
+ // unknown flow, assume it's a fork pull request to be safe
+ isForkPullRequest = true
+ }
+ }
+
+ for _, dwf := range detectedWorkflows {
+ run := &actions_model.ActionRun{
+ Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0],
+ RepoID: input.Repo.ID,
+ OwnerID: input.Repo.OwnerID,
+ WorkflowID: dwf.EntryName,
+ TriggerUserID: input.Doer.ID,
+ Ref: ref,
+ CommitSHA: commit.ID.String(),
+ IsForkPullRequest: isForkPullRequest,
+ Event: input.Event,
+ EventPayload: string(p),
+ TriggerEvent: dwf.TriggerEvent.Name,
+ Status: actions_model.StatusWaiting,
+ }
+
+ need, err := ifNeedApproval(ctx, run, input.Repo, input.Doer)
+ if err != nil {
+ log.Error("check if need approval for repo %d with user %d: %v", input.Repo.ID, input.Doer.ID, err)
+ continue
+ }
+
+ run.NeedApproval = need
+
+ if err := run.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ continue
+ }
+
+ vars, err := actions_model.GetVariablesOfRun(ctx, run)
+ if err != nil {
+ log.Error("GetVariablesOfRun: %v", err)
+ continue
+ }
+
+ jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars))
+ if err != nil {
+ log.Error("jobparser.Parse: %v", err)
+ continue
+ }
+
+ // cancel running jobs if the event is push or pull_request_sync
+ if run.Event == webhook_module.HookEventPush ||
+ run.Event == webhook_module.HookEventPullRequestSync {
+ if err := actions_model.CancelPreviousJobs(
+ ctx,
+ run.RepoID,
+ run.Ref,
+ run.WorkflowID,
+ run.Event,
+ ); err != nil {
+ log.Error("CancelPreviousJobs: %v", err)
+ }
+ }
+
+ if err := actions_model.InsertRun(ctx, run, jobs); err != nil {
+ log.Error("InsertRun: %v", err)
+ continue
+ }
+
+ alljobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: run.ID})
+ if err != nil {
+ log.Error("FindRunJobs: %v", err)
+ continue
+ }
+ CreateCommitStatus(ctx, alljobs...)
+ }
+ return nil
+}
+
+func newNotifyInputFromIssue(issue *issues_model.Issue, event webhook_module.HookEventType) *notifyInput {
+ return newNotifyInput(issue.Repo, issue.Poster, event)
+}
+
+func notifyRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release, action api.HookReleaseAction) {
+ if err := rel.LoadAttributes(ctx); err != nil {
+ log.Error("LoadAttributes: %v", err)
+ return
+ }
+
+ permission, _ := access_model.GetUserRepoPermission(ctx, rel.Repo, doer)
+
+ newNotifyInput(rel.Repo, doer, webhook_module.HookEventRelease).
+ WithRef(git.RefNameFromTag(rel.TagName).String()).
+ WithPayload(&api.ReleasePayload{
+ Action: action,
+ Release: convert.ToAPIRelease(ctx, rel.Repo, rel),
+ Repository: convert.ToRepo(ctx, rel.Repo, permission),
+ Sender: convert.ToUser(ctx, doer, nil),
+ }).
+ Notify(ctx)
+}
+
+func notifyPackage(ctx context.Context, sender *user_model.User, pd *packages_model.PackageDescriptor, action api.HookPackageAction) {
+ if pd.Repository == nil {
+ // When a package is uploaded to an organization, it could trigger an event to notify.
+ // So the repository could be nil, however, actions can't support that yet.
+ // See https://github.com/go-gitea/gitea/pull/17940
+ return
+ }
+
+ apiPackage, err := convert.ToPackage(ctx, pd, sender)
+ if err != nil {
+ log.Error("Error converting package: %v", err)
+ return
+ }
+
+ newNotifyInput(pd.Repository, sender, webhook_module.HookEventPackage).
+ WithPayload(&api.PackagePayload{
+ Action: action,
+ Package: apiPackage,
+ Sender: convert.ToUser(ctx, sender, nil),
+ }).
+ Notify(ctx)
+}
+
+func ifNeedApproval(ctx context.Context, run *actions_model.ActionRun, repo *repo_model.Repository, user *user_model.User) (bool, error) {
+ // 1. don't need approval if it's not a fork PR
+ // 2. don't need approval if the event is `pull_request_target` since the workflow will run in the context of base branch
+ // see https://docs.github.com/en/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks#about-workflow-runs-from-public-forks
+ if !run.IsForkPullRequest || run.TriggerEvent == actions_module.GithubEventPullRequestTarget {
+ return false, nil
+ }
+
+ // always need approval if the user is restricted
+ if user.IsRestricted {
+ log.Trace("need approval because user %d is restricted", user.ID)
+ return true, nil
+ }
+
+ // don't need approval if the user can write
+ if perm, err := access_model.GetUserRepoPermission(ctx, repo, user); err != nil {
+ return false, fmt.Errorf("GetUserRepoPermission: %w", err)
+ } else if perm.CanWrite(unit_model.TypeActions) {
+ log.Trace("do not need approval because user %d can write", user.ID)
+ return false, nil
+ }
+
+ // don't need approval if the user has been approved before
+ if count, err := db.Count[actions_model.ActionRun](ctx, actions_model.FindRunOptions{
+ RepoID: repo.ID,
+ TriggerUserID: user.ID,
+ Approved: true,
+ }); err != nil {
+ return false, fmt.Errorf("CountRuns: %w", err)
+ } else if count > 0 {
+ log.Trace("do not need approval because user %d has been approved before", user.ID)
+ return false, nil
+ }
+
+ // otherwise, need approval
+ log.Trace("need approval because it's the first time user %d triggered actions", user.ID)
+ return true, nil
+}
+
+func handleSchedules(
+ ctx context.Context,
+ detectedWorkflows []*actions_module.DetectedWorkflow,
+ commit *git.Commit,
+ input *notifyInput,
+ _ string,
+) error {
+ branch, err := commit.GetBranchName()
+ if err != nil {
+ return err
+ }
+ if branch != input.Repo.DefaultBranch {
+ log.Trace("commit branch is not default branch in repo")
+ return nil
+ }
+
+ if count, err := db.Count[actions_model.ActionSchedule](ctx, actions_model.FindScheduleOptions{RepoID: input.Repo.ID}); err != nil {
+ log.Error("CountSchedules: %v", err)
+ return err
+ } else if count > 0 {
+ if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo, false); err != nil {
+ log.Error("CleanRepoScheduleTasks: %v", err)
+ }
+ }
+
+ if len(detectedWorkflows) == 0 {
+ log.Trace("repo %s with commit %s couldn't find schedules", input.Repo.RepoPath(), commit.ID)
+ return nil
+ }
+
+ payload := &api.SchedulePayload{
+ Action: api.HookScheduleCreated,
+ }
+
+ p, err := json.Marshal(payload)
+ if err != nil {
+ return fmt.Errorf("json.Marshal: %w", err)
+ }
+
+ crons := make([]*actions_model.ActionSchedule, 0, len(detectedWorkflows))
+ for _, dwf := range detectedWorkflows {
+ // Check cron job condition. Only working in default branch
+ workflow, err := model.ReadWorkflow(bytes.NewReader(dwf.Content))
+ if err != nil {
+ log.Error("ReadWorkflow: %v", err)
+ continue
+ }
+ schedules := workflow.OnSchedule()
+ if len(schedules) == 0 {
+ log.Warn("no schedule event")
+ continue
+ }
+
+ run := &actions_model.ActionSchedule{
+ Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0],
+ RepoID: input.Repo.ID,
+ OwnerID: input.Repo.OwnerID,
+ WorkflowID: dwf.EntryName,
+ TriggerUserID: user_model.ActionsUserID,
+ Ref: input.Repo.DefaultBranch,
+ CommitSHA: commit.ID.String(),
+ Event: input.Event,
+ EventPayload: string(p),
+ Specs: schedules,
+ Content: dwf.Content,
+ }
+ crons = append(crons, run)
+ }
+
+ return actions_model.CreateScheduleTask(ctx, crons)
+}
+
+// DetectAndHandleSchedules detects the schedule workflows on the default branch and create schedule tasks
+func DetectAndHandleSchedules(ctx context.Context, repo *repo_model.Repository) error {
+ if repo.IsEmpty || repo.IsArchived {
+ return nil
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(context.Background(), repo)
+ if err != nil {
+ return fmt.Errorf("git.OpenRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ // Only detect schedule workflows on the default branch
+ commit, err := gitRepo.GetCommit(repo.DefaultBranch)
+ if err != nil {
+ return fmt.Errorf("gitRepo.GetCommit: %w", err)
+ }
+ scheduleWorkflows, err := actions_module.DetectScheduledWorkflows(gitRepo, commit)
+ if err != nil {
+ return fmt.Errorf("detect schedule workflows: %w", err)
+ }
+ if len(scheduleWorkflows) == 0 {
+ return nil
+ }
+
+ // We need a notifyInput to call handleSchedules
+ // if repo is a mirror, commit author maybe an external user,
+ // so we use action user as the Doer of the notifyInput
+ notifyInput := newNotifyInputForSchedules(repo)
+
+ return handleSchedules(ctx, scheduleWorkflows, commit, notifyInput, repo.DefaultBranch)
+}
diff --git a/services/actions/notifier_helper_test.go b/services/actions/notifier_helper_test.go
new file mode 100644
index 0000000..0fa40c0
--- /dev/null
+++ b/services/actions/notifier_helper_test.go
@@ -0,0 +1,51 @@
+// Copyright 2024 The Forgejo Authors
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_SkipPullRequestEvent(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repoID := int64(1)
+ commitSHA := "1234"
+
+ // event is not webhook_module.HookEventPullRequestSync, never skip
+ assert.False(t, SkipPullRequestEvent(db.DefaultContext, webhook_module.HookEventPullRequest, repoID, commitSHA))
+
+ // event is webhook_module.HookEventPullRequestSync but there is nothing in the ActionRun table, do not skip
+ assert.False(t, SkipPullRequestEvent(db.DefaultContext, webhook_module.HookEventPullRequestSync, repoID, commitSHA))
+
+ // there is a webhook_module.HookEventPullRequest event but the SHA is different, do not skip
+ index := int64(1)
+ run := &actions_model.ActionRun{
+ Index: index,
+ Event: webhook_module.HookEventPullRequest,
+ RepoID: repoID,
+ CommitSHA: "othersha",
+ }
+ unittest.AssertSuccessfulInsert(t, run)
+ assert.False(t, SkipPullRequestEvent(db.DefaultContext, webhook_module.HookEventPullRequestSync, repoID, commitSHA))
+
+ // there already is a webhook_module.HookEventPullRequest with the same SHA, skip
+ index++
+ run = &actions_model.ActionRun{
+ Index: index,
+ Event: webhook_module.HookEventPullRequest,
+ RepoID: repoID,
+ CommitSHA: commitSHA,
+ }
+ unittest.AssertSuccessfulInsert(t, run)
+ assert.True(t, SkipPullRequestEvent(db.DefaultContext, webhook_module.HookEventPullRequestSync, repoID, commitSHA))
+}
diff --git a/services/actions/rerun.go b/services/actions/rerun.go
new file mode 100644
index 0000000..60f6650
--- /dev/null
+++ b/services/actions/rerun.go
@@ -0,0 +1,38 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/modules/container"
+)
+
+// GetAllRerunJobs get all jobs that need to be rerun when job should be rerun
+func GetAllRerunJobs(job *actions_model.ActionRunJob, allJobs []*actions_model.ActionRunJob) []*actions_model.ActionRunJob {
+ rerunJobs := []*actions_model.ActionRunJob{job}
+ rerunJobsIDSet := make(container.Set[string])
+ rerunJobsIDSet.Add(job.JobID)
+
+ for {
+ found := false
+ for _, j := range allJobs {
+ if rerunJobsIDSet.Contains(j.JobID) {
+ continue
+ }
+ for _, need := range j.Needs {
+ if rerunJobsIDSet.Contains(need) {
+ found = true
+ rerunJobs = append(rerunJobs, j)
+ rerunJobsIDSet.Add(j.JobID)
+ break
+ }
+ }
+ }
+ if !found {
+ break
+ }
+ }
+
+ return rerunJobs
+}
diff --git a/services/actions/rerun_test.go b/services/actions/rerun_test.go
new file mode 100644
index 0000000..a98de7b
--- /dev/null
+++ b/services/actions/rerun_test.go
@@ -0,0 +1,48 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGetAllRerunJobs(t *testing.T) {
+ job1 := &actions_model.ActionRunJob{JobID: "job1"}
+ job2 := &actions_model.ActionRunJob{JobID: "job2", Needs: []string{"job1"}}
+ job3 := &actions_model.ActionRunJob{JobID: "job3", Needs: []string{"job2"}}
+ job4 := &actions_model.ActionRunJob{JobID: "job4", Needs: []string{"job2", "job3"}}
+
+ jobs := []*actions_model.ActionRunJob{job1, job2, job3, job4}
+
+ testCases := []struct {
+ job *actions_model.ActionRunJob
+ rerunJobs []*actions_model.ActionRunJob
+ }{
+ {
+ job1,
+ []*actions_model.ActionRunJob{job1, job2, job3, job4},
+ },
+ {
+ job2,
+ []*actions_model.ActionRunJob{job2, job3, job4},
+ },
+ {
+ job3,
+ []*actions_model.ActionRunJob{job3, job4},
+ },
+ {
+ job4,
+ []*actions_model.ActionRunJob{job4},
+ },
+ }
+
+ for _, tc := range testCases {
+ rerunJobs := GetAllRerunJobs(tc.job, jobs)
+ assert.ElementsMatch(t, tc.rerunJobs, rerunJobs)
+ }
+}
diff --git a/services/actions/schedule_tasks.go b/services/actions/schedule_tasks.go
new file mode 100644
index 0000000..18f3324
--- /dev/null
+++ b/services/actions/schedule_tasks.go
@@ -0,0 +1,154 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/timeutil"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/nektos/act/pkg/jobparser"
+)
+
+// StartScheduleTasks start the task
+func StartScheduleTasks(ctx context.Context) error {
+ return startTasks(ctx)
+}
+
+// startTasks retrieves specifications in pages, creates a schedule task for each specification,
+// and updates the specification's next run time and previous run time.
+// The function returns an error if there's an issue with finding or updating the specifications.
+func startTasks(ctx context.Context) error {
+ // Set the page size
+ pageSize := 50
+
+ // Retrieve specs in pages until all specs have been retrieved
+ now := time.Now()
+ for page := 1; ; page++ {
+ // Retrieve the specs for the current page
+ specs, _, err := actions_model.FindSpecs(ctx, actions_model.FindSpecOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: pageSize,
+ },
+ Next: now.Unix(),
+ })
+ if err != nil {
+ return fmt.Errorf("find specs: %w", err)
+ }
+
+ if err := specs.LoadRepos(ctx); err != nil {
+ return fmt.Errorf("LoadRepos: %w", err)
+ }
+
+ // Loop through each spec and create a schedule task for it
+ for _, row := range specs {
+ // cancel running jobs if the event is push
+ if row.Schedule.Event == webhook_module.HookEventPush {
+ // cancel running jobs of the same workflow
+ if err := actions_model.CancelPreviousJobs(
+ ctx,
+ row.RepoID,
+ row.Schedule.Ref,
+ row.Schedule.WorkflowID,
+ webhook_module.HookEventSchedule,
+ ); err != nil {
+ log.Error("CancelPreviousJobs: %v", err)
+ }
+ }
+
+ if row.Repo.IsArchived {
+ // Skip if the repo is archived
+ continue
+ }
+
+ cfg, err := row.Repo.GetUnit(ctx, unit.TypeActions)
+ if err != nil {
+ if repo_model.IsErrUnitTypeNotExist(err) {
+ // Skip the actions unit of this repo is disabled.
+ continue
+ }
+ return fmt.Errorf("GetUnit: %w", err)
+ }
+ if cfg.ActionsConfig().IsWorkflowDisabled(row.Schedule.WorkflowID) {
+ continue
+ }
+
+ if err := CreateScheduleTask(ctx, row.Schedule); err != nil {
+ log.Error("CreateScheduleTask: %v", err)
+ return err
+ }
+
+ // Parse the spec
+ schedule, err := row.Parse()
+ if err != nil {
+ log.Error("Parse: %v", err)
+ return err
+ }
+
+ // Update the spec's next run time and previous run time
+ row.Prev = row.Next
+ row.Next = timeutil.TimeStamp(schedule.Next(now.Add(1 * time.Minute)).Unix())
+ if err := actions_model.UpdateScheduleSpec(ctx, row, "prev", "next"); err != nil {
+ log.Error("UpdateScheduleSpec: %v", err)
+ return err
+ }
+ }
+
+ // Stop if all specs have been retrieved
+ if len(specs) < pageSize {
+ break
+ }
+ }
+
+ return nil
+}
+
+// CreateScheduleTask creates a scheduled task from a cron action schedule.
+// It creates an action run based on the schedule, inserts it into the database, and creates commit statuses for each job.
+func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule) error {
+ // Create a new action run based on the schedule
+ run := &actions_model.ActionRun{
+ Title: cron.Title,
+ RepoID: cron.RepoID,
+ OwnerID: cron.OwnerID,
+ WorkflowID: cron.WorkflowID,
+ TriggerUserID: cron.TriggerUserID,
+ Ref: cron.Ref,
+ CommitSHA: cron.CommitSHA,
+ Event: cron.Event,
+ EventPayload: cron.EventPayload,
+ TriggerEvent: string(webhook_module.HookEventSchedule),
+ ScheduleID: cron.ID,
+ Status: actions_model.StatusWaiting,
+ }
+
+ vars, err := actions_model.GetVariablesOfRun(ctx, run)
+ if err != nil {
+ log.Error("GetVariablesOfRun: %v", err)
+ return err
+ }
+
+ // Parse the workflow specification from the cron schedule
+ workflows, err := jobparser.Parse(cron.Content, jobparser.WithVars(vars))
+ if err != nil {
+ return err
+ }
+
+ // Insert the action run and its associated jobs into the database
+ if err := actions_model.InsertRun(ctx, run, workflows); err != nil {
+ return err
+ }
+
+ // Return nil if no errors occurred
+ return nil
+}
diff --git a/services/actions/variables.go b/services/actions/variables.go
new file mode 100644
index 0000000..8dde9c4
--- /dev/null
+++ b/services/actions/variables.go
@@ -0,0 +1,100 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "regexp"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+ secret_service "code.gitea.io/gitea/services/secrets"
+)
+
+func CreateVariable(ctx context.Context, ownerID, repoID int64, name, data string) (*actions_model.ActionVariable, error) {
+ if err := secret_service.ValidateName(name); err != nil {
+ return nil, err
+ }
+
+ if err := envNameCIRegexMatch(name); err != nil {
+ return nil, err
+ }
+
+ v, err := actions_model.InsertVariable(ctx, ownerID, repoID, name, util.ReserveLineBreakForTextarea(data))
+ if err != nil {
+ return nil, err
+ }
+
+ return v, nil
+}
+
+func UpdateVariable(ctx context.Context, variableID int64, name, data string) (bool, error) {
+ if err := secret_service.ValidateName(name); err != nil {
+ return false, err
+ }
+
+ if err := envNameCIRegexMatch(name); err != nil {
+ return false, err
+ }
+
+ return actions_model.UpdateVariable(ctx, &actions_model.ActionVariable{
+ ID: variableID,
+ Name: strings.ToUpper(name),
+ Data: util.ReserveLineBreakForTextarea(data),
+ })
+}
+
+func DeleteVariableByID(ctx context.Context, variableID int64) error {
+ return actions_model.DeleteVariable(ctx, variableID)
+}
+
+func DeleteVariableByName(ctx context.Context, ownerID, repoID int64, name string) error {
+ if err := secret_service.ValidateName(name); err != nil {
+ return err
+ }
+
+ if err := envNameCIRegexMatch(name); err != nil {
+ return err
+ }
+
+ v, err := GetVariable(ctx, actions_model.FindVariablesOpts{
+ OwnerID: ownerID,
+ RepoID: repoID,
+ Name: name,
+ })
+ if err != nil {
+ return err
+ }
+
+ return actions_model.DeleteVariable(ctx, v.ID)
+}
+
+func GetVariable(ctx context.Context, opts actions_model.FindVariablesOpts) (*actions_model.ActionVariable, error) {
+ vars, err := actions_model.FindVariables(ctx, opts)
+ if err != nil {
+ return nil, err
+ }
+ if len(vars) != 1 {
+ return nil, util.NewNotExistErrorf("variable not found")
+ }
+ return vars[0], nil
+}
+
+// some regular expression of `variables` and `secrets`
+// reference to:
+// https://docs.github.com/en/actions/learn-github-actions/variables#naming-conventions-for-configuration-variables
+// https://docs.github.com/en/actions/security-guides/encrypted-secrets#naming-your-secrets
+var (
+ forbiddenEnvNameCIRx = regexp.MustCompile("(?i)^CI")
+)
+
+func envNameCIRegexMatch(name string) error {
+ if forbiddenEnvNameCIRx.MatchString(name) {
+ log.Error("Env Name cannot be ci")
+ return util.NewInvalidArgumentErrorf("env name cannot be ci")
+ }
+ return nil
+}
diff --git a/services/actions/workflows.go b/services/actions/workflows.go
new file mode 100644
index 0000000..e2fb316
--- /dev/null
+++ b/services/actions/workflows.go
@@ -0,0 +1,173 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "strconv"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/perm"
+ "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/convert"
+
+ "github.com/nektos/act/pkg/jobparser"
+ act_model "github.com/nektos/act/pkg/model"
+)
+
+type InputRequiredErr struct {
+ Name string
+}
+
+func (err InputRequiredErr) Error() string {
+ return fmt.Sprintf("input required for '%s'", err.Name)
+}
+
+func IsInputRequiredErr(err error) bool {
+ _, ok := err.(InputRequiredErr)
+ return ok
+}
+
+type Workflow struct {
+ WorkflowID string
+ Ref string
+ Commit *git.Commit
+ GitEntry *git.TreeEntry
+}
+
+type InputValueGetter func(key string) string
+
+func (entry *Workflow) Dispatch(ctx context.Context, inputGetter InputValueGetter, repo *repo_model.Repository, doer *user.User) error {
+ content, err := actions.GetContentFromEntry(entry.GitEntry)
+ if err != nil {
+ return err
+ }
+
+ wf, err := act_model.ReadWorkflow(bytes.NewReader(content))
+ if err != nil {
+ return err
+ }
+
+ fullWorkflowID := ".forgejo/workflows/" + entry.WorkflowID
+
+ title := wf.Name
+ if len(title) < 1 {
+ title = fullWorkflowID
+ }
+
+ inputs := make(map[string]string)
+ if workflowDispatch := wf.WorkflowDispatchConfig(); workflowDispatch != nil {
+ for key, input := range workflowDispatch.Inputs {
+ val := inputGetter(key)
+ if len(val) == 0 {
+ val = input.Default
+ if len(val) == 0 {
+ if input.Required {
+ name := input.Description
+ if len(name) == 0 {
+ name = key
+ }
+ return InputRequiredErr{Name: name}
+ }
+ continue
+ }
+ } else if input.Type == "boolean" {
+ // Since "boolean" inputs are rendered as a checkbox in html, the value inside the form is "on"
+ val = strconv.FormatBool(val == "on")
+ }
+ inputs[key] = val
+ }
+ }
+
+ if int64(len(inputs)) > setting.Actions.LimitDispatchInputs {
+ return errors.New("to many inputs")
+ }
+
+ payload := &structs.WorkflowDispatchPayload{
+ Inputs: inputs,
+ Ref: entry.Ref,
+ Repository: convert.ToRepo(ctx, repo, access.Permission{AccessMode: perm.AccessModeNone}),
+ Sender: convert.ToUser(ctx, doer, nil),
+ Workflow: fullWorkflowID,
+ }
+
+ p, err := json.Marshal(payload)
+ if err != nil {
+ return err
+ }
+
+ run := &actions_model.ActionRun{
+ Title: title,
+ RepoID: repo.ID,
+ Repo: repo,
+ OwnerID: repo.OwnerID,
+ WorkflowID: entry.WorkflowID,
+ TriggerUserID: doer.ID,
+ TriggerUser: doer,
+ Ref: entry.Ref,
+ CommitSHA: entry.Commit.ID.String(),
+ Event: webhook.HookEventWorkflowDispatch,
+ EventPayload: string(p),
+ TriggerEvent: string(webhook.HookEventWorkflowDispatch),
+ Status: actions_model.StatusWaiting,
+ }
+
+ vars, err := actions_model.GetVariablesOfRun(ctx, run)
+ if err != nil {
+ return err
+ }
+
+ jobs, err := jobparser.Parse(content, jobparser.WithVars(vars))
+ if err != nil {
+ return err
+ }
+
+ return actions_model.InsertRun(ctx, run, jobs)
+}
+
+func GetWorkflowFromCommit(gitRepo *git.Repository, ref, workflowID string) (*Workflow, error) {
+ ref, err := gitRepo.ExpandRef(ref)
+ if err != nil {
+ return nil, err
+ }
+
+ commit, err := gitRepo.GetCommit(ref)
+ if err != nil {
+ return nil, err
+ }
+
+ entries, err := actions.ListWorkflows(commit)
+ if err != nil {
+ return nil, err
+ }
+
+ var workflowEntry *git.TreeEntry
+ for _, entry := range entries {
+ if entry.Name() == workflowID {
+ workflowEntry = entry
+ break
+ }
+ }
+ if workflowEntry == nil {
+ return nil, errors.New("workflow not found")
+ }
+
+ return &Workflow{
+ WorkflowID: workflowID,
+ Ref: ref,
+ Commit: commit,
+ GitEntry: workflowEntry,
+ }, nil
+}