summaryrefslogtreecommitdiffstats
path: root/modules/actions
diff options
context:
space:
mode:
authorDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
committerDaniel Baumann <daniel@debian.org>2024-12-12 23:57:56 +0100
commite68b9d00a6e05b3a941f63ffb696f91e554ac5ec (patch)
tree97775d6c13b0f416af55314eb6a89ef792474615 /modules/actions
parentInitial commit. (diff)
downloadforgejo-e68b9d00a6e05b3a941f63ffb696f91e554ac5ec.tar.xz
forgejo-e68b9d00a6e05b3a941f63ffb696f91e554ac5ec.zip
Adding upstream version 9.0.3.
Signed-off-by: Daniel Baumann <daniel@debian.org>
Diffstat (limited to '')
-rw-r--r--modules/actions/github.go133
-rw-r--r--modules/actions/github_test.go119
-rw-r--r--modules/actions/log.go224
-rw-r--r--modules/actions/task_state.go123
-rw-r--r--modules/actions/task_state_test.go165
-rw-r--r--modules/actions/workflows.go702
-rw-r--r--modules/actions/workflows_test.go163
7 files changed, 1629 insertions, 0 deletions
diff --git a/modules/actions/github.go b/modules/actions/github.go
new file mode 100644
index 0000000..c27d4ed
--- /dev/null
+++ b/modules/actions/github.go
@@ -0,0 +1,133 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+)
+
+const (
+ GithubEventPullRequest = "pull_request"
+ GithubEventPullRequestTarget = "pull_request_target"
+ GithubEventPullRequestReviewComment = "pull_request_review_comment"
+ GithubEventPullRequestReview = "pull_request_review"
+ GithubEventRegistryPackage = "registry_package"
+ GithubEventCreate = "create"
+ GithubEventDelete = "delete"
+ GithubEventFork = "fork"
+ GithubEventPush = "push"
+ GithubEventIssues = "issues"
+ GithubEventIssueComment = "issue_comment"
+ GithubEventRelease = "release"
+ GithubEventPullRequestComment = "pull_request_comment"
+ GithubEventGollum = "gollum"
+ GithubEventSchedule = "schedule"
+ GithubEventWorkflowDispatch = "workflow_dispatch"
+)
+
+// IsDefaultBranchWorkflow returns true if the event only triggers workflows on the default branch
+func IsDefaultBranchWorkflow(triggedEvent webhook_module.HookEventType) bool {
+ switch triggedEvent {
+ case webhook_module.HookEventDelete:
+ // GitHub "delete" event
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#delete
+ return true
+ case webhook_module.HookEventFork:
+ // GitHub "fork" event
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#fork
+ return true
+ case webhook_module.HookEventIssueComment:
+ // GitHub "issue_comment" event
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issue_comment
+ return true
+ case webhook_module.HookEventPullRequestComment:
+ // GitHub "pull_request_comment" event
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment
+ return true
+ case webhook_module.HookEventWiki:
+ // GitHub "gollum" event
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#gollum
+ return true
+ case webhook_module.HookEventSchedule:
+ // GitHub "schedule" event
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule
+ return true
+ case webhook_module.HookEventWorkflowDispatch:
+ // GitHub "workflow_dispatch" event
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch
+ return true
+ case webhook_module.HookEventIssues,
+ webhook_module.HookEventIssueAssign,
+ webhook_module.HookEventIssueLabel,
+ webhook_module.HookEventIssueMilestone:
+ // Github "issues" event
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issues
+ return true
+ }
+
+ return false
+}
+
+// canGithubEventMatch check if the input Github event can match any Gitea event.
+func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEventType) bool {
+ switch eventName {
+ case GithubEventRegistryPackage:
+ return triggedEvent == webhook_module.HookEventPackage
+
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#gollum
+ case GithubEventGollum:
+ return triggedEvent == webhook_module.HookEventWiki
+
+ case GithubEventWorkflowDispatch:
+ return triggedEvent == webhook_module.HookEventWorkflowDispatch
+
+ case GithubEventIssues:
+ switch triggedEvent {
+ case webhook_module.HookEventIssues,
+ webhook_module.HookEventIssueAssign,
+ webhook_module.HookEventIssueLabel,
+ webhook_module.HookEventIssueMilestone:
+ return true
+
+ default:
+ return false
+ }
+
+ case GithubEventPullRequest, GithubEventPullRequestTarget:
+ switch triggedEvent {
+ case webhook_module.HookEventPullRequest,
+ webhook_module.HookEventPullRequestSync,
+ webhook_module.HookEventPullRequestAssign,
+ webhook_module.HookEventPullRequestLabel,
+ webhook_module.HookEventPullRequestReviewRequest,
+ webhook_module.HookEventPullRequestMilestone:
+ return true
+
+ default:
+ return false
+ }
+
+ case GithubEventPullRequestReview:
+ switch triggedEvent {
+ case webhook_module.HookEventPullRequestReviewApproved,
+ webhook_module.HookEventPullRequestReviewComment,
+ webhook_module.HookEventPullRequestReviewRejected:
+ return true
+
+ default:
+ return false
+ }
+
+ case GithubEventIssueComment:
+ // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment
+ return triggedEvent == webhook_module.HookEventIssueComment ||
+ triggedEvent == webhook_module.HookEventPullRequestComment
+
+ case GithubEventSchedule:
+ return triggedEvent == webhook_module.HookEventSchedule
+
+ default:
+ return eventName == string(triggedEvent)
+ }
+}
diff --git a/modules/actions/github_test.go b/modules/actions/github_test.go
new file mode 100644
index 0000000..6652ff6
--- /dev/null
+++ b/modules/actions/github_test.go
@@ -0,0 +1,119 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCanGithubEventMatch(t *testing.T) {
+ testCases := []struct {
+ desc string
+ eventName string
+ triggeredEvent webhook_module.HookEventType
+ expected bool
+ }{
+ // registry_package event
+ {
+ "registry_package matches",
+ GithubEventRegistryPackage,
+ webhook_module.HookEventPackage,
+ true,
+ },
+ {
+ "registry_package cannot match",
+ GithubEventRegistryPackage,
+ webhook_module.HookEventPush,
+ false,
+ },
+ // issues event
+ {
+ "issue matches",
+ GithubEventIssues,
+ webhook_module.HookEventIssueLabel,
+ true,
+ },
+ {
+ "issue cannot match",
+ GithubEventIssues,
+ webhook_module.HookEventIssueComment,
+ false,
+ },
+ // issue_comment event
+ {
+ "issue_comment matches",
+ GithubEventIssueComment,
+ webhook_module.HookEventIssueComment,
+ true,
+ },
+ {
+ "issue_comment cannot match",
+ GithubEventIssueComment,
+ webhook_module.HookEventIssues,
+ false,
+ },
+ // pull_request event
+ {
+ "pull_request matches",
+ GithubEventPullRequest,
+ webhook_module.HookEventPullRequestSync,
+ true,
+ },
+ {
+ "pull_request cannot match",
+ GithubEventPullRequest,
+ webhook_module.HookEventPullRequestComment,
+ false,
+ },
+ // pull_request_target event
+ {
+ "pull_request_target matches",
+ GithubEventPullRequest,
+ webhook_module.HookEventPullRequest,
+ true,
+ },
+ {
+ "pull_request_target cannot match",
+ GithubEventPullRequest,
+ webhook_module.HookEventPullRequestComment,
+ false,
+ },
+ // pull_request_review event
+ {
+ "pull_request_review matches",
+ GithubEventPullRequestReview,
+ webhook_module.HookEventPullRequestReviewComment,
+ true,
+ },
+ {
+ "pull_request_review cannot match",
+ GithubEventPullRequestReview,
+ webhook_module.HookEventPullRequestComment,
+ false,
+ },
+ // other events
+ {
+ "create event",
+ GithubEventCreate,
+ webhook_module.HookEventCreate,
+ true,
+ },
+ {
+ "create pull request comment",
+ GithubEventIssueComment,
+ webhook_module.HookEventPullRequestComment,
+ true,
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ assert.Equalf(t, tc.expected, canGithubEventMatch(tc.eventName, tc.triggeredEvent), "canGithubEventMatch(%v, %v)", tc.eventName, tc.triggeredEvent)
+ })
+ }
+}
diff --git a/modules/actions/log.go b/modules/actions/log.go
new file mode 100644
index 0000000..5a1425e
--- /dev/null
+++ b/modules/actions/log.go
@@ -0,0 +1,224 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "bufio"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/dbfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/zstd"
+
+ runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
+ "google.golang.org/protobuf/types/known/timestamppb"
+)
+
+const (
+ MaxLineSize = 64 * 1024
+ DBFSPrefix = "actions_log/"
+
+ timeFormat = "2006-01-02T15:04:05.0000000Z07:00"
+ defaultBufSize = MaxLineSize
+)
+
+// WriteLogs appends logs to DBFS file for temporary storage.
+// It doesn't respect the file format in the filename like ".zst", since it's difficult to reopen a closed compressed file and append new content.
+// Why doesn't it store logs in object storage directly? Because it's not efficient to append content to object storage.
+func WriteLogs(ctx context.Context, filename string, offset int64, rows []*runnerv1.LogRow) ([]int, error) {
+ flag := os.O_WRONLY
+ if offset == 0 {
+ // Create file only if offset is 0, or it could result in content holes if the file doesn't exist.
+ flag |= os.O_CREATE
+ }
+ name := DBFSPrefix + filename
+ f, err := dbfs.OpenFile(ctx, name, flag)
+ if err != nil {
+ return nil, fmt.Errorf("dbfs OpenFile %q: %w", name, err)
+ }
+ defer f.Close()
+
+ stat, err := f.Stat()
+ if err != nil {
+ return nil, fmt.Errorf("dbfs Stat %q: %w", name, err)
+ }
+ if stat.Size() < offset {
+ // If the size is less than offset, refuse to write, or it could result in content holes.
+ // However, if the size is greater than offset, we can still write to overwrite the content.
+ return nil, fmt.Errorf("size of %q is less than offset", name)
+ }
+
+ if _, err := f.Seek(offset, io.SeekStart); err != nil {
+ return nil, fmt.Errorf("dbfs Seek %q: %w", name, err)
+ }
+
+ writer := bufio.NewWriterSize(f, defaultBufSize)
+
+ ns := make([]int, 0, len(rows))
+ for _, row := range rows {
+ n, err := writer.WriteString(FormatLog(row.Time.AsTime(), row.Content) + "\n")
+ if err != nil {
+ return nil, err
+ }
+ ns = append(ns, n)
+ }
+
+ if err := writer.Flush(); err != nil {
+ return nil, err
+ }
+ return ns, nil
+}
+
+func ReadLogs(ctx context.Context, inStorage bool, filename string, offset, limit int64) ([]*runnerv1.LogRow, error) {
+ f, err := OpenLogs(ctx, inStorage, filename)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ if _, err := f.Seek(offset, io.SeekStart); err != nil {
+ return nil, fmt.Errorf("file seek: %w", err)
+ }
+
+ scanner := bufio.NewScanner(f)
+ maxLineSize := len(timeFormat) + MaxLineSize + 1
+ scanner.Buffer(make([]byte, maxLineSize), maxLineSize)
+
+ var rows []*runnerv1.LogRow
+ for scanner.Scan() && (int64(len(rows)) < limit || limit < 0) {
+ t, c, err := ParseLog(scanner.Text())
+ if err != nil {
+ return nil, fmt.Errorf("parse log %q: %w", scanner.Text(), err)
+ }
+ rows = append(rows, &runnerv1.LogRow{
+ Time: timestamppb.New(t),
+ Content: c,
+ })
+ }
+
+ if err := scanner.Err(); err != nil {
+ return nil, fmt.Errorf("ReadLogs scan: %w", err)
+ }
+
+ return rows, nil
+}
+
+const (
+ // logZstdBlockSize is the block size for zstd compression.
+ // 128KB leads the compression ratio to be close to the regular zstd compression.
+ // And it means each read from the underlying object storage will be at least 128KB*(compression ratio).
+ // The compression ratio is about 30% for text files, so the actual read size is about 38KB, which should be acceptable.
+ logZstdBlockSize = 128 * 1024 // 128KB
+)
+
+// TransferLogs transfers logs from DBFS to object storage.
+// It happens when the file is complete and no more logs will be appended.
+// It respects the file format in the filename like ".zst", and compresses the content if needed.
+func TransferLogs(ctx context.Context, filename string) (func(), error) {
+ name := DBFSPrefix + filename
+ remove := func() {
+ if err := dbfs.Remove(ctx, name); err != nil {
+ log.Warn("dbfs remove %q: %v", name, err)
+ }
+ }
+ f, err := dbfs.Open(ctx, name)
+ if err != nil {
+ return nil, fmt.Errorf("dbfs open %q: %w", name, err)
+ }
+ defer f.Close()
+
+ var reader io.Reader = f
+ if strings.HasSuffix(filename, ".zst") {
+ r, w := io.Pipe()
+ reader = r
+ zstdWriter, err := zstd.NewSeekableWriter(w, logZstdBlockSize)
+ if err != nil {
+ return nil, fmt.Errorf("zstd NewSeekableWriter: %w", err)
+ }
+ go func() {
+ defer func() {
+ _ = w.CloseWithError(zstdWriter.Close())
+ }()
+ if _, err := io.Copy(zstdWriter, f); err != nil {
+ _ = w.CloseWithError(err)
+ return
+ }
+ }()
+ }
+
+ if _, err := storage.Actions.Save(filename, reader, -1); err != nil {
+ return nil, fmt.Errorf("storage save %q: %w", filename, err)
+ }
+ return remove, nil
+}
+
+func RemoveLogs(ctx context.Context, inStorage bool, filename string) error {
+ if !inStorage {
+ name := DBFSPrefix + filename
+ err := dbfs.Remove(ctx, name)
+ if err != nil {
+ return fmt.Errorf("dbfs remove %q: %w", name, err)
+ }
+ return nil
+ }
+ err := storage.Actions.Delete(filename)
+ if err != nil {
+ return fmt.Errorf("storage delete %q: %w", filename, err)
+ }
+ return nil
+}
+
+func OpenLogs(ctx context.Context, inStorage bool, filename string) (io.ReadSeekCloser, error) {
+ if !inStorage {
+ name := DBFSPrefix + filename
+ f, err := dbfs.Open(ctx, name)
+ if err != nil {
+ return nil, fmt.Errorf("dbfs open %q: %w", name, err)
+ }
+ return f, nil
+ }
+
+ f, err := storage.Actions.Open(filename)
+ if err != nil {
+ return nil, fmt.Errorf("storage open %q: %w", filename, err)
+ }
+
+ var reader io.ReadSeekCloser = f
+ if strings.HasSuffix(filename, ".zst") {
+ r, err := zstd.NewSeekableReader(f)
+ if err != nil {
+ return nil, fmt.Errorf("zstd NewSeekableReader: %w", err)
+ }
+ reader = r
+ }
+
+ return reader, nil
+}
+
+func FormatLog(timestamp time.Time, content string) string {
+ // Content shouldn't contain new line, it will break log indexes, other control chars are safe.
+ content = strings.ReplaceAll(content, "\n", `\n`)
+ if len(content) > MaxLineSize {
+ content = content[:MaxLineSize]
+ }
+ return fmt.Sprintf("%s %s", timestamp.UTC().Format(timeFormat), content)
+}
+
+func ParseLog(in string) (time.Time, string, error) {
+ index := strings.IndexRune(in, ' ')
+ if index < 0 {
+ return time.Time{}, "", fmt.Errorf("invalid log: %q", in)
+ }
+ timestamp, err := time.Parse(timeFormat, in[:index])
+ if err != nil {
+ return time.Time{}, "", err
+ }
+ return timestamp, in[index+1:], nil
+}
diff --git a/modules/actions/task_state.go b/modules/actions/task_state.go
new file mode 100644
index 0000000..1f36e02
--- /dev/null
+++ b/modules/actions/task_state.go
@@ -0,0 +1,123 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ actions_model "code.gitea.io/gitea/models/actions"
+)
+
+const (
+ preStepName = "Set up job"
+ postStepName = "Complete job"
+)
+
+// FullSteps returns steps with "Set up job" and "Complete job"
+func FullSteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep {
+ if len(task.Steps) == 0 {
+ return fullStepsOfEmptySteps(task)
+ }
+
+ // firstStep is the first step that has run or running, not include preStep.
+ // For example,
+ // 1. preStep(Success) -> step1(Success) -> step2(Running) -> step3(Waiting) -> postStep(Waiting): firstStep is step1.
+ // 2. preStep(Success) -> step1(Skipped) -> step2(Success) -> postStep(Success): firstStep is step2.
+ // 3. preStep(Success) -> step1(Running) -> step2(Waiting) -> postStep(Waiting): firstStep is step1.
+ // 4. preStep(Success) -> step1(Skipped) -> step2(Skipped) -> postStep(Skipped): firstStep is nil.
+ // 5. preStep(Success) -> step1(Cancelled) -> step2(Cancelled) -> postStep(Cancelled): firstStep is nil.
+ var firstStep *actions_model.ActionTaskStep
+ // lastHasRunStep is the last step that has run.
+ // For example,
+ // 1. preStep(Success) -> step1(Success) -> step2(Running) -> step3(Waiting) -> postStep(Waiting): lastHasRunStep is step1.
+ // 2. preStep(Success) -> step1(Success) -> step2(Success) -> step3(Success) -> postStep(Success): lastHasRunStep is step3.
+ // 3. preStep(Success) -> step1(Success) -> step2(Failure) -> step3 -> postStep(Waiting): lastHasRunStep is step2.
+ // So its Stopped is the Started of postStep when there are no more steps to run.
+ var lastHasRunStep *actions_model.ActionTaskStep
+
+ var logIndex int64
+ for _, step := range task.Steps {
+ if firstStep == nil && (step.Status.HasRun() || step.Status.IsRunning()) {
+ firstStep = step
+ }
+ if step.Status.HasRun() {
+ lastHasRunStep = step
+ }
+ logIndex += step.LogLength
+ }
+
+ preStep := &actions_model.ActionTaskStep{
+ Name: preStepName,
+ LogLength: task.LogLength,
+ Started: task.Started,
+ Status: actions_model.StatusRunning,
+ }
+
+ // No step has run or is running, so preStep is equal to the task
+ if firstStep == nil {
+ preStep.Stopped = task.Stopped
+ preStep.Status = task.Status
+ } else {
+ preStep.LogLength = firstStep.LogIndex
+ preStep.Stopped = firstStep.Started
+ preStep.Status = actions_model.StatusSuccess
+ }
+ logIndex += preStep.LogLength
+
+ if lastHasRunStep == nil {
+ lastHasRunStep = preStep
+ }
+
+ postStep := &actions_model.ActionTaskStep{
+ Name: postStepName,
+ Status: actions_model.StatusWaiting,
+ }
+ // If the lastHasRunStep is the last step, or it has failed, postStep has started.
+ if lastHasRunStep.Status.IsFailure() || lastHasRunStep == task.Steps[len(task.Steps)-1] {
+ postStep.LogIndex = logIndex
+ postStep.LogLength = task.LogLength - postStep.LogIndex
+ postStep.Started = lastHasRunStep.Stopped
+ postStep.Status = actions_model.StatusRunning
+ }
+ if task.Status.IsDone() {
+ postStep.Status = task.Status
+ postStep.Stopped = task.Stopped
+ }
+ ret := make([]*actions_model.ActionTaskStep, 0, len(task.Steps)+2)
+ ret = append(ret, preStep)
+ ret = append(ret, task.Steps...)
+ ret = append(ret, postStep)
+
+ return ret
+}
+
+func fullStepsOfEmptySteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep {
+ preStep := &actions_model.ActionTaskStep{
+ Name: preStepName,
+ LogLength: task.LogLength,
+ Started: task.Started,
+ Stopped: task.Stopped,
+ Status: actions_model.StatusRunning,
+ }
+
+ postStep := &actions_model.ActionTaskStep{
+ Name: postStepName,
+ LogIndex: task.LogLength,
+ Started: task.Stopped,
+ Stopped: task.Stopped,
+ Status: actions_model.StatusWaiting,
+ }
+
+ if task.Status.IsDone() {
+ preStep.Status = task.Status
+ if preStep.Status.IsSuccess() {
+ postStep.Status = actions_model.StatusSuccess
+ } else {
+ postStep.Status = actions_model.StatusCancelled
+ }
+ }
+
+ return []*actions_model.ActionTaskStep{
+ preStep,
+ postStep,
+ }
+}
diff --git a/modules/actions/task_state_test.go b/modules/actions/task_state_test.go
new file mode 100644
index 0000000..ff0fd57
--- /dev/null
+++ b/modules/actions/task_state_test.go
@@ -0,0 +1,165 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestFullSteps(t *testing.T) {
+ tests := []struct {
+ name string
+ task *actions_model.ActionTask
+ want []*actions_model.ActionTaskStep
+ }{
+ {
+ name: "regular",
+ task: &actions_model.ActionTask{
+ Steps: []*actions_model.ActionTaskStep{
+ {Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
+ },
+ Status: actions_model.StatusSuccess,
+ Started: 10000,
+ Stopped: 10100,
+ LogLength: 100,
+ },
+ want: []*actions_model.ActionTaskStep{
+ {Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 10, Started: 10000, Stopped: 10010},
+ {Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
+ {Name: postStepName, Status: actions_model.StatusSuccess, LogIndex: 90, LogLength: 10, Started: 10090, Stopped: 10100},
+ },
+ },
+ {
+ name: "failed step",
+ task: &actions_model.ActionTask{
+ Steps: []*actions_model.ActionTaskStep{
+ {Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 20, Started: 10010, Stopped: 10020},
+ {Status: actions_model.StatusFailure, LogIndex: 30, LogLength: 60, Started: 10020, Stopped: 10090},
+ {Status: actions_model.StatusCancelled, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ },
+ Status: actions_model.StatusFailure,
+ Started: 10000,
+ Stopped: 10100,
+ LogLength: 100,
+ },
+ want: []*actions_model.ActionTaskStep{
+ {Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 10, Started: 10000, Stopped: 10010},
+ {Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 20, Started: 10010, Stopped: 10020},
+ {Status: actions_model.StatusFailure, LogIndex: 30, LogLength: 60, Started: 10020, Stopped: 10090},
+ {Status: actions_model.StatusCancelled, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ {Name: postStepName, Status: actions_model.StatusFailure, LogIndex: 90, LogLength: 10, Started: 10090, Stopped: 10100},
+ },
+ },
+ {
+ name: "first step is running",
+ task: &actions_model.ActionTask{
+ Steps: []*actions_model.ActionTaskStep{
+ {Status: actions_model.StatusRunning, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 0},
+ },
+ Status: actions_model.StatusRunning,
+ Started: 10000,
+ Stopped: 10100,
+ LogLength: 100,
+ },
+ want: []*actions_model.ActionTaskStep{
+ {Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 10, Started: 10000, Stopped: 10010},
+ {Status: actions_model.StatusRunning, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 0},
+ {Name: postStepName, Status: actions_model.StatusWaiting, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ },
+ },
+ {
+ name: "first step has canceled",
+ task: &actions_model.ActionTask{
+ Steps: []*actions_model.ActionTaskStep{
+ {Status: actions_model.StatusCancelled, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ },
+ Status: actions_model.StatusFailure,
+ Started: 10000,
+ Stopped: 10100,
+ LogLength: 100,
+ },
+ want: []*actions_model.ActionTaskStep{
+ {Name: preStepName, Status: actions_model.StatusFailure, LogIndex: 0, LogLength: 100, Started: 10000, Stopped: 10100},
+ {Status: actions_model.StatusCancelled, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ {Name: postStepName, Status: actions_model.StatusFailure, LogIndex: 100, LogLength: 0, Started: 10100, Stopped: 10100},
+ },
+ },
+ {
+ name: "empty steps",
+ task: &actions_model.ActionTask{
+ Steps: []*actions_model.ActionTaskStep{},
+ Status: actions_model.StatusSuccess,
+ Started: 10000,
+ Stopped: 10100,
+ LogLength: 100,
+ },
+ want: []*actions_model.ActionTaskStep{
+ {Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 100, Started: 10000, Stopped: 10100},
+ {Name: postStepName, Status: actions_model.StatusSuccess, LogIndex: 100, LogLength: 0, Started: 10100, Stopped: 10100},
+ },
+ },
+ {
+ name: "all steps finished but task is running",
+ task: &actions_model.ActionTask{
+ Steps: []*actions_model.ActionTaskStep{
+ {Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
+ },
+ Status: actions_model.StatusRunning,
+ Started: 10000,
+ Stopped: 0,
+ LogLength: 100,
+ },
+ want: []*actions_model.ActionTaskStep{
+ {Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 10, Started: 10000, Stopped: 10010},
+ {Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
+ {Name: postStepName, Status: actions_model.StatusRunning, LogIndex: 90, LogLength: 10, Started: 10090, Stopped: 0},
+ },
+ },
+ {
+ name: "skipped task",
+ task: &actions_model.ActionTask{
+ Steps: []*actions_model.ActionTaskStep{
+ {Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ },
+ Status: actions_model.StatusSkipped,
+ Started: 0,
+ Stopped: 0,
+ LogLength: 0,
+ },
+ want: []*actions_model.ActionTaskStep{
+ {Name: preStepName, Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ {Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ {Name: postStepName, Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ },
+ },
+ {
+ name: "first step is skipped",
+ task: &actions_model.ActionTask{
+ Steps: []*actions_model.ActionTaskStep{
+ {Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ {Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
+ },
+ Status: actions_model.StatusSuccess,
+ Started: 10000,
+ Stopped: 10100,
+ LogLength: 100,
+ },
+ want: []*actions_model.ActionTaskStep{
+ {Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 10, Started: 10000, Stopped: 10010},
+ {Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
+ {Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
+ {Name: postStepName, Status: actions_model.StatusSuccess, LogIndex: 90, LogLength: 10, Started: 10090, Stopped: 10100},
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ assert.Equalf(t, tt.want, FullSteps(tt.task), "FullSteps(%v)", tt.task)
+ })
+ }
+}
diff --git a/modules/actions/workflows.go b/modules/actions/workflows.go
new file mode 100644
index 0000000..94c221e
--- /dev/null
+++ b/modules/actions/workflows.go
@@ -0,0 +1,702 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "bytes"
+ "io"
+ "strings"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/gobwas/glob"
+ "github.com/nektos/act/pkg/jobparser"
+ "github.com/nektos/act/pkg/model"
+ "github.com/nektos/act/pkg/workflowpattern"
+ "gopkg.in/yaml.v3"
+)
+
+type DetectedWorkflow struct {
+ EntryName string
+ TriggerEvent *jobparser.Event
+ Content []byte
+}
+
+func init() {
+ model.OnDecodeNodeError = func(node yaml.Node, out any, err error) {
+ // Log the error instead of panic or fatal.
+ // It will be a big job to refactor act/pkg/model to return decode error,
+ // so we just log the error and return empty value, and improve it later.
+ log.Error("Failed to decode node %v into %T: %v", node, out, err)
+ }
+}
+
+func IsWorkflow(path string) bool {
+ if (!strings.HasSuffix(path, ".yaml")) && (!strings.HasSuffix(path, ".yml")) {
+ return false
+ }
+
+ return strings.HasPrefix(path, ".forgejo/workflows") || strings.HasPrefix(path, ".gitea/workflows") || strings.HasPrefix(path, ".github/workflows")
+}
+
+func ListWorkflows(commit *git.Commit) (git.Entries, error) {
+ tree, err := commit.SubTree(".forgejo/workflows")
+ if _, ok := err.(git.ErrNotExist); ok {
+ tree, err = commit.SubTree(".gitea/workflows")
+ }
+ if _, ok := err.(git.ErrNotExist); ok {
+ tree, err = commit.SubTree(".github/workflows")
+ }
+ if _, ok := err.(git.ErrNotExist); ok {
+ return nil, nil
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ entries, err := tree.ListEntriesRecursiveFast()
+ if err != nil {
+ return nil, err
+ }
+
+ ret := make(git.Entries, 0, len(entries))
+ for _, entry := range entries {
+ if strings.HasSuffix(entry.Name(), ".yml") || strings.HasSuffix(entry.Name(), ".yaml") {
+ ret = append(ret, entry)
+ }
+ }
+ return ret, nil
+}
+
+func GetContentFromEntry(entry *git.TreeEntry) ([]byte, error) {
+ f, err := entry.Blob().DataAsync()
+ if err != nil {
+ return nil, err
+ }
+ content, err := io.ReadAll(f)
+ _ = f.Close()
+ if err != nil {
+ return nil, err
+ }
+ return content, nil
+}
+
+func GetEventsFromContent(content []byte) ([]*jobparser.Event, error) {
+ workflow, err := model.ReadWorkflow(bytes.NewReader(content))
+ if err != nil {
+ return nil, err
+ }
+ events, err := jobparser.ParseRawOn(&workflow.RawOn)
+ if err != nil {
+ return nil, err
+ }
+
+ return events, nil
+}
+
+func DetectWorkflows(
+ gitRepo *git.Repository,
+ commit *git.Commit,
+ triggedEvent webhook_module.HookEventType,
+ payload api.Payloader,
+ detectSchedule bool,
+) ([]*DetectedWorkflow, []*DetectedWorkflow, error) {
+ entries, err := ListWorkflows(commit)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ workflows := make([]*DetectedWorkflow, 0, len(entries))
+ schedules := make([]*DetectedWorkflow, 0, len(entries))
+ for _, entry := range entries {
+ content, err := GetContentFromEntry(entry)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // one workflow may have multiple events
+ events, err := GetEventsFromContent(content)
+ if err != nil {
+ log.Warn("ignore invalid workflow %q: %v", entry.Name(), err)
+ continue
+ }
+ for _, evt := range events {
+ log.Trace("detect workflow %q for event %#v matching %q", entry.Name(), evt, triggedEvent)
+ if evt.IsSchedule() {
+ if detectSchedule {
+ dwf := &DetectedWorkflow{
+ EntryName: entry.Name(),
+ TriggerEvent: evt,
+ Content: content,
+ }
+ schedules = append(schedules, dwf)
+ }
+ } else if detectMatched(gitRepo, commit, triggedEvent, payload, evt) {
+ dwf := &DetectedWorkflow{
+ EntryName: entry.Name(),
+ TriggerEvent: evt,
+ Content: content,
+ }
+ workflows = append(workflows, dwf)
+ }
+ }
+ }
+
+ return workflows, schedules, nil
+}
+
+func DetectScheduledWorkflows(gitRepo *git.Repository, commit *git.Commit) ([]*DetectedWorkflow, error) {
+ entries, err := ListWorkflows(commit)
+ if err != nil {
+ return nil, err
+ }
+
+ wfs := make([]*DetectedWorkflow, 0, len(entries))
+ for _, entry := range entries {
+ content, err := GetContentFromEntry(entry)
+ if err != nil {
+ return nil, err
+ }
+
+ // one workflow may have multiple events
+ events, err := GetEventsFromContent(content)
+ if err != nil {
+ log.Warn("ignore invalid workflow %q: %v", entry.Name(), err)
+ continue
+ }
+ for _, evt := range events {
+ if evt.IsSchedule() {
+ log.Trace("detect scheduled workflow: %q", entry.Name())
+ dwf := &DetectedWorkflow{
+ EntryName: entry.Name(),
+ TriggerEvent: evt,
+ Content: content,
+ }
+ wfs = append(wfs, dwf)
+ }
+ }
+ }
+
+ return wfs, nil
+}
+
+func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent webhook_module.HookEventType, payload api.Payloader, evt *jobparser.Event) bool {
+ if !canGithubEventMatch(evt.Name, triggedEvent) {
+ return false
+ }
+
+ switch triggedEvent {
+ case // events with no activity types
+ webhook_module.HookEventWorkflowDispatch,
+ webhook_module.HookEventCreate,
+ webhook_module.HookEventDelete,
+ webhook_module.HookEventFork,
+ webhook_module.HookEventWiki,
+ webhook_module.HookEventSchedule:
+ if len(evt.Acts()) != 0 {
+ log.Warn("Ignore unsupported %s event arguments %v", triggedEvent, evt.Acts())
+ }
+ // no special filter parameters for these events, just return true if name matched
+ return true
+
+ case // push
+ webhook_module.HookEventPush:
+ return matchPushEvent(commit, payload.(*api.PushPayload), evt)
+
+ case // issues
+ webhook_module.HookEventIssues,
+ webhook_module.HookEventIssueAssign,
+ webhook_module.HookEventIssueLabel,
+ webhook_module.HookEventIssueMilestone:
+ return matchIssuesEvent(payload.(*api.IssuePayload), evt)
+
+ case // issue_comment
+ webhook_module.HookEventIssueComment,
+ // `pull_request_comment` is same as `issue_comment`
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment
+ webhook_module.HookEventPullRequestComment:
+ return matchIssueCommentEvent(payload.(*api.IssueCommentPayload), evt)
+
+ case // pull_request
+ webhook_module.HookEventPullRequest,
+ webhook_module.HookEventPullRequestSync,
+ webhook_module.HookEventPullRequestAssign,
+ webhook_module.HookEventPullRequestLabel,
+ webhook_module.HookEventPullRequestReviewRequest,
+ webhook_module.HookEventPullRequestMilestone:
+ return matchPullRequestEvent(gitRepo, commit, payload.(*api.PullRequestPayload), evt)
+
+ case // pull_request_review
+ webhook_module.HookEventPullRequestReviewApproved,
+ webhook_module.HookEventPullRequestReviewRejected:
+ return matchPullRequestReviewEvent(payload.(*api.PullRequestPayload), evt)
+
+ case // pull_request_review_comment
+ webhook_module.HookEventPullRequestReviewComment:
+ return matchPullRequestReviewCommentEvent(payload.(*api.PullRequestPayload), evt)
+
+ case // release
+ webhook_module.HookEventRelease:
+ return matchReleaseEvent(payload.(*api.ReleasePayload), evt)
+
+ case // registry_package
+ webhook_module.HookEventPackage:
+ return matchPackageEvent(payload.(*api.PackagePayload), evt)
+
+ default:
+ log.Warn("unsupported event %q", triggedEvent)
+ return false
+ }
+}
+
+func matchPushEvent(commit *git.Commit, pushPayload *api.PushPayload, evt *jobparser.Event) bool {
+ // with no special filter parameters
+ if len(evt.Acts()) == 0 {
+ return true
+ }
+
+ matchTimes := 0
+ hasBranchFilter := false
+ hasTagFilter := false
+ refName := git.RefName(pushPayload.Ref)
+ // all acts conditions should be satisfied
+ for cond, vals := range evt.Acts() {
+ switch cond {
+ case "branches":
+ hasBranchFilter = true
+ if !refName.IsBranch() {
+ break
+ }
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Skip(patterns, []string{refName.BranchName()}, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ case "branches-ignore":
+ hasBranchFilter = true
+ if !refName.IsBranch() {
+ break
+ }
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Filter(patterns, []string{refName.BranchName()}, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ case "tags":
+ hasTagFilter = true
+ if !refName.IsTag() {
+ break
+ }
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Skip(patterns, []string{refName.TagName()}, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ case "tags-ignore":
+ hasTagFilter = true
+ if !refName.IsTag() {
+ break
+ }
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Filter(patterns, []string{refName.TagName()}, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ case "paths":
+ filesChanged, err := commit.GetFilesChangedSinceCommit(pushPayload.Before)
+ if err != nil {
+ log.Error("GetFilesChangedSinceCommit [commit_sha1: %s]: %v", commit.ID.String(), err)
+ } else {
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Skip(patterns, filesChanged, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ }
+ case "paths-ignore":
+ filesChanged, err := commit.GetFilesChangedSinceCommit(pushPayload.Before)
+ if err != nil {
+ log.Error("GetFilesChangedSinceCommit [commit_sha1: %s]: %v", commit.ID.String(), err)
+ } else {
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Filter(patterns, filesChanged, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ }
+ default:
+ log.Warn("push event unsupported condition %q", cond)
+ }
+ }
+ // if both branch and tag filter are defined in the workflow only one needs to match
+ if hasBranchFilter && hasTagFilter {
+ matchTimes++
+ }
+ return matchTimes == len(evt.Acts())
+}
+
+func matchIssuesEvent(issuePayload *api.IssuePayload, evt *jobparser.Event) bool {
+ // with no special filter parameters
+ if len(evt.Acts()) == 0 {
+ return true
+ }
+
+ matchTimes := 0
+ // all acts conditions should be satisfied
+ for cond, vals := range evt.Acts() {
+ switch cond {
+ case "types":
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issues
+ // Actions with the same name:
+ // opened, edited, closed, reopened, assigned, unassigned, milestoned, demilestoned
+ // Actions need to be converted:
+ // label_updated -> labeled
+ // label_cleared -> unlabeled
+ // Unsupported activity types:
+ // deleted, transferred, pinned, unpinned, locked, unlocked
+
+ action := issuePayload.Action
+ switch action {
+ case api.HookIssueLabelUpdated:
+ action = "labeled"
+ case api.HookIssueLabelCleared:
+ action = "unlabeled"
+ }
+ for _, val := range vals {
+ if glob.MustCompile(val, '/').Match(string(action)) {
+ matchTimes++
+ break
+ }
+ }
+ default:
+ log.Warn("issue event unsupported condition %q", cond)
+ }
+ }
+ return matchTimes == len(evt.Acts())
+}
+
+func matchPullRequestEvent(gitRepo *git.Repository, commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
+ acts := evt.Acts()
+ activityTypeMatched := false
+ matchTimes := 0
+
+ if vals, ok := acts["types"]; !ok {
+ // defaultly, only pull request `opened`, `reopened` and `synchronized` will trigger workflow
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
+ activityTypeMatched = prPayload.Action == api.HookIssueSynchronized || prPayload.Action == api.HookIssueOpened || prPayload.Action == api.HookIssueReOpened
+ } else {
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
+ // Actions with the same name:
+ // opened, edited, closed, reopened, assigned, unassigned, review_requested, review_request_removed, milestoned, demilestoned
+ // Actions need to be converted:
+ // synchronized -> synchronize
+ // label_updated -> labeled
+ // label_cleared -> unlabeled
+ // Unsupported activity types:
+ // converted_to_draft, ready_for_review, locked, unlocked, auto_merge_enabled, auto_merge_disabled, enqueued, dequeued
+
+ action := prPayload.Action
+ switch action {
+ case api.HookIssueSynchronized:
+ action = "synchronize"
+ case api.HookIssueLabelUpdated:
+ action = "labeled"
+ case api.HookIssueLabelCleared:
+ action = "unlabeled"
+ }
+ log.Trace("matching pull_request %s with %v", action, vals)
+ for _, val := range vals {
+ if glob.MustCompile(val, '/').Match(string(action)) {
+ activityTypeMatched = true
+ matchTimes++
+ break
+ }
+ }
+ }
+
+ var (
+ headCommit = commit
+ err error
+ )
+ if evt.Name == GithubEventPullRequestTarget && (len(acts["paths"]) > 0 || len(acts["paths-ignore"]) > 0) {
+ headCommit, err = gitRepo.GetCommit(prPayload.PullRequest.Head.Sha)
+ if err != nil {
+ log.Error("GetCommit [ref: %s]: %v", prPayload.PullRequest.Head.Sha, err)
+ return false
+ }
+ }
+
+ // all acts conditions should be satisfied
+ for cond, vals := range acts {
+ switch cond {
+ case "types":
+ // types have been checked
+ continue
+ case "branches":
+ refName := git.RefName(prPayload.PullRequest.Base.Ref)
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Skip(patterns, []string{refName.ShortName()}, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ case "branches-ignore":
+ refName := git.RefName(prPayload.PullRequest.Base.Ref)
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Filter(patterns, []string{refName.ShortName()}, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ case "paths":
+ filesChanged, err := headCommit.GetFilesChangedSinceCommit(prPayload.PullRequest.Base.Ref)
+ if err != nil {
+ log.Error("GetFilesChangedSinceCommit [commit_sha1: %s]: %v", headCommit.ID.String(), err)
+ } else {
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Skip(patterns, filesChanged, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ }
+ case "paths-ignore":
+ filesChanged, err := headCommit.GetFilesChangedSinceCommit(prPayload.PullRequest.Base.Ref)
+ if err != nil {
+ log.Error("GetFilesChangedSinceCommit [commit_sha1: %s]: %v", headCommit.ID.String(), err)
+ } else {
+ patterns, err := workflowpattern.CompilePatterns(vals...)
+ if err != nil {
+ break
+ }
+ if !workflowpattern.Filter(patterns, filesChanged, &workflowpattern.EmptyTraceWriter{}) {
+ matchTimes++
+ }
+ }
+ default:
+ log.Warn("pull request event unsupported condition %q", cond)
+ }
+ }
+ return activityTypeMatched && matchTimes == len(evt.Acts())
+}
+
+func matchIssueCommentEvent(issueCommentPayload *api.IssueCommentPayload, evt *jobparser.Event) bool {
+ // with no special filter parameters
+ if len(evt.Acts()) == 0 {
+ return true
+ }
+
+ matchTimes := 0
+ // all acts conditions should be satisfied
+ for cond, vals := range evt.Acts() {
+ switch cond {
+ case "types":
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issue_comment
+ // Actions with the same name:
+ // created, edited, deleted
+ // Actions need to be converted:
+ // NONE
+ // Unsupported activity types:
+ // NONE
+
+ for _, val := range vals {
+ if glob.MustCompile(val, '/').Match(string(issueCommentPayload.Action)) {
+ matchTimes++
+ break
+ }
+ }
+ default:
+ log.Warn("issue comment event unsupported condition %q", cond)
+ }
+ }
+ return matchTimes == len(evt.Acts())
+}
+
+func matchPullRequestReviewEvent(prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
+ // with no special filter parameters
+ if len(evt.Acts()) == 0 {
+ return true
+ }
+
+ matchTimes := 0
+ // all acts conditions should be satisfied
+ for cond, vals := range evt.Acts() {
+ switch cond {
+ case "types":
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_review
+ // Activity types with the same name:
+ // NONE
+ // Activity types need to be converted:
+ // reviewed -> submitted
+ // reviewed -> edited
+ // Unsupported activity types:
+ // dismissed
+
+ actions := make([]string, 0)
+ if prPayload.Action == api.HookIssueReviewed {
+ // the `reviewed` HookIssueAction can match the two activity types: `submitted` and `edited`
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_review
+ actions = append(actions, "submitted", "edited")
+ }
+
+ matched := false
+ for _, val := range vals {
+ for _, action := range actions {
+ if glob.MustCompile(val, '/').Match(action) {
+ matched = true
+ break
+ }
+ }
+ if matched {
+ break
+ }
+ }
+ if matched {
+ matchTimes++
+ }
+ default:
+ log.Warn("pull request review event unsupported condition %q", cond)
+ }
+ }
+ return matchTimes == len(evt.Acts())
+}
+
+func matchPullRequestReviewCommentEvent(prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
+ // with no special filter parameters
+ if len(evt.Acts()) == 0 {
+ return true
+ }
+
+ matchTimes := 0
+ // all acts conditions should be satisfied
+ for cond, vals := range evt.Acts() {
+ switch cond {
+ case "types":
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_review_comment
+ // Activity types with the same name:
+ // NONE
+ // Activity types need to be converted:
+ // reviewed -> created
+ // reviewed -> edited
+ // Unsupported activity types:
+ // deleted
+
+ actions := make([]string, 0)
+ if prPayload.Action == api.HookIssueReviewed {
+ // the `reviewed` HookIssueAction can match the two activity types: `created` and `edited`
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_review_comment
+ actions = append(actions, "created", "edited")
+ }
+
+ matched := false
+ for _, val := range vals {
+ for _, action := range actions {
+ if glob.MustCompile(val, '/').Match(action) {
+ matched = true
+ break
+ }
+ }
+ if matched {
+ break
+ }
+ }
+ if matched {
+ matchTimes++
+ }
+ default:
+ log.Warn("pull request review comment event unsupported condition %q", cond)
+ }
+ }
+ return matchTimes == len(evt.Acts())
+}
+
+func matchReleaseEvent(payload *api.ReleasePayload, evt *jobparser.Event) bool {
+ // with no special filter parameters
+ if len(evt.Acts()) == 0 {
+ return true
+ }
+
+ matchTimes := 0
+ // all acts conditions should be satisfied
+ for cond, vals := range evt.Acts() {
+ switch cond {
+ case "types":
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#release
+ // Activity types with the same name:
+ // published
+ // Activity types need to be converted:
+ // updated -> edited
+ // Unsupported activity types:
+ // unpublished, created, deleted, prereleased, released
+
+ action := payload.Action
+ if action == api.HookReleaseUpdated {
+ action = "edited"
+ }
+ for _, val := range vals {
+ if glob.MustCompile(val, '/').Match(string(action)) {
+ matchTimes++
+ break
+ }
+ }
+ default:
+ log.Warn("release event unsupported condition %q", cond)
+ }
+ }
+ return matchTimes == len(evt.Acts())
+}
+
+func matchPackageEvent(payload *api.PackagePayload, evt *jobparser.Event) bool {
+ // with no special filter parameters
+ if len(evt.Acts()) == 0 {
+ return true
+ }
+
+ matchTimes := 0
+ // all acts conditions should be satisfied
+ for cond, vals := range evt.Acts() {
+ switch cond {
+ case "types":
+ // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#registry_package
+ // Activity types with the same name:
+ // NONE
+ // Activity types need to be converted:
+ // created -> published
+ // Unsupported activity types:
+ // updated
+
+ action := payload.Action
+ if action == api.HookPackageCreated {
+ action = "published"
+ }
+ for _, val := range vals {
+ if glob.MustCompile(val, '/').Match(string(action)) {
+ matchTimes++
+ break
+ }
+ }
+ default:
+ log.Warn("package event unsupported condition %q", cond)
+ }
+ }
+ return matchTimes == len(evt.Acts())
+}
diff --git a/modules/actions/workflows_test.go b/modules/actions/workflows_test.go
new file mode 100644
index 0000000..965d01f
--- /dev/null
+++ b/modules/actions/workflows_test.go
@@ -0,0 +1,163 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/git"
+ api "code.gitea.io/gitea/modules/structs"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDetectMatched(t *testing.T) {
+ testCases := []struct {
+ desc string
+ commit *git.Commit
+ triggeredEvent webhook_module.HookEventType
+ payload api.Payloader
+ yamlOn string
+ expected bool
+ }{
+ {
+ desc: "HookEventCreate(create) matches GithubEventCreate(create)",
+ triggeredEvent: webhook_module.HookEventCreate,
+ payload: nil,
+ yamlOn: "on: create",
+ expected: true,
+ },
+ {
+ desc: "HookEventIssues(issues) `opened` action matches GithubEventIssues(issues)",
+ triggeredEvent: webhook_module.HookEventIssues,
+ payload: &api.IssuePayload{Action: api.HookIssueOpened},
+ yamlOn: "on: issues",
+ expected: true,
+ },
+ {
+ desc: "HookEventIssueComment(issue_comment) `created` action matches GithubEventIssueComment(issue_comment)",
+ triggeredEvent: webhook_module.HookEventIssueComment,
+ payload: &api.IssueCommentPayload{Action: api.HookIssueCommentCreated},
+ yamlOn: "on:\n issue_comment:\n types: [created]",
+ expected: true,
+ },
+
+ {
+ desc: "HookEventIssues(issues) `milestoned` action matches GithubEventIssues(issues)",
+ triggeredEvent: webhook_module.HookEventIssues,
+ payload: &api.IssuePayload{Action: api.HookIssueMilestoned},
+ yamlOn: "on: issues",
+ expected: true,
+ },
+
+ {
+ desc: "HookEventPullRequestSync(pull_request_sync) matches GithubEventPullRequest(pull_request)",
+ triggeredEvent: webhook_module.HookEventPullRequestSync,
+ payload: &api.PullRequestPayload{Action: api.HookIssueSynchronized},
+ yamlOn: "on: pull_request",
+ expected: true,
+ },
+ {
+ desc: "HookEventPullRequest(pull_request) `label_updated` action doesn't match GithubEventPullRequest(pull_request) with no activity type",
+ triggeredEvent: webhook_module.HookEventPullRequest,
+ payload: &api.PullRequestPayload{Action: api.HookIssueLabelUpdated},
+ yamlOn: "on: pull_request",
+ expected: false,
+ },
+ {
+ desc: "HookEventPullRequest(pull_request) `closed` action doesn't match GithubEventPullRequest(pull_request) with no activity type",
+ triggeredEvent: webhook_module.HookEventPullRequest,
+ payload: &api.PullRequestPayload{Action: api.HookIssueClosed},
+ yamlOn: "on: pull_request",
+ expected: false,
+ },
+ {
+ desc: "HookEventPullRequest(pull_request) `closed` action doesn't match GithubEventPullRequest(pull_request) with branches",
+ triggeredEvent: webhook_module.HookEventPullRequest,
+ payload: &api.PullRequestPayload{
+ Action: api.HookIssueClosed,
+ PullRequest: &api.PullRequest{
+ Base: &api.PRBranchInfo{},
+ },
+ },
+ yamlOn: "on:\n pull_request:\n branches: [main]",
+ expected: false,
+ },
+ {
+ desc: "HookEventPullRequest(pull_request) `label_updated` action matches GithubEventPullRequest(pull_request) with `label` activity type",
+ triggeredEvent: webhook_module.HookEventPullRequest,
+ payload: &api.PullRequestPayload{Action: api.HookIssueLabelUpdated},
+ yamlOn: "on:\n pull_request:\n types: [labeled]",
+ expected: true,
+ },
+ {
+ desc: "HookEventPullRequestReviewComment(pull_request_review_comment) matches GithubEventPullRequestReviewComment(pull_request_review_comment)",
+ triggeredEvent: webhook_module.HookEventPullRequestReviewComment,
+ payload: &api.PullRequestPayload{Action: api.HookIssueReviewed},
+ yamlOn: "on:\n pull_request_review_comment:\n types: [created]",
+ expected: true,
+ },
+ {
+ desc: "HookEventPullRequestReviewRejected(pull_request_review_rejected) doesn't match GithubEventPullRequestReview(pull_request_review) with `dismissed` activity type (we don't support `dismissed` at present)",
+ triggeredEvent: webhook_module.HookEventPullRequestReviewRejected,
+ payload: &api.PullRequestPayload{Action: api.HookIssueReviewed},
+ yamlOn: "on:\n pull_request_review:\n types: [dismissed]",
+ expected: false,
+ },
+ {
+ desc: "HookEventRelease(release) `published` action matches GithubEventRelease(release) with `published` activity type",
+ triggeredEvent: webhook_module.HookEventRelease,
+ payload: &api.ReleasePayload{Action: api.HookReleasePublished},
+ yamlOn: "on:\n release:\n types: [published]",
+ expected: true,
+ },
+ {
+ desc: "HookEventRelease(updated) `updated` action matches GithubEventRelease(edited) with `edited` activity type",
+ triggeredEvent: webhook_module.HookEventRelease,
+ payload: &api.ReleasePayload{Action: api.HookReleaseUpdated},
+ yamlOn: "on:\n release:\n types: [edited]",
+ expected: true,
+ },
+
+ {
+ desc: "HookEventPackage(package) `created` action doesn't match GithubEventRegistryPackage(registry_package) with `updated` activity type",
+ triggeredEvent: webhook_module.HookEventPackage,
+ payload: &api.PackagePayload{Action: api.HookPackageCreated},
+ yamlOn: "on:\n registry_package:\n types: [updated]",
+ expected: false,
+ },
+ {
+ desc: "HookEventWiki(wiki) matches GithubEventGollum(gollum)",
+ triggeredEvent: webhook_module.HookEventWiki,
+ payload: nil,
+ yamlOn: "on: gollum",
+ expected: true,
+ },
+ {
+ desc: "HookEventSchedule(schedule) matches GithubEventSchedule(schedule)",
+ triggeredEvent: webhook_module.HookEventSchedule,
+ payload: nil,
+ yamlOn: "on: schedule",
+ expected: true,
+ },
+ {
+ desc: "HookEventWorkflowDispatch(workflow_dispatch) matches GithubEventWorkflowDispatch(workflow_dispatch)",
+ triggeredEvent: webhook_module.HookEventWorkflowDispatch,
+ payload: nil,
+ yamlOn: "on: workflow_dispatch",
+ expected: true,
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ evts, err := GetEventsFromContent([]byte(tc.yamlOn))
+ require.NoError(t, err)
+ assert.Len(t, evts, 1)
+ assert.Equal(t, tc.expected, detectMatched(nil, tc.commit, tc.triggeredEvent, tc.payload, evts[0]))
+ })
+ }
+}