summaryrefslogtreecommitdiffstats
path: root/pkg/jobparser
diff options
context:
space:
mode:
Diffstat (limited to 'pkg/jobparser')
-rw-r--r--pkg/jobparser/evaluator.go185
-rw-r--r--pkg/jobparser/interpeter.go83
-rw-r--r--pkg/jobparser/jobparser.go160
-rw-r--r--pkg/jobparser/jobparser_test.go81
-rw-r--r--pkg/jobparser/model.go338
-rw-r--r--pkg/jobparser/model_test.go314
-rw-r--r--pkg/jobparser/testdata/empty_step.in.yaml8
-rw-r--r--pkg/jobparser/testdata/empty_step.out.yaml7
-rw-r--r--pkg/jobparser/testdata/erase_needs.in.yaml16
-rw-r--r--pkg/jobparser/testdata/erase_needs.out.yaml23
-rw-r--r--pkg/jobparser/testdata/has_needs.in.yaml16
-rw-r--r--pkg/jobparser/testdata/has_needs.out.yaml25
-rw-r--r--pkg/jobparser/testdata/has_secrets.in.yaml14
-rw-r--r--pkg/jobparser/testdata/has_secrets.out.yaml16
-rw-r--r--pkg/jobparser/testdata/has_with.in.yaml15
-rw-r--r--pkg/jobparser/testdata/has_with.out.yaml17
-rw-r--r--pkg/jobparser/testdata/multiple_jobs.in.yaml22
-rw-r--r--pkg/jobparser/testdata/multiple_jobs.out.yaml39
-rw-r--r--pkg/jobparser/testdata/multiple_matrix.in.yaml13
-rw-r--r--pkg/jobparser/testdata/multiple_matrix.out.yaml101
-rw-r--r--pkg/jobparser/testdata/multiple_named_matrix.in.yaml14
-rw-r--r--pkg/jobparser/testdata/multiple_named_matrix.out.yaml101
-rw-r--r--pkg/jobparser/testdata_test.go18
23 files changed, 1626 insertions, 0 deletions
diff --git a/pkg/jobparser/evaluator.go b/pkg/jobparser/evaluator.go
new file mode 100644
index 0000000..80a1397
--- /dev/null
+++ b/pkg/jobparser/evaluator.go
@@ -0,0 +1,185 @@
+package jobparser
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+
+ "github.com/nektos/act/pkg/exprparser"
+ "gopkg.in/yaml.v3"
+)
+
+// ExpressionEvaluator is copied from runner.expressionEvaluator,
+// to avoid unnecessary dependencies
+type ExpressionEvaluator struct {
+ interpreter exprparser.Interpreter
+}
+
+func NewExpressionEvaluator(interpreter exprparser.Interpreter) *ExpressionEvaluator {
+ return &ExpressionEvaluator{interpreter: interpreter}
+}
+
+func (ee ExpressionEvaluator) evaluate(in string, defaultStatusCheck exprparser.DefaultStatusCheck) (interface{}, error) {
+ evaluated, err := ee.interpreter.Evaluate(in, defaultStatusCheck)
+
+ return evaluated, err
+}
+
+func (ee ExpressionEvaluator) evaluateScalarYamlNode(node *yaml.Node) error {
+ var in string
+ if err := node.Decode(&in); err != nil {
+ return err
+ }
+ if !strings.Contains(in, "${{") || !strings.Contains(in, "}}") {
+ return nil
+ }
+ expr, _ := rewriteSubExpression(in, false)
+ res, err := ee.evaluate(expr, exprparser.DefaultStatusCheckNone)
+ if err != nil {
+ return err
+ }
+ return node.Encode(res)
+}
+
+func (ee ExpressionEvaluator) evaluateMappingYamlNode(node *yaml.Node) error {
+ // GitHub has this undocumented feature to merge maps, called insert directive
+ insertDirective := regexp.MustCompile(`\${{\s*insert\s*}}`)
+ for i := 0; i < len(node.Content)/2; {
+ k := node.Content[i*2]
+ v := node.Content[i*2+1]
+ if err := ee.EvaluateYamlNode(v); err != nil {
+ return err
+ }
+ var sk string
+ // Merge the nested map of the insert directive
+ if k.Decode(&sk) == nil && insertDirective.MatchString(sk) {
+ node.Content = append(append(node.Content[:i*2], v.Content...), node.Content[(i+1)*2:]...)
+ i += len(v.Content) / 2
+ } else {
+ if err := ee.EvaluateYamlNode(k); err != nil {
+ return err
+ }
+ i++
+ }
+ }
+ return nil
+}
+
+func (ee ExpressionEvaluator) evaluateSequenceYamlNode(node *yaml.Node) error {
+ for i := 0; i < len(node.Content); {
+ v := node.Content[i]
+ // Preserve nested sequences
+ wasseq := v.Kind == yaml.SequenceNode
+ if err := ee.EvaluateYamlNode(v); err != nil {
+ return err
+ }
+ // GitHub has this undocumented feature to merge sequences / arrays
+ // We have a nested sequence via evaluation, merge the arrays
+ if v.Kind == yaml.SequenceNode && !wasseq {
+ node.Content = append(append(node.Content[:i], v.Content...), node.Content[i+1:]...)
+ i += len(v.Content)
+ } else {
+ i++
+ }
+ }
+ return nil
+}
+
+func (ee ExpressionEvaluator) EvaluateYamlNode(node *yaml.Node) error {
+ switch node.Kind {
+ case yaml.ScalarNode:
+ return ee.evaluateScalarYamlNode(node)
+ case yaml.MappingNode:
+ return ee.evaluateMappingYamlNode(node)
+ case yaml.SequenceNode:
+ return ee.evaluateSequenceYamlNode(node)
+ default:
+ return nil
+ }
+}
+
+func (ee ExpressionEvaluator) Interpolate(in string) string {
+ if !strings.Contains(in, "${{") || !strings.Contains(in, "}}") {
+ return in
+ }
+
+ expr, _ := rewriteSubExpression(in, true)
+ evaluated, err := ee.evaluate(expr, exprparser.DefaultStatusCheckNone)
+ if err != nil {
+ return ""
+ }
+
+ value, ok := evaluated.(string)
+ if !ok {
+ panic(fmt.Sprintf("Expression %s did not evaluate to a string", expr))
+ }
+
+ return value
+}
+
+func escapeFormatString(in string) string {
+ return strings.ReplaceAll(strings.ReplaceAll(in, "{", "{{"), "}", "}}")
+}
+
+func rewriteSubExpression(in string, forceFormat bool) (string, error) {
+ if !strings.Contains(in, "${{") || !strings.Contains(in, "}}") {
+ return in, nil
+ }
+
+ strPattern := regexp.MustCompile("(?:''|[^'])*'")
+ pos := 0
+ exprStart := -1
+ strStart := -1
+ var results []string
+ formatOut := ""
+ for pos < len(in) {
+ if strStart > -1 {
+ matches := strPattern.FindStringIndex(in[pos:])
+ if matches == nil {
+ panic("unclosed string.")
+ }
+
+ strStart = -1
+ pos += matches[1]
+ } else if exprStart > -1 {
+ exprEnd := strings.Index(in[pos:], "}}")
+ strStart = strings.Index(in[pos:], "'")
+
+ if exprEnd > -1 && strStart > -1 {
+ if exprEnd < strStart {
+ strStart = -1
+ } else {
+ exprEnd = -1
+ }
+ }
+
+ if exprEnd > -1 {
+ formatOut += fmt.Sprintf("{%d}", len(results))
+ results = append(results, strings.TrimSpace(in[exprStart:pos+exprEnd]))
+ pos += exprEnd + 2
+ exprStart = -1
+ } else if strStart > -1 {
+ pos += strStart + 1
+ } else {
+ panic("unclosed expression.")
+ }
+ } else {
+ exprStart = strings.Index(in[pos:], "${{")
+ if exprStart != -1 {
+ formatOut += escapeFormatString(in[pos : pos+exprStart])
+ exprStart = pos + exprStart + 3
+ pos = exprStart
+ } else {
+ formatOut += escapeFormatString(in[pos:])
+ pos = len(in)
+ }
+ }
+ }
+
+ if len(results) == 1 && formatOut == "{0}" && !forceFormat {
+ return in, nil
+ }
+
+ out := fmt.Sprintf("format('%s', %s)", strings.ReplaceAll(formatOut, "'", "''"), strings.Join(results, ", "))
+ return out, nil
+}
diff --git a/pkg/jobparser/interpeter.go b/pkg/jobparser/interpeter.go
new file mode 100644
index 0000000..8aaf319
--- /dev/null
+++ b/pkg/jobparser/interpeter.go
@@ -0,0 +1,83 @@
+package jobparser
+
+import (
+ "github.com/nektos/act/pkg/exprparser"
+ "github.com/nektos/act/pkg/model"
+ "gopkg.in/yaml.v3"
+)
+
+// NewInterpeter returns an interpeter used in the server,
+// need github, needs, strategy, matrix, inputs context only,
+// see https://docs.github.com/en/actions/learn-github-actions/contexts#context-availability
+func NewInterpeter(
+ jobID string,
+ job *model.Job,
+ matrix map[string]interface{},
+ gitCtx *model.GithubContext,
+ results map[string]*JobResult,
+ vars map[string]string,
+) exprparser.Interpreter {
+ strategy := make(map[string]interface{})
+ if job.Strategy != nil {
+ strategy["fail-fast"] = job.Strategy.FailFast
+ strategy["max-parallel"] = job.Strategy.MaxParallel
+ }
+
+ run := &model.Run{
+ Workflow: &model.Workflow{
+ Jobs: map[string]*model.Job{},
+ },
+ JobID: jobID,
+ }
+ for id, result := range results {
+ need := yaml.Node{}
+ _ = need.Encode(result.Needs)
+ run.Workflow.Jobs[id] = &model.Job{
+ RawNeeds: need,
+ Result: result.Result,
+ Outputs: result.Outputs,
+ }
+ }
+
+ jobs := run.Workflow.Jobs
+ jobNeeds := run.Job().Needs()
+
+ using := map[string]exprparser.Needs{}
+ for _, need := range jobNeeds {
+ if v, ok := jobs[need]; ok {
+ using[need] = exprparser.Needs{
+ Outputs: v.Outputs,
+ Result: v.Result,
+ }
+ }
+ }
+
+ ee := &exprparser.EvaluationEnvironment{
+ Github: gitCtx,
+ Env: nil, // no need
+ Job: nil, // no need
+ Steps: nil, // no need
+ Runner: nil, // no need
+ Secrets: nil, // no need
+ Strategy: strategy,
+ Matrix: matrix,
+ Needs: using,
+ Inputs: nil, // not supported yet
+ Vars: vars,
+ }
+
+ config := exprparser.Config{
+ Run: run,
+ WorkingDir: "", // WorkingDir is used for the function hashFiles, but it's not needed in the server
+ Context: "job",
+ }
+
+ return exprparser.NewInterpeter(ee, config)
+}
+
+// JobResult is the minimum requirement of job results for Interpeter
+type JobResult struct {
+ Needs []string
+ Result string
+ Outputs map[string]string
+}
diff --git a/pkg/jobparser/jobparser.go b/pkg/jobparser/jobparser.go
new file mode 100644
index 0000000..b8c206a
--- /dev/null
+++ b/pkg/jobparser/jobparser.go
@@ -0,0 +1,160 @@
+package jobparser
+
+import (
+ "bytes"
+ "fmt"
+ "sort"
+ "strings"
+
+ "gopkg.in/yaml.v3"
+
+ "github.com/nektos/act/pkg/model"
+)
+
+func Parse(content []byte, options ...ParseOption) ([]*SingleWorkflow, error) {
+ origin, err := model.ReadWorkflow(bytes.NewReader(content))
+ if err != nil {
+ return nil, fmt.Errorf("model.ReadWorkflow: %w", err)
+ }
+
+ workflow := &SingleWorkflow{}
+ if err := yaml.Unmarshal(content, workflow); err != nil {
+ return nil, fmt.Errorf("yaml.Unmarshal: %w", err)
+ }
+
+ pc := &parseContext{}
+ for _, o := range options {
+ o(pc)
+ }
+ results := map[string]*JobResult{}
+ for id, job := range origin.Jobs {
+ results[id] = &JobResult{
+ Needs: job.Needs(),
+ Result: pc.jobResults[id],
+ Outputs: nil, // not supported yet
+ }
+ }
+
+ var ret []*SingleWorkflow
+ ids, jobs, err := workflow.jobs()
+ if err != nil {
+ return nil, fmt.Errorf("invalid jobs: %w", err)
+ }
+ for i, id := range ids {
+ job := jobs[i]
+ matricxes, err := getMatrixes(origin.GetJob(id))
+ if err != nil {
+ return nil, fmt.Errorf("getMatrixes: %w", err)
+ }
+ for _, matrix := range matricxes {
+ job := job.Clone()
+ evaluator := NewExpressionEvaluator(NewInterpeter(id, origin.GetJob(id), matrix, pc.gitContext, results, pc.vars))
+ if job.Name == "" {
+ job.Name = nameWithMatrix(id, matrix)
+ } else {
+ job.Name = evaluator.Interpolate(job.Name)
+ }
+
+ job.Strategy.RawMatrix = encodeMatrix(matrix)
+
+ runsOn := origin.GetJob(id).RunsOn()
+ for i, v := range runsOn {
+ runsOn[i] = evaluator.Interpolate(v)
+ }
+ job.RawRunsOn = encodeRunsOn(runsOn)
+ swf := &SingleWorkflow{
+ Name: workflow.Name,
+ RawOn: workflow.RawOn,
+ Env: workflow.Env,
+ Defaults: workflow.Defaults,
+ }
+ if err := swf.SetJob(id, job); err != nil {
+ return nil, fmt.Errorf("SetJob: %w", err)
+ }
+ ret = append(ret, swf)
+ }
+ }
+ return ret, nil
+}
+
+func WithJobResults(results map[string]string) ParseOption {
+ return func(c *parseContext) {
+ c.jobResults = results
+ }
+}
+
+func WithGitContext(context *model.GithubContext) ParseOption {
+ return func(c *parseContext) {
+ c.gitContext = context
+ }
+}
+
+func WithVars(vars map[string]string) ParseOption {
+ return func(c *parseContext) {
+ c.vars = vars
+ }
+}
+
+type parseContext struct {
+ jobResults map[string]string
+ gitContext *model.GithubContext
+ vars map[string]string
+}
+
+type ParseOption func(c *parseContext)
+
+func getMatrixes(job *model.Job) ([]map[string]interface{}, error) {
+ ret, err := job.GetMatrixes()
+ if err != nil {
+ return nil, fmt.Errorf("GetMatrixes: %w", err)
+ }
+ sort.Slice(ret, func(i, j int) bool {
+ return matrixName(ret[i]) < matrixName(ret[j])
+ })
+ return ret, nil
+}
+
+func encodeMatrix(matrix map[string]interface{}) yaml.Node {
+ if len(matrix) == 0 {
+ return yaml.Node{}
+ }
+ value := map[string][]interface{}{}
+ for k, v := range matrix {
+ value[k] = []interface{}{v}
+ }
+ node := yaml.Node{}
+ _ = node.Encode(value)
+ return node
+}
+
+func encodeRunsOn(runsOn []string) yaml.Node {
+ node := yaml.Node{}
+ if len(runsOn) == 1 {
+ _ = node.Encode(runsOn[0])
+ } else {
+ _ = node.Encode(runsOn)
+ }
+ return node
+}
+
+func nameWithMatrix(name string, m map[string]interface{}) string {
+ if len(m) == 0 {
+ return name
+ }
+
+ return name + " " + matrixName(m)
+}
+
+func matrixName(m map[string]interface{}) string {
+ ks := make([]string, 0, len(m))
+ for k := range m {
+ ks = append(ks, k)
+ }
+ sort.Strings(ks)
+ vs := make([]string, 0, len(m))
+ for _, v := range ks {
+ vs = append(vs, fmt.Sprint(m[v]))
+ }
+
+ return fmt.Sprintf("(%s)", strings.Join(vs, ", "))
+}
diff --git a/pkg/jobparser/jobparser_test.go b/pkg/jobparser/jobparser_test.go
new file mode 100644
index 0000000..06ed825
--- /dev/null
+++ b/pkg/jobparser/jobparser_test.go
@@ -0,0 +1,81 @@
+package jobparser
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/stretchr/testify/require"
+
+ "gopkg.in/yaml.v3"
+)
+
+func TestParse(t *testing.T) {
+ tests := []struct {
+ name string
+ options []ParseOption
+ wantErr bool
+ }{
+ {
+ name: "multiple_named_matrix",
+ options: nil,
+ wantErr: false,
+ },
+ {
+ name: "multiple_jobs",
+ options: nil,
+ wantErr: false,
+ },
+ {
+ name: "multiple_matrix",
+ options: nil,
+ wantErr: false,
+ },
+ {
+ name: "has_needs",
+ options: nil,
+ wantErr: false,
+ },
+ {
+ name: "has_with",
+ options: nil,
+ wantErr: false,
+ },
+ {
+ name: "has_secrets",
+ options: nil,
+ wantErr: false,
+ },
+ {
+ name: "empty_step",
+ options: nil,
+ wantErr: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ content := ReadTestdata(t, tt.name+".in.yaml")
+ want := ReadTestdata(t, tt.name+".out.yaml")
+ got, err := Parse(content, tt.options...)
+ if tt.wantErr {
+ require.Error(t, err)
+ }
+ require.NoError(t, err)
+
+ builder := &strings.Builder{}
+ for _, v := range got {
+ if builder.Len() > 0 {
+ builder.WriteString("---\n")
+ }
+ encoder := yaml.NewEncoder(builder)
+ encoder.SetIndent(2)
+ require.NoError(t, encoder.Encode(v))
+ id, job := v.Job()
+ assert.NotEmpty(t, id)
+ assert.NotNil(t, job)
+ }
+ assert.Equal(t, string(want), builder.String())
+ })
+ }
+}
diff --git a/pkg/jobparser/model.go b/pkg/jobparser/model.go
new file mode 100644
index 0000000..f63a045
--- /dev/null
+++ b/pkg/jobparser/model.go
@@ -0,0 +1,338 @@
+package jobparser
+
+import (
+ "fmt"
+
+ "github.com/nektos/act/pkg/model"
+ "gopkg.in/yaml.v3"
+)
+
+// SingleWorkflow is a workflow with single job and single matrix
+type SingleWorkflow struct {
+ Name string `yaml:"name,omitempty"`
+ RawOn yaml.Node `yaml:"on,omitempty"`
+ Env map[string]string `yaml:"env,omitempty"`
+ RawJobs yaml.Node `yaml:"jobs,omitempty"`
+ Defaults Defaults `yaml:"defaults,omitempty"`
+}
+
+func (w *SingleWorkflow) Job() (string, *Job) {
+ ids, jobs, _ := w.jobs()
+ if len(ids) >= 1 {
+ return ids[0], jobs[0]
+ }
+ return "", nil
+}
+
+func (w *SingleWorkflow) jobs() ([]string, []*Job, error) {
+ ids, jobs, err := parseMappingNode[*Job](&w.RawJobs)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ for _, job := range jobs {
+ steps := make([]*Step, 0, len(job.Steps))
+ for _, s := range job.Steps {
+ if s != nil {
+ steps = append(steps, s)
+ }
+ }
+ job.Steps = steps
+ }
+
+ return ids, jobs, nil
+}
+
+func (w *SingleWorkflow) SetJob(id string, job *Job) error {
+ m := map[string]*Job{
+ id: job,
+ }
+ out, err := yaml.Marshal(m)
+ if err != nil {
+ return err
+ }
+ node := yaml.Node{}
+ if err := yaml.Unmarshal(out, &node); err != nil {
+ return err
+ }
+ if len(node.Content) != 1 || node.Content[0].Kind != yaml.MappingNode {
+ return fmt.Errorf("can not set job: %q", out)
+ }
+ w.RawJobs = *node.Content[0]
+ return nil
+}
+
+func (w *SingleWorkflow) Marshal() ([]byte, error) {
+ return yaml.Marshal(w)
+}
+
+type Job struct {
+ Name string `yaml:"name,omitempty"`
+ RawNeeds yaml.Node `yaml:"needs,omitempty"`
+ RawRunsOn yaml.Node `yaml:"runs-on,omitempty"`
+ Env yaml.Node `yaml:"env,omitempty"`
+ If yaml.Node `yaml:"if,omitempty"`
+ Steps []*Step `yaml:"steps,omitempty"`
+ TimeoutMinutes string `yaml:"timeout-minutes,omitempty"`
+ Services map[string]*ContainerSpec `yaml:"services,omitempty"`
+ Strategy Strategy `yaml:"strategy,omitempty"`
+ RawContainer yaml.Node `yaml:"container,omitempty"`
+ Defaults Defaults `yaml:"defaults,omitempty"`
+ Outputs map[string]string `yaml:"outputs,omitempty"`
+ Uses string `yaml:"uses,omitempty"`
+ With map[string]interface{} `yaml:"with,omitempty"`
+ RawSecrets yaml.Node `yaml:"secrets,omitempty"`
+}
+
+func (j *Job) Clone() *Job {
+ if j == nil {
+ return nil
+ }
+ return &Job{
+ Name: j.Name,
+ RawNeeds: j.RawNeeds,
+ RawRunsOn: j.RawRunsOn,
+ Env: j.Env,
+ If: j.If,
+ Steps: j.Steps,
+ TimeoutMinutes: j.TimeoutMinutes,
+ Services: j.Services,
+ Strategy: j.Strategy,
+ RawContainer: j.RawContainer,
+ Defaults: j.Defaults,
+ Outputs: j.Outputs,
+ Uses: j.Uses,
+ With: j.With,
+ RawSecrets: j.RawSecrets,
+ }
+}
+
+func (j *Job) Needs() []string {
+ return (&model.Job{RawNeeds: j.RawNeeds}).Needs()
+}
+
+func (j *Job) EraseNeeds() *Job {
+ j.RawNeeds = yaml.Node{}
+ return j
+}
+
+func (j *Job) RunsOn() []string {
+ return (&model.Job{RawRunsOn: j.RawRunsOn}).RunsOn()
+}
+
+type Step struct {
+ ID string `yaml:"id,omitempty"`
+ If yaml.Node `yaml:"if,omitempty"`
+ Name string `yaml:"name,omitempty"`
+ Uses string `yaml:"uses,omitempty"`
+ Run string `yaml:"run,omitempty"`
+ WorkingDirectory string `yaml:"working-directory,omitempty"`
+ Shell string `yaml:"shell,omitempty"`
+ Env yaml.Node `yaml:"env,omitempty"`
+ With map[string]string `yaml:"with,omitempty"`
+ ContinueOnError bool `yaml:"continue-on-error,omitempty"`
+ TimeoutMinutes string `yaml:"timeout-minutes,omitempty"`
+}
+
+// String gets the name of step
+func (s *Step) String() string {
+ if s == nil {
+ return ""
+ }
+ return (&model.Step{
+ ID: s.ID,
+ Name: s.Name,
+ Uses: s.Uses,
+ Run: s.Run,
+ }).String()
+}
+
+type ContainerSpec struct {
+ Image string `yaml:"image,omitempty"`
+ Env map[string]string `yaml:"env,omitempty"`
+ Ports []string `yaml:"ports,omitempty"`
+ Volumes []string `yaml:"volumes,omitempty"`
+ Options string `yaml:"options,omitempty"`
+ Credentials map[string]string `yaml:"credentials,omitempty"`
+ Cmd []string `yaml:"cmd,omitempty"`
+}
+
+type Strategy struct {
+ FailFastString string `yaml:"fail-fast,omitempty"`
+ MaxParallelString string `yaml:"max-parallel,omitempty"`
+ RawMatrix yaml.Node `yaml:"matrix,omitempty"`
+}
+
+type Defaults struct {
+ Run RunDefaults `yaml:"run,omitempty"`
+}
+
+type RunDefaults struct {
+ Shell string `yaml:"shell,omitempty"`
+ WorkingDirectory string `yaml:"working-directory,omitempty"`
+}
+
+type Event struct {
+ Name string
+ acts map[string][]string
+ schedules []map[string]string
+}
+
+func (evt *Event) IsSchedule() bool {
+ return evt.schedules != nil
+}
+
+func (evt *Event) Acts() map[string][]string {
+ return evt.acts
+}
+
+func (evt *Event) Schedules() []map[string]string {
+ return evt.schedules
+}
+
+func ParseRawOn(rawOn *yaml.Node) ([]*Event, error) {
+ switch rawOn.Kind {
+ case yaml.ScalarNode:
+ var val string
+ err := rawOn.Decode(&val)
+ if err != nil {
+ return nil, err
+ }
+ return []*Event{
+ {Name: val},
+ }, nil
+ case yaml.SequenceNode:
+ var val []interface{}
+ err := rawOn.Decode(&val)
+ if err != nil {
+ return nil, err
+ }
+ res := make([]*Event, 0, len(val))
+ for _, v := range val {
+ switch t := v.(type) {
+ case string:
+ res = append(res, &Event{Name: t})
+ default:
+ return nil, fmt.Errorf("invalid type %T", t)
+ }
+ }
+ return res, nil
+ case yaml.MappingNode:
+ events, triggers, err := parseMappingNode[interface{}](rawOn)
+ if err != nil {
+ return nil, err
+ }
+ res := make([]*Event, 0, len(events))
+ for i, k := range events {
+ v := triggers[i]
+ if v == nil {
+ res = append(res, &Event{
+ Name: k,
+ acts: map[string][]string{},
+ })
+ continue
+ }
+ switch t := v.(type) {
+ case string:
+ res = append(res, &Event{
+ Name: k,
+ acts: map[string][]string{},
+ })
+ case []string:
+ res = append(res, &Event{
+ Name: k,
+ acts: map[string][]string{},
+ })
+ case map[string]interface{}:
+ acts := make(map[string][]string, len(t))
+ for act, branches := range t {
+ switch b := branches.(type) {
+ case string:
+ acts[act] = []string{b}
+ case []string:
+ acts[act] = b
+ case []interface{}:
+ acts[act] = make([]string, len(b))
+ for i, v := range b {
+ var ok bool
+ if acts[act][i], ok = v.(string); !ok {
+ return nil, fmt.Errorf("unknown on type: %#v", branches)
+ }
+ }
+ case map[string]interface{}:
+ if k != "workflow_dispatch" || act != "inputs" {
+ return nil, fmt.Errorf("unknown on type: %#v", v)
+ }
+ acts = nil
+ default:
+ return nil, fmt.Errorf("unknown on type: %#v", branches)
+ }
+ }
+ res = append(res, &Event{
+ Name: k,
+ acts: acts,
+ })
+ case []interface{}:
+ if k != "schedule" {
+ return nil, fmt.Errorf("unknown on type: %#v", v)
+ }
+ schedules := make([]map[string]string, len(t))
+ for i, tt := range t {
+ vv, ok := tt.(map[string]interface{})
+ if !ok {
+ return nil, fmt.Errorf("unknown on type: %#v", v)
+ }
+ schedules[i] = make(map[string]string, len(vv))
+ for k, vvv := range vv {
+ var ok bool
+ if schedules[i][k], ok = vvv.(string); !ok {
+ return nil, fmt.Errorf("unknown on type: %#v", v)
+ }
+ }
+ }
+ res = append(res, &Event{
+ Name: k,
+ schedules: schedules,
+ })
+ default:
+ return nil, fmt.Errorf("unknown on type: %#v", v)
+ }
+ }
+ return res, nil
+ default:
+ return nil, fmt.Errorf("unknown on type: %v", rawOn.Kind)
+ }
+}
+
+// parseMappingNode parse a mapping node and preserve order.
+func parseMappingNode[T any](node *yaml.Node) ([]string, []T, error) {
+ if node.Kind != yaml.MappingNode {
+ return nil, nil, fmt.Errorf("input node is not a mapping node")
+ }
+
+ var scalars []string
+ var datas []T
+ expectKey := true
+ for _, item := range node.Content {
+ if expectKey {
+ if item.Kind != yaml.ScalarNode {
+ return nil, nil, fmt.Errorf("not a valid scalar node: %v", item.Value)
+ }
+ scalars = append(scalars, item.Value)
+ expectKey = false
+ } else {
+ var val T
+ if err := item.Decode(&val); err != nil {
+ return nil, nil, err
+ }
+ datas = append(datas, val)
+ expectKey = true
+ }
+ }
+
+ if len(scalars) != len(datas) {
+ return nil, nil, fmt.Errorf("invalid definition of on: %v", node.Value)
+ }
+
+ return scalars, datas, nil
+}
diff --git a/pkg/jobparser/model_test.go b/pkg/jobparser/model_test.go
new file mode 100644
index 0000000..a034306
--- /dev/null
+++ b/pkg/jobparser/model_test.go
@@ -0,0 +1,314 @@
+package jobparser
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/nektos/act/pkg/model"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gopkg.in/yaml.v3"
+)
+
+func TestParseRawOn(t *testing.T) {
+ kases := []struct {
+ input string
+ result []*Event
+ }{
+ {
+ input: "on: issue_comment",
+ result: []*Event{
+ {
+ Name: "issue_comment",
+ },
+ },
+ },
+ {
+ input: "on:\n push",
+ result: []*Event{
+ {
+ Name: "push",
+ },
+ },
+ },
+
+ {
+ input: "on:\n - push\n - pull_request",
+ result: []*Event{
+ {
+ Name: "push",
+ },
+ {
+ Name: "pull_request",
+ },
+ },
+ },
+ {
+ input: "on:\n push:\n branches:\n - master",
+ result: []*Event{
+ {
+ Name: "push",
+ acts: map[string][]string{
+ "branches": {
+ "master",
+ },
+ },
+ },
+ },
+ },
+ {
+ input: "on:\n branch_protection_rule:\n types: [created, deleted]",
+ result: []*Event{
+ {
+ Name: "branch_protection_rule",
+ acts: map[string][]string{
+ "types": {
+ "created",
+ "deleted",
+ },
+ },
+ },
+ },
+ },
+ {
+ input: "on:\n project:\n types: [created, deleted]\n milestone:\n types: [opened, deleted]",
+ result: []*Event{
+ {
+ Name: "project",
+ acts: map[string][]string{
+ "types": {
+ "created",
+ "deleted",
+ },
+ },
+ },
+ {
+ Name: "milestone",
+ acts: map[string][]string{
+ "types": {
+ "opened",
+ "deleted",
+ },
+ },
+ },
+ },
+ },
+ {
+ input: "on:\n pull_request:\n types:\n - opened\n branches:\n - 'releases/**'",
+ result: []*Event{
+ {
+ Name: "pull_request",
+ acts: map[string][]string{
+ "types": {
+ "opened",
+ },
+ "branches": {
+ "releases/**",
+ },
+ },
+ },
+ },
+ },
+ {
+ input: "on:\n push:\n branches:\n - main\n pull_request:\n types:\n - opened\n branches:\n - '**'",
+ result: []*Event{
+ {
+ Name: "push",
+ acts: map[string][]string{
+ "branches": {
+ "main",
+ },
+ },
+ },
+ {
+ Name: "pull_request",
+ acts: map[string][]string{
+ "types": {
+ "opened",
+ },
+ "branches": {
+ "**",
+ },
+ },
+ },
+ },
+ },
+ {
+ input: "on:\n push:\n branches:\n - 'main'\n - 'releases/**'",
+ result: []*Event{
+ {
+ Name: "push",
+ acts: map[string][]string{
+ "branches": {
+ "main",
+ "releases/**",
+ },
+ },
+ },
+ },
+ },
+ {
+ input: "on:\n push:\n tags:\n - v1.**",
+ result: []*Event{
+ {
+ Name: "push",
+ acts: map[string][]string{
+ "tags": {
+ "v1.**",
+ },
+ },
+ },
+ },
+ },
+ {
+ input: "on: [pull_request, workflow_dispatch]",
+ result: []*Event{
+ {
+ Name: "pull_request",
+ },
+ {
+ Name: "workflow_dispatch",
+ },
+ },
+ },
+ {
+ input: "on:\n schedule:\n - cron: '20 6 * * *'",
+ result: []*Event{
+ {
+ Name: "schedule",
+ schedules: []map[string]string{
+ {
+ "cron": "20 6 * * *",
+ },
+ },
+ },
+ },
+ },
+ {
+ input: "on:\n workflow_dispatch:\n inputs:\n test:\n type: string",
+ result: []*Event{
+ {
+ Name: "workflow_dispatch",
+ },
+ },
+ },
+ }
+ for _, kase := range kases {
+ t.Run(kase.input, func(t *testing.T) {
+ origin, err := model.ReadWorkflow(strings.NewReader(kase.input))
+ assert.NoError(t, err)
+
+ events, err := ParseRawOn(&origin.RawOn)
+ assert.NoError(t, err)
+ assert.EqualValues(t, kase.result, events, fmt.Sprintf("%#v", events))
+ })
+ }
+}
+
+func TestSingleWorkflow_SetJob(t *testing.T) {
+ t.Run("erase needs", func(t *testing.T) {
+ content := ReadTestdata(t, "erase_needs.in.yaml")
+ want := ReadTestdata(t, "erase_needs.out.yaml")
+ swf, err := Parse(content)
+ require.NoError(t, err)
+ builder := &strings.Builder{}
+ for _, v := range swf {
+ id, job := v.Job()
+ require.NoError(t, v.SetJob(id, job.EraseNeeds()))
+
+ if builder.Len() > 0 {
+ builder.WriteString("---\n")
+ }
+ encoder := yaml.NewEncoder(builder)
+ encoder.SetIndent(2)
+ require.NoError(t, encoder.Encode(v))
+ }
+ assert.Equal(t, string(want), builder.String())
+ })
+}
+
+func TestParseMappingNode(t *testing.T) {
+ tests := []struct {
+ input string
+ scalars []string
+ datas []interface{}
+ }{
+ {
+ input: "on:\n push:\n branches:\n - master",
+ scalars: []string{"push"},
+ datas: []interface {
+ }{
+ map[string]interface{}{
+ "branches": []interface{}{"master"},
+ },
+ },
+ },
+ {
+ input: "on:\n branch_protection_rule:\n types: [created, deleted]",
+ scalars: []string{"branch_protection_rule"},
+ datas: []interface{}{
+ map[string]interface{}{
+ "types": []interface{}{"created", "deleted"},
+ },
+ },
+ },
+ {
+ input: "on:\n project:\n types: [created, deleted]\n milestone:\n types: [opened, deleted]",
+ scalars: []string{"project", "milestone"},
+ datas: []interface{}{
+ map[string]interface{}{
+ "types": []interface{}{"created", "deleted"},
+ },
+ map[string]interface{}{
+ "types": []interface{}{"opened", "deleted"},
+ },
+ },
+ },
+ {
+ input: "on:\n pull_request:\n types:\n - opened\n branches:\n - 'releases/**'",
+ scalars: []string{"pull_request"},
+ datas: []interface{}{
+ map[string]interface{}{
+ "types": []interface{}{"opened"},
+ "branches": []interface{}{"releases/**"},
+ },
+ },
+ },
+ {
+ input: "on:\n push:\n branches:\n - main\n pull_request:\n types:\n - opened\n branches:\n - '**'",
+ scalars: []string{"push", "pull_request"},
+ datas: []interface{}{
+ map[string]interface{}{
+ "branches": []interface{}{"main"},
+ },
+ map[string]interface{}{
+ "types": []interface{}{"opened"},
+ "branches": []interface{}{"**"},
+ },
+ },
+ },
+ {
+ input: "on:\n schedule:\n - cron: '20 6 * * *'",
+ scalars: []string{"schedule"},
+ datas: []interface{}{
+ []interface{}{map[string]interface{}{
+ "cron": "20 6 * * *",
+ }},
+ },
+ },
+ }
+
+ for _, test := range tests {
+ t.Run(test.input, func(t *testing.T) {
+ workflow, err := model.ReadWorkflow(strings.NewReader(test.input))
+ assert.NoError(t, err)
+
+ scalars, datas, err := parseMappingNode[interface{}](&workflow.RawOn)
+ assert.NoError(t, err)
+ assert.EqualValues(t, test.scalars, scalars, fmt.Sprintf("%#v", scalars))
+ assert.EqualValues(t, test.datas, datas, fmt.Sprintf("%#v", datas))
+ })
+ }
+}
diff --git a/pkg/jobparser/testdata/empty_step.in.yaml b/pkg/jobparser/testdata/empty_step.in.yaml
new file mode 100644
index 0000000..737ac0b
--- /dev/null
+++ b/pkg/jobparser/testdata/empty_step.in.yaml
@@ -0,0 +1,8 @@
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ steps:
+ - run: echo job-1
+ -
diff --git a/pkg/jobparser/testdata/empty_step.out.yaml b/pkg/jobparser/testdata/empty_step.out.yaml
new file mode 100644
index 0000000..06828e0
--- /dev/null
+++ b/pkg/jobparser/testdata/empty_step.out.yaml
@@ -0,0 +1,7 @@
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ steps:
+ - run: echo job-1
diff --git a/pkg/jobparser/testdata/erase_needs.in.yaml b/pkg/jobparser/testdata/erase_needs.in.yaml
new file mode 100644
index 0000000..a7d1f9b
--- /dev/null
+++ b/pkg/jobparser/testdata/erase_needs.in.yaml
@@ -0,0 +1,16 @@
+name: test
+jobs:
+ job1:
+ runs-on: linux
+ steps:
+ - run: uname -a
+ job2:
+ runs-on: linux
+ steps:
+ - run: uname -a
+ needs: job1
+ job3:
+ runs-on: linux
+ steps:
+ - run: uname -a
+ needs: [job1, job2]
diff --git a/pkg/jobparser/testdata/erase_needs.out.yaml b/pkg/jobparser/testdata/erase_needs.out.yaml
new file mode 100644
index 0000000..959960d
--- /dev/null
+++ b/pkg/jobparser/testdata/erase_needs.out.yaml
@@ -0,0 +1,23 @@
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ steps:
+ - run: uname -a
+---
+name: test
+jobs:
+ job2:
+ name: job2
+ runs-on: linux
+ steps:
+ - run: uname -a
+---
+name: test
+jobs:
+ job3:
+ name: job3
+ runs-on: linux
+ steps:
+ - run: uname -a
diff --git a/pkg/jobparser/testdata/has_needs.in.yaml b/pkg/jobparser/testdata/has_needs.in.yaml
new file mode 100644
index 0000000..a7d1f9b
--- /dev/null
+++ b/pkg/jobparser/testdata/has_needs.in.yaml
@@ -0,0 +1,16 @@
+name: test
+jobs:
+ job1:
+ runs-on: linux
+ steps:
+ - run: uname -a
+ job2:
+ runs-on: linux
+ steps:
+ - run: uname -a
+ needs: job1
+ job3:
+ runs-on: linux
+ steps:
+ - run: uname -a
+ needs: [job1, job2]
diff --git a/pkg/jobparser/testdata/has_needs.out.yaml b/pkg/jobparser/testdata/has_needs.out.yaml
new file mode 100644
index 0000000..a544aa2
--- /dev/null
+++ b/pkg/jobparser/testdata/has_needs.out.yaml
@@ -0,0 +1,25 @@
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ steps:
+ - run: uname -a
+---
+name: test
+jobs:
+ job2:
+ name: job2
+ needs: job1
+ runs-on: linux
+ steps:
+ - run: uname -a
+---
+name: test
+jobs:
+ job3:
+ name: job3
+ needs: [job1, job2]
+ runs-on: linux
+ steps:
+ - run: uname -a
diff --git a/pkg/jobparser/testdata/has_secrets.in.yaml b/pkg/jobparser/testdata/has_secrets.in.yaml
new file mode 100644
index 0000000..64b9f69
--- /dev/null
+++ b/pkg/jobparser/testdata/has_secrets.in.yaml
@@ -0,0 +1,14 @@
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ uses: .gitea/workflows/build.yml
+ secrets:
+ secret: hideme
+
+ job2:
+ name: job2
+ runs-on: linux
+ uses: .gitea/workflows/build.yml
+ secrets: inherit
diff --git a/pkg/jobparser/testdata/has_secrets.out.yaml b/pkg/jobparser/testdata/has_secrets.out.yaml
new file mode 100644
index 0000000..23dfb80
--- /dev/null
+++ b/pkg/jobparser/testdata/has_secrets.out.yaml
@@ -0,0 +1,16 @@
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ uses: .gitea/workflows/build.yml
+ secrets:
+ secret: hideme
+---
+name: test
+jobs:
+ job2:
+ name: job2
+ runs-on: linux
+ uses: .gitea/workflows/build.yml
+ secrets: inherit
diff --git a/pkg/jobparser/testdata/has_with.in.yaml b/pkg/jobparser/testdata/has_with.in.yaml
new file mode 100644
index 0000000..4e3dc74
--- /dev/null
+++ b/pkg/jobparser/testdata/has_with.in.yaml
@@ -0,0 +1,15 @@
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ uses: .gitea/workflows/build.yml
+ with:
+ package: service
+
+ job2:
+ name: job2
+ runs-on: linux
+ uses: .gitea/workflows/build.yml
+ with:
+ package: module
diff --git a/pkg/jobparser/testdata/has_with.out.yaml b/pkg/jobparser/testdata/has_with.out.yaml
new file mode 100644
index 0000000..de79b80
--- /dev/null
+++ b/pkg/jobparser/testdata/has_with.out.yaml
@@ -0,0 +1,17 @@
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ uses: .gitea/workflows/build.yml
+ with:
+ package: service
+---
+name: test
+jobs:
+ job2:
+ name: job2
+ runs-on: linux
+ uses: .gitea/workflows/build.yml
+ with:
+ package: module
diff --git a/pkg/jobparser/testdata/multiple_jobs.in.yaml b/pkg/jobparser/testdata/multiple_jobs.in.yaml
new file mode 100644
index 0000000..266ede8
--- /dev/null
+++ b/pkg/jobparser/testdata/multiple_jobs.in.yaml
@@ -0,0 +1,22 @@
+name: test
+jobs:
+ zzz:
+ runs-on: linux
+ steps:
+ - run: echo zzz
+ job1:
+ runs-on: linux
+ steps:
+ - run: uname -a && go version
+ job2:
+ runs-on: linux
+ steps:
+ - run: uname -a && go version
+ job3:
+ runs-on: linux
+ steps:
+ - run: uname -a && go version
+ aaa:
+ runs-on: linux
+ steps:
+ - run: uname -a && go version
diff --git a/pkg/jobparser/testdata/multiple_jobs.out.yaml b/pkg/jobparser/testdata/multiple_jobs.out.yaml
new file mode 100644
index 0000000..ea22350
--- /dev/null
+++ b/pkg/jobparser/testdata/multiple_jobs.out.yaml
@@ -0,0 +1,39 @@
+name: test
+jobs:
+ zzz:
+ name: zzz
+ runs-on: linux
+ steps:
+ - run: echo zzz
+---
+name: test
+jobs:
+ job1:
+ name: job1
+ runs-on: linux
+ steps:
+ - run: uname -a && go version
+---
+name: test
+jobs:
+ job2:
+ name: job2
+ runs-on: linux
+ steps:
+ - run: uname -a && go version
+---
+name: test
+jobs:
+ job3:
+ name: job3
+ runs-on: linux
+ steps:
+ - run: uname -a && go version
+---
+name: test
+jobs:
+ aaa:
+ name: aaa
+ runs-on: linux
+ steps:
+ - run: uname -a && go version
diff --git a/pkg/jobparser/testdata/multiple_matrix.in.yaml b/pkg/jobparser/testdata/multiple_matrix.in.yaml
new file mode 100644
index 0000000..99985f3
--- /dev/null
+++ b/pkg/jobparser/testdata/multiple_matrix.in.yaml
@@ -0,0 +1,13 @@
+name: test
+jobs:
+ job1:
+ strategy:
+ matrix:
+ os: [ubuntu-22.04, ubuntu-20.04]
+ version: [1.17, 1.18, 1.19]
+ runs-on: ${{ matrix.os }}
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version \ No newline at end of file
diff --git a/pkg/jobparser/testdata/multiple_matrix.out.yaml b/pkg/jobparser/testdata/multiple_matrix.out.yaml
new file mode 100644
index 0000000..e277cdd
--- /dev/null
+++ b/pkg/jobparser/testdata/multiple_matrix.out.yaml
@@ -0,0 +1,101 @@
+name: test
+jobs:
+ job1:
+ name: job1 (ubuntu-20.04, 1.17)
+ runs-on: ubuntu-20.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-20.04
+ version:
+ - 1.17
+---
+name: test
+jobs:
+ job1:
+ name: job1 (ubuntu-20.04, 1.18)
+ runs-on: ubuntu-20.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-20.04
+ version:
+ - 1.18
+---
+name: test
+jobs:
+ job1:
+ name: job1 (ubuntu-20.04, 1.19)
+ runs-on: ubuntu-20.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-20.04
+ version:
+ - 1.19
+---
+name: test
+jobs:
+ job1:
+ name: job1 (ubuntu-22.04, 1.17)
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-22.04
+ version:
+ - 1.17
+---
+name: test
+jobs:
+ job1:
+ name: job1 (ubuntu-22.04, 1.18)
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-22.04
+ version:
+ - 1.18
+---
+name: test
+jobs:
+ job1:
+ name: job1 (ubuntu-22.04, 1.19)
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-22.04
+ version:
+ - 1.19
diff --git a/pkg/jobparser/testdata/multiple_named_matrix.in.yaml b/pkg/jobparser/testdata/multiple_named_matrix.in.yaml
new file mode 100644
index 0000000..bd44b2c
--- /dev/null
+++ b/pkg/jobparser/testdata/multiple_named_matrix.in.yaml
@@ -0,0 +1,14 @@
+name: test
+jobs:
+ job1:
+ strategy:
+ matrix:
+ os: [ubuntu-22.04, ubuntu-20.04]
+ version: [1.17, 1.18, 1.19]
+ runs-on: ${{ matrix.os }}
+ name: On ${{ matrix.os }} with go v${{ matrix.version }}
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
diff --git a/pkg/jobparser/testdata/multiple_named_matrix.out.yaml b/pkg/jobparser/testdata/multiple_named_matrix.out.yaml
new file mode 100644
index 0000000..eea2da6
--- /dev/null
+++ b/pkg/jobparser/testdata/multiple_named_matrix.out.yaml
@@ -0,0 +1,101 @@
+name: test
+jobs:
+ job1:
+ name: On ubuntu-20.04 with go v1.17
+ runs-on: ubuntu-20.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-20.04
+ version:
+ - 1.17
+---
+name: test
+jobs:
+ job1:
+ name: On ubuntu-20.04 with go v1.18
+ runs-on: ubuntu-20.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-20.04
+ version:
+ - 1.18
+---
+name: test
+jobs:
+ job1:
+ name: On ubuntu-20.04 with go v1.19
+ runs-on: ubuntu-20.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-20.04
+ version:
+ - 1.19
+---
+name: test
+jobs:
+ job1:
+ name: On ubuntu-22.04 with go v1.17
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-22.04
+ version:
+ - 1.17
+---
+name: test
+jobs:
+ job1:
+ name: On ubuntu-22.04 with go v1.18
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-22.04
+ version:
+ - 1.18
+---
+name: test
+jobs:
+ job1:
+ name: On ubuntu-22.04 with go v1.19
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: ${{ matrix.version }}
+ - run: uname -a && go version
+ strategy:
+ matrix:
+ os:
+ - ubuntu-22.04
+ version:
+ - 1.19
diff --git a/pkg/jobparser/testdata_test.go b/pkg/jobparser/testdata_test.go
new file mode 100644
index 0000000..fb75a50
--- /dev/null
+++ b/pkg/jobparser/testdata_test.go
@@ -0,0 +1,18 @@
+package jobparser
+
+import (
+ "embed"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+//go:embed testdata
+var testdata embed.FS
+
+func ReadTestdata(t *testing.T, name string) []byte {
+ content, err := testdata.ReadFile(filepath.Join("testdata", name))
+ require.NoError(t, err)
+ return content
+}