diff options
Diffstat (limited to '')
58 files changed, 10458 insertions, 0 deletions
diff --git a/services/migrations/codebase.go b/services/migrations/codebase.go new file mode 100644 index 0000000..492fc90 --- /dev/null +++ b/services/migrations/codebase.go @@ -0,0 +1,651 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "encoding/xml" + "fmt" + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/proxy" + "code.gitea.io/gitea/modules/structs" +) + +var ( + _ base.Downloader = &CodebaseDownloader{} + _ base.DownloaderFactory = &CodebaseDownloaderFactory{} +) + +func init() { + RegisterDownloaderFactory(&CodebaseDownloaderFactory{}) +} + +// CodebaseDownloaderFactory defines a downloader factory +type CodebaseDownloaderFactory struct{} + +// New returns a downloader related to this factory according MigrateOptions +func (f *CodebaseDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) { + u, err := url.Parse(opts.CloneAddr) + if err != nil { + return nil, err + } + u.User = nil + + fields := strings.Split(strings.Trim(u.Path, "/"), "/") + if len(fields) != 2 { + return nil, fmt.Errorf("invalid path: %s", u.Path) + } + project := fields[0] + repoName := strings.TrimSuffix(fields[1], ".git") + + log.Trace("Create Codebase downloader. BaseURL: %v RepoName: %s", u, repoName) + + return NewCodebaseDownloader(ctx, u, project, repoName, opts.AuthUsername, opts.AuthPassword), nil +} + +// GitServiceType returns the type of git service +func (f *CodebaseDownloaderFactory) GitServiceType() structs.GitServiceType { + return structs.CodebaseService +} + +type codebaseUser struct { + ID int64 `json:"id"` + Name string `json:"name"` + Email string `json:"email"` +} + +// CodebaseDownloader implements a Downloader interface to get repository information +// from Codebase +type CodebaseDownloader struct { + base.NullDownloader + ctx context.Context + client *http.Client + baseURL *url.URL + projectURL *url.URL + project string + repoName string + maxIssueIndex int64 + userMap map[int64]*codebaseUser + commitMap map[string]string +} + +// SetContext set context +func (d *CodebaseDownloader) SetContext(ctx context.Context) { + d.ctx = ctx +} + +// NewCodebaseDownloader creates a new downloader +func NewCodebaseDownloader(ctx context.Context, projectURL *url.URL, project, repoName, username, password string) *CodebaseDownloader { + baseURL, _ := url.Parse("https://api3.codebasehq.com") + + downloader := &CodebaseDownloader{ + ctx: ctx, + baseURL: baseURL, + projectURL: projectURL, + project: project, + repoName: repoName, + client: &http.Client{ + Transport: &http.Transport{ + Proxy: func(req *http.Request) (*url.URL, error) { + if len(username) > 0 && len(password) > 0 { + req.SetBasicAuth(username, password) + } + return proxy.Proxy()(req) + }, + }, + }, + userMap: make(map[int64]*codebaseUser), + commitMap: make(map[string]string), + } + + log.Trace("Create Codebase downloader. BaseURL: %s Project: %s RepoName: %s", baseURL, project, repoName) + return downloader +} + +// String implements Stringer +func (d *CodebaseDownloader) String() string { + return fmt.Sprintf("migration from codebase server %s %s/%s", d.baseURL, d.project, d.repoName) +} + +func (d *CodebaseDownloader) LogString() string { + if d == nil { + return "<CodebaseDownloader nil>" + } + return fmt.Sprintf("<CodebaseDownloader %s %s/%s>", d.baseURL, d.project, d.repoName) +} + +// FormatCloneURL add authentication into remote URLs +func (d *CodebaseDownloader) FormatCloneURL(opts base.MigrateOptions, remoteAddr string) (string, error) { + return opts.CloneAddr, nil +} + +func (d *CodebaseDownloader) callAPI(endpoint string, parameter map[string]string, result any) error { + u, err := d.baseURL.Parse(endpoint) + if err != nil { + return err + } + + if parameter != nil { + query := u.Query() + for k, v := range parameter { + query.Set(k, v) + } + u.RawQuery = query.Encode() + } + + req, err := http.NewRequestWithContext(d.ctx, "GET", u.String(), nil) + if err != nil { + return err + } + req.Header.Add("Accept", "application/xml") + + resp, err := d.client.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + return xml.NewDecoder(resp.Body).Decode(&result) +} + +// GetRepoInfo returns repository information +// https://support.codebasehq.com/kb/projects +func (d *CodebaseDownloader) GetRepoInfo() (*base.Repository, error) { + var rawRepository struct { + XMLName xml.Name `xml:"repository"` + Name string `xml:"name"` + Description string `xml:"description"` + Permalink string `xml:"permalink"` + CloneURL string `xml:"clone-url"` + Source string `xml:"source"` + } + + err := d.callAPI( + fmt.Sprintf("/%s/%s", d.project, d.repoName), + nil, + &rawRepository, + ) + if err != nil { + return nil, err + } + + return &base.Repository{ + Name: rawRepository.Name, + Description: rawRepository.Description, + CloneURL: rawRepository.CloneURL, + OriginalURL: d.projectURL.String(), + }, nil +} + +// GetMilestones returns milestones +// https://support.codebasehq.com/kb/tickets-and-milestones/milestones +func (d *CodebaseDownloader) GetMilestones() ([]*base.Milestone, error) { + var rawMilestones struct { + XMLName xml.Name `xml:"ticketing-milestone"` + Type string `xml:"type,attr"` + TicketingMilestone []struct { + Text string `xml:",chardata"` + ID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"id"` + Identifier string `xml:"identifier"` + Name string `xml:"name"` + Deadline struct { + Value string `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"deadline"` + Description string `xml:"description"` + Status string `xml:"status"` + } `xml:"ticketing-milestone"` + } + + err := d.callAPI( + fmt.Sprintf("/%s/milestones", d.project), + nil, + &rawMilestones, + ) + if err != nil { + return nil, err + } + + milestones := make([]*base.Milestone, 0, len(rawMilestones.TicketingMilestone)) + for _, milestone := range rawMilestones.TicketingMilestone { + var deadline *time.Time + if len(milestone.Deadline.Value) > 0 { + if val, err := time.Parse("2006-01-02", milestone.Deadline.Value); err == nil { + deadline = &val + } + } + + closed := deadline + state := "closed" + if milestone.Status == "active" { + closed = nil + state = "" + } + + milestones = append(milestones, &base.Milestone{ + Title: milestone.Name, + Deadline: deadline, + Closed: closed, + State: state, + }) + } + return milestones, nil +} + +// GetLabels returns labels +// https://support.codebasehq.com/kb/tickets-and-milestones/statuses-priorities-and-categories +func (d *CodebaseDownloader) GetLabels() ([]*base.Label, error) { + var rawTypes struct { + XMLName xml.Name `xml:"ticketing-types"` + Type string `xml:"type,attr"` + TicketingType []struct { + ID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"id"` + Name string `xml:"name"` + } `xml:"ticketing-type"` + } + + err := d.callAPI( + fmt.Sprintf("/%s/tickets/types", d.project), + nil, + &rawTypes, + ) + if err != nil { + return nil, err + } + + labels := make([]*base.Label, 0, len(rawTypes.TicketingType)) + for _, label := range rawTypes.TicketingType { + labels = append(labels, &base.Label{ + Name: label.Name, + Color: "ffffff", + }) + } + return labels, nil +} + +type codebaseIssueContext struct { + Comments []*base.Comment +} + +// GetIssues returns issues, limits are not supported +// https://support.codebasehq.com/kb/tickets-and-milestones +// https://support.codebasehq.com/kb/tickets-and-milestones/updating-tickets +func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) { + var rawIssues struct { + XMLName xml.Name `xml:"tickets"` + Type string `xml:"type,attr"` + Ticket []struct { + TicketID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"ticket-id"` + Summary string `xml:"summary"` + TicketType string `xml:"ticket-type"` + ReporterID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"reporter-id"` + Reporter string `xml:"reporter"` + Type struct { + Name string `xml:"name"` + } `xml:"type"` + Status struct { + TreatAsClosed struct { + Value bool `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"treat-as-closed"` + } `xml:"status"` + Milestone struct { + Name string `xml:"name"` + } `xml:"milestone"` + UpdatedAt struct { + Value time.Time `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"updated-at"` + CreatedAt struct { + Value time.Time `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"created-at"` + } `xml:"ticket"` + } + + err := d.callAPI( + fmt.Sprintf("/%s/tickets", d.project), + nil, + &rawIssues, + ) + if err != nil { + return nil, false, err + } + + issues := make([]*base.Issue, 0, len(rawIssues.Ticket)) + for _, issue := range rawIssues.Ticket { + var notes struct { + XMLName xml.Name `xml:"ticket-notes"` + Type string `xml:"type,attr"` + TicketNote []struct { + Content string `xml:"content"` + CreatedAt struct { + Value time.Time `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"created-at"` + UpdatedAt struct { + Value time.Time `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"updated-at"` + ID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"id"` + UserID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"user-id"` + } `xml:"ticket-note"` + } + err := d.callAPI( + fmt.Sprintf("/%s/tickets/%d/notes", d.project, issue.TicketID.Value), + nil, + ¬es, + ) + if err != nil { + return nil, false, err + } + comments := make([]*base.Comment, 0, len(notes.TicketNote)) + for _, note := range notes.TicketNote { + if len(note.Content) == 0 { + continue + } + poster := d.tryGetUser(note.UserID.Value) + comments = append(comments, &base.Comment{ + IssueIndex: issue.TicketID.Value, + Index: note.ID.Value, + PosterID: poster.ID, + PosterName: poster.Name, + PosterEmail: poster.Email, + Content: note.Content, + Created: note.CreatedAt.Value, + Updated: note.UpdatedAt.Value, + }) + } + if len(comments) == 0 { + comments = append(comments, &base.Comment{}) + } + + state := "open" + if issue.Status.TreatAsClosed.Value { + state = "closed" + } + poster := d.tryGetUser(issue.ReporterID.Value) + issues = append(issues, &base.Issue{ + Title: issue.Summary, + Number: issue.TicketID.Value, + PosterName: poster.Name, + PosterEmail: poster.Email, + Content: comments[0].Content, + Milestone: issue.Milestone.Name, + State: state, + Created: issue.CreatedAt.Value, + Updated: issue.UpdatedAt.Value, + Labels: []*base.Label{ + {Name: issue.Type.Name}, + }, + ForeignIndex: issue.TicketID.Value, + Context: codebaseIssueContext{ + Comments: comments[1:], + }, + }) + + if d.maxIssueIndex < issue.TicketID.Value { + d.maxIssueIndex = issue.TicketID.Value + } + } + + return issues, true, nil +} + +// GetComments returns comments +func (d *CodebaseDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + context, ok := commentable.GetContext().(codebaseIssueContext) + if !ok { + return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext()) + } + + return context.Comments, true, nil +} + +// GetPullRequests returns pull requests +// https://support.codebasehq.com/kb/repositories/merge-requests +func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) { + var rawMergeRequests struct { + XMLName xml.Name `xml:"merge-requests"` + Type string `xml:"type,attr"` + MergeRequest []struct { + ID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"id"` + } `xml:"merge-request"` + } + + err := d.callAPI( + fmt.Sprintf("/%s/%s/merge_requests", d.project, d.repoName), + map[string]string{ + "query": `"Target Project" is "` + d.repoName + `"`, + "offset": strconv.Itoa((page - 1) * perPage), + "count": strconv.Itoa(perPage), + }, + &rawMergeRequests, + ) + if err != nil { + return nil, false, err + } + + pullRequests := make([]*base.PullRequest, 0, len(rawMergeRequests.MergeRequest)) + for i, mr := range rawMergeRequests.MergeRequest { + var rawMergeRequest struct { + XMLName xml.Name `xml:"merge-request"` + ID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"id"` + SourceRef string `xml:"source-ref"` // NOTE: from the documentation these are actually just branches NOT full refs + TargetRef string `xml:"target-ref"` // NOTE: from the documentation these are actually just branches NOT full refs + Subject string `xml:"subject"` + Status string `xml:"status"` + UserID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"user-id"` + CreatedAt struct { + Value time.Time `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"created-at"` + UpdatedAt struct { + Value time.Time `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"updated-at"` + Comments struct { + Type string `xml:"type,attr"` + Comment []struct { + Content string `xml:"content"` + ID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"id"` + UserID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"user-id"` + Action struct { + Value string `xml:",chardata"` + Nil string `xml:"nil,attr"` + } `xml:"action"` + CreatedAt struct { + Value time.Time `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"created-at"` + } `xml:"comment"` + } `xml:"comments"` + } + err := d.callAPI( + fmt.Sprintf("/%s/%s/merge_requests/%d", d.project, d.repoName, mr.ID.Value), + nil, + &rawMergeRequest, + ) + if err != nil { + return nil, false, err + } + + number := d.maxIssueIndex + int64(i) + 1 + + state := "open" + merged := false + var closeTime *time.Time + var mergedTime *time.Time + if rawMergeRequest.Status != "new" { + state = "closed" + closeTime = &rawMergeRequest.UpdatedAt.Value + } + + comments := make([]*base.Comment, 0, len(rawMergeRequest.Comments.Comment)) + for _, comment := range rawMergeRequest.Comments.Comment { + if len(comment.Content) == 0 { + if comment.Action.Value == "merging" { + merged = true + mergedTime = &comment.CreatedAt.Value + } + continue + } + poster := d.tryGetUser(comment.UserID.Value) + comments = append(comments, &base.Comment{ + IssueIndex: number, + Index: comment.ID.Value, + PosterID: poster.ID, + PosterName: poster.Name, + PosterEmail: poster.Email, + Content: comment.Content, + Created: comment.CreatedAt.Value, + Updated: comment.CreatedAt.Value, + }) + } + if len(comments) == 0 { + comments = append(comments, &base.Comment{}) + } + + poster := d.tryGetUser(rawMergeRequest.UserID.Value) + + pullRequests = append(pullRequests, &base.PullRequest{ + Title: rawMergeRequest.Subject, + Number: number, + PosterName: poster.Name, + PosterEmail: poster.Email, + Content: comments[0].Content, + State: state, + Created: rawMergeRequest.CreatedAt.Value, + Updated: rawMergeRequest.UpdatedAt.Value, + Closed: closeTime, + Merged: merged, + MergedTime: mergedTime, + Head: base.PullRequestBranch{ + Ref: rawMergeRequest.SourceRef, + SHA: d.getHeadCommit(rawMergeRequest.SourceRef), + RepoName: d.repoName, + }, + Base: base.PullRequestBranch{ + Ref: rawMergeRequest.TargetRef, + SHA: d.getHeadCommit(rawMergeRequest.TargetRef), + RepoName: d.repoName, + }, + ForeignIndex: rawMergeRequest.ID.Value, + Context: codebaseIssueContext{ + Comments: comments[1:], + }, + }) + + // SECURITY: Ensure that the PR is safe + _ = CheckAndEnsureSafePR(pullRequests[len(pullRequests)-1], d.baseURL.String(), d) + } + + return pullRequests, true, nil +} + +func (d *CodebaseDownloader) tryGetUser(userID int64) *codebaseUser { + if len(d.userMap) == 0 { + var rawUsers struct { + XMLName xml.Name `xml:"users"` + Type string `xml:"type,attr"` + User []struct { + EmailAddress string `xml:"email-address"` + ID struct { + Value int64 `xml:",chardata"` + Type string `xml:"type,attr"` + } `xml:"id"` + LastName string `xml:"last-name"` + FirstName string `xml:"first-name"` + Username string `xml:"username"` + } `xml:"user"` + } + + err := d.callAPI( + "/users", + nil, + &rawUsers, + ) + if err == nil { + for _, user := range rawUsers.User { + d.userMap[user.ID.Value] = &codebaseUser{ + Name: user.Username, + Email: user.EmailAddress, + } + } + } + } + + user, ok := d.userMap[userID] + if !ok { + user = &codebaseUser{ + Name: fmt.Sprintf("User %d", userID), + } + d.userMap[userID] = user + } + + return user +} + +func (d *CodebaseDownloader) getHeadCommit(ref string) string { + commitRef, ok := d.commitMap[ref] + if !ok { + var rawCommits struct { + XMLName xml.Name `xml:"commits"` + Type string `xml:"type,attr"` + Commit []struct { + Ref string `xml:"ref"` + } `xml:"commit"` + } + err := d.callAPI( + fmt.Sprintf("/%s/%s/commits/%s", d.project, d.repoName, ref), + nil, + &rawCommits, + ) + if err == nil && len(rawCommits.Commit) > 0 { + commitRef = rawCommits.Commit[0].Ref + d.commitMap[ref] = commitRef + } + } + return commitRef +} diff --git a/services/migrations/codebase_test.go b/services/migrations/codebase_test.go new file mode 100644 index 0000000..23626d1 --- /dev/null +++ b/services/migrations/codebase_test.go @@ -0,0 +1,151 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "net/url" + "os" + "testing" + "time" + + base "code.gitea.io/gitea/modules/migration" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCodebaseDownloadRepo(t *testing.T) { + // Skip tests if Codebase token is not found + cloneUser := os.Getenv("CODEBASE_CLONE_USER") + clonePassword := os.Getenv("CODEBASE_CLONE_PASSWORD") + apiUser := os.Getenv("CODEBASE_API_USER") + apiPassword := os.Getenv("CODEBASE_API_TOKEN") + if apiUser == "" || apiPassword == "" { + t.Skip("skipped test because a CODEBASE_ variable was not in the environment") + } + + cloneAddr := "https://gitea-test.codebasehq.com/gitea-test/test.git" + u, _ := url.Parse(cloneAddr) + if cloneUser != "" { + u.User = url.UserPassword(cloneUser, clonePassword) + } + + factory := &CodebaseDownloaderFactory{} + downloader, err := factory.New(context.Background(), base.MigrateOptions{ + CloneAddr: u.String(), + AuthUsername: apiUser, + AuthPassword: apiPassword, + }) + if err != nil { + t.Fatalf("Error creating Codebase downloader: %v", err) + } + repo, err := downloader.GetRepoInfo() + require.NoError(t, err) + assertRepositoryEqual(t, &base.Repository{ + Name: "test", + Owner: "", + Description: "Repository Description", + CloneURL: "git@codebasehq.com:gitea-test/gitea-test/test.git", + OriginalURL: cloneAddr, + }, repo) + + milestones, err := downloader.GetMilestones() + require.NoError(t, err) + assertMilestonesEqual(t, []*base.Milestone{ + { + Title: "Milestone1", + Deadline: timePtr(time.Date(2021, time.September, 16, 0, 0, 0, 0, time.UTC)), + }, + { + Title: "Milestone2", + Deadline: timePtr(time.Date(2021, time.September, 17, 0, 0, 0, 0, time.UTC)), + Closed: timePtr(time.Date(2021, time.September, 17, 0, 0, 0, 0, time.UTC)), + State: "closed", + }, + }, milestones) + + labels, err := downloader.GetLabels() + require.NoError(t, err) + assert.Len(t, labels, 4) + + issues, isEnd, err := downloader.GetIssues(1, 2) + require.NoError(t, err) + assert.True(t, isEnd) + assertIssuesEqual(t, []*base.Issue{ + { + Number: 2, + Title: "Open Ticket", + Content: "Open Ticket Message", + PosterName: "gitea-test-43", + PosterEmail: "gitea-codebase@smack.email", + State: "open", + Created: time.Date(2021, time.September, 26, 19, 19, 14, 0, time.UTC), + Updated: time.Date(2021, time.September, 26, 19, 19, 34, 0, time.UTC), + Labels: []*base.Label{ + { + Name: "Feature", + }, + }, + }, + { + Number: 1, + Title: "Closed Ticket", + Content: "Closed Ticket Message", + PosterName: "gitea-test-43", + PosterEmail: "gitea-codebase@smack.email", + State: "closed", + Milestone: "Milestone1", + Created: time.Date(2021, time.September, 26, 19, 18, 33, 0, time.UTC), + Updated: time.Date(2021, time.September, 26, 19, 18, 55, 0, time.UTC), + Labels: []*base.Label{ + { + Name: "Bug", + }, + }, + }, + }, issues) + + comments, _, err := downloader.GetComments(issues[0]) + require.NoError(t, err) + assertCommentsEqual(t, []*base.Comment{ + { + IssueIndex: 2, + PosterName: "gitea-test-43", + PosterEmail: "gitea-codebase@smack.email", + Created: time.Date(2021, time.September, 26, 19, 19, 34, 0, time.UTC), + Updated: time.Date(2021, time.September, 26, 19, 19, 34, 0, time.UTC), + Content: "open comment", + }, + }, comments) + + prs, _, err := downloader.GetPullRequests(1, 1) + require.NoError(t, err) + assertPullRequestsEqual(t, []*base.PullRequest{ + { + Number: 3, + Title: "Readme Change", + Content: "Merge Request comment", + PosterName: "gitea-test-43", + PosterEmail: "gitea-codebase@smack.email", + State: "open", + Created: time.Date(2021, time.September, 26, 20, 25, 47, 0, time.UTC), + Updated: time.Date(2021, time.September, 26, 20, 25, 47, 0, time.UTC), + Head: base.PullRequestBranch{ + Ref: "readme-mr", + SHA: "1287f206b888d4d13540e0a8e1c07458f5420059", + RepoName: "test", + }, + Base: base.PullRequestBranch{ + Ref: "master", + SHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2", + RepoName: "test", + }, + }, + }, prs) + + rvs, err := downloader.GetReviews(prs[0]) + require.NoError(t, err) + assert.Empty(t, rvs) +} diff --git a/services/migrations/common.go b/services/migrations/common.go new file mode 100644 index 0000000..d885188 --- /dev/null +++ b/services/migrations/common.go @@ -0,0 +1,83 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "fmt" + "strings" + + system_model "code.gitea.io/gitea/models/system" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" +) + +// WarnAndNotice will log the provided message and send a repository notice +func WarnAndNotice(fmtStr string, args ...any) { + log.Warn(fmtStr, args...) + if err := system_model.CreateRepositoryNotice(fmt.Sprintf(fmtStr, args...)); err != nil { + log.Error("create repository notice failed: ", err) + } +} + +func hasBaseURL(toCheck, baseURL string) bool { + if len(baseURL) > 0 && baseURL[len(baseURL)-1] != '/' { + baseURL += "/" + } + return strings.HasPrefix(toCheck, baseURL) +} + +// CheckAndEnsureSafePR will check that a given PR is safe to download +func CheckAndEnsureSafePR(pr *base.PullRequest, commonCloneBaseURL string, g base.Downloader) bool { + valid := true + // SECURITY: the patchURL must be checked to have the same baseURL as the current to prevent open redirect + if pr.PatchURL != "" && !hasBaseURL(pr.PatchURL, commonCloneBaseURL) { + // TODO: Should we check that this url has the expected format for a patch url? + WarnAndNotice("PR #%d in %s has invalid PatchURL: %s baseURL: %s", pr.Number, g, pr.PatchURL, commonCloneBaseURL) + pr.PatchURL = "" + valid = false + } + + // SECURITY: the headCloneURL must be checked to have the same baseURL as the current to prevent open redirect + if pr.Head.CloneURL != "" && !hasBaseURL(pr.Head.CloneURL, commonCloneBaseURL) { + // TODO: Should we check that this url has the expected format for a patch url? + WarnAndNotice("PR #%d in %s has invalid HeadCloneURL: %s baseURL: %s", pr.Number, g, pr.Head.CloneURL, commonCloneBaseURL) + pr.Head.CloneURL = "" + valid = false + } + + // SECURITY: SHAs Must be a SHA + // FIXME: hash only a SHA1 + CommitType := git.Sha1ObjectFormat + if pr.MergeCommitSHA != "" && !CommitType.IsValid(pr.MergeCommitSHA) { + WarnAndNotice("PR #%d in %s has invalid MergeCommitSHA: %s", pr.Number, g, pr.MergeCommitSHA) + pr.MergeCommitSHA = "" + } + if pr.Head.SHA != "" && !CommitType.IsValid(pr.Head.SHA) { + WarnAndNotice("PR #%d in %s has invalid HeadSHA: %s", pr.Number, g, pr.Head.SHA) + pr.Head.SHA = "" + valid = false + } + if pr.Base.SHA != "" && !CommitType.IsValid(pr.Base.SHA) { + WarnAndNotice("PR #%d in %s has invalid BaseSHA: %s", pr.Number, g, pr.Base.SHA) + pr.Base.SHA = "" + valid = false + } + + // SECURITY: Refs must be valid refs or SHAs + if pr.Head.Ref != "" && !git.IsValidRefPattern(pr.Head.Ref) { + WarnAndNotice("PR #%d in %s has invalid HeadRef: %s", pr.Number, g, pr.Head.Ref) + pr.Head.Ref = "" + valid = false + } + if pr.Base.Ref != "" && !git.IsValidRefPattern(pr.Base.Ref) { + WarnAndNotice("PR #%d in %s has invalid BaseRef: %s", pr.Number, g, pr.Base.Ref) + pr.Base.Ref = "" + valid = false + } + + pr.EnsuredSafe = true + + return valid +} diff --git a/services/migrations/dump.go b/services/migrations/dump.go new file mode 100644 index 0000000..0781200 --- /dev/null +++ b/services/migrations/dump.go @@ -0,0 +1,737 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" + + "github.com/google/uuid" + "gopkg.in/yaml.v3" +) + +var _ base.Uploader = &RepositoryDumper{} + +// RepositoryDumper implements an Uploader to the local directory +type RepositoryDumper struct { + ctx context.Context + baseDir string + repoOwner string + repoName string + opts base.MigrateOptions + milestoneFile *os.File + labelFile *os.File + releaseFile *os.File + issueFile *os.File + commentFiles map[int64]*os.File + pullrequestFile *os.File + reviewFiles map[int64]*os.File + + gitRepo *git.Repository + prHeadCache map[string]string +} + +// NewRepositoryDumper creates an gitea Uploader +func NewRepositoryDumper(ctx context.Context, baseDir, repoOwner, repoName string, opts base.MigrateOptions) (*RepositoryDumper, error) { + baseDir = filepath.Join(baseDir, repoOwner, repoName) + if err := os.MkdirAll(baseDir, os.ModePerm); err != nil { + return nil, err + } + return &RepositoryDumper{ + ctx: ctx, + opts: opts, + baseDir: baseDir, + repoOwner: repoOwner, + repoName: repoName, + prHeadCache: make(map[string]string), + commentFiles: make(map[int64]*os.File), + reviewFiles: make(map[int64]*os.File), + }, nil +} + +// MaxBatchInsertSize returns the table's max batch insert size +func (g *RepositoryDumper) MaxBatchInsertSize(tp string) int { + return 1000 +} + +func (g *RepositoryDumper) gitPath() string { + return filepath.Join(g.baseDir, "git") +} + +func (g *RepositoryDumper) wikiPath() string { + return filepath.Join(g.baseDir, "wiki") +} + +func (g *RepositoryDumper) commentDir() string { + return filepath.Join(g.baseDir, "comments") +} + +func (g *RepositoryDumper) reviewDir() string { + return filepath.Join(g.baseDir, "reviews") +} + +func (g *RepositoryDumper) setURLToken(remoteAddr string) (string, error) { + if len(g.opts.AuthToken) > 0 || len(g.opts.AuthUsername) > 0 { + u, err := url.Parse(remoteAddr) + if err != nil { + return "", err + } + u.User = url.UserPassword(g.opts.AuthUsername, g.opts.AuthPassword) + if len(g.opts.AuthToken) > 0 { + u.User = url.UserPassword("oauth2", g.opts.AuthToken) + } + remoteAddr = u.String() + } + + return remoteAddr, nil +} + +// CreateRepo creates a repository +func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOptions) error { + f, err := os.Create(filepath.Join(g.baseDir, "repo.yml")) + if err != nil { + return err + } + defer f.Close() + + bs, err := yaml.Marshal(map[string]any{ + "name": repo.Name, + "owner": repo.Owner, + "description": repo.Description, + "clone_addr": opts.CloneAddr, + "original_url": repo.OriginalURL, + "is_private": opts.Private, + "service_type": opts.GitServiceType, + "wiki": opts.Wiki, + "issues": opts.Issues, + "milestones": opts.Milestones, + "labels": opts.Labels, + "releases": opts.Releases, + "comments": opts.Comments, + "pulls": opts.PullRequests, + "assets": opts.ReleaseAssets, + }) + if err != nil { + return err + } + + if _, err := f.Write(bs); err != nil { + return err + } + + repoPath := g.gitPath() + if err := os.MkdirAll(repoPath, os.ModePerm); err != nil { + return err + } + + migrateTimeout := 2 * time.Hour + + remoteAddr, err := g.setURLToken(repo.CloneURL) + if err != nil { + return err + } + + err = git.Clone(g.ctx, remoteAddr, repoPath, git.CloneRepoOptions{ + Mirror: true, + Quiet: true, + Timeout: migrateTimeout, + SkipTLSVerify: setting.Migrations.SkipTLSVerify, + }) + if err != nil { + return fmt.Errorf("Clone: %w", err) + } + if err := git.WriteCommitGraph(g.ctx, repoPath); err != nil { + return err + } + + if opts.Wiki { + wikiPath := g.wikiPath() + wikiRemotePath := repository.WikiRemoteURL(g.ctx, remoteAddr) + if len(wikiRemotePath) > 0 { + if err := os.MkdirAll(wikiPath, os.ModePerm); err != nil { + return fmt.Errorf("Failed to remove %s: %w", wikiPath, err) + } + + if err := git.Clone(g.ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{ + Mirror: true, + Quiet: true, + Timeout: migrateTimeout, + Branch: "master", + SkipTLSVerify: setting.Migrations.SkipTLSVerify, + }); err != nil { + log.Warn("Clone wiki: %v", err) + if err := os.RemoveAll(wikiPath); err != nil { + return fmt.Errorf("Failed to remove %s: %w", wikiPath, err) + } + } else if err := git.WriteCommitGraph(g.ctx, wikiPath); err != nil { + return err + } + } + } + + g.gitRepo, err = git.OpenRepository(g.ctx, g.gitPath()) + return err +} + +// Close closes this uploader +func (g *RepositoryDumper) Close() { + if g.gitRepo != nil { + g.gitRepo.Close() + } + if g.milestoneFile != nil { + g.milestoneFile.Close() + } + if g.labelFile != nil { + g.labelFile.Close() + } + if g.releaseFile != nil { + g.releaseFile.Close() + } + if g.issueFile != nil { + g.issueFile.Close() + } + for _, f := range g.commentFiles { + f.Close() + } + if g.pullrequestFile != nil { + g.pullrequestFile.Close() + } + for _, f := range g.reviewFiles { + f.Close() + } +} + +// CreateTopics creates topics +func (g *RepositoryDumper) CreateTopics(topics ...string) error { + f, err := os.Create(filepath.Join(g.baseDir, "topic.yml")) + if err != nil { + return err + } + defer f.Close() + + bs, err := yaml.Marshal(map[string]any{ + "topics": topics, + }) + if err != nil { + return err + } + + if _, err := f.Write(bs); err != nil { + return err + } + + return nil +} + +// CreateMilestones creates milestones +func (g *RepositoryDumper) CreateMilestones(milestones ...*base.Milestone) error { + var err error + if g.milestoneFile == nil { + g.milestoneFile, err = os.Create(filepath.Join(g.baseDir, "milestone.yml")) + if err != nil { + return err + } + } + + bs, err := yaml.Marshal(milestones) + if err != nil { + return err + } + + if _, err := g.milestoneFile.Write(bs); err != nil { + return err + } + + return nil +} + +// CreateLabels creates labels +func (g *RepositoryDumper) CreateLabels(labels ...*base.Label) error { + var err error + if g.labelFile == nil { + g.labelFile, err = os.Create(filepath.Join(g.baseDir, "label.yml")) + if err != nil { + return err + } + } + + bs, err := yaml.Marshal(labels) + if err != nil { + return err + } + + if _, err := g.labelFile.Write(bs); err != nil { + return err + } + + return nil +} + +// CreateReleases creates releases +func (g *RepositoryDumper) CreateReleases(releases ...*base.Release) error { + if g.opts.ReleaseAssets { + for _, release := range releases { + attachDir := filepath.Join("release_assets", release.TagName) + if err := os.MkdirAll(filepath.Join(g.baseDir, attachDir), os.ModePerm); err != nil { + return err + } + for _, asset := range release.Assets { + attachLocalPath := filepath.Join(attachDir, asset.Name) + + // SECURITY: We cannot check the DownloadURL and DownloadFunc are safe here + // ... we must assume that they are safe and simply download the attachment + // download attachment + err := func(attachPath string) error { + var rc io.ReadCloser + var err error + if asset.DownloadURL == nil { + rc, err = asset.DownloadFunc() + if err != nil { + return err + } + } else { + resp, err := http.Get(*asset.DownloadURL) + if err != nil { + return err + } + rc = resp.Body + } + defer rc.Close() + + fw, err := os.Create(attachPath) + if err != nil { + return fmt.Errorf("create: %w", err) + } + defer fw.Close() + + _, err = io.Copy(fw, rc) + return err + }(filepath.Join(g.baseDir, attachLocalPath)) + if err != nil { + return err + } + asset.DownloadURL = &attachLocalPath // to save the filepath on the yml file, change the source + } + } + } + + var err error + if g.releaseFile == nil { + g.releaseFile, err = os.Create(filepath.Join(g.baseDir, "release.yml")) + if err != nil { + return err + } + } + + bs, err := yaml.Marshal(releases) + if err != nil { + return err + } + + if _, err := g.releaseFile.Write(bs); err != nil { + return err + } + + return nil +} + +// SyncTags syncs releases with tags in the database +func (g *RepositoryDumper) SyncTags() error { + return nil +} + +// CreateIssues creates issues +func (g *RepositoryDumper) CreateIssues(issues ...*base.Issue) error { + var err error + if g.issueFile == nil { + g.issueFile, err = os.Create(filepath.Join(g.baseDir, "issue.yml")) + if err != nil { + return err + } + } + + bs, err := yaml.Marshal(issues) + if err != nil { + return err + } + + if _, err := g.issueFile.Write(bs); err != nil { + return err + } + + return nil +} + +func (g *RepositoryDumper) createItems(dir string, itemFiles map[int64]*os.File, itemsMap map[int64][]any) error { + if err := os.MkdirAll(dir, os.ModePerm); err != nil { + return err + } + + for number, items := range itemsMap { + if err := g.encodeItems(number, items, dir, itemFiles); err != nil { + return err + } + } + + return nil +} + +func (g *RepositoryDumper) encodeItems(number int64, items []any, dir string, itemFiles map[int64]*os.File) error { + itemFile := itemFiles[number] + if itemFile == nil { + var err error + itemFile, err = os.Create(filepath.Join(dir, fmt.Sprintf("%d.yml", number))) + if err != nil { + return err + } + itemFiles[number] = itemFile + } + + encoder := yaml.NewEncoder(itemFile) + defer encoder.Close() + + return encoder.Encode(items) +} + +// CreateComments creates comments of issues +func (g *RepositoryDumper) CreateComments(comments ...*base.Comment) error { + commentsMap := make(map[int64][]any, len(comments)) + for _, comment := range comments { + commentsMap[comment.IssueIndex] = append(commentsMap[comment.IssueIndex], comment) + } + + return g.createItems(g.commentDir(), g.commentFiles, commentsMap) +} + +func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error { + // SECURITY: this pr must have been ensured safe + if !pr.EnsuredSafe { + log.Error("PR #%d in %s/%s has not been checked for safety ... We will ignore this.", pr.Number, g.repoOwner, g.repoName) + return fmt.Errorf("unsafe PR #%d", pr.Number) + } + + // First we download the patch file + err := func() error { + // if the patchURL is empty there is nothing to download + if pr.PatchURL == "" { + return nil + } + + // SECURITY: We will assume that the pr.PatchURL has been checked + // pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe + u, err := g.setURLToken(pr.PatchURL) + if err != nil { + return err + } + + // SECURITY: We will assume that the pr.PatchURL has been checked + // pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe + resp, err := http.Get(u) // TODO: This probably needs to use the downloader as there may be rate limiting issues here + if err != nil { + return err + } + defer resp.Body.Close() + pullDir := filepath.Join(g.gitPath(), "pulls") + if err = os.MkdirAll(pullDir, os.ModePerm); err != nil { + return err + } + fPath := filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number)) + f, err := os.Create(fPath) + if err != nil { + return err + } + defer f.Close() + + // TODO: Should there be limits on the size of this file? + if _, err = io.Copy(f, resp.Body); err != nil { + return err + } + pr.PatchURL = "git/pulls/" + fmt.Sprintf("%d.patch", pr.Number) + + return nil + }() + if err != nil { + log.Error("PR #%d in %s/%s unable to download patch: %v", pr.Number, g.repoOwner, g.repoName, err) + return err + } + + isFork := pr.IsForkPullRequest() + + // Even if it's a forked repo PR, we have to change head info as the same as the base info + oldHeadOwnerName := pr.Head.OwnerName + pr.Head.OwnerName, pr.Head.RepoName = pr.Base.OwnerName, pr.Base.RepoName + + if !isFork || pr.State == "closed" { + return nil + } + + // OK we want to fetch the current head as a branch from its CloneURL + + // 1. Is there a head clone URL available? + // 2. Is there a head ref available? + if pr.Head.CloneURL == "" || pr.Head.Ref == "" { + // Set head information if pr.Head.SHA is available + if pr.Head.SHA != "" { + _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()}) + if err != nil { + log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err) + } + } + return nil + } + + // 3. We need to create a remote for this clone url + // ... maybe we already have a name for this remote + remote, ok := g.prHeadCache[pr.Head.CloneURL+":"] + if !ok { + // ... let's try ownername as a reasonable name + remote = oldHeadOwnerName + if !git.IsValidRefPattern(remote) { + // ... let's try something less nice + remote = "head-pr-" + strconv.FormatInt(pr.Number, 10) + } + // ... now add the remote + err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true) + if err != nil { + log.Error("PR #%d in %s/%s AddRemote[%s] failed: %v", pr.Number, g.repoOwner, g.repoName, remote, err) + } else { + g.prHeadCache[pr.Head.CloneURL+":"] = remote + ok = true + } + } + if !ok { + // Set head information if pr.Head.SHA is available + if pr.Head.SHA != "" { + _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()}) + if err != nil { + log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err) + } + } + + return nil + } + + // 4. Check if we already have this ref? + localRef, ok := g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref] + if !ok { + // ... We would normally name this migrated branch as <OwnerName>/<HeadRef> but we need to ensure that is safe + localRef = git.SanitizeRefPattern(oldHeadOwnerName + "/" + pr.Head.Ref) + + // ... Now we must assert that this does not exist + if g.gitRepo.IsBranchExist(localRef) { + localRef = "head-pr-" + strconv.FormatInt(pr.Number, 10) + "/" + localRef + i := 0 + for g.gitRepo.IsBranchExist(localRef) { + if i > 5 { + // ... We tried, we really tried but this is just a seriously unfriendly repo + return fmt.Errorf("unable to create unique local reference from %s", pr.Head.Ref) + } + // OK just try some uuids! + localRef = git.SanitizeRefPattern("head-pr-" + strconv.FormatInt(pr.Number, 10) + uuid.New().String()) + i++ + } + } + + fetchArg := pr.Head.Ref + ":" + git.BranchPrefix + localRef + if strings.HasPrefix(fetchArg, "-") { + fetchArg = git.BranchPrefix + fetchArg + } + + _, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags").AddDashesAndList(remote, fetchArg).RunStdString(&git.RunOpts{Dir: g.gitPath()}) + if err != nil { + log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err) + // We need to continue here so that the Head.Ref is reset and we attempt to set the gitref for the PR + // (This last step will likely fail but we should try to do as much as we can.) + } else { + // Cache the localRef as the Head.Ref - if we've failed we can always try again. + g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref] = localRef + } + } + + // Set the pr.Head.Ref to the localRef + pr.Head.Ref = localRef + + // 5. Now if pr.Head.SHA == "" we should recover this to the head of this branch + if pr.Head.SHA == "" { + headSha, err := g.gitRepo.GetBranchCommitID(localRef) + if err != nil { + log.Error("unable to get head SHA of local head for PR #%d from %s in %s/%s. Error: %v", pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err) + return nil + } + pr.Head.SHA = headSha + } + if pr.Head.SHA != "" { + _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()}) + if err != nil { + log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err) + } + } + + return nil +} + +// CreatePullRequests creates pull requests +func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error { + var err error + if g.pullrequestFile == nil { + if err := os.MkdirAll(g.baseDir, os.ModePerm); err != nil { + return err + } + g.pullrequestFile, err = os.Create(filepath.Join(g.baseDir, "pull_request.yml")) + if err != nil { + return err + } + } + + encoder := yaml.NewEncoder(g.pullrequestFile) + defer encoder.Close() + + count := 0 + for i := 0; i < len(prs); i++ { + pr := prs[i] + if err := g.handlePullRequest(pr); err != nil { + log.Error("PR #%d in %s/%s failed - skipping", pr.Number, g.repoOwner, g.repoName, err) + continue + } + prs[count] = pr + count++ + } + prs = prs[:count] + + return encoder.Encode(prs) +} + +// CreateReviews create pull request reviews +func (g *RepositoryDumper) CreateReviews(reviews ...*base.Review) error { + reviewsMap := make(map[int64][]any, len(reviews)) + for _, review := range reviews { + reviewsMap[review.IssueIndex] = append(reviewsMap[review.IssueIndex], review) + } + + return g.createItems(g.reviewDir(), g.reviewFiles, reviewsMap) +} + +// Rollback when migrating failed, this will rollback all the changes. +func (g *RepositoryDumper) Rollback() error { + g.Close() + return os.RemoveAll(g.baseDir) +} + +// Finish when migrating succeed, this will update something. +func (g *RepositoryDumper) Finish() error { + return nil +} + +// DumpRepository dump repository according MigrateOptions to a local directory +func DumpRepository(ctx context.Context, baseDir, ownerName string, opts base.MigrateOptions) error { + doer, err := user_model.GetAdminUser(ctx) + if err != nil { + return err + } + downloader, err := newDownloader(ctx, ownerName, opts) + if err != nil { + return err + } + uploader, err := NewRepositoryDumper(ctx, baseDir, ownerName, opts.RepoName, opts) + if err != nil { + return err + } + + if err := migrateRepository(ctx, doer, downloader, uploader, opts, nil); err != nil { + if err1 := uploader.Rollback(); err1 != nil { + log.Error("rollback failed: %v", err1) + } + return err + } + return nil +} + +func updateOptionsUnits(opts *base.MigrateOptions, units []string) error { + if len(units) == 0 { + opts.Wiki = true + opts.Issues = true + opts.Milestones = true + opts.Labels = true + opts.Releases = true + opts.Comments = true + opts.PullRequests = true + opts.ReleaseAssets = true + } else { + for _, unit := range units { + switch strings.ToLower(strings.TrimSpace(unit)) { + case "": + continue + case "wiki": + opts.Wiki = true + case "issues": + opts.Issues = true + case "milestones": + opts.Milestones = true + case "labels": + opts.Labels = true + case "releases": + opts.Releases = true + case "release_assets": + opts.ReleaseAssets = true + case "comments": + opts.Comments = true + case "pull_requests": + opts.PullRequests = true + default: + return errors.New("invalid unit: " + unit) + } + } + } + return nil +} + +// RestoreRepository restore a repository from the disk directory +func RestoreRepository(ctx context.Context, baseDir, ownerName, repoName string, units []string, validation bool) error { + doer, err := user_model.GetAdminUser(ctx) + if err != nil { + return err + } + uploader := NewGiteaLocalUploader(ctx, doer, ownerName, repoName) + downloader, err := NewRepositoryRestorer(ctx, baseDir, ownerName, repoName, validation) + if err != nil { + return err + } + opts, err := downloader.getRepoOptions() + if err != nil { + return err + } + tp, _ := strconv.Atoi(opts["service_type"]) + + migrateOpts := base.MigrateOptions{ + GitServiceType: structs.GitServiceType(tp), + } + if err := updateOptionsUnits(&migrateOpts, units); err != nil { + return err + } + + if err = migrateRepository(ctx, doer, downloader, uploader, migrateOpts, nil); err != nil { + if err1 := uploader.Rollback(); err1 != nil { + log.Error("rollback failed: %v", err1) + } + return err + } + return updateMigrationPosterIDByGitService(ctx, structs.GitServiceType(tp)) +} diff --git a/services/migrations/error.go b/services/migrations/error.go new file mode 100644 index 0000000..a592989 --- /dev/null +++ b/services/migrations/error.go @@ -0,0 +1,26 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2018 Jonas Franz. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "errors" + + "github.com/google/go-github/v64/github" +) + +// ErrRepoNotCreated returns the error that repository not created +var ErrRepoNotCreated = errors.New("repository is not created yet") + +// IsRateLimitError returns true if the err is github.RateLimitError +func IsRateLimitError(err error) bool { + _, ok := err.(*github.RateLimitError) + return ok +} + +// IsTwoFactorAuthError returns true if the err is github.TwoFactorAuthError +func IsTwoFactorAuthError(err error) bool { + _, ok := err.(*github.TwoFactorAuthError) + return ok +} diff --git a/services/migrations/forgejo_downloader.go b/services/migrations/forgejo_downloader.go new file mode 100644 index 0000000..25dbb6e --- /dev/null +++ b/services/migrations/forgejo_downloader.go @@ -0,0 +1,20 @@ +// Copyright 2023 The Forgejo Authors +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "code.gitea.io/gitea/modules/structs" +) + +func init() { + RegisterDownloaderFactory(&ForgejoDownloaderFactory{}) +} + +type ForgejoDownloaderFactory struct { + GiteaDownloaderFactory +} + +func (f *ForgejoDownloaderFactory) GitServiceType() structs.GitServiceType { + return structs.ForgejoService +} diff --git a/services/migrations/forgejo_downloader_test.go b/services/migrations/forgejo_downloader_test.go new file mode 100644 index 0000000..5bd3755 --- /dev/null +++ b/services/migrations/forgejo_downloader_test.go @@ -0,0 +1,16 @@ +// Copyright 2023 The Forgejo Authors +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "testing" + + "code.gitea.io/gitea/modules/structs" + + "github.com/stretchr/testify/require" +) + +func TestForgejoDownload(t *testing.T) { + require.NotNil(t, getFactoryFromServiceType(structs.ForgejoService)) +} diff --git a/services/migrations/git.go b/services/migrations/git.go new file mode 100644 index 0000000..22ffd5e --- /dev/null +++ b/services/migrations/git.go @@ -0,0 +1,48 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + + base "code.gitea.io/gitea/modules/migration" +) + +var _ base.Downloader = &PlainGitDownloader{} + +// PlainGitDownloader implements a Downloader interface to clone git from a http/https URL +type PlainGitDownloader struct { + base.NullDownloader + ownerName string + repoName string + remoteURL string +} + +// NewPlainGitDownloader creates a git Downloader +func NewPlainGitDownloader(ownerName, repoName, remoteURL string) *PlainGitDownloader { + return &PlainGitDownloader{ + ownerName: ownerName, + repoName: repoName, + remoteURL: remoteURL, + } +} + +// SetContext set context +func (g *PlainGitDownloader) SetContext(ctx context.Context) { +} + +// GetRepoInfo returns a repository information +func (g *PlainGitDownloader) GetRepoInfo() (*base.Repository, error) { + // convert github repo to stand Repo + return &base.Repository{ + Owner: g.ownerName, + Name: g.repoName, + CloneURL: g.remoteURL, + }, nil +} + +// GetTopics return empty string slice +func (g PlainGitDownloader) GetTopics() ([]string, error) { + return []string{}, nil +} diff --git a/services/migrations/gitbucket.go b/services/migrations/gitbucket.go new file mode 100644 index 0000000..4fe9e30 --- /dev/null +++ b/services/migrations/gitbucket.go @@ -0,0 +1,90 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "net/url" + "strings" + + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/structs" +) + +var ( + _ base.Downloader = &GitBucketDownloader{} + _ base.DownloaderFactory = &GitBucketDownloaderFactory{} +) + +func init() { + RegisterDownloaderFactory(&GitBucketDownloaderFactory{}) +} + +// GitBucketDownloaderFactory defines a GitBucket downloader factory +type GitBucketDownloaderFactory struct{} + +// New returns a Downloader related to this factory according MigrateOptions +func (f *GitBucketDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) { + u, err := url.Parse(opts.CloneAddr) + if err != nil { + return nil, err + } + + fields := strings.Split(u.Path, "/") + if len(fields) < 2 { + return nil, fmt.Errorf("invalid path: %s", u.Path) + } + baseURL := u.Scheme + "://" + u.Host + strings.TrimSuffix(strings.Join(fields[:len(fields)-2], "/"), "/git") + + oldOwner := fields[len(fields)-2] + oldName := strings.TrimSuffix(fields[len(fields)-1], ".git") + + log.Trace("Create GitBucket downloader. BaseURL: %s RepoOwner: %s RepoName: %s", baseURL, oldOwner, oldName) + return NewGitBucketDownloader(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil +} + +// GitServiceType returns the type of git service +func (f *GitBucketDownloaderFactory) GitServiceType() structs.GitServiceType { + return structs.GitBucketService +} + +// GitBucketDownloader implements a Downloader interface to get repository information +// from GitBucket via GithubDownloader +type GitBucketDownloader struct { + *GithubDownloaderV3 +} + +// String implements Stringer +func (g *GitBucketDownloader) String() string { + return fmt.Sprintf("migration from gitbucket server %s %s/%s", g.baseURL, g.repoOwner, g.repoName) +} + +func (g *GitBucketDownloader) LogString() string { + if g == nil { + return "<GitBucketDownloader nil>" + } + return fmt.Sprintf("<GitBucketDownloader %s %s/%s>", g.baseURL, g.repoOwner, g.repoName) +} + +// NewGitBucketDownloader creates a GitBucket downloader +func NewGitBucketDownloader(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GitBucketDownloader { + githubDownloader := NewGithubDownloaderV3(ctx, baseURL, userName, password, token, repoOwner, repoName) + // Gitbucket 4.40 uses different internal hard-coded perPage values. + // Issues, PRs, and other major parts use 25. Release page uses 10. + // Some API doesn't support paging yet. Sounds difficult, but using + // minimum number among them worked out very well. + githubDownloader.maxPerPage = 10 + githubDownloader.SkipReactions = true + githubDownloader.SkipReviews = true + return &GitBucketDownloader{ + githubDownloader, + } +} + +// SupportGetRepoComments return true if it supports get repo comments +func (g *GitBucketDownloader) SupportGetRepoComments() bool { + return false +} diff --git a/services/migrations/gitea_downloader.go b/services/migrations/gitea_downloader.go new file mode 100644 index 0000000..272bf02 --- /dev/null +++ b/services/migrations/gitea_downloader.go @@ -0,0 +1,703 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/structs" + + gitea_sdk "code.gitea.io/sdk/gitea" +) + +var ( + _ base.Downloader = &GiteaDownloader{} + _ base.DownloaderFactory = &GiteaDownloaderFactory{} +) + +func init() { + RegisterDownloaderFactory(&GiteaDownloaderFactory{}) +} + +// GiteaDownloaderFactory defines a gitea downloader factory +type GiteaDownloaderFactory struct{} + +// New returns a Downloader related to this factory according MigrateOptions +func (f *GiteaDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) { + u, err := url.Parse(opts.CloneAddr) + if err != nil { + return nil, err + } + + baseURL := u.Scheme + "://" + u.Host + repoNameSpace := strings.TrimPrefix(u.Path, "/") + repoNameSpace = strings.TrimSuffix(repoNameSpace, ".git") + + path := strings.Split(repoNameSpace, "/") + if len(path) < 2 { + return nil, fmt.Errorf("invalid path: %s", repoNameSpace) + } + + repoPath := strings.Join(path[len(path)-2:], "/") + if len(path) > 2 { + subPath := strings.Join(path[:len(path)-2], "/") + baseURL += "/" + subPath + } + + log.Trace("Create gitea downloader. BaseURL: %s RepoName: %s", baseURL, repoNameSpace) + + return NewGiteaDownloader(ctx, baseURL, repoPath, opts.AuthUsername, opts.AuthPassword, opts.AuthToken) +} + +// GitServiceType returns the type of git service +func (f *GiteaDownloaderFactory) GitServiceType() structs.GitServiceType { + return structs.GiteaService +} + +// GiteaDownloader implements a Downloader interface to get repository information's +type GiteaDownloader struct { + base.NullDownloader + ctx context.Context + client *gitea_sdk.Client + baseURL string + repoOwner string + repoName string + pagination bool + maxPerPage int +} + +// NewGiteaDownloader creates a gitea Downloader via gitea API +// +// Use either a username/password or personal token. token is preferred +// Note: Public access only allows very basic access +func NewGiteaDownloader(ctx context.Context, baseURL, repoPath, username, password, token string) (*GiteaDownloader, error) { + giteaClient, err := gitea_sdk.NewClient( + baseURL, + gitea_sdk.SetToken(token), + gitea_sdk.SetBasicAuth(username, password), + gitea_sdk.SetContext(ctx), + gitea_sdk.SetHTTPClient(NewMigrationHTTPClient()), + ) + if err != nil { + log.Error(fmt.Sprintf("Failed to create NewGiteaDownloader for: %s. Error: %v", baseURL, err)) + return nil, err + } + + path := strings.Split(repoPath, "/") + + paginationSupport := true + if err = giteaClient.CheckServerVersionConstraint(">=1.12"); err != nil { + paginationSupport = false + } + + // set small maxPerPage since we can only guess + // (default would be 50 but this can differ) + maxPerPage := 10 + // gitea instances >=1.13 can tell us what maximum they have + apiConf, _, err := giteaClient.GetGlobalAPISettings() + if err != nil { + log.Info("Unable to get global API settings. Ignoring these.") + log.Debug("giteaClient.GetGlobalAPISettings. Error: %v", err) + } + if apiConf != nil { + maxPerPage = apiConf.MaxResponseItems + } + + return &GiteaDownloader{ + ctx: ctx, + client: giteaClient, + baseURL: baseURL, + repoOwner: path[0], + repoName: path[1], + pagination: paginationSupport, + maxPerPage: maxPerPage, + }, nil +} + +// SetContext set context +func (g *GiteaDownloader) SetContext(ctx context.Context) { + g.ctx = ctx +} + +// String implements Stringer +func (g *GiteaDownloader) String() string { + return fmt.Sprintf("migration from gitea server %s %s/%s", g.baseURL, g.repoOwner, g.repoName) +} + +func (g *GiteaDownloader) LogString() string { + if g == nil { + return "<GiteaDownloader nil>" + } + return fmt.Sprintf("<GiteaDownloader %s %s/%s>", g.baseURL, g.repoOwner, g.repoName) +} + +// GetRepoInfo returns a repository information +func (g *GiteaDownloader) GetRepoInfo() (*base.Repository, error) { + if g == nil { + return nil, errors.New("error: GiteaDownloader is nil") + } + + repo, _, err := g.client.GetRepo(g.repoOwner, g.repoName) + if err != nil { + return nil, err + } + + return &base.Repository{ + Name: repo.Name, + Owner: repo.Owner.UserName, + IsPrivate: repo.Private, + Description: repo.Description, + CloneURL: repo.CloneURL, + OriginalURL: repo.HTMLURL, + DefaultBranch: repo.DefaultBranch, + }, nil +} + +// GetTopics return gitea topics +func (g *GiteaDownloader) GetTopics() ([]string, error) { + topics, _, err := g.client.ListRepoTopics(g.repoOwner, g.repoName, gitea_sdk.ListRepoTopicsOptions{}) + return topics, err +} + +// GetMilestones returns milestones +func (g *GiteaDownloader) GetMilestones() ([]*base.Milestone, error) { + milestones := make([]*base.Milestone, 0, g.maxPerPage) + + for i := 1; ; i++ { + // make sure gitea can shutdown gracefully + select { + case <-g.ctx.Done(): + return nil, nil + default: + } + + ms, _, err := g.client.ListRepoMilestones(g.repoOwner, g.repoName, gitea_sdk.ListMilestoneOption{ + ListOptions: gitea_sdk.ListOptions{ + PageSize: g.maxPerPage, + Page: i, + }, + State: gitea_sdk.StateAll, + }) + if err != nil { + return nil, err + } + + for i := range ms { + // old gitea instances dont have this information + createdAT := time.Time{} + var updatedAT *time.Time + if ms[i].Closed != nil { + createdAT = *ms[i].Closed + updatedAT = ms[i].Closed + } + + // new gitea instances (>=1.13) do + if !ms[i].Created.IsZero() { + createdAT = ms[i].Created + } + if ms[i].Updated != nil && !ms[i].Updated.IsZero() { + updatedAT = ms[i].Updated + } + + milestones = append(milestones, &base.Milestone{ + Title: ms[i].Title, + Description: ms[i].Description, + Deadline: ms[i].Deadline, + Created: createdAT, + Updated: updatedAT, + Closed: ms[i].Closed, + State: string(ms[i].State), + }) + } + if !g.pagination || len(ms) < g.maxPerPage { + break + } + } + return milestones, nil +} + +func (g *GiteaDownloader) convertGiteaLabel(label *gitea_sdk.Label) *base.Label { + return &base.Label{ + Name: label.Name, + Color: label.Color, + Description: label.Description, + } +} + +// GetLabels returns labels +func (g *GiteaDownloader) GetLabels() ([]*base.Label, error) { + labels := make([]*base.Label, 0, g.maxPerPage) + + for i := 1; ; i++ { + // make sure gitea can shutdown gracefully + select { + case <-g.ctx.Done(): + return nil, nil + default: + } + + ls, _, err := g.client.ListRepoLabels(g.repoOwner, g.repoName, gitea_sdk.ListLabelsOptions{ListOptions: gitea_sdk.ListOptions{ + PageSize: g.maxPerPage, + Page: i, + }}) + if err != nil { + return nil, err + } + + for i := range ls { + labels = append(labels, g.convertGiteaLabel(ls[i])) + } + if !g.pagination || len(ls) < g.maxPerPage { + break + } + } + return labels, nil +} + +func (g *GiteaDownloader) convertGiteaRelease(rel *gitea_sdk.Release) *base.Release { + r := &base.Release{ + TagName: rel.TagName, + TargetCommitish: rel.Target, + Name: rel.Title, + Body: rel.Note, + Draft: rel.IsDraft, + Prerelease: rel.IsPrerelease, + PublisherID: rel.Publisher.ID, + PublisherName: rel.Publisher.UserName, + PublisherEmail: rel.Publisher.Email, + Published: rel.PublishedAt, + Created: rel.CreatedAt, + } + + httpClient := NewMigrationHTTPClient() + + for _, asset := range rel.Attachments { + assetID := asset.ID // Don't optimize this, for closure we need a local variable + assetDownloadURL := asset.DownloadURL + size := int(asset.Size) + dlCount := int(asset.DownloadCount) + r.Assets = append(r.Assets, &base.ReleaseAsset{ + ID: asset.ID, + Name: asset.Name, + Size: &size, + DownloadCount: &dlCount, + Created: asset.Created, + DownloadURL: &asset.DownloadURL, + DownloadFunc: func() (io.ReadCloser, error) { + asset, _, err := g.client.GetReleaseAttachment(g.repoOwner, g.repoName, rel.ID, assetID) + if err != nil { + return nil, err + } + + if !hasBaseURL(assetDownloadURL, g.baseURL) { + WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", assetID, g, assetDownloadURL) + return io.NopCloser(strings.NewReader(asset.DownloadURL)), nil + } + + // FIXME: for a private download? + req, err := http.NewRequest("GET", assetDownloadURL, nil) + if err != nil { + return nil, err + } + resp, err := httpClient.Do(req) + if err != nil { + return nil, err + } + + // resp.Body is closed by the uploader + return resp.Body, nil + }, + }) + } + return r +} + +// GetReleases returns releases +func (g *GiteaDownloader) GetReleases() ([]*base.Release, error) { + releases := make([]*base.Release, 0, g.maxPerPage) + + for i := 1; ; i++ { + // make sure gitea can shutdown gracefully + select { + case <-g.ctx.Done(): + return nil, nil + default: + } + + rl, _, err := g.client.ListReleases(g.repoOwner, g.repoName, gitea_sdk.ListReleasesOptions{ListOptions: gitea_sdk.ListOptions{ + PageSize: g.maxPerPage, + Page: i, + }}) + if err != nil { + return nil, err + } + + for i := range rl { + releases = append(releases, g.convertGiteaRelease(rl[i])) + } + if !g.pagination || len(rl) < g.maxPerPage { + break + } + } + return releases, nil +} + +func (g *GiteaDownloader) getIssueReactions(index int64) ([]*base.Reaction, error) { + var reactions []*base.Reaction + if err := g.client.CheckServerVersionConstraint(">=1.11"); err != nil { + log.Info("GiteaDownloader: instance to old, skip getIssueReactions") + return reactions, nil + } + rl, _, err := g.client.GetIssueReactions(g.repoOwner, g.repoName, index) + if err != nil { + return nil, err + } + + for _, reaction := range rl { + reactions = append(reactions, &base.Reaction{ + UserID: reaction.User.ID, + UserName: reaction.User.UserName, + Content: reaction.Reaction, + }) + } + return reactions, nil +} + +func (g *GiteaDownloader) getCommentReactions(commentID int64) ([]*base.Reaction, error) { + var reactions []*base.Reaction + if err := g.client.CheckServerVersionConstraint(">=1.11"); err != nil { + log.Info("GiteaDownloader: instance to old, skip getCommentReactions") + return reactions, nil + } + rl, _, err := g.client.GetIssueCommentReactions(g.repoOwner, g.repoName, commentID) + if err != nil { + return nil, err + } + + for i := range rl { + reactions = append(reactions, &base.Reaction{ + UserID: rl[i].User.ID, + UserName: rl[i].User.UserName, + Content: rl[i].Reaction, + }) + } + return reactions, nil +} + +// GetIssues returns issues according start and limit +func (g *GiteaDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) { + if perPage > g.maxPerPage { + perPage = g.maxPerPage + } + allIssues := make([]*base.Issue, 0, perPage) + + issues, _, err := g.client.ListRepoIssues(g.repoOwner, g.repoName, gitea_sdk.ListIssueOption{ + ListOptions: gitea_sdk.ListOptions{Page: page, PageSize: perPage}, + State: gitea_sdk.StateAll, + Type: gitea_sdk.IssueTypeIssue, + }) + if err != nil { + return nil, false, fmt.Errorf("error while listing issues: %w", err) + } + for _, issue := range issues { + labels := make([]*base.Label, 0, len(issue.Labels)) + for i := range issue.Labels { + labels = append(labels, g.convertGiteaLabel(issue.Labels[i])) + } + + var milestone string + if issue.Milestone != nil { + milestone = issue.Milestone.Title + } + + reactions, err := g.getIssueReactions(issue.Index) + if err != nil { + WarnAndNotice("Unable to load reactions during migrating issue #%d in %s. Error: %v", issue.Index, g, err) + } + + var assignees []string + for i := range issue.Assignees { + assignees = append(assignees, issue.Assignees[i].UserName) + } + + allIssues = append(allIssues, &base.Issue{ + Title: issue.Title, + Number: issue.Index, + PosterID: issue.Poster.ID, + PosterName: issue.Poster.UserName, + PosterEmail: issue.Poster.Email, + Content: issue.Body, + Milestone: milestone, + State: string(issue.State), + Created: issue.Created, + Updated: issue.Updated, + Closed: issue.Closed, + Reactions: reactions, + Labels: labels, + Assignees: assignees, + IsLocked: issue.IsLocked, + ForeignIndex: issue.Index, + }) + } + + isEnd := len(issues) < perPage + if !g.pagination { + isEnd = len(issues) == 0 + } + return allIssues, isEnd, nil +} + +// GetComments returns comments according issueNumber +func (g *GiteaDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + allComments := make([]*base.Comment, 0, g.maxPerPage) + + for i := 1; ; i++ { + // make sure gitea can shutdown gracefully + select { + case <-g.ctx.Done(): + return nil, false, nil + default: + } + + comments, _, err := g.client.ListIssueComments(g.repoOwner, g.repoName, commentable.GetForeignIndex(), gitea_sdk.ListIssueCommentOptions{ListOptions: gitea_sdk.ListOptions{ + PageSize: g.maxPerPage, + Page: i, + }}) + if err != nil { + return nil, false, fmt.Errorf("error while listing comments for issue #%d. Error: %w", commentable.GetForeignIndex(), err) + } + + for _, comment := range comments { + reactions, err := g.getCommentReactions(comment.ID) + if err != nil { + WarnAndNotice("Unable to load comment reactions during migrating issue #%d for comment %d in %s. Error: %v", commentable.GetForeignIndex(), comment.ID, g, err) + } + + allComments = append(allComments, &base.Comment{ + IssueIndex: commentable.GetLocalIndex(), + Index: comment.ID, + PosterID: comment.Poster.ID, + PosterName: comment.Poster.UserName, + PosterEmail: comment.Poster.Email, + Content: comment.Body, + Created: comment.Created, + Updated: comment.Updated, + Reactions: reactions, + }) + } + + if !g.pagination || len(comments) < g.maxPerPage { + break + } + } + return allComments, true, nil +} + +// GetPullRequests returns pull requests according page and perPage +func (g *GiteaDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) { + if perPage > g.maxPerPage { + perPage = g.maxPerPage + } + allPRs := make([]*base.PullRequest, 0, perPage) + + prs, _, err := g.client.ListRepoPullRequests(g.repoOwner, g.repoName, gitea_sdk.ListPullRequestsOptions{ + ListOptions: gitea_sdk.ListOptions{ + Page: page, + PageSize: perPage, + }, + State: gitea_sdk.StateAll, + }) + if err != nil { + return nil, false, fmt.Errorf("error while listing pull requests (page: %d, pagesize: %d). Error: %w", page, perPage, err) + } + for _, pr := range prs { + var milestone string + if pr.Milestone != nil { + milestone = pr.Milestone.Title + } + + labels := make([]*base.Label, 0, len(pr.Labels)) + for i := range pr.Labels { + labels = append(labels, g.convertGiteaLabel(pr.Labels[i])) + } + + var ( + headUserName string + headRepoName string + headCloneURL string + headRef string + headSHA string + ) + if pr.Head != nil { + if pr.Head.Repository != nil { + headUserName = pr.Head.Repository.Owner.UserName + headRepoName = pr.Head.Repository.Name + headCloneURL = pr.Head.Repository.CloneURL + } + headSHA = pr.Head.Sha + headRef = pr.Head.Ref + } + + var mergeCommitSHA string + if pr.MergedCommitID != nil { + mergeCommitSHA = *pr.MergedCommitID + } + + reactions, err := g.getIssueReactions(pr.Index) + if err != nil { + WarnAndNotice("Unable to load reactions during migrating pull #%d in %s. Error: %v", pr.Index, g, err) + } + + var assignees []string + for i := range pr.Assignees { + assignees = append(assignees, pr.Assignees[i].UserName) + } + + createdAt := time.Time{} + if pr.Created != nil { + createdAt = *pr.Created + } + updatedAt := time.Time{} + if pr.Created != nil { + updatedAt = *pr.Updated + } + + closedAt := pr.Closed + if pr.Merged != nil && closedAt == nil { + closedAt = pr.Merged + } + + allPRs = append(allPRs, &base.PullRequest{ + Title: pr.Title, + Number: pr.Index, + PosterID: pr.Poster.ID, + PosterName: pr.Poster.UserName, + PosterEmail: pr.Poster.Email, + Content: pr.Body, + State: string(pr.State), + Created: createdAt, + Updated: updatedAt, + Closed: closedAt, + Labels: labels, + Milestone: milestone, + Reactions: reactions, + Assignees: assignees, + Merged: pr.HasMerged, + MergedTime: pr.Merged, + MergeCommitSHA: mergeCommitSHA, + IsLocked: pr.IsLocked, + PatchURL: pr.PatchURL, + Head: base.PullRequestBranch{ + Ref: headRef, + SHA: headSHA, + RepoName: headRepoName, + OwnerName: headUserName, + CloneURL: headCloneURL, + }, + Base: base.PullRequestBranch{ + Ref: pr.Base.Ref, + SHA: pr.Base.Sha, + RepoName: g.repoName, + OwnerName: g.repoOwner, + }, + ForeignIndex: pr.Index, + }) + // SECURITY: Ensure that the PR is safe + _ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g) + } + + isEnd := len(prs) < perPage + if !g.pagination { + isEnd = len(prs) == 0 + } + return allPRs, isEnd, nil +} + +// GetReviews returns pull requests review +func (g *GiteaDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { + if err := g.client.CheckServerVersionConstraint(">=1.12"); err != nil { + log.Info("GiteaDownloader: instance to old, skip GetReviews") + return nil, nil + } + + allReviews := make([]*base.Review, 0, g.maxPerPage) + + for i := 1; ; i++ { + // make sure gitea can shutdown gracefully + select { + case <-g.ctx.Done(): + return nil, nil + default: + } + + prl, _, err := g.client.ListPullReviews(g.repoOwner, g.repoName, reviewable.GetForeignIndex(), gitea_sdk.ListPullReviewsOptions{ListOptions: gitea_sdk.ListOptions{ + Page: i, + PageSize: g.maxPerPage, + }}) + if err != nil { + return nil, err + } + + for _, pr := range prl { + if pr.Reviewer == nil { + // Presumably this is a team review which we cannot migrate at present but we have to skip this review as otherwise the review will be mapped on to an incorrect user. + // TODO: handle team reviews + continue + } + + rcl, _, err := g.client.ListPullReviewComments(g.repoOwner, g.repoName, reviewable.GetForeignIndex(), pr.ID) + if err != nil { + return nil, err + } + var reviewComments []*base.ReviewComment + for i := range rcl { + line := int(rcl[i].LineNum) + if rcl[i].OldLineNum > 0 { + line = int(rcl[i].OldLineNum) * -1 + } + + reviewComments = append(reviewComments, &base.ReviewComment{ + ID: rcl[i].ID, + Content: rcl[i].Body, + TreePath: rcl[i].Path, + DiffHunk: rcl[i].DiffHunk, + Line: line, + CommitID: rcl[i].CommitID, + PosterID: rcl[i].Reviewer.ID, + CreatedAt: rcl[i].Created, + UpdatedAt: rcl[i].Updated, + }) + } + + review := &base.Review{ + ID: pr.ID, + IssueIndex: reviewable.GetLocalIndex(), + ReviewerID: pr.Reviewer.ID, + ReviewerName: pr.Reviewer.UserName, + Official: pr.Official, + CommitID: pr.CommitID, + Content: pr.Body, + CreatedAt: pr.Submitted, + State: string(pr.State), + Comments: reviewComments, + } + + allReviews = append(allReviews, review) + } + + if len(prl) < g.maxPerPage { + break + } + } + return allReviews, nil +} diff --git a/services/migrations/gitea_downloader_test.go b/services/migrations/gitea_downloader_test.go new file mode 100644 index 0000000..28a52c2 --- /dev/null +++ b/services/migrations/gitea_downloader_test.go @@ -0,0 +1,314 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "net/http" + "os" + "sort" + "testing" + "time" + + base "code.gitea.io/gitea/modules/migration" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGiteaDownloadRepo(t *testing.T) { + // Skip tests if Gitea token is not found + giteaToken := os.Getenv("GITEA_TOKEN") + if giteaToken == "" { + t.Skip("skipped test because GITEA_TOKEN was not in the environment") + } + + resp, err := http.Get("https://gitea.com/gitea") + if err != nil || resp.StatusCode != http.StatusOK { + t.Skipf("Can't reach https://gitea.com, skipping %s", t.Name()) + } + + downloader, err := NewGiteaDownloader(context.Background(), "https://gitea.com", "gitea/test_repo", "", "", giteaToken) + if downloader == nil { + t.Fatal("NewGitlabDownloader is nil") + } + require.NoError(t, err, "NewGitlabDownloader error occur") + + repo, err := downloader.GetRepoInfo() + require.NoError(t, err) + assertRepositoryEqual(t, &base.Repository{ + Name: "test_repo", + Owner: "gitea", + IsPrivate: false, + Description: "Test repository for testing migration from gitea to gitea", + CloneURL: "https://gitea.com/gitea/test_repo.git", + OriginalURL: "https://gitea.com/gitea/test_repo", + DefaultBranch: "master", + }, repo) + + topics, err := downloader.GetTopics() + require.NoError(t, err) + sort.Strings(topics) + assert.EqualValues(t, []string{"ci", "gitea", "migration", "test"}, topics) + + labels, err := downloader.GetLabels() + require.NoError(t, err) + assertLabelsEqual(t, []*base.Label{ + { + Name: "Bug", + Color: "e11d21", + }, + { + Name: "Enhancement", + Color: "207de5", + }, + { + Name: "Feature", + Color: "0052cc", + Description: "a feature request", + }, + { + Name: "Invalid", + Color: "d4c5f9", + }, + { + Name: "Question", + Color: "fbca04", + }, + { + Name: "Valid", + Color: "53e917", + }, + }, labels) + + milestones, err := downloader.GetMilestones() + require.NoError(t, err) + assertMilestonesEqual(t, []*base.Milestone{ + { + Title: "V2 Finalize", + Created: time.Unix(0, 0), + Deadline: timePtr(time.Unix(1599263999, 0)), + Updated: timePtr(time.Unix(0, 0)), + State: "open", + }, + { + Title: "V1", + Description: "Generate Content", + Created: time.Unix(0, 0), + Updated: timePtr(time.Unix(0, 0)), + Closed: timePtr(time.Unix(1598985406, 0)), + State: "closed", + }, + }, milestones) + + releases, err := downloader.GetReleases() + require.NoError(t, err) + assertReleasesEqual(t, []*base.Release{ + { + Name: "Second Release", + TagName: "v2-rc1", + TargetCommitish: "master", + Body: "this repo has:\r\n* reactions\r\n* wiki\r\n* issues (open/closed)\r\n* pulls (open/closed/merged) (external/internal)\r\n* pull reviews\r\n* projects\r\n* milestones\r\n* labels\r\n* releases\r\n\r\nto test migration against", + Draft: false, + Prerelease: true, + Created: time.Date(2020, 9, 1, 18, 2, 43, 0, time.UTC), + Published: time.Date(2020, 9, 1, 18, 2, 43, 0, time.UTC), + PublisherID: 689, + PublisherName: "6543", + PublisherEmail: "6543@obermui.de", + }, + { + Name: "First Release", + TagName: "V1", + TargetCommitish: "master", + Body: "as title", + Draft: false, + Prerelease: false, + Created: time.Date(2020, 9, 1, 17, 30, 32, 0, time.UTC), + Published: time.Date(2020, 9, 1, 17, 30, 32, 0, time.UTC), + PublisherID: 689, + PublisherName: "6543", + PublisherEmail: "6543@obermui.de", + }, + }, releases) + + issues, isEnd, err := downloader.GetIssues(1, 50) + require.NoError(t, err) + assert.True(t, isEnd) + assert.Len(t, issues, 7) + assert.EqualValues(t, "open", issues[0].State) + + issues, isEnd, err = downloader.GetIssues(3, 2) + require.NoError(t, err) + assert.False(t, isEnd) + + assertIssuesEqual(t, []*base.Issue{ + { + Number: 4, + Title: "what is this repo about?", + Content: "", + Milestone: "V1", + PosterID: -1, + PosterName: "Ghost", + PosterEmail: "", + State: "closed", + IsLocked: true, + Created: time.Unix(1598975321, 0), + Updated: time.Unix(1598975400, 0), + Labels: []*base.Label{{ + Name: "Question", + Color: "fbca04", + Description: "", + }}, + Reactions: []*base.Reaction{ + { + UserID: 689, + UserName: "6543", + Content: "gitea", + }, + { + UserID: 689, + UserName: "6543", + Content: "laugh", + }, + }, + Closed: timePtr(time.Date(2020, 9, 1, 15, 49, 34, 0, time.UTC)), + }, + { + Number: 2, + Title: "Spam", + Content: ":(", + Milestone: "", + PosterID: 689, + PosterName: "6543", + PosterEmail: "6543@obermui.de", + State: "closed", + IsLocked: false, + Created: time.Unix(1598919780, 0), + Updated: time.Unix(1598969497, 0), + Labels: []*base.Label{{ + Name: "Invalid", + Color: "d4c5f9", + Description: "", + }}, + Closed: timePtr(time.Unix(1598969497, 0)), + }, + }, issues) + + comments, _, err := downloader.GetComments(&base.Issue{Number: 4, ForeignIndex: 4}) + require.NoError(t, err) + assertCommentsEqual(t, []*base.Comment{ + { + IssueIndex: 4, + PosterID: 689, + PosterName: "6543", + PosterEmail: "6543@obermui.de", + Created: time.Unix(1598975370, 0), + Updated: time.Unix(1599070865, 0), + Content: "a really good question!\n\nIt is the used as TESTSET for gitea2gitea repo migration function", + }, + { + IssueIndex: 4, + PosterID: -1, + PosterName: "Ghost", + PosterEmail: "", + Created: time.Unix(1598975393, 0), + Updated: time.Unix(1598975393, 0), + Content: "Oh!", + }, + }, comments) + + prs, isEnd, err := downloader.GetPullRequests(1, 50) + require.NoError(t, err) + assert.True(t, isEnd) + assert.Len(t, prs, 6) + prs, isEnd, err = downloader.GetPullRequests(1, 3) + require.NoError(t, err) + assert.False(t, isEnd) + assert.Len(t, prs, 3) + assertPullRequestEqual(t, &base.PullRequest{ + Number: 12, + PosterID: 689, + PosterName: "6543", + PosterEmail: "6543@obermui.de", + Title: "Dont Touch", + Content: "\r\nadd dont touch note", + Milestone: "V2 Finalize", + State: "closed", + IsLocked: false, + Created: time.Unix(1598982759, 0), + Updated: time.Unix(1599023425, 0), + Closed: timePtr(time.Unix(1598982934, 0)), + Assignees: []string{"techknowlogick"}, + Base: base.PullRequestBranch{ + CloneURL: "", + Ref: "master", + SHA: "827aa28a907853e5ddfa40c8f9bc52471a2685fd", + RepoName: "test_repo", + OwnerName: "gitea", + }, + Head: base.PullRequestBranch{ + CloneURL: "https://gitea.com/6543-forks/test_repo.git", + Ref: "refs/pull/12/head", + SHA: "b6ab5d9ae000b579a5fff03f92c486da4ddf48b6", + RepoName: "test_repo", + OwnerName: "6543-forks", + }, + Merged: true, + MergedTime: timePtr(time.Unix(1598982934, 0)), + MergeCommitSHA: "827aa28a907853e5ddfa40c8f9bc52471a2685fd", + PatchURL: "https://gitea.com/gitea/test_repo/pulls/12.patch", + }, prs[1]) + + reviews, err := downloader.GetReviews(&base.Issue{Number: 7, ForeignIndex: 7}) + require.NoError(t, err) + assertReviewsEqual(t, []*base.Review{ + { + ID: 1770, + IssueIndex: 7, + ReviewerID: 689, + ReviewerName: "6543", + CommitID: "187ece0cb6631e2858a6872e5733433bb3ca3b03", + CreatedAt: time.Date(2020, 9, 1, 16, 12, 58, 0, time.UTC), + State: "COMMENT", // TODO + Comments: []*base.ReviewComment{ + { + ID: 116561, + InReplyTo: 0, + Content: "is one `\\newline` to less?", + TreePath: "README.md", + DiffHunk: "@@ -2,3 +2,3 @@\n \n-Test repository for testing migration from gitea 2 gitea\n\\ No newline at end of file\n+Test repository for testing migration from gitea 2 gitea", + Position: 0, + Line: 4, + CommitID: "187ece0cb6631e2858a6872e5733433bb3ca3b03", + PosterID: 689, + Reactions: nil, + CreatedAt: time.Date(2020, 9, 1, 16, 12, 58, 0, time.UTC), + UpdatedAt: time.Date(2020, 9, 1, 16, 12, 58, 0, time.UTC), + }, + }, + }, + { + ID: 1771, + IssueIndex: 7, + ReviewerID: 9, + ReviewerName: "techknowlogick", + CommitID: "187ece0cb6631e2858a6872e5733433bb3ca3b03", + CreatedAt: time.Date(2020, 9, 1, 17, 6, 47, 0, time.UTC), + State: "REQUEST_CHANGES", // TODO + Content: "I think this needs some changes", + }, + { + ID: 1772, + IssueIndex: 7, + ReviewerID: 9, + ReviewerName: "techknowlogick", + CommitID: "187ece0cb6631e2858a6872e5733433bb3ca3b03", + CreatedAt: time.Date(2020, 9, 1, 17, 19, 51, 0, time.UTC), + State: base.ReviewStateApproved, + Official: true, + Content: "looks good", + }, + }, reviews) +} diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go new file mode 100644 index 0000000..3ba4ca2 --- /dev/null +++ b/services/migrations/gitea_uploader.go @@ -0,0 +1,1031 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2018 Jonas Franz. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "io" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + base_module "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/label" + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/uri" + "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/services/pull" + repo_service "code.gitea.io/gitea/services/repository" + + "github.com/google/uuid" +) + +var _ base.Uploader = &GiteaLocalUploader{} + +// GiteaLocalUploader implements an Uploader to gitea sites +type GiteaLocalUploader struct { + ctx context.Context + doer *user_model.User + repoOwner string + repoName string + repo *repo_model.Repository + labels map[string]*issues_model.Label + milestones map[string]int64 + issues map[int64]*issues_model.Issue + gitRepo *git.Repository + prHeadCache map[string]string + sameApp bool + userMap map[int64]int64 // external user id mapping to user id + prCache map[int64]*issues_model.PullRequest + gitServiceType structs.GitServiceType +} + +// NewGiteaLocalUploader creates an gitea Uploader via gitea API v1 +func NewGiteaLocalUploader(ctx context.Context, doer *user_model.User, repoOwner, repoName string) *GiteaLocalUploader { + return &GiteaLocalUploader{ + ctx: ctx, + doer: doer, + repoOwner: repoOwner, + repoName: repoName, + labels: make(map[string]*issues_model.Label), + milestones: make(map[string]int64), + issues: make(map[int64]*issues_model.Issue), + prHeadCache: make(map[string]string), + userMap: make(map[int64]int64), + prCache: make(map[int64]*issues_model.PullRequest), + } +} + +// MaxBatchInsertSize returns the table's max batch insert size +func (g *GiteaLocalUploader) MaxBatchInsertSize(tp string) int { + switch tp { + case "issue": + return db.MaxBatchInsertSize(new(issues_model.Issue)) + case "comment": + return db.MaxBatchInsertSize(new(issues_model.Comment)) + case "milestone": + return db.MaxBatchInsertSize(new(issues_model.Milestone)) + case "label": + return db.MaxBatchInsertSize(new(issues_model.Label)) + case "release": + return db.MaxBatchInsertSize(new(repo_model.Release)) + case "pullrequest": + return db.MaxBatchInsertSize(new(issues_model.PullRequest)) + } + return 10 +} + +// CreateRepo creates a repository +func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.MigrateOptions) error { + owner, err := user_model.GetUserByName(g.ctx, g.repoOwner) + if err != nil { + return err + } + + var r *repo_model.Repository + if opts.MigrateToRepoID <= 0 { + r, err = repo_service.CreateRepositoryDirectly(g.ctx, g.doer, owner, repo_service.CreateRepoOptions{ + Name: g.repoName, + Description: repo.Description, + OriginalURL: repo.OriginalURL, + GitServiceType: opts.GitServiceType, + IsPrivate: opts.Private || setting.Repository.ForcePrivate, + IsMirror: opts.Mirror, + Status: repo_model.RepositoryBeingMigrated, + }) + } else { + r, err = repo_model.GetRepositoryByID(g.ctx, opts.MigrateToRepoID) + } + if err != nil { + return err + } + r.DefaultBranch = repo.DefaultBranch + r.Description = repo.Description + + r, err = repo_service.MigrateRepositoryGitData(g.ctx, owner, r, base.MigrateOptions{ + RepoName: g.repoName, + Description: repo.Description, + OriginalURL: repo.OriginalURL, + GitServiceType: opts.GitServiceType, + Mirror: repo.IsMirror, + LFS: opts.LFS, + LFSEndpoint: opts.LFSEndpoint, + CloneAddr: repo.CloneURL, // SECURITY: we will assume that this has already been checked + Private: repo.IsPrivate, + Wiki: opts.Wiki, + Releases: opts.Releases, // if didn't get releases, then sync them from tags + MirrorInterval: opts.MirrorInterval, + }, NewMigrationHTTPTransport()) + + g.sameApp = strings.HasPrefix(repo.OriginalURL, setting.AppURL) + g.repo = r + if err != nil { + return err + } + g.gitRepo, err = gitrepo.OpenRepository(g.ctx, g.repo) + if err != nil { + return err + } + + // detect object format from git repository and update to database + objectFormat, err := g.gitRepo.GetObjectFormat() + if err != nil { + return err + } + g.repo.ObjectFormatName = objectFormat.Name() + return repo_model.UpdateRepositoryCols(g.ctx, g.repo, "object_format_name") +} + +// Close closes this uploader +func (g *GiteaLocalUploader) Close() { + if g.gitRepo != nil { + g.gitRepo.Close() + } +} + +// CreateTopics creates topics +func (g *GiteaLocalUploader) CreateTopics(topics ...string) error { + // Ignore topics too long for the db + c := 0 + for _, topic := range topics { + if len(topic) > 50 { + continue + } + + topics[c] = topic + c++ + } + topics = topics[:c] + return repo_model.SaveTopics(g.ctx, g.repo.ID, topics...) +} + +// CreateMilestones creates milestones +func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) error { + mss := make([]*issues_model.Milestone, 0, len(milestones)) + for _, milestone := range milestones { + var deadline timeutil.TimeStamp + if milestone.Deadline != nil { + deadline = timeutil.TimeStamp(milestone.Deadline.Unix()) + } + if deadline == 0 { + deadline = timeutil.TimeStamp(time.Date(9999, 1, 1, 0, 0, 0, 0, setting.DefaultUILocation).Unix()) + } + + if milestone.Created.IsZero() { + if milestone.Updated != nil { + milestone.Created = *milestone.Updated + } else if milestone.Deadline != nil { + milestone.Created = *milestone.Deadline + } else { + milestone.Created = time.Now() + } + } + if milestone.Updated == nil || milestone.Updated.IsZero() { + milestone.Updated = &milestone.Created + } + + ms := issues_model.Milestone{ + RepoID: g.repo.ID, + Name: milestone.Title, + Content: milestone.Description, + IsClosed: milestone.State == "closed", + CreatedUnix: timeutil.TimeStamp(milestone.Created.Unix()), + UpdatedUnix: timeutil.TimeStamp(milestone.Updated.Unix()), + DeadlineUnix: deadline, + } + if ms.IsClosed && milestone.Closed != nil { + ms.ClosedDateUnix = timeutil.TimeStamp(milestone.Closed.Unix()) + } + mss = append(mss, &ms) + } + + err := issues_model.InsertMilestones(g.ctx, mss...) + if err != nil { + return err + } + + for _, ms := range mss { + g.milestones[ms.Name] = ms.ID + } + return nil +} + +// CreateLabels creates labels +func (g *GiteaLocalUploader) CreateLabels(labels ...*base.Label) error { + lbs := make([]*issues_model.Label, 0, len(labels)) + for _, l := range labels { + if color, err := label.NormalizeColor(l.Color); err != nil { + log.Warn("Invalid label color: #%s for label: %s in migration to %s/%s", l.Color, l.Name, g.repoOwner, g.repoName) + l.Color = "#ffffff" + } else { + l.Color = color + } + + lbs = append(lbs, &issues_model.Label{ + RepoID: g.repo.ID, + Name: l.Name, + Exclusive: l.Exclusive, + Description: l.Description, + Color: l.Color, + }) + } + + err := issues_model.NewLabels(g.ctx, lbs...) + if err != nil { + return err + } + for _, lb := range lbs { + g.labels[lb.Name] = lb + } + return nil +} + +// CreateReleases creates releases +func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error { + rels := make([]*repo_model.Release, 0, len(releases)) + for _, release := range releases { + if release.Created.IsZero() { + if !release.Published.IsZero() { + release.Created = release.Published + } else { + release.Created = time.Now() + } + } + + // SECURITY: The TagName must be a valid git ref + if release.TagName != "" && !git.IsValidRefPattern(release.TagName) { + release.TagName = "" + } + + // SECURITY: The TargetCommitish must be a valid git ref + if release.TargetCommitish != "" && !git.IsValidRefPattern(release.TargetCommitish) { + release.TargetCommitish = "" + } + + rel := repo_model.Release{ + RepoID: g.repo.ID, + TagName: release.TagName, + LowerTagName: strings.ToLower(release.TagName), + Target: release.TargetCommitish, + Title: release.Name, + Note: release.Body, + IsDraft: release.Draft, + IsPrerelease: release.Prerelease, + IsTag: false, + CreatedUnix: timeutil.TimeStamp(release.Created.Unix()), + } + + if err := g.remapUser(release, &rel); err != nil { + return err + } + + // calc NumCommits if possible + if rel.TagName != "" { + commit, err := g.gitRepo.GetTagCommit(rel.TagName) + if !git.IsErrNotExist(err) { + if err != nil { + return fmt.Errorf("GetTagCommit[%v]: %w", rel.TagName, err) + } + rel.Sha1 = commit.ID.String() + rel.NumCommits, err = commit.CommitsCount() + if err != nil { + return fmt.Errorf("CommitsCount: %w", err) + } + } + } + + for _, asset := range release.Assets { + if asset.Created.IsZero() { + if !asset.Updated.IsZero() { + asset.Created = asset.Updated + } else { + asset.Created = release.Created + } + } + attach := repo_model.Attachment{ + UUID: uuid.New().String(), + Name: asset.Name, + DownloadCount: int64(*asset.DownloadCount), + Size: int64(*asset.Size), + CreatedUnix: timeutil.TimeStamp(asset.Created.Unix()), + } + + // SECURITY: We cannot check the DownloadURL and DownloadFunc are safe here + // ... we must assume that they are safe and simply download the attachment + err := func() error { + // asset.DownloadURL maybe a local file + var rc io.ReadCloser + var err error + if asset.DownloadFunc != nil { + rc, err = asset.DownloadFunc() + if err != nil { + return err + } + } else if asset.DownloadURL != nil { + rc, err = uri.Open(*asset.DownloadURL) + if err != nil { + return err + } + } + if rc == nil { + return nil + } + _, err = storage.Attachments.Save(attach.RelativePath(), rc, int64(*asset.Size)) + rc.Close() + return err + }() + if err != nil { + return err + } + + rel.Attachments = append(rel.Attachments, &attach) + } + + rels = append(rels, &rel) + } + + return repo_model.InsertReleases(g.ctx, rels...) +} + +// SyncTags syncs releases with tags in the database +func (g *GiteaLocalUploader) SyncTags() error { + return repo_module.SyncReleasesWithTags(g.ctx, g.repo, g.gitRepo) +} + +// CreateIssues creates issues +func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error { + iss := make([]*issues_model.Issue, 0, len(issues)) + for _, issue := range issues { + var labels []*issues_model.Label + for _, label := range issue.Labels { + lb, ok := g.labels[label.Name] + if ok { + labels = append(labels, lb) + } + } + + milestoneID := g.milestones[issue.Milestone] + + if issue.Created.IsZero() { + if issue.Closed != nil { + issue.Created = *issue.Closed + } else { + issue.Created = time.Now() + } + } + if issue.Updated.IsZero() { + if issue.Closed != nil { + issue.Updated = *issue.Closed + } else { + issue.Updated = time.Now() + } + } + + // SECURITY: issue.Ref needs to be a valid reference + if !git.IsValidRefPattern(issue.Ref) { + log.Warn("Invalid issue.Ref[%s] in issue #%d in %s/%s", issue.Ref, issue.Number, g.repoOwner, g.repoName) + issue.Ref = "" + } + + is := issues_model.Issue{ + RepoID: g.repo.ID, + Repo: g.repo, + Index: issue.Number, + Title: base_module.TruncateString(issue.Title, 255), + Content: issue.Content, + Ref: issue.Ref, + IsClosed: issue.State == "closed", + IsLocked: issue.IsLocked, + MilestoneID: milestoneID, + Labels: labels, + CreatedUnix: timeutil.TimeStamp(issue.Created.Unix()), + UpdatedUnix: timeutil.TimeStamp(issue.Updated.Unix()), + } + + if err := g.remapUser(issue, &is); err != nil { + return err + } + + if issue.Closed != nil { + is.ClosedUnix = timeutil.TimeStamp(issue.Closed.Unix()) + } + // add reactions + for _, reaction := range issue.Reactions { + res := issues_model.Reaction{ + Type: reaction.Content, + CreatedUnix: timeutil.TimeStampNow(), + } + if err := g.remapUser(reaction, &res); err != nil { + return err + } + is.Reactions = append(is.Reactions, &res) + } + iss = append(iss, &is) + } + + if len(iss) > 0 { + if err := issues_model.InsertIssues(g.ctx, iss...); err != nil { + return err + } + + for _, is := range iss { + g.issues[is.Index] = is + } + } + + return nil +} + +// CreateComments creates comments of issues +func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error { + cms := make([]*issues_model.Comment, 0, len(comments)) + for _, comment := range comments { + var issue *issues_model.Issue + issue, ok := g.issues[comment.IssueIndex] + if !ok { + return fmt.Errorf("comment references non existent IssueIndex %d", comment.IssueIndex) + } + + if comment.Created.IsZero() { + comment.Created = time.Unix(int64(issue.CreatedUnix), 0) + } + if comment.Updated.IsZero() { + comment.Updated = comment.Created + } + if comment.CommentType == "" { + // if type field is missing, then assume a normal comment + comment.CommentType = issues_model.CommentTypeComment.String() + } + cm := issues_model.Comment{ + IssueID: issue.ID, + Type: issues_model.AsCommentType(comment.CommentType), + Content: comment.Content, + CreatedUnix: timeutil.TimeStamp(comment.Created.Unix()), + UpdatedUnix: timeutil.TimeStamp(comment.Updated.Unix()), + } + + switch cm.Type { + case issues_model.CommentTypeReopen: + cm.Content = "" + case issues_model.CommentTypeClose: + cm.Content = "" + case issues_model.CommentTypeAssignees: + if assigneeID, ok := comment.Meta["AssigneeID"].(int); ok { + cm.AssigneeID = int64(assigneeID) + } + if comment.Meta["RemovedAssigneeID"] != nil { + cm.RemovedAssignee = true + } + case issues_model.CommentTypeChangeTitle: + if comment.Meta["OldTitle"] != nil { + cm.OldTitle = fmt.Sprint(comment.Meta["OldTitle"]) + } + if comment.Meta["NewTitle"] != nil { + cm.NewTitle = fmt.Sprint(comment.Meta["NewTitle"]) + } + case issues_model.CommentTypeChangeTargetBranch: + if comment.Meta["OldRef"] != nil && comment.Meta["NewRef"] != nil { + cm.OldRef = fmt.Sprint(comment.Meta["OldRef"]) + cm.NewRef = fmt.Sprint(comment.Meta["NewRef"]) + cm.Content = "" + } + case issues_model.CommentTypeMergePull: + cm.Content = "" + case issues_model.CommentTypePRScheduledToAutoMerge, issues_model.CommentTypePRUnScheduledToAutoMerge: + cm.Content = "" + default: + } + + if err := g.remapUser(comment, &cm); err != nil { + return err + } + + // add reactions + for _, reaction := range comment.Reactions { + res := issues_model.Reaction{ + Type: reaction.Content, + CreatedUnix: timeutil.TimeStampNow(), + } + if err := g.remapUser(reaction, &res); err != nil { + return err + } + cm.Reactions = append(cm.Reactions, &res) + } + + cms = append(cms, &cm) + } + + if len(cms) == 0 { + return nil + } + return issues_model.InsertIssueComments(g.ctx, cms) +} + +// CreatePullRequests creates pull requests +func (g *GiteaLocalUploader) CreatePullRequests(prs ...*base.PullRequest) error { + gprs := make([]*issues_model.PullRequest, 0, len(prs)) + for _, pr := range prs { + gpr, err := g.newPullRequest(pr) + if err != nil { + return err + } + + if err := g.remapUser(pr, gpr.Issue); err != nil { + return err + } + + gprs = append(gprs, gpr) + } + if err := issues_model.InsertPullRequests(g.ctx, gprs...); err != nil { + return err + } + for _, pr := range gprs { + g.issues[pr.Issue.Index] = pr.Issue + pull.AddToTaskQueue(g.ctx, pr) + } + return nil +} + +func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head string, err error) { + // SECURITY: this pr must have been must have been ensured safe + if !pr.EnsuredSafe { + log.Error("PR #%d in %s/%s has not been checked for safety.", pr.Number, g.repoOwner, g.repoName) + return "", fmt.Errorf("the PR[%d] was not checked for safety", pr.Number) + } + + // Anonymous function to download the patch file (allows us to use defer) + err = func() error { + // if the patchURL is empty there is nothing to download + if pr.PatchURL == "" { + return nil + } + + // SECURITY: We will assume that the pr.PatchURL has been checked + // pr.PatchURL maybe a local file - but note EnsureSafe should be asserting that this safe + ret, err := uri.Open(pr.PatchURL) // TODO: This probably needs to use the downloader as there may be rate limiting issues here + if err != nil { + return err + } + defer ret.Close() + + pullDir := filepath.Join(g.repo.RepoPath(), "pulls") + if err = os.MkdirAll(pullDir, os.ModePerm); err != nil { + return err + } + + f, err := os.Create(filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number))) + if err != nil { + return err + } + defer f.Close() + + // TODO: Should there be limits on the size of this file? + _, err = io.Copy(f, ret) + + return err + }() + if err != nil { + return "", err + } + + head = "unknown repository" + if pr.IsForkPullRequest() && pr.State != "closed" { + // OK we want to fetch the current head as a branch from its CloneURL + + // 1. Is there a head clone URL available? + // 2. Is there a head ref available? + if pr.Head.CloneURL == "" || pr.Head.Ref == "" { + return head, nil + } + + // 3. We need to create a remote for this clone url + // ... maybe we already have a name for this remote + remote, ok := g.prHeadCache[pr.Head.CloneURL+":"] + if !ok { + // ... let's try ownername as a reasonable name + remote = pr.Head.OwnerName + if !git.IsValidRefPattern(remote) { + // ... let's try something less nice + remote = "head-pr-" + strconv.FormatInt(pr.Number, 10) + } + // ... now add the remote + err := g.gitRepo.AddRemote(remote, pr.Head.CloneURL, true) + if err != nil { + log.Error("PR #%d in %s/%s AddRemote[%s] failed: %v", pr.Number, g.repoOwner, g.repoName, remote, err) + } else { + g.prHeadCache[pr.Head.CloneURL+":"] = remote + ok = true + } + } + if !ok { + return head, nil + } + + // 4. Check if we already have this ref? + localRef, ok := g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref] + if !ok { + // ... We would normally name this migrated branch as <OwnerName>/<HeadRef> but we need to ensure that is safe + localRef = git.SanitizeRefPattern(pr.Head.OwnerName + "/" + pr.Head.Ref) + + // ... Now we must assert that this does not exist + if g.gitRepo.IsBranchExist(localRef) { + localRef = "head-pr-" + strconv.FormatInt(pr.Number, 10) + "/" + localRef + i := 0 + for g.gitRepo.IsBranchExist(localRef) { + if i > 5 { + // ... We tried, we really tried but this is just a seriously unfriendly repo + return head, nil + } + // OK just try some uuids! + localRef = git.SanitizeRefPattern("head-pr-" + strconv.FormatInt(pr.Number, 10) + uuid.New().String()) + i++ + } + } + + fetchArg := pr.Head.Ref + ":" + git.BranchPrefix + localRef + if strings.HasPrefix(fetchArg, "-") { + fetchArg = git.BranchPrefix + fetchArg + } + + _, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags").AddDashesAndList(remote, fetchArg).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()}) + if err != nil { + log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err) + return head, nil + } + g.prHeadCache[pr.Head.CloneURL+":"+pr.Head.Ref] = localRef + head = localRef + } + + // 5. Now if pr.Head.SHA == "" we should recover this to the head of this branch + if pr.Head.SHA == "" { + headSha, err := g.gitRepo.GetBranchCommitID(localRef) + if err != nil { + log.Error("unable to get head SHA of local head for PR #%d from %s in %s/%s. Error: %v", pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err) + return head, nil + } + pr.Head.SHA = headSha + } + + _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()}) + if err != nil { + return "", err + } + + return head, nil + } + + if pr.Head.Ref != "" { + head = pr.Head.Ref + } + + // Ensure the closed PR SHA still points to an existing ref + if pr.Head.SHA == "" { + // The SHA is empty + log.Warn("Empty reference, no pull head for PR #%d in %s/%s", pr.Number, g.repoOwner, g.repoName) + } else { + _, _, err = git.NewCommand(g.ctx, "rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()}) + if err != nil { + // Git update-ref remove bad references with a relative path + log.Warn("Deprecated local head %s for PR #%d in %s/%s, removing %s", pr.Head.SHA, pr.Number, g.repoOwner, g.repoName, pr.GetGitRefName()) + } else { + // set head information + _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()}) + if err != nil { + log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err) + } + } + } + + return head, nil +} + +func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model.PullRequest, error) { + var labels []*issues_model.Label + for _, label := range pr.Labels { + lb, ok := g.labels[label.Name] + if ok { + labels = append(labels, lb) + } + } + + milestoneID := g.milestones[pr.Milestone] + + head, err := g.updateGitForPullRequest(pr) + if err != nil { + return nil, fmt.Errorf("updateGitForPullRequest: %w", err) + } + + // Now we may need to fix the mergebase + if pr.Base.SHA == "" { + if pr.Base.Ref != "" && pr.Head.SHA != "" { + // A PR against a tag base does not make sense - therefore pr.Base.Ref must be a branch + // TODO: should we be checking for the refs/heads/ prefix on the pr.Base.Ref? (i.e. are these actually branches or refs) + pr.Base.SHA, _, err = g.gitRepo.GetMergeBase("", git.BranchPrefix+pr.Base.Ref, pr.Head.SHA) + if err != nil { + log.Error("Cannot determine the merge base for PR #%d in %s/%s. Error: %v", pr.Number, g.repoOwner, g.repoName, err) + } + } else { + log.Error("Cannot determine the merge base for PR #%d in %s/%s. Not enough information", pr.Number, g.repoOwner, g.repoName) + } + } + + if pr.Created.IsZero() { + if pr.Closed != nil { + pr.Created = *pr.Closed + } else if pr.MergedTime != nil { + pr.Created = *pr.MergedTime + } else { + pr.Created = time.Now() + } + } + if pr.Updated.IsZero() { + pr.Updated = pr.Created + } + + issue := issues_model.Issue{ + RepoID: g.repo.ID, + Repo: g.repo, + Title: pr.Title, + Index: pr.Number, + Content: pr.Content, + MilestoneID: milestoneID, + IsPull: true, + IsClosed: pr.State == "closed", + IsLocked: pr.IsLocked, + Labels: labels, + CreatedUnix: timeutil.TimeStamp(pr.Created.Unix()), + UpdatedUnix: timeutil.TimeStamp(pr.Updated.Unix()), + } + + if err := g.remapUser(pr, &issue); err != nil { + return nil, err + } + + // add reactions + for _, reaction := range pr.Reactions { + res := issues_model.Reaction{ + Type: reaction.Content, + CreatedUnix: timeutil.TimeStampNow(), + } + if err := g.remapUser(reaction, &res); err != nil { + return nil, err + } + issue.Reactions = append(issue.Reactions, &res) + } + + pullRequest := issues_model.PullRequest{ + HeadRepoID: g.repo.ID, + HeadBranch: head, + BaseRepoID: g.repo.ID, + BaseBranch: pr.Base.Ref, + MergeBase: pr.Base.SHA, + Index: pr.Number, + HasMerged: pr.Merged, + + Issue: &issue, + } + + if pullRequest.Issue.IsClosed && pr.Closed != nil { + pullRequest.Issue.ClosedUnix = timeutil.TimeStamp(pr.Closed.Unix()) + } + if pullRequest.HasMerged && pr.MergedTime != nil { + pullRequest.MergedUnix = timeutil.TimeStamp(pr.MergedTime.Unix()) + pullRequest.MergedCommitID = pr.MergeCommitSHA + pullRequest.MergerID = g.doer.ID + } + + // TODO: assignees + + return &pullRequest, nil +} + +func convertReviewState(state string) issues_model.ReviewType { + switch state { + case base.ReviewStatePending: + return issues_model.ReviewTypePending + case base.ReviewStateApproved: + return issues_model.ReviewTypeApprove + case base.ReviewStateChangesRequested: + return issues_model.ReviewTypeReject + case base.ReviewStateCommented: + return issues_model.ReviewTypeComment + case base.ReviewStateRequestReview: + return issues_model.ReviewTypeRequest + default: + return issues_model.ReviewTypePending + } +} + +// CreateReviews create pull request reviews of currently migrated issues +func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error { + cms := make([]*issues_model.Review, 0, len(reviews)) + for _, review := range reviews { + var issue *issues_model.Issue + issue, ok := g.issues[review.IssueIndex] + if !ok { + return fmt.Errorf("review references non existent IssueIndex %d", review.IssueIndex) + } + if review.CreatedAt.IsZero() { + review.CreatedAt = time.Unix(int64(issue.CreatedUnix), 0) + } + + cm := issues_model.Review{ + Type: convertReviewState(review.State), + IssueID: issue.ID, + Content: review.Content, + Official: review.Official, + CreatedUnix: timeutil.TimeStamp(review.CreatedAt.Unix()), + UpdatedUnix: timeutil.TimeStamp(review.CreatedAt.Unix()), + } + + if err := g.remapUser(review, &cm); err != nil { + return err + } + + cms = append(cms, &cm) + + // get pr + pr, ok := g.prCache[issue.ID] + if !ok { + var err error + pr, err = issues_model.GetPullRequestByIssueIDWithNoAttributes(g.ctx, issue.ID) + if err != nil { + return err + } + g.prCache[issue.ID] = pr + } + if pr.MergeBase == "" { + // No mergebase -> no basis for any patches + log.Warn("PR #%d in %s/%s: does not have a merge base, all review comments will be ignored", pr.Index, g.repoOwner, g.repoName) + continue + } + + headCommitID, err := g.gitRepo.GetRefCommitID(pr.GetGitRefName()) + if err != nil { + log.Warn("PR #%d GetRefCommitID[%s] in %s/%s: %v, all review comments will be ignored", pr.Index, pr.GetGitRefName(), g.repoOwner, g.repoName, err) + continue + } + + for _, comment := range review.Comments { + // Skip code comment if it doesn't have a diff it is commenting on. + if comment.DiffHunk == "" { + continue + } + + line := comment.Line + if line != 0 { + comment.Position = 1 + } else if comment.DiffHunk != "" { + _, _, line, _ = git.ParseDiffHunkString(comment.DiffHunk) + } + + // SECURITY: The TreePath must be cleaned! use relative path + comment.TreePath = util.PathJoinRel(comment.TreePath) + + var patch string + reader, writer := io.Pipe() + defer func() { + _ = reader.Close() + _ = writer.Close() + }() + go func(comment *base.ReviewComment) { + if err := git.GetRepoRawDiffForFile(g.gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, comment.TreePath, writer); err != nil { + // We should ignore the error since the commit maybe removed when force push to the pull request + log.Warn("GetRepoRawDiffForFile failed when migrating [%s, %s, %s, %s]: %v", g.gitRepo.Path, pr.MergeBase, headCommitID, comment.TreePath, err) + } + _ = writer.Close() + }(comment) + + patch, _ = git.CutDiffAroundLine(reader, int64((&issues_model.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines) + + if comment.CreatedAt.IsZero() { + comment.CreatedAt = review.CreatedAt + } + if comment.UpdatedAt.IsZero() { + comment.UpdatedAt = comment.CreatedAt + } + + objectFormat := git.ObjectFormatFromName(g.repo.ObjectFormatName) + if !objectFormat.IsValid(comment.CommitID) { + log.Warn("Invalid comment CommitID[%s] on comment[%d] in PR #%d of %s/%s replaced with %s", comment.CommitID, pr.Index, g.repoOwner, g.repoName, headCommitID) + comment.CommitID = headCommitID + } + + c := issues_model.Comment{ + Type: issues_model.CommentTypeCode, + IssueID: issue.ID, + Content: comment.Content, + Line: int64(line + comment.Position - 1), + TreePath: comment.TreePath, + CommitSHA: comment.CommitID, + Patch: patch, + CreatedUnix: timeutil.TimeStamp(comment.CreatedAt.Unix()), + UpdatedUnix: timeutil.TimeStamp(comment.UpdatedAt.Unix()), + } + + if err := g.remapUser(review, &c); err != nil { + return err + } + + cm.Comments = append(cm.Comments, &c) + } + } + + return issues_model.InsertReviews(g.ctx, cms) +} + +// Rollback when migrating failed, this will rollback all the changes. +func (g *GiteaLocalUploader) Rollback() error { + if g.repo != nil && g.repo.ID > 0 { + g.gitRepo.Close() + + // do not delete the repository, otherwise the end users won't be able to see the last error message + } + return nil +} + +// Finish when migrating success, this will do some status update things. +func (g *GiteaLocalUploader) Finish() error { + if g.repo == nil || g.repo.ID <= 0 { + return ErrRepoNotCreated + } + + // update issue_index + if err := issues_model.RecalculateIssueIndexForRepo(g.ctx, g.repo.ID); err != nil { + return err + } + + if err := models.UpdateRepoStats(g.ctx, g.repo.ID); err != nil { + return err + } + + g.repo.Status = repo_model.RepositoryReady + return repo_model.UpdateRepositoryCols(g.ctx, g.repo, "status") +} + +func (g *GiteaLocalUploader) remapUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error { + var userID int64 + var err error + if g.sameApp { + userID, err = g.remapLocalUser(source) + } else { + userID, err = g.remapExternalUser(source) + } + if err != nil { + return err + } + + if userID > 0 { + return target.RemapExternalUser("", 0, userID) + } + return target.RemapExternalUser(source.GetExternalName(), source.GetExternalID(), user_model.GhostUserID) +} + +func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated) (int64, error) { + userid, ok := g.userMap[source.GetExternalID()] + if !ok { + name, err := user_model.GetUserNameByID(g.ctx, source.GetExternalID()) + if err != nil { + return 0, err + } + // let's not reuse an ID when the user was deleted or has a different user name + if name != source.GetExternalName() { + userid = 0 + } else { + userid = source.GetExternalID() + } + g.userMap[source.GetExternalID()] = userid + } + return userid, nil +} + +func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated) (userid int64, err error) { + userid, ok := g.userMap[source.GetExternalID()] + if !ok { + userid, err = user_model.GetUserIDByExternalUserID(g.ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID())) + if err != nil { + log.Error("GetUserIDByExternalUserID: %v", err) + return 0, err + } + g.userMap[source.GetExternalID()] = userid + } + return userid, nil +} diff --git a/services/migrations/gitea_uploader_test.go b/services/migrations/gitea_uploader_test.go new file mode 100644 index 0000000..ad193b2 --- /dev/null +++ b/services/migrations/gitea_uploader_test.go @@ -0,0 +1,519 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2018 Jonas Franz. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "os" + "path/filepath" + "strconv" + "testing" + "time" + + "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/test" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGiteaUploadRepo(t *testing.T) { + // FIXME: Since no accesskey or user/password will trigger rate limit of github, just skip + t.Skip() + + unittest.PrepareTestEnv(t) + + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) + + var ( + ctx = context.Background() + downloader = NewGithubDownloaderV3(ctx, "https://github.com", "", "", "", "go-xorm", "builder") + repoName = "builder-" + time.Now().Format("2006-01-02-15-04-05") + uploader = NewGiteaLocalUploader(graceful.GetManager().HammerContext(), user, user.Name, repoName) + ) + + err := migrateRepository(db.DefaultContext, user, downloader, uploader, base.MigrateOptions{ + CloneAddr: "https://github.com/go-xorm/builder", + RepoName: repoName, + AuthUsername: "", + + Wiki: true, + Issues: true, + Milestones: true, + Labels: true, + Releases: true, + Comments: true, + PullRequests: true, + Private: true, + Mirror: false, + }, nil) + require.NoError(t, err) + + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, Name: repoName}) + assert.True(t, repo.HasWiki()) + assert.EqualValues(t, repo_model.RepositoryReady, repo.Status) + + milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{ + RepoID: repo.ID, + IsClosed: optional.Some(false), + }) + require.NoError(t, err) + assert.Len(t, milestones, 1) + + milestones, err = db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{ + RepoID: repo.ID, + IsClosed: optional.Some(true), + }) + require.NoError(t, err) + assert.Empty(t, milestones) + + labels, err := issues_model.GetLabelsByRepoID(ctx, repo.ID, "", db.ListOptions{}) + require.NoError(t, err) + assert.Len(t, labels, 12) + + releases, err := db.Find[repo_model.Release](db.DefaultContext, repo_model.FindReleasesOptions{ + ListOptions: db.ListOptions{ + PageSize: 10, + Page: 0, + }, + IncludeTags: true, + RepoID: repo.ID, + }) + require.NoError(t, err) + assert.Len(t, releases, 8) + + releases, err = db.Find[repo_model.Release](db.DefaultContext, repo_model.FindReleasesOptions{ + ListOptions: db.ListOptions{ + PageSize: 10, + Page: 0, + }, + IncludeTags: false, + RepoID: repo.ID, + }) + require.NoError(t, err) + assert.Len(t, releases, 1) + + issues, err := issues_model.Issues(db.DefaultContext, &issues_model.IssuesOptions{ + RepoIDs: []int64{repo.ID}, + IsPull: optional.Some(false), + SortType: "oldest", + }) + require.NoError(t, err) + assert.Len(t, issues, 15) + require.NoError(t, issues[0].LoadDiscussComments(db.DefaultContext)) + assert.Empty(t, issues[0].Comments) + + pulls, _, err := issues_model.PullRequests(db.DefaultContext, repo.ID, &issues_model.PullRequestsOptions{ + SortType: "oldest", + }) + require.NoError(t, err) + assert.Len(t, pulls, 30) + require.NoError(t, pulls[0].LoadIssue(db.DefaultContext)) + require.NoError(t, pulls[0].Issue.LoadDiscussComments(db.DefaultContext)) + assert.Len(t, pulls[0].Issue.Comments, 2) +} + +func TestGiteaUploadRemapLocalUser(t *testing.T) { + unittest.PrepareTestEnv(t) + doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + repoName := "migrated" + uploader := NewGiteaLocalUploader(context.Background(), doer, doer.Name, repoName) + // call remapLocalUser + uploader.sameApp = true + + externalID := int64(1234567) + externalName := "username" + source := base.Release{ + PublisherID: externalID, + PublisherName: externalName, + } + + // + // The externalID does not match any existing user, everything + // belongs to the Ghost user + // + target := repo_model.Release{} + uploader.userMap = make(map[int64]int64) + err := uploader.remapUser(&source, &target) + require.NoError(t, err) + assert.EqualValues(t, user_model.GhostUserID, target.GetUserID()) + + // + // The externalID matches a known user but the name does not match, + // everything belongs to the Ghost user + // + source.PublisherID = user.ID + target = repo_model.Release{} + uploader.userMap = make(map[int64]int64) + err = uploader.remapUser(&source, &target) + require.NoError(t, err) + assert.EqualValues(t, user_model.GhostUserID, target.GetUserID()) + + // + // The externalID and externalName match an existing user, everything + // belongs to the existing user + // + source.PublisherName = user.Name + target = repo_model.Release{} + uploader.userMap = make(map[int64]int64) + err = uploader.remapUser(&source, &target) + require.NoError(t, err) + assert.EqualValues(t, user.ID, target.GetUserID()) +} + +func TestGiteaUploadRemapExternalUser(t *testing.T) { + unittest.PrepareTestEnv(t) + doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) + + repoName := "migrated" + uploader := NewGiteaLocalUploader(context.Background(), doer, doer.Name, repoName) + uploader.gitServiceType = structs.GiteaService + // call remapExternalUser + uploader.sameApp = false + + externalID := int64(1234567) + externalName := "username" + source := base.Release{ + PublisherID: externalID, + PublisherName: externalName, + } + + // + // When there is no user linked to the external ID, the migrated data is authored + // by the Ghost user + // + uploader.userMap = make(map[int64]int64) + target := repo_model.Release{} + err := uploader.remapUser(&source, &target) + require.NoError(t, err) + assert.EqualValues(t, user_model.GhostUserID, target.GetUserID()) + + // + // Link the external ID to an existing user + // + linkedUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + externalLoginUser := &user_model.ExternalLoginUser{ + ExternalID: strconv.FormatInt(externalID, 10), + UserID: linkedUser.ID, + LoginSourceID: 0, + Provider: structs.GiteaService.Name(), + } + err = user_model.LinkExternalToUser(db.DefaultContext, linkedUser, externalLoginUser) + require.NoError(t, err) + + // + // When a user is linked to the external ID, it becomes the author of + // the migrated data + // + uploader.userMap = make(map[int64]int64) + target = repo_model.Release{} + err = uploader.remapUser(&source, &target) + require.NoError(t, err) + assert.EqualValues(t, linkedUser.ID, target.GetUserID()) +} + +func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { + unittest.PrepareTestEnv(t) + + // + // fromRepo master + // + fromRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) + baseRef := "master" + require.NoError(t, git.InitRepository(git.DefaultContext, fromRepo.RepoPath(), false, fromRepo.ObjectFormatName)) + err := git.NewCommand(git.DefaultContext, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseRef).Run(&git.RunOpts{Dir: fromRepo.RepoPath()}) + require.NoError(t, err) + require.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# Testing Repository\n\nOriginally created in: %s", fromRepo.RepoPath())), 0o644)) + require.NoError(t, git.AddChanges(fromRepo.RepoPath(), true)) + signature := git.Signature{ + Email: "test@example.com", + Name: "test", + When: time.Now(), + } + require.NoError(t, git.CommitChanges(fromRepo.RepoPath(), git.CommitChangesOptions{ + Committer: &signature, + Author: &signature, + Message: "Initial Commit", + })) + fromGitRepo, err := gitrepo.OpenRepository(git.DefaultContext, fromRepo) + require.NoError(t, err) + defer fromGitRepo.Close() + baseSHA, err := fromGitRepo.GetBranchCommitID(baseRef) + require.NoError(t, err) + + // + // fromRepo branch1 + // + headRef := "branch1" + _, _, err = git.NewCommand(git.DefaultContext, "checkout", "-b").AddDynamicArguments(headRef).RunStdString(&git.RunOpts{Dir: fromRepo.RepoPath()}) + require.NoError(t, err) + require.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte("SOMETHING"), 0o644)) + require.NoError(t, git.AddChanges(fromRepo.RepoPath(), true)) + signature.When = time.Now() + require.NoError(t, git.CommitChanges(fromRepo.RepoPath(), git.CommitChangesOptions{ + Committer: &signature, + Author: &signature, + Message: "Pull request", + })) + require.NoError(t, err) + headSHA, err := fromGitRepo.GetBranchCommitID(headRef) + require.NoError(t, err) + + fromRepoOwner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: fromRepo.OwnerID}) + + // + // forkRepo branch2 + // + forkHeadRef := "branch2" + forkRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 8}) + require.NoError(t, git.CloneWithArgs(git.DefaultContext, nil, fromRepo.RepoPath(), forkRepo.RepoPath(), git.CloneRepoOptions{ + Branch: headRef, + })) + _, _, err = git.NewCommand(git.DefaultContext, "checkout", "-b").AddDynamicArguments(forkHeadRef).RunStdString(&git.RunOpts{Dir: forkRepo.RepoPath()}) + require.NoError(t, err) + require.NoError(t, os.WriteFile(filepath.Join(forkRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# branch2 %s", forkRepo.RepoPath())), 0o644)) + require.NoError(t, git.AddChanges(forkRepo.RepoPath(), true)) + require.NoError(t, git.CommitChanges(forkRepo.RepoPath(), git.CommitChangesOptions{ + Committer: &signature, + Author: &signature, + Message: "branch2 commit", + })) + forkGitRepo, err := gitrepo.OpenRepository(git.DefaultContext, forkRepo) + require.NoError(t, err) + defer forkGitRepo.Close() + forkHeadSHA, err := forkGitRepo.GetBranchCommitID(forkHeadRef) + require.NoError(t, err) + + toRepoName := "migrated" + uploader := NewGiteaLocalUploader(context.Background(), fromRepoOwner, fromRepoOwner.Name, toRepoName) + uploader.gitServiceType = structs.GiteaService + require.NoError(t, uploader.CreateRepo(&base.Repository{ + Description: "description", + OriginalURL: fromRepo.RepoPath(), + CloneURL: fromRepo.RepoPath(), + IsPrivate: false, + IsMirror: true, + }, base.MigrateOptions{ + GitServiceType: structs.GiteaService, + Private: false, + Mirror: true, + })) + + for _, testCase := range []struct { + name string + head string + logFilter []string + logFiltered []bool + pr base.PullRequest + }{ + { + name: "fork, good Head.SHA", + head: fmt.Sprintf("%s/%s", forkRepo.OwnerName, forkHeadRef), + pr: base.PullRequest{ + PatchURL: "", + Number: 1, + State: "open", + Base: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: baseRef, + SHA: baseSHA, + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + Head: base.PullRequestBranch{ + CloneURL: forkRepo.RepoPath(), + Ref: forkHeadRef, + SHA: forkHeadSHA, + RepoName: forkRepo.Name, + OwnerName: forkRepo.OwnerName, + }, + }, + }, + { + name: "fork, invalid Head.Ref", + head: "unknown repository", + pr: base.PullRequest{ + PatchURL: "", + Number: 1, + State: "open", + Base: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: baseRef, + SHA: baseSHA, + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + Head: base.PullRequestBranch{ + CloneURL: forkRepo.RepoPath(), + Ref: "INVALID", + SHA: forkHeadSHA, + RepoName: forkRepo.Name, + OwnerName: forkRepo.OwnerName, + }, + }, + logFilter: []string{"Fetch branch from"}, + logFiltered: []bool{true}, + }, + { + name: "invalid fork CloneURL", + head: "unknown repository", + pr: base.PullRequest{ + PatchURL: "", + Number: 1, + State: "open", + Base: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: baseRef, + SHA: baseSHA, + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + Head: base.PullRequestBranch{ + CloneURL: "UNLIKELY", + Ref: forkHeadRef, + SHA: forkHeadSHA, + RepoName: forkRepo.Name, + OwnerName: "WRONG", + }, + }, + logFilter: []string{"AddRemote"}, + logFiltered: []bool{true}, + }, + { + name: "no fork, good Head.SHA", + head: headRef, + pr: base.PullRequest{ + PatchURL: "", + Number: 1, + State: "open", + Base: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: baseRef, + SHA: baseSHA, + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + Head: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: headRef, + SHA: headSHA, + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + }, + }, + { + name: "no fork, empty Head.SHA", + head: headRef, + pr: base.PullRequest{ + PatchURL: "", + Number: 1, + State: "open", + Base: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: baseRef, + SHA: baseSHA, + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + Head: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: headRef, + SHA: "", + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + }, + logFilter: []string{"Empty reference", "Cannot remove local head"}, + logFiltered: []bool{true, false}, + }, + { + name: "no fork, invalid Head.SHA", + head: headRef, + pr: base.PullRequest{ + PatchURL: "", + Number: 1, + State: "open", + Base: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: baseRef, + SHA: baseSHA, + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + Head: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: headRef, + SHA: "brokenSHA", + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + }, + logFilter: []string{"Deprecated local head"}, + logFiltered: []bool{true}, + }, + { + name: "no fork, not found Head.SHA", + head: headRef, + pr: base.PullRequest{ + PatchURL: "", + Number: 1, + State: "open", + Base: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: baseRef, + SHA: baseSHA, + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + Head: base.PullRequestBranch{ + CloneURL: fromRepo.RepoPath(), + Ref: headRef, + SHA: "2697b352310fcd01cbd1f3dbd43b894080027f68", + RepoName: fromRepo.Name, + OwnerName: fromRepo.OwnerName, + }, + }, + logFilter: []string{"Deprecated local head", "Cannot remove local head"}, + logFiltered: []bool{true, false}, + }, + } { + t.Run(testCase.name, func(t *testing.T) { + stopMark := fmt.Sprintf(">>>>>>>>>>>>>STOP: %s<<<<<<<<<<<<<<<", testCase.name) + + logChecker, cleanup := test.NewLogChecker(log.DEFAULT, log.INFO) + logChecker.Filter(testCase.logFilter...).StopMark(stopMark) + defer cleanup() + + testCase.pr.EnsuredSafe = true + + head, err := uploader.updateGitForPullRequest(&testCase.pr) + require.NoError(t, err) + assert.EqualValues(t, testCase.head, head) + + log.Info(stopMark) + + logFiltered, logStopped := logChecker.Check(5 * time.Second) + assert.True(t, logStopped) + if len(testCase.logFilter) > 0 { + assert.EqualValues(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter) + } + }) + } +} diff --git a/services/migrations/github.go b/services/migrations/github.go new file mode 100644 index 0000000..54d3859 --- /dev/null +++ b/services/migrations/github.go @@ -0,0 +1,885 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2018 Jonas Franz. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/proxy" + "code.gitea.io/gitea/modules/structs" + + "github.com/google/go-github/v64/github" + "golang.org/x/oauth2" +) + +var ( + _ base.Downloader = &GithubDownloaderV3{} + _ base.DownloaderFactory = &GithubDownloaderV3Factory{} + // GithubLimitRateRemaining limit to wait for new rate to apply + GithubLimitRateRemaining = 0 +) + +func init() { + RegisterDownloaderFactory(&GithubDownloaderV3Factory{}) +} + +// GithubDownloaderV3Factory defines a github downloader v3 factory +type GithubDownloaderV3Factory struct{} + +// New returns a Downloader related to this factory according MigrateOptions +func (f *GithubDownloaderV3Factory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) { + u, err := url.Parse(opts.CloneAddr) + if err != nil { + return nil, err + } + + // some users are using the github redirect url for migration + if u.Host == "www.github.com" { + u.Host = "github.com" + } + + baseURL := u.Scheme + "://" + u.Host + fields := strings.Split(u.Path, "/") + oldOwner := fields[1] + oldName := strings.TrimSuffix(fields[2], ".git") + + log.Trace("Create github downloader BaseURL: %s %s/%s", baseURL, oldOwner, oldName) + + return NewGithubDownloaderV3(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil +} + +// GitServiceType returns the type of git service +func (f *GithubDownloaderV3Factory) GitServiceType() structs.GitServiceType { + return structs.GithubService +} + +// GithubDownloaderV3 implements a Downloader interface to get repository information +// from github via APIv3 +type GithubDownloaderV3 struct { + base.NullDownloader + ctx context.Context + clients []*github.Client + baseURL string + repoOwner string + repoName string + userName string + password string + rates []*github.Rate + curClientIdx int + maxPerPage int + SkipReactions bool + SkipReviews bool +} + +// NewGithubDownloaderV3 creates a github Downloader via github v3 API +func NewGithubDownloaderV3(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 { + downloader := GithubDownloaderV3{ + userName: userName, + baseURL: baseURL, + password: password, + ctx: ctx, + repoOwner: repoOwner, + repoName: repoName, + maxPerPage: 100, + } + + if token != "" { + tokens := strings.Split(token, ",") + for _, token := range tokens { + token = strings.TrimSpace(token) + ts := oauth2.StaticTokenSource( + &oauth2.Token{AccessToken: token}, + ) + client := &http.Client{ + Transport: &oauth2.Transport{ + Base: NewMigrationHTTPTransport(), + Source: oauth2.ReuseTokenSource(nil, ts), + }, + } + + downloader.addClient(client, baseURL) + } + } else { + transport := NewMigrationHTTPTransport() + transport.Proxy = func(req *http.Request) (*url.URL, error) { + req.SetBasicAuth(userName, password) + return proxy.Proxy()(req) + } + client := &http.Client{ + Transport: transport, + } + downloader.addClient(client, baseURL) + } + return &downloader +} + +// String implements Stringer +func (g *GithubDownloaderV3) String() string { + return fmt.Sprintf("migration from github server %s %s/%s", g.baseURL, g.repoOwner, g.repoName) +} + +func (g *GithubDownloaderV3) LogString() string { + if g == nil { + return "<GithubDownloaderV3 nil>" + } + return fmt.Sprintf("<GithubDownloaderV3 %s %s/%s>", g.baseURL, g.repoOwner, g.repoName) +} + +func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) { + githubClient := github.NewClient(client) + if baseURL != "https://github.com" { + githubClient, _ = github.NewClient(client).WithEnterpriseURLs(baseURL, baseURL) + } + g.clients = append(g.clients, githubClient) + g.rates = append(g.rates, nil) +} + +// SetContext set context +func (g *GithubDownloaderV3) SetContext(ctx context.Context) { + g.ctx = ctx +} + +func (g *GithubDownloaderV3) waitAndPickClient() { + var recentIdx int + var maxRemaining int + for i := 0; i < len(g.clients); i++ { + if g.rates[i] != nil && g.rates[i].Remaining > maxRemaining { + maxRemaining = g.rates[i].Remaining + recentIdx = i + } + } + g.curClientIdx = recentIdx // if no max remain, it will always pick the first client. + + for g.rates[g.curClientIdx] != nil && g.rates[g.curClientIdx].Remaining <= GithubLimitRateRemaining { + timer := time.NewTimer(time.Until(g.rates[g.curClientIdx].Reset.Time)) + select { + case <-g.ctx.Done(): + timer.Stop() + return + case <-timer.C: + } + + err := g.RefreshRate() + if err != nil { + log.Error("g.getClient().RateLimit.Get: %s", err) + } + } +} + +// RefreshRate update the current rate (doesn't count in rate limit) +func (g *GithubDownloaderV3) RefreshRate() error { + rates, _, err := g.getClient().RateLimit.Get(g.ctx) + if err != nil { + // if rate limit is not enabled, ignore it + if strings.Contains(err.Error(), "404") { + g.setRate(nil) + return nil + } + return err + } + + g.setRate(rates.GetCore()) + return nil +} + +func (g *GithubDownloaderV3) getClient() *github.Client { + return g.clients[g.curClientIdx] +} + +func (g *GithubDownloaderV3) setRate(rate *github.Rate) { + g.rates[g.curClientIdx] = rate +} + +// GetRepoInfo returns a repository information +func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) { + g.waitAndPickClient() + gr, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName) + if err != nil { + return nil, err + } + g.setRate(&resp.Rate) + + // convert github repo to stand Repo + return &base.Repository{ + Owner: g.repoOwner, + Name: gr.GetName(), + IsPrivate: gr.GetPrivate(), + Description: gr.GetDescription(), + OriginalURL: gr.GetHTMLURL(), + CloneURL: gr.GetCloneURL(), + DefaultBranch: gr.GetDefaultBranch(), + }, nil +} + +// GetTopics return github topics +func (g *GithubDownloaderV3) GetTopics() ([]string, error) { + g.waitAndPickClient() + r, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName) + if err != nil { + return nil, err + } + g.setRate(&resp.Rate) + return r.Topics, nil +} + +// GetMilestones returns milestones +func (g *GithubDownloaderV3) GetMilestones() ([]*base.Milestone, error) { + perPage := g.maxPerPage + milestones := make([]*base.Milestone, 0, perPage) + for i := 1; ; i++ { + g.waitAndPickClient() + ms, resp, err := g.getClient().Issues.ListMilestones(g.ctx, g.repoOwner, g.repoName, + &github.MilestoneListOptions{ + State: "all", + ListOptions: github.ListOptions{ + Page: i, + PerPage: perPage, + }, + }) + if err != nil { + return nil, err + } + g.setRate(&resp.Rate) + + for _, m := range ms { + state := "open" + if m.State != nil { + state = *m.State + } + milestones = append(milestones, &base.Milestone{ + Title: m.GetTitle(), + Description: m.GetDescription(), + Deadline: m.DueOn.GetTime(), + State: state, + Created: m.GetCreatedAt().Time, + Updated: m.UpdatedAt.GetTime(), + Closed: m.ClosedAt.GetTime(), + }) + } + if len(ms) < perPage { + break + } + } + return milestones, nil +} + +func convertGithubLabel(label *github.Label) *base.Label { + return &base.Label{ + Name: label.GetName(), + Color: label.GetColor(), + Description: label.GetDescription(), + } +} + +// GetLabels returns labels +func (g *GithubDownloaderV3) GetLabels() ([]*base.Label, error) { + perPage := g.maxPerPage + labels := make([]*base.Label, 0, perPage) + for i := 1; ; i++ { + g.waitAndPickClient() + ls, resp, err := g.getClient().Issues.ListLabels(g.ctx, g.repoOwner, g.repoName, + &github.ListOptions{ + Page: i, + PerPage: perPage, + }) + if err != nil { + return nil, err + } + g.setRate(&resp.Rate) + + for _, label := range ls { + labels = append(labels, convertGithubLabel(label)) + } + if len(ls) < perPage { + break + } + } + return labels, nil +} + +func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease) *base.Release { + // GitHub allows committish to be a reference. + // In this case, we need to remove the prefix, i.e. convert "refs/heads/main" to "main". + targetCommitish := strings.TrimPrefix(rel.GetTargetCommitish(), git.BranchPrefix) + + r := &base.Release{ + Name: rel.GetName(), + TagName: rel.GetTagName(), + TargetCommitish: targetCommitish, + Draft: rel.GetDraft(), + Prerelease: rel.GetPrerelease(), + Created: rel.GetCreatedAt().Time, + PublisherID: rel.GetAuthor().GetID(), + PublisherName: rel.GetAuthor().GetLogin(), + PublisherEmail: rel.GetAuthor().GetEmail(), + Body: rel.GetBody(), + } + + if rel.PublishedAt != nil { + r.Published = rel.PublishedAt.Time + } + + httpClient := NewMigrationHTTPClient() + + for _, asset := range rel.Assets { + assetID := *asset.ID // Don't optimize this, for closure we need a local variable + r.Assets = append(r.Assets, &base.ReleaseAsset{ + ID: asset.GetID(), + Name: asset.GetName(), + ContentType: asset.ContentType, + Size: asset.Size, + DownloadCount: asset.DownloadCount, + Created: asset.CreatedAt.Time, + Updated: asset.UpdatedAt.Time, + DownloadFunc: func() (io.ReadCloser, error) { + g.waitAndPickClient() + readCloser, redirectURL, err := g.getClient().Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil) + if err != nil { + return nil, err + } + if err := g.RefreshRate(); err != nil { + log.Error("g.getClient().RateLimits: %s", err) + } + + if readCloser != nil { + return readCloser, nil + } + + if redirectURL == "" { + return nil, fmt.Errorf("no release asset found for %d", assetID) + } + + // Prevent open redirect + if !hasBaseURL(redirectURL, g.baseURL) && + !hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") { + WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.GetID(), g, redirectURL) + + return io.NopCloser(strings.NewReader(redirectURL)), nil + } + + g.waitAndPickClient() + req, err := http.NewRequestWithContext(g.ctx, "GET", redirectURL, nil) + if err != nil { + return nil, err + } + resp, err := httpClient.Do(req) + err1 := g.RefreshRate() + if err1 != nil { + log.Error("g.RefreshRate(): %s", err1) + } + if err != nil { + return nil, err + } + return resp.Body, nil + }, + }) + } + return r +} + +// GetReleases returns releases +func (g *GithubDownloaderV3) GetReleases() ([]*base.Release, error) { + perPage := g.maxPerPage + releases := make([]*base.Release, 0, perPage) + for i := 1; ; i++ { + g.waitAndPickClient() + ls, resp, err := g.getClient().Repositories.ListReleases(g.ctx, g.repoOwner, g.repoName, + &github.ListOptions{ + Page: i, + PerPage: perPage, + }) + if err != nil { + return nil, err + } + g.setRate(&resp.Rate) + + for _, release := range ls { + releases = append(releases, g.convertGithubRelease(release)) + } + if len(ls) < perPage { + break + } + } + return releases, nil +} + +// GetIssues returns issues according start and limit +func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool, error) { + if perPage > g.maxPerPage { + perPage = g.maxPerPage + } + opt := &github.IssueListByRepoOptions{ + Sort: "created", + Direction: "asc", + State: "all", + ListOptions: github.ListOptions{ + PerPage: perPage, + Page: page, + }, + } + + allIssues := make([]*base.Issue, 0, perPage) + g.waitAndPickClient() + issues, resp, err := g.getClient().Issues.ListByRepo(g.ctx, g.repoOwner, g.repoName, opt) + if err != nil { + return nil, false, fmt.Errorf("error while listing repos: %w", err) + } + log.Trace("Request get issues %d/%d, but in fact get %d", perPage, page, len(issues)) + g.setRate(&resp.Rate) + for _, issue := range issues { + if issue.IsPullRequest() { + continue + } + + labels := make([]*base.Label, 0, len(issue.Labels)) + for _, l := range issue.Labels { + labels = append(labels, convertGithubLabel(l)) + } + + // get reactions + var reactions []*base.Reaction + if !g.SkipReactions { + for i := 1; ; i++ { + g.waitAndPickClient() + res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListOptions{ + Page: i, + PerPage: perPage, + }) + if err != nil { + return nil, false, err + } + g.setRate(&resp.Rate) + if len(res) == 0 { + break + } + for _, reaction := range res { + reactions = append(reactions, &base.Reaction{ + UserID: reaction.User.GetID(), + UserName: reaction.User.GetLogin(), + Content: reaction.GetContent(), + }) + } + } + } + + var assignees []string + for i := range issue.Assignees { + assignees = append(assignees, issue.Assignees[i].GetLogin()) + } + + allIssues = append(allIssues, &base.Issue{ + Title: *issue.Title, + Number: int64(*issue.Number), + PosterID: issue.GetUser().GetID(), + PosterName: issue.GetUser().GetLogin(), + PosterEmail: issue.GetUser().GetEmail(), + Content: issue.GetBody(), + Milestone: issue.GetMilestone().GetTitle(), + State: issue.GetState(), + Created: issue.GetCreatedAt().Time, + Updated: issue.GetUpdatedAt().Time, + Labels: labels, + Reactions: reactions, + Closed: issue.ClosedAt.GetTime(), + IsLocked: issue.GetLocked(), + Assignees: assignees, + ForeignIndex: int64(*issue.Number), + }) + } + + return allIssues, len(issues) < perPage, nil +} + +// SupportGetRepoComments return true if it supports get repo comments +func (g *GithubDownloaderV3) SupportGetRepoComments() bool { + return true +} + +// GetComments returns comments according issueNumber +func (g *GithubDownloaderV3) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + comments, err := g.getComments(commentable) + return comments, false, err +} + +func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.Comment, error) { + var ( + allComments = make([]*base.Comment, 0, g.maxPerPage) + created = "created" + asc = "asc" + ) + opt := &github.IssueListCommentsOptions{ + Sort: &created, + Direction: &asc, + ListOptions: github.ListOptions{ + PerPage: g.maxPerPage, + }, + } + for { + g.waitAndPickClient() + comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, int(commentable.GetForeignIndex()), opt) + if err != nil { + return nil, fmt.Errorf("error while listing repos: %w", err) + } + g.setRate(&resp.Rate) + for _, comment := range comments { + // get reactions + var reactions []*base.Reaction + if !g.SkipReactions { + for i := 1; ; i++ { + g.waitAndPickClient() + res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }) + if err != nil { + return nil, err + } + g.setRate(&resp.Rate) + if len(res) == 0 { + break + } + for _, reaction := range res { + reactions = append(reactions, &base.Reaction{ + UserID: reaction.User.GetID(), + UserName: reaction.User.GetLogin(), + Content: reaction.GetContent(), + }) + } + } + } + + allComments = append(allComments, &base.Comment{ + IssueIndex: commentable.GetLocalIndex(), + Index: comment.GetID(), + PosterID: comment.GetUser().GetID(), + PosterName: comment.GetUser().GetLogin(), + PosterEmail: comment.GetUser().GetEmail(), + Content: comment.GetBody(), + Created: comment.GetCreatedAt().Time, + Updated: comment.GetUpdatedAt().Time, + Reactions: reactions, + }) + } + if resp.NextPage == 0 { + break + } + opt.Page = resp.NextPage + } + return allComments, nil +} + +// GetAllComments returns repository comments according page and perPageSize +func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment, bool, error) { + var ( + allComments = make([]*base.Comment, 0, perPage) + created = "created" + asc = "asc" + ) + if perPage > g.maxPerPage { + perPage = g.maxPerPage + } + opt := &github.IssueListCommentsOptions{ + Sort: &created, + Direction: &asc, + ListOptions: github.ListOptions{ + Page: page, + PerPage: perPage, + }, + } + + g.waitAndPickClient() + comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, 0, opt) + if err != nil { + return nil, false, fmt.Errorf("error while listing repos: %w", err) + } + isEnd := resp.NextPage == 0 + + log.Trace("Request get comments %d/%d, but in fact get %d, next page is %d", perPage, page, len(comments), resp.NextPage) + g.setRate(&resp.Rate) + for _, comment := range comments { + // get reactions + var reactions []*base.Reaction + if !g.SkipReactions { + for i := 1; ; i++ { + g.waitAndPickClient() + res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }) + if err != nil { + return nil, false, err + } + g.setRate(&resp.Rate) + if len(res) == 0 { + break + } + for _, reaction := range res { + reactions = append(reactions, &base.Reaction{ + UserID: reaction.User.GetID(), + UserName: reaction.User.GetLogin(), + Content: reaction.GetContent(), + }) + } + } + } + idx := strings.LastIndex(*comment.IssueURL, "/") + issueIndex, _ := strconv.ParseInt((*comment.IssueURL)[idx+1:], 10, 64) + allComments = append(allComments, &base.Comment{ + IssueIndex: issueIndex, + Index: comment.GetID(), + PosterID: comment.GetUser().GetID(), + PosterName: comment.GetUser().GetLogin(), + PosterEmail: comment.GetUser().GetEmail(), + Content: comment.GetBody(), + Created: comment.GetCreatedAt().Time, + Updated: comment.GetUpdatedAt().Time, + Reactions: reactions, + }) + } + + return allComments, isEnd, nil +} + +// GetPullRequests returns pull requests according page and perPage +func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) { + if perPage > g.maxPerPage { + perPage = g.maxPerPage + } + opt := &github.PullRequestListOptions{ + Sort: "created", + Direction: "asc", + State: "all", + ListOptions: github.ListOptions{ + PerPage: perPage, + Page: page, + }, + } + allPRs := make([]*base.PullRequest, 0, perPage) + g.waitAndPickClient() + prs, resp, err := g.getClient().PullRequests.List(g.ctx, g.repoOwner, g.repoName, opt) + if err != nil { + return nil, false, fmt.Errorf("error while listing repos: %w", err) + } + log.Trace("Request get pull requests %d/%d, but in fact get %d", perPage, page, len(prs)) + g.setRate(&resp.Rate) + for _, pr := range prs { + labels := make([]*base.Label, 0, len(pr.Labels)) + for _, l := range pr.Labels { + labels = append(labels, convertGithubLabel(l)) + } + + // get reactions + var reactions []*base.Reaction + if !g.SkipReactions { + for i := 1; ; i++ { + g.waitAndPickClient() + res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListOptions{ + Page: i, + PerPage: perPage, + }) + if err != nil { + return nil, false, err + } + g.setRate(&resp.Rate) + if len(res) == 0 { + break + } + for _, reaction := range res { + reactions = append(reactions, &base.Reaction{ + UserID: reaction.User.GetID(), + UserName: reaction.User.GetLogin(), + Content: reaction.GetContent(), + }) + } + } + } + + // download patch and saved as tmp file + g.waitAndPickClient() + + allPRs = append(allPRs, &base.PullRequest{ + Title: pr.GetTitle(), + Number: int64(pr.GetNumber()), + PosterID: pr.GetUser().GetID(), + PosterName: pr.GetUser().GetLogin(), + PosterEmail: pr.GetUser().GetEmail(), + Content: pr.GetBody(), + Milestone: pr.GetMilestone().GetTitle(), + State: pr.GetState(), + Created: pr.GetCreatedAt().Time, + Updated: pr.GetUpdatedAt().Time, + Closed: pr.ClosedAt.GetTime(), + Labels: labels, + Merged: pr.MergedAt != nil, + MergeCommitSHA: pr.GetMergeCommitSHA(), + MergedTime: pr.MergedAt.GetTime(), + IsLocked: pr.ActiveLockReason != nil, + Head: base.PullRequestBranch{ + Ref: pr.GetHead().GetRef(), + SHA: pr.GetHead().GetSHA(), + OwnerName: pr.GetHead().GetUser().GetLogin(), + RepoName: pr.GetHead().GetRepo().GetName(), + CloneURL: pr.GetHead().GetRepo().GetCloneURL(), // see below for SECURITY related issues here + }, + Base: base.PullRequestBranch{ + Ref: pr.GetBase().GetRef(), + SHA: pr.GetBase().GetSHA(), + RepoName: pr.GetBase().GetRepo().GetName(), + OwnerName: pr.GetBase().GetUser().GetLogin(), + }, + PatchURL: pr.GetPatchURL(), // see below for SECURITY related issues here + Reactions: reactions, + ForeignIndex: int64(*pr.Number), + }) + + // SECURITY: Ensure that the PR is safe + _ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g) + } + + return allPRs, len(prs) < perPage, nil +} + +func convertGithubReview(r *github.PullRequestReview) *base.Review { + return &base.Review{ + ID: r.GetID(), + ReviewerID: r.GetUser().GetID(), + ReviewerName: r.GetUser().GetLogin(), + CommitID: r.GetCommitID(), + Content: r.GetBody(), + CreatedAt: r.GetSubmittedAt().Time, + State: r.GetState(), + } +} + +func (g *GithubDownloaderV3) convertGithubReviewComments(cs []*github.PullRequestComment) ([]*base.ReviewComment, error) { + rcs := make([]*base.ReviewComment, 0, len(cs)) + for _, c := range cs { + // get reactions + var reactions []*base.Reaction + if !g.SkipReactions { + for i := 1; ; i++ { + g.waitAndPickClient() + res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(g.ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }) + if err != nil { + return nil, err + } + g.setRate(&resp.Rate) + if len(res) == 0 { + break + } + for _, reaction := range res { + reactions = append(reactions, &base.Reaction{ + UserID: reaction.User.GetID(), + UserName: reaction.User.GetLogin(), + Content: reaction.GetContent(), + }) + } + } + } + + rcs = append(rcs, &base.ReviewComment{ + ID: c.GetID(), + InReplyTo: c.GetInReplyTo(), + Content: c.GetBody(), + TreePath: c.GetPath(), + DiffHunk: c.GetDiffHunk(), + Position: c.GetPosition(), + CommitID: c.GetCommitID(), + PosterID: c.GetUser().GetID(), + Reactions: reactions, + CreatedAt: c.GetCreatedAt().Time, + UpdatedAt: c.GetUpdatedAt().Time, + }) + } + return rcs, nil +} + +// GetReviews returns pull requests review +func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { + allReviews := make([]*base.Review, 0, g.maxPerPage) + if g.SkipReviews { + return allReviews, nil + } + opt := &github.ListOptions{ + PerPage: g.maxPerPage, + } + // Get approve/request change reviews + for { + g.waitAndPickClient() + reviews, resp, err := g.getClient().PullRequests.ListReviews(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt) + if err != nil { + return nil, fmt.Errorf("error while listing repos: %w", err) + } + g.setRate(&resp.Rate) + for _, review := range reviews { + r := convertGithubReview(review) + r.IssueIndex = reviewable.GetLocalIndex() + // retrieve all review comments + opt2 := &github.ListOptions{ + PerPage: g.maxPerPage, + } + for { + g.waitAndPickClient() + reviewComments, resp, err := g.getClient().PullRequests.ListReviewComments(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), review.GetID(), opt2) + if err != nil { + return nil, fmt.Errorf("error while listing repos: %w", err) + } + g.setRate(&resp.Rate) + + cs, err := g.convertGithubReviewComments(reviewComments) + if err != nil { + return nil, err + } + r.Comments = append(r.Comments, cs...) + if resp.NextPage == 0 { + break + } + opt2.Page = resp.NextPage + } + allReviews = append(allReviews, r) + } + if resp.NextPage == 0 { + break + } + opt.Page = resp.NextPage + } + // Get requested reviews + for { + g.waitAndPickClient() + reviewers, resp, err := g.getClient().PullRequests.ListReviewers(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt) + if err != nil { + return nil, fmt.Errorf("error while listing repos: %w", err) + } + g.setRate(&resp.Rate) + for _, user := range reviewers.Users { + r := &base.Review{ + ReviewerID: user.GetID(), + ReviewerName: user.GetLogin(), + State: base.ReviewStateRequestReview, + IssueIndex: reviewable.GetLocalIndex(), + } + allReviews = append(allReviews, r) + } + // TODO: Handle Team requests + if resp.NextPage == 0 { + break + } + opt.Page = resp.NextPage + } + return allReviews, nil +} diff --git a/services/migrations/github_test.go b/services/migrations/github_test.go new file mode 100644 index 0000000..a2134f8 --- /dev/null +++ b/services/migrations/github_test.go @@ -0,0 +1,432 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2018 Jonas Franz. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "os" + "testing" + "time" + + base "code.gitea.io/gitea/modules/migration" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGitHubDownloadRepo(t *testing.T) { + GithubLimitRateRemaining = 3 // Wait at 3 remaining since we could have 3 CI in // + token := os.Getenv("GITHUB_READ_TOKEN") + if token == "" { + t.Skip("Skipping GitHub migration test because GITHUB_READ_TOKEN is empty") + } + downloader := NewGithubDownloaderV3(context.Background(), "https://github.com", "", "", token, "go-gitea", "test_repo") + err := downloader.RefreshRate() + require.NoError(t, err) + + repo, err := downloader.GetRepoInfo() + require.NoError(t, err) + assertRepositoryEqual(t, &base.Repository{ + Name: "test_repo", + Owner: "go-gitea", + Description: "Test repository for testing migration from github to gitea", + CloneURL: "https://github.com/go-gitea/test_repo.git", + OriginalURL: "https://github.com/go-gitea/test_repo", + DefaultBranch: "master", + }, repo) + + topics, err := downloader.GetTopics() + require.NoError(t, err) + assert.Contains(t, topics, "gitea") + + milestones, err := downloader.GetMilestones() + require.NoError(t, err) + assertMilestonesEqual(t, []*base.Milestone{ + { + Title: "1.0.0", + Description: "Milestone 1.0.0", + Deadline: timePtr(time.Date(2019, 11, 11, 8, 0, 0, 0, time.UTC)), + Created: time.Date(2019, 11, 12, 19, 37, 8, 0, time.UTC), + Updated: timePtr(time.Date(2019, 11, 12, 21, 56, 17, 0, time.UTC)), + Closed: timePtr(time.Date(2019, 11, 12, 19, 45, 49, 0, time.UTC)), + State: "closed", + }, + { + Title: "1.1.0", + Description: "Milestone 1.1.0", + Deadline: timePtr(time.Date(2019, 11, 12, 8, 0, 0, 0, time.UTC)), + Created: time.Date(2019, 11, 12, 19, 37, 25, 0, time.UTC), + Updated: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)), + Closed: timePtr(time.Date(2019, 11, 12, 19, 45, 46, 0, time.UTC)), + State: "closed", + }, + }, milestones) + + labels, err := downloader.GetLabels() + require.NoError(t, err) + assertLabelsEqual(t, []*base.Label{ + { + Name: "bug", + Color: "d73a4a", + Description: "Something isn't working", + }, + { + Name: "documentation", + Color: "0075ca", + Description: "Improvements or additions to documentation", + }, + { + Name: "duplicate", + Color: "cfd3d7", + Description: "This issue or pull request already exists", + }, + { + Name: "enhancement", + Color: "a2eeef", + Description: "New feature or request", + }, + { + Name: "good first issue", + Color: "7057ff", + Description: "Good for newcomers", + }, + { + Name: "help wanted", + Color: "008672", + Description: "Extra attention is needed", + }, + { + Name: "invalid", + Color: "e4e669", + Description: "This doesn't seem right", + }, + { + Name: "question", + Color: "d876e3", + Description: "Further information is requested", + }, + { + Name: "wontfix", + Color: "ffffff", + Description: "This will not be worked on", + }, + }, labels) + + releases, err := downloader.GetReleases() + require.NoError(t, err) + assertReleasesEqual(t, []*base.Release{ + { + TagName: "v0.9.99", + TargetCommitish: "master", + Name: "First Release", + Body: "A test release", + Created: time.Date(2019, 11, 9, 16, 49, 21, 0, time.UTC), + Published: time.Date(2019, 11, 12, 20, 12, 10, 0, time.UTC), + PublisherID: 1669571, + PublisherName: "mrsdizzie", + }, + }, releases) + + // downloader.GetIssues() + issues, isEnd, err := downloader.GetIssues(1, 2) + require.NoError(t, err) + assert.False(t, isEnd) + assertIssuesEqual(t, []*base.Issue{ + { + Number: 1, + Title: "Please add an animated gif icon to the merge button", + Content: "I just want the merge button to hurt my eyes a little. \xF0\x9F\x98\x9D ", + Milestone: "1.0.0", + PosterID: 18600385, + PosterName: "guillep2k", + State: "closed", + Created: time.Date(2019, 11, 9, 17, 0, 29, 0, time.UTC), + Updated: time.Date(2019, 11, 12, 20, 29, 53, 0, time.UTC), + Labels: []*base.Label{ + { + Name: "bug", + Color: "d73a4a", + Description: "Something isn't working", + }, + { + Name: "good first issue", + Color: "7057ff", + Description: "Good for newcomers", + }, + }, + Reactions: []*base.Reaction{ + { + UserID: 1669571, + UserName: "mrsdizzie", + Content: "+1", + }, + }, + Closed: timePtr(time.Date(2019, 11, 12, 20, 22, 22, 0, time.UTC)), + }, + { + Number: 2, + Title: "Test issue", + Content: "This is test issue 2, do not touch!", + Milestone: "1.1.0", + PosterID: 1669571, + PosterName: "mrsdizzie", + State: "closed", + Created: time.Date(2019, 11, 12, 21, 0, 6, 0, time.UTC), + Updated: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC), + Labels: []*base.Label{ + { + Name: "duplicate", + Color: "cfd3d7", + Description: "This issue or pull request already exists", + }, + }, + Reactions: []*base.Reaction{ + { + UserID: 1669571, + UserName: "mrsdizzie", + Content: "heart", + }, + { + UserID: 1669571, + UserName: "mrsdizzie", + Content: "laugh", + }, + { + UserID: 1669571, + UserName: "mrsdizzie", + Content: "-1", + }, + { + UserID: 1669571, + UserName: "mrsdizzie", + Content: "confused", + }, + { + UserID: 1669571, + UserName: "mrsdizzie", + Content: "hooray", + }, + { + UserID: 1669571, + UserName: "mrsdizzie", + Content: "+1", + }, + }, + Closed: timePtr(time.Date(2019, 11, 12, 21, 1, 31, 0, time.UTC)), + }, + }, issues) + + // downloader.GetComments() + comments, _, err := downloader.GetComments(&base.Issue{Number: 2, ForeignIndex: 2}) + require.NoError(t, err) + assertCommentsEqual(t, []*base.Comment{ + { + IssueIndex: 2, + PosterID: 1669571, + PosterName: "mrsdizzie", + Created: time.Date(2019, 11, 12, 21, 0, 13, 0, time.UTC), + Updated: time.Date(2019, 11, 12, 21, 0, 13, 0, time.UTC), + Content: "This is a comment", + Reactions: []*base.Reaction{ + { + UserID: 1669571, + UserName: "mrsdizzie", + Content: "+1", + }, + }, + }, + { + IssueIndex: 2, + PosterID: 1669571, + PosterName: "mrsdizzie", + Created: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC), + Updated: time.Date(2019, 11, 12, 22, 7, 14, 0, time.UTC), + Content: "A second comment", + Reactions: nil, + }, + }, comments) + + // downloader.GetPullRequests() + prs, _, err := downloader.GetPullRequests(1, 2) + require.NoError(t, err) + assertPullRequestsEqual(t, []*base.PullRequest{ + { + Number: 3, + Title: "Update README.md", + Content: "add warning to readme", + Milestone: "1.1.0", + PosterID: 1669571, + PosterName: "mrsdizzie", + State: "closed", + Created: time.Date(2019, 11, 12, 21, 21, 43, 0, time.UTC), + Updated: time.Date(2019, 11, 12, 21, 39, 28, 0, time.UTC), + Labels: []*base.Label{ + { + Name: "documentation", + Color: "0075ca", + Description: "Improvements or additions to documentation", + }, + }, + PatchURL: "https://github.com/go-gitea/test_repo/pull/3.patch", + Head: base.PullRequestBranch{ + Ref: "master", + CloneURL: "https://github.com/mrsdizzie/test_repo.git", + SHA: "076160cf0b039f13e5eff19619932d181269414b", + RepoName: "test_repo", + + OwnerName: "mrsdizzie", + }, + Base: base.PullRequestBranch{ + Ref: "master", + SHA: "72866af952e98d02a73003501836074b286a78f6", + OwnerName: "go-gitea", + RepoName: "test_repo", + }, + Closed: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)), + Merged: true, + MergedTime: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)), + MergeCommitSHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2", + ForeignIndex: 3, + }, + { + Number: 4, + Title: "Test branch", + Content: "do not merge this PR", + Milestone: "1.0.0", + PosterID: 1669571, + PosterName: "mrsdizzie", + State: "open", + Created: time.Date(2019, 11, 12, 21, 54, 18, 0, time.UTC), + Updated: time.Date(2020, 1, 4, 11, 30, 1, 0, time.UTC), + Labels: []*base.Label{ + { + Name: "bug", + Color: "d73a4a", + Description: "Something isn't working", + }, + }, + PatchURL: "https://github.com/go-gitea/test_repo/pull/4.patch", + Head: base.PullRequestBranch{ + Ref: "test-branch", + SHA: "2be9101c543658591222acbee3eb799edfc3853d", + RepoName: "test_repo", + OwnerName: "mrsdizzie", + CloneURL: "https://github.com/mrsdizzie/test_repo.git", + }, + Base: base.PullRequestBranch{ + Ref: "master", + SHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2", + OwnerName: "go-gitea", + RepoName: "test_repo", + }, + Merged: false, + MergeCommitSHA: "565d1208f5fffdc1c5ae1a2436491eb9a5e4ebae", + Reactions: []*base.Reaction{ + { + UserID: 81045, + UserName: "lunny", + Content: "heart", + }, + { + UserID: 81045, + UserName: "lunny", + Content: "+1", + }, + }, + ForeignIndex: 4, + }, + }, prs) + + reviews, err := downloader.GetReviews(&base.PullRequest{Number: 3, ForeignIndex: 3}) + require.NoError(t, err) + assertReviewsEqual(t, []*base.Review{ + { + ID: 315859956, + IssueIndex: 3, + ReviewerID: 42128690, + ReviewerName: "jolheiser", + CommitID: "076160cf0b039f13e5eff19619932d181269414b", + CreatedAt: time.Date(2019, 11, 12, 21, 35, 24, 0, time.UTC), + State: base.ReviewStateApproved, + }, + { + ID: 315860062, + IssueIndex: 3, + ReviewerID: 1824502, + ReviewerName: "zeripath", + CommitID: "076160cf0b039f13e5eff19619932d181269414b", + CreatedAt: time.Date(2019, 11, 12, 21, 35, 36, 0, time.UTC), + State: base.ReviewStateApproved, + }, + { + ID: 315861440, + IssueIndex: 3, + ReviewerID: 165205, + ReviewerName: "lafriks", + CommitID: "076160cf0b039f13e5eff19619932d181269414b", + CreatedAt: time.Date(2019, 11, 12, 21, 38, 0, 0, time.UTC), + State: base.ReviewStateApproved, + }, + }, reviews) + + reviews, err = downloader.GetReviews(&base.PullRequest{Number: 4, ForeignIndex: 4}) + require.NoError(t, err) + assertReviewsEqual(t, []*base.Review{ + { + ID: 338338740, + IssueIndex: 4, + ReviewerID: 81045, + ReviewerName: "lunny", + CommitID: "2be9101c543658591222acbee3eb799edfc3853d", + CreatedAt: time.Date(2020, 1, 4, 5, 33, 18, 0, time.UTC), + State: base.ReviewStateApproved, + Comments: []*base.ReviewComment{ + { + ID: 363017488, + Content: "This is a good pull request.", + TreePath: "README.md", + DiffHunk: "@@ -1,2 +1,4 @@\n # test_repo\n Test repository for testing migration from github to gitea\n+", + Position: 3, + CommitID: "2be9101c543658591222acbee3eb799edfc3853d", + PosterID: 81045, + CreatedAt: time.Date(2020, 1, 4, 5, 33, 6, 0, time.UTC), + UpdatedAt: time.Date(2020, 1, 4, 5, 33, 18, 0, time.UTC), + }, + }, + }, + { + ID: 338339651, + IssueIndex: 4, + ReviewerID: 81045, + ReviewerName: "lunny", + CommitID: "2be9101c543658591222acbee3eb799edfc3853d", + CreatedAt: time.Date(2020, 1, 4, 6, 7, 6, 0, time.UTC), + State: base.ReviewStateChangesRequested, + Content: "Don't add more reviews", + }, + { + ID: 338349019, + IssueIndex: 4, + ReviewerID: 81045, + ReviewerName: "lunny", + CommitID: "2be9101c543658591222acbee3eb799edfc3853d", + CreatedAt: time.Date(2020, 1, 4, 11, 21, 41, 0, time.UTC), + State: base.ReviewStateCommented, + Comments: []*base.ReviewComment{ + { + ID: 363029944, + Content: "test a single comment.", + TreePath: "LICENSE", + DiffHunk: "@@ -19,3 +19,5 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n SOFTWARE.\n+", + Position: 4, + CommitID: "2be9101c543658591222acbee3eb799edfc3853d", + PosterID: 81045, + CreatedAt: time.Date(2020, 1, 4, 11, 21, 41, 0, time.UTC), + UpdatedAt: time.Date(2020, 1, 4, 11, 21, 41, 0, time.UTC), + }, + }, + }, + }, reviews) +} diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go new file mode 100644 index 0000000..1639a34 --- /dev/null +++ b/services/migrations/gitlab.go @@ -0,0 +1,784 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "path" + "regexp" + "strings" + "time" + + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/modules/container" + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/structs" + + "github.com/xanzy/go-gitlab" +) + +var ( + _ base.Downloader = &GitlabDownloader{} + _ base.DownloaderFactory = &GitlabDownloaderFactory{} +) + +func init() { + RegisterDownloaderFactory(&GitlabDownloaderFactory{}) +} + +// GitlabDownloaderFactory defines a gitlab downloader factory +type GitlabDownloaderFactory struct{} + +// New returns a Downloader related to this factory according MigrateOptions +func (f *GitlabDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) { + u, err := url.Parse(opts.CloneAddr) + if err != nil { + return nil, err + } + + baseURL := u.Scheme + "://" + u.Host + repoNameSpace := strings.TrimPrefix(u.Path, "/") + repoNameSpace = strings.TrimSuffix(repoNameSpace, ".git") + + log.Trace("Create gitlab downloader. BaseURL: %s RepoName: %s", baseURL, repoNameSpace) + + return NewGitlabDownloader(ctx, baseURL, repoNameSpace, opts.AuthUsername, opts.AuthPassword, opts.AuthToken) +} + +// GitServiceType returns the type of git service +func (f *GitlabDownloaderFactory) GitServiceType() structs.GitServiceType { + return structs.GitlabService +} + +type gitlabIIDResolver struct { + maxIssueIID int64 + frozen bool +} + +func (r *gitlabIIDResolver) recordIssueIID(issueIID int) { + if r.frozen { + panic("cannot record issue IID after pull request IID generation has started") + } + r.maxIssueIID = max(r.maxIssueIID, int64(issueIID)) +} + +func (r *gitlabIIDResolver) generatePullRequestNumber(mrIID int) int64 { + r.frozen = true + return r.maxIssueIID + int64(mrIID) +} + +// GitlabDownloader implements a Downloader interface to get repository information +// from gitlab via go-gitlab +// - issueCount is incremented in GetIssues() to ensure PR and Issue numbers do not overlap, +// because Gitlab has individual Issue and Pull Request numbers. +type GitlabDownloader struct { + base.NullDownloader + ctx context.Context + client *gitlab.Client + baseURL string + repoID int + repoName string + iidResolver gitlabIIDResolver + maxPerPage int +} + +// NewGitlabDownloader creates a gitlab Downloader via gitlab API +// +// Use either a username/password, personal token entered into the username field, or anonymous/public access +// Note: Public access only allows very basic access +func NewGitlabDownloader(ctx context.Context, baseURL, repoPath, username, password, token string) (*GitlabDownloader, error) { + gitlabClient, err := gitlab.NewClient(token, gitlab.WithBaseURL(baseURL), gitlab.WithHTTPClient(NewMigrationHTTPClient())) + // Only use basic auth if token is blank and password is NOT + // Basic auth will fail with empty strings, but empty token will allow anonymous public API usage + if token == "" && password != "" { + gitlabClient, err = gitlab.NewBasicAuthClient(username, password, gitlab.WithBaseURL(baseURL), gitlab.WithHTTPClient(NewMigrationHTTPClient())) + } + + if err != nil { + log.Trace("Error logging into gitlab: %v", err) + return nil, err + } + + // split namespace and subdirectory + pathParts := strings.Split(strings.Trim(repoPath, "/"), "/") + var resp *gitlab.Response + u, _ := url.Parse(baseURL) + for len(pathParts) >= 2 { + _, resp, err = gitlabClient.Version.GetVersion() + if err == nil || resp != nil && resp.StatusCode == http.StatusUnauthorized { + err = nil // if no authentication given, this still should work + break + } + + u.Path = path.Join(u.Path, pathParts[0]) + baseURL = u.String() + pathParts = pathParts[1:] + _ = gitlab.WithBaseURL(baseURL)(gitlabClient) + repoPath = strings.Join(pathParts, "/") + } + if err != nil { + log.Trace("Error could not get gitlab version: %v", err) + return nil, err + } + + log.Trace("gitlab downloader: use BaseURL: '%s' and RepoPath: '%s'", baseURL, repoPath) + + // Grab and store project/repo ID here, due to issues using the URL escaped path + gr, _, err := gitlabClient.Projects.GetProject(repoPath, nil, nil, gitlab.WithContext(ctx)) + if err != nil { + log.Trace("Error retrieving project: %v", err) + return nil, err + } + + if gr == nil { + log.Trace("Error getting project, project is nil") + return nil, errors.New("Error getting project, project is nil") + } + + return &GitlabDownloader{ + ctx: ctx, + client: gitlabClient, + baseURL: baseURL, + repoID: gr.ID, + repoName: gr.Name, + maxPerPage: 100, + }, nil +} + +// String implements Stringer +func (g *GitlabDownloader) String() string { + return fmt.Sprintf("migration from gitlab server %s [%d]/%s", g.baseURL, g.repoID, g.repoName) +} + +func (g *GitlabDownloader) LogString() string { + if g == nil { + return "<GitlabDownloader nil>" + } + return fmt.Sprintf("<GitlabDownloader %s [%d]/%s>", g.baseURL, g.repoID, g.repoName) +} + +// SetContext set context +func (g *GitlabDownloader) SetContext(ctx context.Context) { + g.ctx = ctx +} + +// GetRepoInfo returns a repository information +func (g *GitlabDownloader) GetRepoInfo() (*base.Repository, error) { + gr, _, err := g.client.Projects.GetProject(g.repoID, nil, nil, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, err + } + + var private bool + switch gr.Visibility { + case gitlab.InternalVisibility: + private = true + case gitlab.PrivateVisibility: + private = true + } + + var owner string + if gr.Owner == nil { + log.Trace("gr.Owner is nil, trying to get owner from Namespace") + if gr.Namespace != nil && gr.Namespace.Kind == "user" { + owner = gr.Namespace.Path + } + } else { + owner = gr.Owner.Username + } + + // convert gitlab repo to stand Repo + return &base.Repository{ + Owner: owner, + Name: gr.Name, + IsPrivate: private, + Description: gr.Description, + OriginalURL: gr.WebURL, + CloneURL: gr.HTTPURLToRepo, + DefaultBranch: gr.DefaultBranch, + }, nil +} + +// GetTopics return gitlab topics +func (g *GitlabDownloader) GetTopics() ([]string, error) { + gr, _, err := g.client.Projects.GetProject(g.repoID, nil, nil, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, err + } + return gr.TagList, err +} + +// GetMilestones returns milestones +func (g *GitlabDownloader) GetMilestones() ([]*base.Milestone, error) { + perPage := g.maxPerPage + state := "all" + milestones := make([]*base.Milestone, 0, perPage) + for i := 1; ; i++ { + ms, _, err := g.client.Milestones.ListMilestones(g.repoID, &gitlab.ListMilestonesOptions{ + State: &state, + ListOptions: gitlab.ListOptions{ + Page: i, + PerPage: perPage, + }, + }, nil, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, err + } + + for _, m := range ms { + var desc string + if m.Description != "" { + desc = m.Description + } + state := "open" + var closedAt *time.Time + if m.State != "" { + state = m.State + if state == "closed" { + closedAt = m.UpdatedAt + } + } + + var deadline *time.Time + if m.DueDate != nil { + deadlineParsed, err := time.Parse("2006-01-02", m.DueDate.String()) + if err != nil { + log.Trace("Error parsing Milestone DueDate time") + deadline = nil + } else { + deadline = &deadlineParsed + } + } + + milestones = append(milestones, &base.Milestone{ + Title: m.Title, + Description: desc, + Deadline: deadline, + State: state, + Created: *m.CreatedAt, + Updated: m.UpdatedAt, + Closed: closedAt, + }) + } + if len(ms) < perPage { + break + } + } + return milestones, nil +} + +func (g *GitlabDownloader) normalizeColor(val string) string { + val = strings.TrimLeft(val, "#") + val = strings.ToLower(val) + if len(val) == 3 { + c := []rune(val) + val = fmt.Sprintf("%c%c%c%c%c%c", c[0], c[0], c[1], c[1], c[2], c[2]) + } + if len(val) != 6 { + return "" + } + return val +} + +// GetLabels returns labels +func (g *GitlabDownloader) GetLabels() ([]*base.Label, error) { + perPage := g.maxPerPage + labels := make([]*base.Label, 0, perPage) + for i := 1; ; i++ { + ls, _, err := g.client.Labels.ListLabels(g.repoID, &gitlab.ListLabelsOptions{ListOptions: gitlab.ListOptions{ + Page: i, + PerPage: perPage, + }}, nil, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, err + } + for _, label := range ls { + baseLabel := &base.Label{ + Name: strings.Replace(label.Name, "::", "/", 1), + Color: g.normalizeColor(label.Color), + Description: label.Description, + Exclusive: strings.Contains(label.Name, "::"), + } + labels = append(labels, baseLabel) + } + if len(ls) < perPage { + break + } + } + return labels, nil +} + +func (g *GitlabDownloader) convertGitlabRelease(rel *gitlab.Release) *base.Release { + var zero int + r := &base.Release{ + TagName: rel.TagName, + TargetCommitish: rel.Commit.ID, + Name: rel.Name, + Body: rel.Description, + Created: *rel.CreatedAt, + PublisherID: int64(rel.Author.ID), + PublisherName: rel.Author.Username, + } + + httpClient := NewMigrationHTTPClient() + + for k, asset := range rel.Assets.Links { + assetID := asset.ID // Don't optimize this, for closure we need a local variable + r.Assets = append(r.Assets, &base.ReleaseAsset{ + ID: int64(asset.ID), + Name: asset.Name, + ContentType: &rel.Assets.Sources[k].Format, + Size: &zero, + DownloadCount: &zero, + DownloadFunc: func() (io.ReadCloser, error) { + link, _, err := g.client.ReleaseLinks.GetReleaseLink(g.repoID, rel.TagName, assetID, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, err + } + + if !hasBaseURL(link.URL, g.baseURL) { + WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", assetID, g, link.URL) + return io.NopCloser(strings.NewReader(link.URL)), nil + } + + req, err := http.NewRequest("GET", link.URL, nil) + if err != nil { + return nil, err + } + req = req.WithContext(g.ctx) + resp, err := httpClient.Do(req) + if err != nil { + return nil, err + } + + // resp.Body is closed by the uploader + return resp.Body, nil + }, + }) + } + return r +} + +// GetReleases returns releases +func (g *GitlabDownloader) GetReleases() ([]*base.Release, error) { + perPage := g.maxPerPage + releases := make([]*base.Release, 0, perPage) + for i := 1; ; i++ { + ls, _, err := g.client.Releases.ListReleases(g.repoID, &gitlab.ListReleasesOptions{ + ListOptions: gitlab.ListOptions{ + Page: i, + PerPage: perPage, + }, + }, nil, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, err + } + + for _, release := range ls { + releases = append(releases, g.convertGitlabRelease(release)) + } + if len(ls) < perPage { + break + } + } + return releases, nil +} + +type gitlabIssueContext struct { + IsMergeRequest bool +} + +// GetIssues returns issues according start and limit +// +// Note: issue label description and colors are not supported by the go-gitlab library at this time +func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) { + state := "all" + sort := "asc" + + if perPage > g.maxPerPage { + perPage = g.maxPerPage + } + + opt := &gitlab.ListProjectIssuesOptions{ + State: &state, + Sort: &sort, + ListOptions: gitlab.ListOptions{ + PerPage: perPage, + Page: page, + }, + } + + allIssues := make([]*base.Issue, 0, perPage) + + issues, _, err := g.client.Issues.ListProjectIssues(g.repoID, opt, nil, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, false, fmt.Errorf("error while listing issues: %w", err) + } + for _, issue := range issues { + labels := make([]*base.Label, 0, len(issue.Labels)) + for _, l := range issue.Labels { + labels = append(labels, &base.Label{ + Name: strings.Replace(l, "::", "/", 1), + }) + } + + var milestone string + if issue.Milestone != nil { + milestone = issue.Milestone.Title + } + + var reactions []*gitlab.AwardEmoji + awardPage := 1 + for { + awards, _, err := g.client.AwardEmoji.ListIssueAwardEmoji(g.repoID, issue.IID, &gitlab.ListAwardEmojiOptions{Page: awardPage, PerPage: perPage}, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, false, fmt.Errorf("error while listing issue awards: %w", err) + } + + reactions = append(reactions, awards...) + + if len(awards) < perPage { + break + } + + awardPage++ + } + + allIssues = append(allIssues, &base.Issue{ + Title: issue.Title, + Number: int64(issue.IID), + PosterID: int64(issue.Author.ID), + PosterName: issue.Author.Username, + Content: issue.Description, + Milestone: milestone, + State: issue.State, + Created: *issue.CreatedAt, + Labels: labels, + Reactions: g.awardsToReactions(reactions), + Closed: issue.ClosedAt, + IsLocked: issue.DiscussionLocked, + Updated: *issue.UpdatedAt, + ForeignIndex: int64(issue.IID), + Context: gitlabIssueContext{IsMergeRequest: false}, + }) + + // record the issue IID, to be used in GetPullRequests() + g.iidResolver.recordIssueIID(issue.IID) + } + + return allIssues, len(issues) < perPage, nil +} + +// GetComments returns comments according issueNumber +// TODO: figure out how to transfer comment reactions +func (g *GitlabDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + context, ok := commentable.GetContext().(gitlabIssueContext) + if !ok { + return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext()) + } + + allComments := make([]*base.Comment, 0, g.maxPerPage) + + page := 1 + + for { + var comments []*gitlab.Discussion + var resp *gitlab.Response + var err error + if !context.IsMergeRequest { + comments, resp, err = g.client.Discussions.ListIssueDiscussions(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListIssueDiscussionsOptions{ + Page: page, + PerPage: g.maxPerPage, + }, nil, gitlab.WithContext(g.ctx)) + } else { + comments, resp, err = g.client.Discussions.ListMergeRequestDiscussions(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListMergeRequestDiscussionsOptions{ + Page: page, + PerPage: g.maxPerPage, + }, nil, gitlab.WithContext(g.ctx)) + } + + if err != nil { + return nil, false, fmt.Errorf("error while listing comments: %v %w", g.repoID, err) + } + for _, comment := range comments { + for _, note := range comment.Notes { + allComments = append(allComments, g.convertNoteToComment(commentable.GetLocalIndex(), note)) + } + } + if resp.NextPage == 0 { + break + } + page = resp.NextPage + } + + page = 1 + for { + var stateEvents []*gitlab.StateEvent + var resp *gitlab.Response + var err error + if context.IsMergeRequest { + stateEvents, resp, err = g.client.ResourceStateEvents.ListMergeStateEvents(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListStateEventsOptions{ + ListOptions: gitlab.ListOptions{ + Page: page, + PerPage: g.maxPerPage, + }, + }, nil, gitlab.WithContext(g.ctx)) + } else { + stateEvents, resp, err = g.client.ResourceStateEvents.ListIssueStateEvents(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListStateEventsOptions{ + ListOptions: gitlab.ListOptions{ + Page: page, + PerPage: g.maxPerPage, + }, + }, nil, gitlab.WithContext(g.ctx)) + } + if err != nil { + return nil, false, fmt.Errorf("error while listing state events: %v %w", g.repoID, err) + } + + for _, stateEvent := range stateEvents { + comment := &base.Comment{ + IssueIndex: commentable.GetLocalIndex(), + Index: int64(stateEvent.ID), + PosterID: int64(stateEvent.User.ID), + PosterName: stateEvent.User.Username, + Content: "", + Created: *stateEvent.CreatedAt, + } + switch stateEvent.State { + case gitlab.ClosedEventType: + comment.CommentType = issues_model.CommentTypeClose.String() + case gitlab.MergedEventType: + comment.CommentType = issues_model.CommentTypeMergePull.String() + case gitlab.ReopenedEventType: + comment.CommentType = issues_model.CommentTypeReopen.String() + default: + // Ignore other event types + continue + } + allComments = append(allComments, comment) + } + + if resp.NextPage == 0 { + break + } + page = resp.NextPage + } + + return allComments, true, nil +} + +var targetBranchChangeRegexp = regexp.MustCompile("^changed target branch from `(.*?)` to `(.*?)`$") + +func (g *GitlabDownloader) convertNoteToComment(localIndex int64, note *gitlab.Note) *base.Comment { + comment := &base.Comment{ + IssueIndex: localIndex, + Index: int64(note.ID), + PosterID: int64(note.Author.ID), + PosterName: note.Author.Username, + PosterEmail: note.Author.Email, + Content: note.Body, + Created: *note.CreatedAt, + Meta: map[string]any{}, + } + + // Try to find the underlying event of system notes. + if note.System { + if match := targetBranchChangeRegexp.FindStringSubmatch(note.Body); match != nil { + comment.CommentType = issues_model.CommentTypeChangeTargetBranch.String() + comment.Meta["OldRef"] = match[1] + comment.Meta["NewRef"] = match[2] + } else if strings.HasPrefix(note.Body, "enabled an automatic merge") { + comment.CommentType = issues_model.CommentTypePRScheduledToAutoMerge.String() + } else if note.Body == "canceled the automatic merge" { + comment.CommentType = issues_model.CommentTypePRUnScheduledToAutoMerge.String() + } + } + + return comment +} + +// GetPullRequests returns pull requests according page and perPage +func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) { + if perPage > g.maxPerPage { + perPage = g.maxPerPage + } + + view := "simple" + opt := &gitlab.ListProjectMergeRequestsOptions{ + ListOptions: gitlab.ListOptions{ + PerPage: perPage, + Page: page, + }, + View: &view, + } + + allPRs := make([]*base.PullRequest, 0, perPage) + + prs, _, err := g.client.MergeRequests.ListProjectMergeRequests(g.repoID, opt, nil, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, false, fmt.Errorf("error while listing merge requests: %w", err) + } + for _, simplePR := range prs { + // Load merge request again by itself, as not all fields are populated in the ListProjectMergeRequests endpoint. + // See https://gitlab.com/gitlab-org/gitlab/-/issues/29620 + pr, _, err := g.client.MergeRequests.GetMergeRequest(g.repoID, simplePR.IID, nil) + if err != nil { + return nil, false, fmt.Errorf("error while loading merge request: %w", err) + } + + labels := make([]*base.Label, 0, len(pr.Labels)) + for _, l := range pr.Labels { + labels = append(labels, &base.Label{ + Name: strings.Replace(l, "::", "/", 1), + }) + } + + var merged bool + if pr.State == "merged" { + merged = true + pr.State = "closed" + } + + mergeTime := pr.MergedAt + if merged && pr.MergedAt == nil { + mergeTime = pr.UpdatedAt + } + + closeTime := pr.ClosedAt + if merged && pr.ClosedAt == nil { + closeTime = pr.UpdatedAt + } + + mergeCommitSHA := pr.MergeCommitSHA + if mergeCommitSHA == "" { + mergeCommitSHA = pr.SquashCommitSHA + } + + var locked bool + if pr.State == "locked" { + locked = true + } + + var milestone string + if pr.Milestone != nil { + milestone = pr.Milestone.Title + } + + var reactions []*gitlab.AwardEmoji + awardPage := 1 + for { + awards, _, err := g.client.AwardEmoji.ListMergeRequestAwardEmoji(g.repoID, pr.IID, &gitlab.ListAwardEmojiOptions{Page: awardPage, PerPage: perPage}, gitlab.WithContext(g.ctx)) + if err != nil { + return nil, false, fmt.Errorf("error while listing merge requests awards: %w", err) + } + + reactions = append(reactions, awards...) + + if len(awards) < perPage { + break + } + + awardPage++ + } + + // Generate new PR Numbers by the known Issue Numbers, because they share the same number space in Gitea, but they are independent in Gitlab + newPRNumber := g.iidResolver.generatePullRequestNumber(pr.IID) + + allPRs = append(allPRs, &base.PullRequest{ + Title: pr.Title, + Number: newPRNumber, + PosterName: pr.Author.Username, + PosterID: int64(pr.Author.ID), + Content: pr.Description, + Milestone: milestone, + State: pr.State, + Created: *pr.CreatedAt, + Closed: closeTime, + Labels: labels, + Merged: merged, + MergeCommitSHA: mergeCommitSHA, + MergedTime: mergeTime, + IsLocked: locked, + Reactions: g.awardsToReactions(reactions), + Head: base.PullRequestBranch{ + Ref: pr.SourceBranch, + SHA: pr.SHA, + RepoName: g.repoName, + OwnerName: pr.Author.Username, + CloneURL: pr.WebURL, + }, + Base: base.PullRequestBranch{ + Ref: pr.TargetBranch, + SHA: pr.DiffRefs.BaseSha, + RepoName: g.repoName, + OwnerName: pr.Author.Username, + }, + PatchURL: pr.WebURL + ".patch", + ForeignIndex: int64(pr.IID), + Context: gitlabIssueContext{IsMergeRequest: true}, + }) + + // SECURITY: Ensure that the PR is safe + _ = CheckAndEnsureSafePR(allPRs[len(allPRs)-1], g.baseURL, g) + } + + return allPRs, len(prs) < perPage, nil +} + +// GetReviews returns pull requests review +func (g *GitlabDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { + approvals, resp, err := g.client.MergeRequestApprovals.GetConfiguration(g.repoID, int(reviewable.GetForeignIndex()), gitlab.WithContext(g.ctx)) + if err != nil { + if resp != nil && resp.StatusCode == http.StatusNotFound { + log.Error(fmt.Sprintf("GitlabDownloader: while migrating a error occurred: '%s'", err.Error())) + return []*base.Review{}, nil + } + return nil, err + } + + var createdAt time.Time + if approvals.CreatedAt != nil { + createdAt = *approvals.CreatedAt + } else if approvals.UpdatedAt != nil { + createdAt = *approvals.UpdatedAt + } else { + createdAt = time.Now() + } + + reviews := make([]*base.Review, 0, len(approvals.ApprovedBy)) + for _, user := range approvals.ApprovedBy { + reviews = append(reviews, &base.Review{ + IssueIndex: reviewable.GetLocalIndex(), + ReviewerID: int64(user.User.ID), + ReviewerName: user.User.Username, + CreatedAt: createdAt, + // All we get are approvals + State: base.ReviewStateApproved, + }) + } + + return reviews, nil +} + +func (g *GitlabDownloader) awardsToReactions(awards []*gitlab.AwardEmoji) []*base.Reaction { + result := make([]*base.Reaction, 0, len(awards)) + uniqCheck := make(container.Set[string]) + for _, award := range awards { + uid := fmt.Sprintf("%s%d", award.Name, award.User.ID) + if uniqCheck.Add(uid) { + result = append(result, &base.Reaction{ + UserID: int64(award.User.ID), + UserName: award.User.Username, + Content: award.Name, + }) + } + } + return result +} diff --git a/services/migrations/gitlab_test.go b/services/migrations/gitlab_test.go new file mode 100644 index 0000000..39edba3 --- /dev/null +++ b/services/migrations/gitlab_test.go @@ -0,0 +1,646 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "net/http" + "net/http/httptest" + "os" + "strconv" + "testing" + "time" + + "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/json" + base "code.gitea.io/gitea/modules/migration" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/xanzy/go-gitlab" +) + +func TestGitlabDownloadRepo(t *testing.T) { + // If a GitLab access token is provided, this test will make HTTP requests to the live gitlab.com instance. + // When doing so, the responses from gitlab.com will be saved as test data files. + // If no access token is available, those cached responses will be used instead. + gitlabPersonalAccessToken := os.Getenv("GITLAB_READ_TOKEN") + fixturePath := "./testdata/gitlab/full_download" + server := unittest.NewMockWebServer(t, "https://gitlab.com", fixturePath, gitlabPersonalAccessToken != "") + defer server.Close() + + downloader, err := NewGitlabDownloader(context.Background(), server.URL, "forgejo/test_repo", "", "", gitlabPersonalAccessToken) + if err != nil { + t.Fatalf("NewGitlabDownloader is nil: %v", err) + } + repo, err := downloader.GetRepoInfo() + require.NoError(t, err) + // Repo Owner is blank in Gitlab Group repos + assertRepositoryEqual(t, &base.Repository{ + Name: "test_repo", + Owner: "", + Description: "Test repository for testing migration from gitlab to forgejo", + CloneURL: server.URL + "/forgejo/test_repo.git", + OriginalURL: server.URL + "/forgejo/test_repo", + DefaultBranch: "master", + }, repo) + + topics, err := downloader.GetTopics() + require.NoError(t, err) + assert.Len(t, topics, 2) + assert.EqualValues(t, []string{"migration", "test"}, topics) + + milestones, err := downloader.GetMilestones() + require.NoError(t, err) + assertMilestonesEqual(t, []*base.Milestone{ + { + Title: "1.0.0", + Created: time.Date(2024, 9, 3, 13, 53, 8, 516000000, time.UTC), + Updated: timePtr(time.Date(2024, 9, 3, 20, 3, 57, 786000000, time.UTC)), + Closed: timePtr(time.Date(2024, 9, 3, 20, 3, 57, 786000000, time.UTC)), + State: "closed", + }, + { + Title: "1.1.0", + Created: time.Date(2024, 9, 3, 13, 52, 48, 414000000, time.UTC), + Updated: timePtr(time.Date(2024, 9, 3, 14, 52, 14, 93000000, time.UTC)), + State: "active", + }, + }, milestones) + + labels, err := downloader.GetLabels() + require.NoError(t, err) + assertLabelsEqual(t, []*base.Label{ + { + Name: "bug", + Color: "d9534f", + }, + { + Name: "confirmed", + Color: "d9534f", + }, + { + Name: "critical", + Color: "d9534f", + }, + { + Name: "discussion", + Color: "428bca", + }, + { + Name: "documentation", + Color: "f0ad4e", + }, + { + Name: "duplicate", + Color: "7f8c8d", + }, + { + Name: "enhancement", + Color: "5cb85c", + }, + { + Name: "suggestion", + Color: "428bca", + }, + { + Name: "support", + Color: "f0ad4e", + }, + { + Name: "test-scope/label0", + Color: "6699cc", + Description: "scoped label", + Exclusive: true, + }, + { + Name: "test-scope/label1", + Color: "dc143c", + Exclusive: true, + }, + }, labels) + + releases, err := downloader.GetReleases() + require.NoError(t, err) + assertReleasesEqual(t, []*base.Release{ + { + TagName: "v0.9.99", + TargetCommitish: "0720a3ec57c1f843568298117b874319e7deee75", + Name: "First Release", + Body: "A test release", + Created: time.Date(2024, 9, 3, 15, 1, 1, 513000000, time.UTC), + PublisherID: 548513, + PublisherName: "mkobel", + }, + }, releases) + + issues, isEnd, err := downloader.GetIssues(1, 2) + require.NoError(t, err) + assert.False(t, isEnd) + assertIssuesEqual(t, []*base.Issue{ + { + Number: 1, + Title: "Please add an animated gif icon to the merge button", + Content: "I just want the merge button to hurt my eyes a little. :stuck_out_tongue_closed_eyes:", + Milestone: "1.0.0", + PosterID: 548513, + PosterName: "mkobel", + State: "closed", + Created: time.Date(2024, 9, 3, 14, 42, 34, 924000000, time.UTC), + Updated: time.Date(2024, 9, 3, 14, 48, 43, 756000000, time.UTC), + Labels: []*base.Label{ + { + Name: "bug", + }, + { + Name: "discussion", + }, + }, + Reactions: []*base.Reaction{ + { + UserID: 548513, + UserName: "mkobel", + Content: "thumbsup", + }, + { + UserID: 548513, + UserName: "mkobel", + Content: "open_mouth", + }, + }, + Closed: timePtr(time.Date(2024, 9, 3, 14, 43, 10, 708000000, time.UTC)), + }, + { + Number: 2, + Title: "Test issue", + Content: "This is test issue 2, do not touch!", + Milestone: "1.0.0", + PosterID: 548513, + PosterName: "mkobel", + State: "closed", + Created: time.Date(2024, 9, 3, 14, 42, 35, 371000000, time.UTC), + Updated: time.Date(2024, 9, 3, 20, 3, 43, 536000000, time.UTC), + Labels: []*base.Label{ + { + Name: "duplicate", + }, + }, + Reactions: []*base.Reaction{ + { + UserID: 548513, + UserName: "mkobel", + Content: "thumbsup", + }, + { + UserID: 548513, + UserName: "mkobel", + Content: "thumbsdown", + }, + { + UserID: 548513, + UserName: "mkobel", + Content: "laughing", + }, + { + UserID: 548513, + UserName: "mkobel", + Content: "tada", + }, + { + UserID: 548513, + UserName: "mkobel", + Content: "confused", + }, + { + UserID: 548513, + UserName: "mkobel", + Content: "hearts", + }, + }, + Closed: timePtr(time.Date(2024, 9, 3, 14, 43, 10, 906000000, time.UTC)), + }, + }, issues) + + comments, _, err := downloader.GetComments(&base.Issue{ + Number: 2, + ForeignIndex: 2, + Context: gitlabIssueContext{IsMergeRequest: false}, + }) + require.NoError(t, err) + assertCommentsEqual(t, []*base.Comment{ + { + IssueIndex: 2, + PosterID: 548513, + PosterName: "mkobel", + Created: time.Date(2024, 9, 3, 14, 45, 20, 848000000, time.UTC), + Content: "This is a comment", + Reactions: nil, + }, + { + IssueIndex: 2, + PosterID: 548513, + PosterName: "mkobel", + Created: time.Date(2024, 9, 3, 14, 45, 30, 59000000, time.UTC), + Content: "A second comment", + Reactions: nil, + }, + { + IssueIndex: 2, + PosterID: 548513, + PosterName: "mkobel", + Created: time.Date(2024, 9, 3, 14, 43, 10, 947000000, time.UTC), + Content: "", + Reactions: nil, + CommentType: "close", + }, + }, comments) + + prs, _, err := downloader.GetPullRequests(1, 1) + require.NoError(t, err) + assertPullRequestsEqual(t, []*base.PullRequest{ + { + Number: 3, + Title: "Test branch", + Content: "do not merge this PR", + Milestone: "1.1.0", + PosterID: 2005797, + PosterName: "oliverpool", + State: "opened", + Created: time.Date(2024, 9, 3, 7, 57, 19, 866000000, time.UTC), + Labels: []*base.Label{ + { + Name: "test-scope/label0", + }, + { + Name: "test-scope/label1", + }, + }, + Reactions: []*base.Reaction{{ + UserID: 548513, + UserName: "mkobel", + Content: "thumbsup", + }, { + UserID: 548513, + UserName: "mkobel", + Content: "tada", + }}, + PatchURL: server.URL + "/forgejo/test_repo/-/merge_requests/1.patch", + Head: base.PullRequestBranch{ + Ref: "feat/test", + CloneURL: server.URL + "/forgejo/test_repo/-/merge_requests/1", + SHA: "9f733b96b98a4175276edf6a2e1231489c3bdd23", + RepoName: "test_repo", + OwnerName: "oliverpool", + }, + Base: base.PullRequestBranch{ + Ref: "master", + SHA: "c59c9b451acca9d106cc19d61d87afe3fbbb8b83", + OwnerName: "oliverpool", + RepoName: "test_repo", + }, + Closed: nil, + Merged: false, + MergedTime: nil, + MergeCommitSHA: "", + ForeignIndex: 2, + Context: gitlabIssueContext{IsMergeRequest: true}, + }, + }, prs) + + rvs, err := downloader.GetReviews(&base.PullRequest{Number: 1, ForeignIndex: 1}) + require.NoError(t, err) + assertReviewsEqual(t, []*base.Review{ + { + IssueIndex: 1, + ReviewerID: 548513, + ReviewerName: "mkobel", + CreatedAt: time.Date(2024, 9, 3, 7, 57, 19, 86600000, time.UTC), + State: "APPROVED", + }, + }, rvs) +} + +func TestGitlabSkippedIssueNumber(t *testing.T) { + // If a GitLab access token is provided, this test will make HTTP requests to the live gitlab.com instance. + // When doing so, the responses from gitlab.com will be saved as test data files. + // If no access token is available, those cached responses will be used instead. + gitlabPersonalAccessToken := os.Getenv("GITLAB_READ_TOKEN") + fixturePath := "./testdata/gitlab/skipped_issue_number" + server := unittest.NewMockWebServer(t, "https://gitlab.com", fixturePath, gitlabPersonalAccessToken != "") + defer server.Close() + + downloader, err := NewGitlabDownloader(context.Background(), server.URL, "troyengel/archbuild", "", "", gitlabPersonalAccessToken) + if err != nil { + t.Fatalf("NewGitlabDownloader is nil: %v", err) + } + repo, err := downloader.GetRepoInfo() + require.NoError(t, err) + assertRepositoryEqual(t, &base.Repository{ + Name: "archbuild", + Owner: "troyengel", + Description: "Arch packaging and build files", + CloneURL: server.URL + "/troyengel/archbuild.git", + OriginalURL: server.URL + "/troyengel/archbuild", + DefaultBranch: "master", + }, repo) + + issues, isEnd, err := downloader.GetIssues(1, 10) + require.NoError(t, err) + assert.True(t, isEnd) + + // the only issue in this repository has number 2 + assert.Len(t, issues, 1) + assert.EqualValues(t, 2, issues[0].Number) + assert.EqualValues(t, "vpn unlimited errors", issues[0].Title) + + prs, _, err := downloader.GetPullRequests(1, 10) + require.NoError(t, err) + // the only merge request in this repository has number 1, + // but we offset it by the maximum issue number so it becomes + // pull request 3 in Forgejo + assert.Len(t, prs, 1) + assert.EqualValues(t, 3, prs[0].Number) + assert.EqualValues(t, "Review", prs[0].Title) +} + +func gitlabClientMockSetup(t *testing.T) (*http.ServeMux, *httptest.Server, *gitlab.Client) { + // mux is the HTTP request multiplexer used with the test server. + mux := http.NewServeMux() + + // server is a test HTTP server used to provide mock API responses. + server := httptest.NewServer(mux) + + // client is the Gitlab client being tested. + client, err := gitlab.NewClient("", gitlab.WithBaseURL(server.URL)) + if err != nil { + server.Close() + t.Fatalf("Failed to create client: %v", err) + } + + return mux, server, client +} + +func gitlabClientMockTeardown(server *httptest.Server) { + server.Close() +} + +type reviewTestCase struct { + repoID, prID, reviewerID int + reviewerName string + createdAt, updatedAt *time.Time + expectedCreatedAt time.Time +} + +func convertTestCase(t reviewTestCase) (func(w http.ResponseWriter, r *http.Request), base.Review) { + var updatedAtField string + if t.updatedAt == nil { + updatedAtField = "" + } else { + updatedAtField = `"updated_at": "` + t.updatedAt.Format(time.RFC3339) + `",` + } + + var createdAtField string + if t.createdAt == nil { + createdAtField = "" + } else { + createdAtField = `"created_at": "` + t.createdAt.Format(time.RFC3339) + `",` + } + + handler := func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, ` +{ + "id": 5, + "iid": `+strconv.Itoa(t.prID)+`, + "project_id": `+strconv.Itoa(t.repoID)+`, + "title": "Approvals API", + "description": "Test", + "state": "opened", + `+createdAtField+` + `+updatedAtField+` + "merge_status": "cannot_be_merged", + "approvals_required": 2, + "approvals_left": 1, + "approved_by": [ + { + "user": { + "name": "Administrator", + "username": "`+t.reviewerName+`", + "id": `+strconv.Itoa(t.reviewerID)+`, + "state": "active", + "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon", + "web_url": "http://localhost:3000/root" + } + } + ] +}`) + } + review := base.Review{ + IssueIndex: int64(t.prID), + ReviewerID: int64(t.reviewerID), + ReviewerName: t.reviewerName, + CreatedAt: t.expectedCreatedAt, + State: "APPROVED", + } + + return handler, review +} + +func TestGitlabGetReviews(t *testing.T) { + mux, server, client := gitlabClientMockSetup(t) + defer gitlabClientMockTeardown(server) + + repoID := 1324 + + downloader := &GitlabDownloader{ + ctx: context.Background(), + client: client, + repoID: repoID, + } + + createdAt := time.Date(2020, 4, 19, 19, 24, 21, 0, time.UTC) + + for _, testCase := range []reviewTestCase{ + { + repoID: repoID, + prID: 1, + reviewerID: 801, + reviewerName: "someone1", + createdAt: nil, + updatedAt: &createdAt, + expectedCreatedAt: createdAt, + }, + { + repoID: repoID, + prID: 2, + reviewerID: 802, + reviewerName: "someone2", + createdAt: &createdAt, + updatedAt: nil, + expectedCreatedAt: createdAt, + }, + { + repoID: repoID, + prID: 3, + reviewerID: 803, + reviewerName: "someone3", + createdAt: nil, + updatedAt: nil, + expectedCreatedAt: time.Now(), + }, + } { + mock, review := convertTestCase(testCase) + mux.HandleFunc(fmt.Sprintf("/api/v4/projects/%d/merge_requests/%d/approvals", testCase.repoID, testCase.prID), mock) + + id := int64(testCase.prID) + rvs, err := downloader.GetReviews(&base.Issue{Number: id, ForeignIndex: id}) + require.NoError(t, err) + assertReviewsEqual(t, []*base.Review{&review}, rvs) + } +} + +func TestAwardsToReactions(t *testing.T) { + downloader := &GitlabDownloader{} + // yes gitlab can have duplicated reactions (https://gitlab.com/jaywink/socialhome/-/issues/24) + testResponse := ` +[ + { + "name": "thumbsup", + "user": { + "id": 1241334, + "username": "lafriks" + } + }, + { + "name": "thumbsup", + "user": { + "id": 1241334, + "username": "lafriks" + } + }, + { + "name": "thumbsup", + "user": { + "id": 4575606, + "username": "real6543" + } + } +] +` + var awards []*gitlab.AwardEmoji + require.NoError(t, json.Unmarshal([]byte(testResponse), &awards)) + + reactions := downloader.awardsToReactions(awards) + assert.EqualValues(t, []*base.Reaction{ + { + UserName: "lafriks", + UserID: 1241334, + Content: "thumbsup", + }, + { + UserName: "real6543", + UserID: 4575606, + Content: "thumbsup", + }, + }, reactions) +} + +func TestNoteToComment(t *testing.T) { + downloader := &GitlabDownloader{} + + now := time.Now() + makeTestNote := func(id int, body string, system bool) gitlab.Note { + return gitlab.Note{ + ID: id, + Author: struct { + ID int `json:"id"` + Username string `json:"username"` + Email string `json:"email"` + Name string `json:"name"` + State string `json:"state"` + AvatarURL string `json:"avatar_url"` + WebURL string `json:"web_url"` + }{ + ID: 72, + Email: "test@example.com", + Username: "test", + }, + Body: body, + CreatedAt: &now, + System: system, + } + } + notes := []gitlab.Note{ + makeTestNote(1, "This is a regular comment", false), + makeTestNote(2, "enabled an automatic merge for abcd1234", true), + makeTestNote(3, "changed target branch from `master` to `main`", true), + makeTestNote(4, "canceled the automatic merge", true), + } + comments := []base.Comment{{ + IssueIndex: 17, + Index: 1, + PosterID: 72, + PosterName: "test", + PosterEmail: "test@example.com", + CommentType: "", + Content: "This is a regular comment", + Created: now, + Meta: map[string]any{}, + }, { + IssueIndex: 17, + Index: 2, + PosterID: 72, + PosterName: "test", + PosterEmail: "test@example.com", + CommentType: "pull_scheduled_merge", + Content: "enabled an automatic merge for abcd1234", + Created: now, + Meta: map[string]any{}, + }, { + IssueIndex: 17, + Index: 3, + PosterID: 72, + PosterName: "test", + PosterEmail: "test@example.com", + CommentType: "change_target_branch", + Content: "changed target branch from `master` to `main`", + Created: now, + Meta: map[string]any{ + "OldRef": "master", + "NewRef": "main", + }, + }, { + IssueIndex: 17, + Index: 4, + PosterID: 72, + PosterName: "test", + PosterEmail: "test@example.com", + CommentType: "pull_cancel_scheduled_merge", + Content: "canceled the automatic merge", + Created: now, + Meta: map[string]any{}, + }} + + for i, note := range notes { + actualComment := *downloader.convertNoteToComment(17, ¬e) + assert.EqualValues(t, actualComment, comments[i]) + } +} + +func TestGitlabIIDResolver(t *testing.T) { + r := gitlabIIDResolver{} + r.recordIssueIID(1) + r.recordIssueIID(2) + r.recordIssueIID(3) + r.recordIssueIID(2) + assert.EqualValues(t, 4, r.generatePullRequestNumber(1)) + assert.EqualValues(t, 13, r.generatePullRequestNumber(10)) + + assert.Panics(t, func() { + r := gitlabIIDResolver{} + r.recordIssueIID(1) + assert.EqualValues(t, 2, r.generatePullRequestNumber(1)) + r.recordIssueIID(3) // the generation procedure has been started, it shouldn't accept any new issue IID, so it panics + }) +} diff --git a/services/migrations/gogs.go b/services/migrations/gogs.go new file mode 100644 index 0000000..b31d05f --- /dev/null +++ b/services/migrations/gogs.go @@ -0,0 +1,330 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strings" + "time" + + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/proxy" + "code.gitea.io/gitea/modules/structs" + + "github.com/gogs/go-gogs-client" +) + +var ( + _ base.Downloader = &GogsDownloader{} + _ base.DownloaderFactory = &GogsDownloaderFactory{} +) + +func init() { + RegisterDownloaderFactory(&GogsDownloaderFactory{}) +} + +// GogsDownloaderFactory defines a gogs downloader factory +type GogsDownloaderFactory struct{} + +// New returns a Downloader related to this factory according MigrateOptions +func (f *GogsDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) { + u, err := url.Parse(opts.CloneAddr) + if err != nil { + return nil, err + } + + repoNameSpace := strings.TrimSuffix(u.Path, ".git") + repoNameSpace = strings.Trim(repoNameSpace, "/") + + fields := strings.Split(repoNameSpace, "/") + numFields := len(fields) + if numFields < 2 { + return nil, fmt.Errorf("invalid path: %s", repoNameSpace) + } + + repoOwner := fields[numFields-2] + repoName := fields[numFields-1] + + u.Path = "" + u = u.JoinPath(fields[:numFields-2]...) + baseURL := u.String() + + log.Trace("Create gogs downloader. BaseURL: %s RepoOwner: %s RepoName: %s", baseURL, repoOwner, repoName) + return NewGogsDownloader(ctx, baseURL, opts.AuthUsername, opts.AuthPassword, opts.AuthToken, repoOwner, repoName), nil +} + +// GitServiceType returns the type of git service +func (f *GogsDownloaderFactory) GitServiceType() structs.GitServiceType { + return structs.GogsService +} + +// GogsDownloader implements a Downloader interface to get repository information +// from gogs via API +type GogsDownloader struct { + base.NullDownloader + ctx context.Context + client *gogs.Client + baseURL string + repoOwner string + repoName string + userName string + password string + openIssuesFinished bool + openIssuesPages int + transport http.RoundTripper +} + +// String implements Stringer +func (g *GogsDownloader) String() string { + return fmt.Sprintf("migration from gogs server %s %s/%s", g.baseURL, g.repoOwner, g.repoName) +} + +func (g *GogsDownloader) LogString() string { + if g == nil { + return "<GogsDownloader nil>" + } + return fmt.Sprintf("<GogsDownloader %s %s/%s>", g.baseURL, g.repoOwner, g.repoName) +} + +// SetContext set context +func (g *GogsDownloader) SetContext(ctx context.Context) { + g.ctx = ctx +} + +// NewGogsDownloader creates a gogs Downloader via gogs API +func NewGogsDownloader(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GogsDownloader { + downloader := GogsDownloader{ + ctx: ctx, + baseURL: baseURL, + userName: userName, + password: password, + repoOwner: repoOwner, + repoName: repoName, + } + + var client *gogs.Client + if len(token) != 0 { + client = gogs.NewClient(baseURL, token) + downloader.userName = token + } else { + transport := NewMigrationHTTPTransport() + transport.Proxy = func(req *http.Request) (*url.URL, error) { + req.SetBasicAuth(userName, password) + return proxy.Proxy()(req) + } + downloader.transport = transport + + client = gogs.NewClient(baseURL, "") + client.SetHTTPClient(&http.Client{ + Transport: &downloader, + }) + } + + downloader.client = client + return &downloader +} + +// RoundTrip wraps the provided request within this downloader's context and passes it to our internal http.Transport. +// This implements http.RoundTripper and makes the gogs client requests cancellable even though it is not cancellable itself +func (g *GogsDownloader) RoundTrip(req *http.Request) (*http.Response, error) { + return g.transport.RoundTrip(req.WithContext(g.ctx)) +} + +// GetRepoInfo returns a repository information +func (g *GogsDownloader) GetRepoInfo() (*base.Repository, error) { + gr, err := g.client.GetRepo(g.repoOwner, g.repoName) + if err != nil { + return nil, err + } + + // convert gogs repo to stand Repo + return &base.Repository{ + Owner: g.repoOwner, + Name: g.repoName, + IsPrivate: gr.Private, + Description: gr.Description, + CloneURL: gr.CloneURL, + OriginalURL: gr.HTMLURL, + DefaultBranch: gr.DefaultBranch, + }, nil +} + +// GetMilestones returns milestones +func (g *GogsDownloader) GetMilestones() ([]*base.Milestone, error) { + perPage := 100 + milestones := make([]*base.Milestone, 0, perPage) + + ms, err := g.client.ListRepoMilestones(g.repoOwner, g.repoName) + if err != nil { + return nil, err + } + + for _, m := range ms { + milestones = append(milestones, &base.Milestone{ + Title: m.Title, + Description: m.Description, + Deadline: m.Deadline, + State: string(m.State), + Closed: m.Closed, + }) + } + + return milestones, nil +} + +// GetLabels returns labels +func (g *GogsDownloader) GetLabels() ([]*base.Label, error) { + perPage := 100 + labels := make([]*base.Label, 0, perPage) + ls, err := g.client.ListRepoLabels(g.repoOwner, g.repoName) + if err != nil { + return nil, err + } + + for _, label := range ls { + labels = append(labels, convertGogsLabel(label)) + } + + return labels, nil +} + +// GetIssues returns issues according start and limit, perPage is not supported +func (g *GogsDownloader) GetIssues(page, _ int) ([]*base.Issue, bool, error) { + var state string + if g.openIssuesFinished { + state = string(gogs.STATE_CLOSED) + page -= g.openIssuesPages + } else { + state = string(gogs.STATE_OPEN) + g.openIssuesPages = page + } + + issues, isEnd, err := g.getIssues(page, state) + if err != nil { + return nil, false, err + } + + if isEnd { + if g.openIssuesFinished { + return issues, true, nil + } + g.openIssuesFinished = true + } + + return issues, false, nil +} + +func (g *GogsDownloader) getIssues(page int, state string) ([]*base.Issue, bool, error) { + allIssues := make([]*base.Issue, 0, 10) + + issues, err := g.client.ListRepoIssues(g.repoOwner, g.repoName, gogs.ListIssueOption{ + Page: page, + State: state, + }) + if err != nil { + return nil, false, fmt.Errorf("error while listing repos: %w", err) + } + + for _, issue := range issues { + if issue.PullRequest != nil { + continue + } + allIssues = append(allIssues, convertGogsIssue(issue)) + } + + return allIssues, len(issues) == 0, nil +} + +// GetComments returns comments according issueNumber +func (g *GogsDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + allComments := make([]*base.Comment, 0, 100) + + comments, err := g.client.ListIssueComments(g.repoOwner, g.repoName, commentable.GetForeignIndex()) + if err != nil { + return nil, false, fmt.Errorf("error while listing repos: %w", err) + } + for _, comment := range comments { + if len(comment.Body) == 0 || comment.Poster == nil { + continue + } + allComments = append(allComments, &base.Comment{ + IssueIndex: commentable.GetLocalIndex(), + Index: comment.ID, + PosterID: comment.Poster.ID, + PosterName: comment.Poster.Login, + PosterEmail: comment.Poster.Email, + Content: comment.Body, + Created: comment.Created, + Updated: comment.Updated, + }) + } + + return allComments, true, nil +} + +// GetTopics return repository topics +func (g *GogsDownloader) GetTopics() ([]string, error) { + return []string{}, nil +} + +// FormatCloneURL add authentication into remote URLs +func (g *GogsDownloader) FormatCloneURL(opts MigrateOptions, remoteAddr string) (string, error) { + if len(opts.AuthToken) > 0 || len(opts.AuthUsername) > 0 { + u, err := url.Parse(remoteAddr) + if err != nil { + return "", err + } + if len(opts.AuthToken) != 0 { + u.User = url.UserPassword(opts.AuthToken, "") + } else { + u.User = url.UserPassword(opts.AuthUsername, opts.AuthPassword) + } + return u.String(), nil + } + return remoteAddr, nil +} + +func convertGogsIssue(issue *gogs.Issue) *base.Issue { + var milestone string + if issue.Milestone != nil { + milestone = issue.Milestone.Title + } + labels := make([]*base.Label, 0, len(issue.Labels)) + for _, l := range issue.Labels { + labels = append(labels, convertGogsLabel(l)) + } + + var closed *time.Time + if issue.State == gogs.STATE_CLOSED { + // gogs client haven't provide closed, so we use updated instead + closed = &issue.Updated + } + + return &base.Issue{ + Title: issue.Title, + Number: issue.Index, + PosterID: issue.Poster.ID, + PosterName: issue.Poster.Login, + PosterEmail: issue.Poster.Email, + Content: issue.Body, + Milestone: milestone, + State: string(issue.State), + Created: issue.Created, + Updated: issue.Updated, + Labels: labels, + Closed: closed, + ForeignIndex: issue.Index, + } +} + +func convertGogsLabel(label *gogs.Label) *base.Label { + return &base.Label{ + Name: label.Name, + Color: label.Color, + } +} diff --git a/services/migrations/gogs_test.go b/services/migrations/gogs_test.go new file mode 100644 index 0000000..6c511a2 --- /dev/null +++ b/services/migrations/gogs_test.go @@ -0,0 +1,224 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "net/http" + "os" + "testing" + "time" + + base "code.gitea.io/gitea/modules/migration" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGogsDownloadRepo(t *testing.T) { + // Skip tests if Gogs token is not found + gogsPersonalAccessToken := os.Getenv("GOGS_READ_TOKEN") + if len(gogsPersonalAccessToken) == 0 { + t.Skip("skipped test because GOGS_READ_TOKEN was not in the environment") + } + + resp, err := http.Get("https://try.gogs.io/lunnytest/TESTREPO") + if err != nil || resp.StatusCode/100 != 2 { + // skip and don't run test + t.Skipf("visit test repo failed, ignored") + return + } + + downloader := NewGogsDownloader(context.Background(), "https://try.gogs.io", "", "", gogsPersonalAccessToken, "lunnytest", "TESTREPO") + repo, err := downloader.GetRepoInfo() + require.NoError(t, err) + + assertRepositoryEqual(t, &base.Repository{ + Name: "TESTREPO", + Owner: "lunnytest", + Description: "", + CloneURL: "https://try.gogs.io/lunnytest/TESTREPO.git", + OriginalURL: "https://try.gogs.io/lunnytest/TESTREPO", + DefaultBranch: "master", + }, repo) + + milestones, err := downloader.GetMilestones() + require.NoError(t, err) + assertMilestonesEqual(t, []*base.Milestone{ + { + Title: "1.0", + State: "open", + }, + }, milestones) + + labels, err := downloader.GetLabels() + require.NoError(t, err) + assertLabelsEqual(t, []*base.Label{ + { + Name: "bug", + Color: "ee0701", + }, + { + Name: "duplicate", + Color: "cccccc", + }, + { + Name: "enhancement", + Color: "84b6eb", + }, + { + Name: "help wanted", + Color: "128a0c", + }, + { + Name: "invalid", + Color: "e6e6e6", + }, + { + Name: "question", + Color: "cc317c", + }, + { + Name: "wontfix", + Color: "ffffff", + }, + }, labels) + + // downloader.GetIssues() + issues, isEnd, err := downloader.GetIssues(1, 8) + require.NoError(t, err) + assert.False(t, isEnd) + assertIssuesEqual(t, []*base.Issue{ + { + Number: 1, + PosterID: 5331, + PosterName: "lunny", + PosterEmail: "xiaolunwen@gmail.com", + Title: "test", + Content: "test", + Milestone: "", + State: "open", + Created: time.Date(2019, 6, 11, 8, 16, 44, 0, time.UTC), + Updated: time.Date(2019, 10, 26, 11, 7, 2, 0, time.UTC), + Labels: []*base.Label{ + { + Name: "bug", + Color: "ee0701", + }, + }, + }, + }, issues) + + // downloader.GetComments() + comments, _, err := downloader.GetComments(&base.Issue{Number: 1, ForeignIndex: 1}) + require.NoError(t, err) + assertCommentsEqual(t, []*base.Comment{ + { + IssueIndex: 1, + PosterID: 5331, + PosterName: "lunny", + PosterEmail: "xiaolunwen@gmail.com", + Created: time.Date(2019, 6, 11, 8, 19, 50, 0, time.UTC), + Updated: time.Date(2019, 6, 11, 8, 19, 50, 0, time.UTC), + Content: "1111", + }, + { + IssueIndex: 1, + PosterID: 15822, + PosterName: "clacplouf", + PosterEmail: "test1234@dbn.re", + Created: time.Date(2019, 10, 26, 11, 7, 2, 0, time.UTC), + Updated: time.Date(2019, 10, 26, 11, 7, 2, 0, time.UTC), + Content: "88888888", + }, + }, comments) + + // downloader.GetPullRequests() + _, _, err = downloader.GetPullRequests(1, 3) + require.Error(t, err) +} + +func TestGogsDownloaderFactory_New(t *testing.T) { + tests := []struct { + name string + args base.MigrateOptions + baseURL string + repoOwner string + repoName string + wantErr bool + }{ + { + name: "Gogs_at_root", + args: base.MigrateOptions{ + CloneAddr: "https://git.example.com/user/repo.git", + AuthUsername: "username", + AuthPassword: "password", + AuthToken: "authtoken", + }, + baseURL: "https://git.example.com/", + repoOwner: "user", + repoName: "repo", + wantErr: false, + }, + { + name: "Gogs_at_sub_path", + args: base.MigrateOptions{ + CloneAddr: "https://git.example.com/subpath/user/repo.git", + AuthUsername: "username", + AuthPassword: "password", + AuthToken: "authtoken", + }, + baseURL: "https://git.example.com/subpath", + repoOwner: "user", + repoName: "repo", + wantErr: false, + }, + { + name: "Gogs_at_2nd_sub_path", + args: base.MigrateOptions{ + CloneAddr: "https://git.example.com/sub1/sub2/user/repo.git", + AuthUsername: "username", + AuthPassword: "password", + AuthToken: "authtoken", + }, + baseURL: "https://git.example.com/sub1/sub2", + repoOwner: "user", + repoName: "repo", + wantErr: false, + }, + { + name: "Gogs_URL_too_short", + args: base.MigrateOptions{ + CloneAddr: "https://git.example.com/repo.git", + AuthUsername: "username", + AuthPassword: "password", + AuthToken: "authtoken", + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f := &GogsDownloaderFactory{} + opts := base.MigrateOptions{ + CloneAddr: tt.args.CloneAddr, + AuthUsername: tt.args.AuthUsername, + AuthPassword: tt.args.AuthPassword, + AuthToken: tt.args.AuthToken, + } + got, err := f.New(context.Background(), opts) + if (err != nil) != tt.wantErr { + t.Errorf("GogsDownloaderFactory.New() error = %v, wantErr %v", err, tt.wantErr) + return + } else if err != nil { + return + } + + assert.IsType(t, &GogsDownloader{}, got) + assert.EqualValues(t, tt.baseURL, got.(*GogsDownloader).baseURL) + assert.EqualValues(t, tt.repoOwner, got.(*GogsDownloader).repoOwner) + assert.EqualValues(t, tt.repoName, got.(*GogsDownloader).repoName) + }) + } +} diff --git a/services/migrations/http_client.go b/services/migrations/http_client.go new file mode 100644 index 0000000..0b997e0 --- /dev/null +++ b/services/migrations/http_client.go @@ -0,0 +1,29 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "crypto/tls" + "net/http" + + "code.gitea.io/gitea/modules/hostmatcher" + "code.gitea.io/gitea/modules/proxy" + "code.gitea.io/gitea/modules/setting" +) + +// NewMigrationHTTPClient returns a HTTP client for migration +func NewMigrationHTTPClient() *http.Client { + return &http.Client{ + Transport: NewMigrationHTTPTransport(), + } +} + +// NewMigrationHTTPTransport returns a HTTP transport for migration +func NewMigrationHTTPTransport() *http.Transport { + return &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: setting.Migrations.SkipTLSVerify}, + Proxy: proxy.Proxy(), + DialContext: hostmatcher.NewDialContext("migration", allowList, blockList, setting.Proxy.ProxyURLFixed), + } +} diff --git a/services/migrations/main_test.go b/services/migrations/main_test.go new file mode 100644 index 0000000..d0ec6a3 --- /dev/null +++ b/services/migrations/main_test.go @@ -0,0 +1,266 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2018 Jonas Franz. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "testing" + "time" + + "code.gitea.io/gitea/models/unittest" + base "code.gitea.io/gitea/modules/migration" + + "github.com/stretchr/testify/assert" +) + +func TestMain(m *testing.M) { + unittest.MainTest(m) +} + +func timePtr(t time.Time) *time.Time { + return &t +} + +func assertTimeEqual(t *testing.T, expected, actual time.Time) { + assert.Equal(t, expected.UTC(), actual.UTC()) +} + +func assertTimePtrEqual(t *testing.T, expected, actual *time.Time) { + if expected == nil { + assert.Nil(t, actual) + } else { + assert.NotNil(t, actual) + assertTimeEqual(t, *expected, *actual) + } +} + +func assertCommentEqual(t *testing.T, expected, actual *base.Comment) { + assert.Equal(t, expected.IssueIndex, actual.IssueIndex) + assert.Equal(t, expected.PosterID, actual.PosterID) + assert.Equal(t, expected.PosterName, actual.PosterName) + assert.Equal(t, expected.PosterEmail, actual.PosterEmail) + assertTimeEqual(t, expected.Created, actual.Created) + assertTimeEqual(t, expected.Updated, actual.Updated) + assert.Equal(t, expected.Content, actual.Content) + assertReactionsEqual(t, expected.Reactions, actual.Reactions) +} + +func assertCommentsEqual(t *testing.T, expected, actual []*base.Comment) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertCommentEqual(t, expected[i], actual[i]) + } + } +} + +func assertLabelEqual(t *testing.T, expected, actual *base.Label) { + assert.Equal(t, expected.Name, actual.Name) + assert.Equal(t, expected.Exclusive, actual.Exclusive) + assert.Equal(t, expected.Color, actual.Color) + assert.Equal(t, expected.Description, actual.Description) +} + +func assertLabelsEqual(t *testing.T, expected, actual []*base.Label) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertLabelEqual(t, expected[i], actual[i]) + } + } +} + +func assertMilestoneEqual(t *testing.T, expected, actual *base.Milestone) { + assert.Equal(t, expected.Title, actual.Title) + assert.Equal(t, expected.Description, actual.Description) + assertTimePtrEqual(t, expected.Deadline, actual.Deadline) + assertTimeEqual(t, expected.Created, actual.Created) + assertTimePtrEqual(t, expected.Updated, actual.Updated) + assertTimePtrEqual(t, expected.Closed, actual.Closed) + assert.Equal(t, expected.State, actual.State) +} + +func assertMilestonesEqual(t *testing.T, expected, actual []*base.Milestone) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertMilestoneEqual(t, expected[i], actual[i]) + } + } +} + +func assertIssueEqual(t *testing.T, expected, actual *base.Issue) { + assert.Equal(t, expected.Number, actual.Number) + assert.Equal(t, expected.PosterID, actual.PosterID) + assert.Equal(t, expected.PosterName, actual.PosterName) + assert.Equal(t, expected.PosterEmail, actual.PosterEmail) + assert.Equal(t, expected.Title, actual.Title) + assert.Equal(t, expected.Content, actual.Content) + assert.Equal(t, expected.Ref, actual.Ref) + assert.Equal(t, expected.Milestone, actual.Milestone) + assert.Equal(t, expected.State, actual.State) + assert.Equal(t, expected.IsLocked, actual.IsLocked) + assertTimeEqual(t, expected.Created, actual.Created) + assertTimeEqual(t, expected.Updated, actual.Updated) + assertTimePtrEqual(t, expected.Closed, actual.Closed) + assertLabelsEqual(t, expected.Labels, actual.Labels) + assertReactionsEqual(t, expected.Reactions, actual.Reactions) + assert.ElementsMatch(t, expected.Assignees, actual.Assignees) +} + +func assertIssuesEqual(t *testing.T, expected, actual []*base.Issue) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertIssueEqual(t, expected[i], actual[i]) + } + } +} + +func assertPullRequestEqual(t *testing.T, expected, actual *base.PullRequest) { + assert.Equal(t, expected.Number, actual.Number) + assert.Equal(t, expected.Title, actual.Title) + assert.Equal(t, expected.PosterID, actual.PosterID) + assert.Equal(t, expected.PosterName, actual.PosterName) + assert.Equal(t, expected.PosterEmail, actual.PosterEmail) + assert.Equal(t, expected.Content, actual.Content) + assert.Equal(t, expected.Milestone, actual.Milestone) + assert.Equal(t, expected.State, actual.State) + assertTimeEqual(t, expected.Created, actual.Created) + assertTimeEqual(t, expected.Updated, actual.Updated) + assertTimePtrEqual(t, expected.Closed, actual.Closed) + assertLabelsEqual(t, expected.Labels, actual.Labels) + assert.Equal(t, expected.PatchURL, actual.PatchURL) + assert.Equal(t, expected.Merged, actual.Merged) + assertTimePtrEqual(t, expected.MergedTime, actual.MergedTime) + assert.Equal(t, expected.MergeCommitSHA, actual.MergeCommitSHA) + assertPullRequestBranchEqual(t, expected.Head, actual.Head) + assertPullRequestBranchEqual(t, expected.Base, actual.Base) + assert.ElementsMatch(t, expected.Assignees, actual.Assignees) + assert.Equal(t, expected.IsLocked, actual.IsLocked) + assertReactionsEqual(t, expected.Reactions, actual.Reactions) +} + +func assertPullRequestsEqual(t *testing.T, expected, actual []*base.PullRequest) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertPullRequestEqual(t, expected[i], actual[i]) + } + } +} + +func assertPullRequestBranchEqual(t *testing.T, expected, actual base.PullRequestBranch) { + assert.Equal(t, expected.CloneURL, actual.CloneURL) + assert.Equal(t, expected.Ref, actual.Ref) + assert.Equal(t, expected.SHA, actual.SHA) + assert.Equal(t, expected.RepoName, actual.RepoName) + assert.Equal(t, expected.OwnerName, actual.OwnerName) +} + +func assertReactionEqual(t *testing.T, expected, actual *base.Reaction) { + assert.Equal(t, expected.UserID, actual.UserID) + assert.Equal(t, expected.UserName, actual.UserName) + assert.Equal(t, expected.Content, actual.Content) +} + +func assertReactionsEqual(t *testing.T, expected, actual []*base.Reaction) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertReactionEqual(t, expected[i], actual[i]) + } + } +} + +func assertReleaseAssetEqual(t *testing.T, expected, actual *base.ReleaseAsset) { + assert.Equal(t, expected.ID, actual.ID) + assert.Equal(t, expected.Name, actual.Name) + assert.Equal(t, expected.ContentType, actual.ContentType) + assert.Equal(t, expected.Size, actual.Size) + assert.Equal(t, expected.DownloadCount, actual.DownloadCount) + assertTimeEqual(t, expected.Created, actual.Created) + assertTimeEqual(t, expected.Updated, actual.Updated) + assert.Equal(t, expected.DownloadURL, actual.DownloadURL) +} + +func assertReleaseAssetsEqual(t *testing.T, expected, actual []*base.ReleaseAsset) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertReleaseAssetEqual(t, expected[i], actual[i]) + } + } +} + +func assertReleaseEqual(t *testing.T, expected, actual *base.Release) { + assert.Equal(t, expected.TagName, actual.TagName) + assert.Equal(t, expected.TargetCommitish, actual.TargetCommitish) + assert.Equal(t, expected.Name, actual.Name) + assert.Equal(t, expected.Body, actual.Body) + assert.Equal(t, expected.Draft, actual.Draft) + assert.Equal(t, expected.Prerelease, actual.Prerelease) + assert.Equal(t, expected.PublisherID, actual.PublisherID) + assert.Equal(t, expected.PublisherName, actual.PublisherName) + assert.Equal(t, expected.PublisherEmail, actual.PublisherEmail) + assertReleaseAssetsEqual(t, expected.Assets, actual.Assets) + assertTimeEqual(t, expected.Created, actual.Created) + assertTimeEqual(t, expected.Published, actual.Published) +} + +func assertReleasesEqual(t *testing.T, expected, actual []*base.Release) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertReleaseEqual(t, expected[i], actual[i]) + } + } +} + +func assertRepositoryEqual(t *testing.T, expected, actual *base.Repository) { + assert.Equal(t, expected.Name, actual.Name) + assert.Equal(t, expected.Owner, actual.Owner) + assert.Equal(t, expected.IsPrivate, actual.IsPrivate) + assert.Equal(t, expected.IsMirror, actual.IsMirror) + assert.Equal(t, expected.Description, actual.Description) + assert.Equal(t, expected.CloneURL, actual.CloneURL) + assert.Equal(t, expected.OriginalURL, actual.OriginalURL) + assert.Equal(t, expected.DefaultBranch, actual.DefaultBranch) +} + +func assertReviewEqual(t *testing.T, expected, actual *base.Review) { + assert.Equal(t, expected.ID, actual.ID, "ID") + assert.Equal(t, expected.IssueIndex, actual.IssueIndex, "IsssueIndex") + assert.Equal(t, expected.ReviewerID, actual.ReviewerID, "ReviewerID") + assert.Equal(t, expected.ReviewerName, actual.ReviewerName, "ReviewerName") + assert.Equal(t, expected.Official, actual.Official, "Official") + assert.Equal(t, expected.CommitID, actual.CommitID, "CommitID") + assert.Equal(t, expected.Content, actual.Content, "Content") + assert.WithinDuration(t, expected.CreatedAt, actual.CreatedAt, 10*time.Second) + assert.Equal(t, expected.State, actual.State, "State") + assertReviewCommentsEqual(t, expected.Comments, actual.Comments) +} + +func assertReviewsEqual(t *testing.T, expected, actual []*base.Review) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertReviewEqual(t, expected[i], actual[i]) + } + } +} + +func assertReviewCommentEqual(t *testing.T, expected, actual *base.ReviewComment) { + assert.Equal(t, expected.ID, actual.ID) + assert.Equal(t, expected.InReplyTo, actual.InReplyTo) + assert.Equal(t, expected.Content, actual.Content) + assert.Equal(t, expected.TreePath, actual.TreePath) + assert.Equal(t, expected.DiffHunk, actual.DiffHunk) + assert.Equal(t, expected.Position, actual.Position) + assert.Equal(t, expected.Line, actual.Line) + assert.Equal(t, expected.CommitID, actual.CommitID) + assert.Equal(t, expected.PosterID, actual.PosterID) + assertReactionsEqual(t, expected.Reactions, actual.Reactions) + assertTimeEqual(t, expected.CreatedAt, actual.CreatedAt) + assertTimeEqual(t, expected.UpdatedAt, actual.UpdatedAt) +} + +func assertReviewCommentsEqual(t *testing.T, expected, actual []*base.ReviewComment) { + if assert.Len(t, actual, len(expected)) { + for i := range expected { + assertReviewCommentEqual(t, expected[i], actual[i]) + } + } +} diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go new file mode 100644 index 0000000..29b815d --- /dev/null +++ b/services/migrations/migrate.go @@ -0,0 +1,510 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2018 Jonas Franz. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "net" + "net/url" + "path/filepath" + "strings" + + "code.gitea.io/gitea/models" + repo_model "code.gitea.io/gitea/models/repo" + system_model "code.gitea.io/gitea/models/system" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/hostmatcher" + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" +) + +// MigrateOptions is equal to base.MigrateOptions +type MigrateOptions = base.MigrateOptions + +var ( + factories []base.DownloaderFactory + + allowList *hostmatcher.HostMatchList + blockList *hostmatcher.HostMatchList +) + +// RegisterDownloaderFactory registers a downloader factory +func RegisterDownloaderFactory(factory base.DownloaderFactory) { + factories = append(factories, factory) +} + +// IsMigrateURLAllowed checks if an URL is allowed to be migrated from +func IsMigrateURLAllowed(remoteURL string, doer *user_model.User) error { + // Remote address can be HTTP/HTTPS/Git URL or local path. + u, err := url.Parse(remoteURL) + if err != nil { + return &models.ErrInvalidCloneAddr{IsURLError: true, Host: remoteURL} + } + + if u.Scheme == "file" || u.Scheme == "" { + if !doer.CanImportLocal() { + return &models.ErrInvalidCloneAddr{Host: "<LOCAL_FILESYSTEM>", IsPermissionDenied: true, LocalPath: true} + } + isAbs := filepath.IsAbs(u.Host + u.Path) + if !isAbs { + return &models.ErrInvalidCloneAddr{Host: "<LOCAL_FILESYSTEM>", IsInvalidPath: true, LocalPath: true} + } + isDir, err := util.IsDir(u.Host + u.Path) + if err != nil { + log.Error("Unable to check if %s is a directory: %v", u.Host+u.Path, err) + return err + } + if !isDir { + return &models.ErrInvalidCloneAddr{Host: "<LOCAL_FILESYSTEM>", IsInvalidPath: true, LocalPath: true} + } + + return nil + } + + if u.Scheme == "git" && u.Port() != "" && (strings.Contains(remoteURL, "%0d") || strings.Contains(remoteURL, "%0a")) { + return &models.ErrInvalidCloneAddr{Host: u.Host, IsURLError: true} + } + + if u.Opaque != "" || u.Scheme != "" && u.Scheme != "http" && u.Scheme != "https" && u.Scheme != "git" && u.Scheme != "ssh" { + return &models.ErrInvalidCloneAddr{Host: u.Host, IsProtocolInvalid: true, IsPermissionDenied: true, IsURLError: true} + } + + hostName, _, err := net.SplitHostPort(u.Host) + if err != nil { + // u.Host can be "host" or "host:port" + err = nil //nolint + hostName = u.Host + } + + // some users only use proxy, there is no DNS resolver. it's safe to ignore the LookupIP error + addrList, _ := net.LookupIP(hostName) + return checkByAllowBlockList(hostName, addrList) +} + +func checkByAllowBlockList(hostName string, addrList []net.IP) error { + var ipAllowed bool + var ipBlocked bool + for _, addr := range addrList { + ipAllowed = ipAllowed || allowList.MatchIPAddr(addr) + ipBlocked = ipBlocked || blockList.MatchIPAddr(addr) + } + var blockedError error + if blockList.MatchHostName(hostName) || ipBlocked { + blockedError = &models.ErrInvalidCloneAddr{Host: hostName, IsPermissionDenied: true} + } + // if we have an allow-list, check the allow-list before return to get the more accurate error + if !allowList.IsEmpty() { + if !allowList.MatchHostName(hostName) && !ipAllowed { + return &models.ErrInvalidCloneAddr{Host: hostName, IsPermissionDenied: true} + } + } + // otherwise, we always follow the blocked list + return blockedError +} + +// MigrateRepository migrate repository according MigrateOptions +func MigrateRepository(ctx context.Context, doer *user_model.User, ownerName string, opts base.MigrateOptions, messenger base.Messenger) (*repo_model.Repository, error) { + err := IsMigrateURLAllowed(opts.CloneAddr, doer) + if err != nil { + return nil, err + } + if opts.LFS && len(opts.LFSEndpoint) > 0 { + err := IsMigrateURLAllowed(opts.LFSEndpoint, doer) + if err != nil { + return nil, err + } + } + downloader, err := newDownloader(ctx, ownerName, opts) + if err != nil { + return nil, err + } + + uploader := NewGiteaLocalUploader(ctx, doer, ownerName, opts.RepoName) + uploader.gitServiceType = opts.GitServiceType + + if err := migrateRepository(ctx, doer, downloader, uploader, opts, messenger); err != nil { + if err1 := uploader.Rollback(); err1 != nil { + log.Error("rollback failed: %v", err1) + } + if err2 := system_model.CreateRepositoryNotice(fmt.Sprintf("Migrate repository from %s failed: %v", opts.OriginalURL, err)); err2 != nil { + log.Error("create respotiry notice failed: ", err2) + } + return nil, err + } + return uploader.repo, nil +} + +func getFactoryFromServiceType(serviceType structs.GitServiceType) base.DownloaderFactory { + for _, factory := range factories { + if factory.GitServiceType() == serviceType { + return factory + } + } + return nil +} + +func newDownloader(ctx context.Context, ownerName string, opts base.MigrateOptions) (base.Downloader, error) { + var ( + downloader base.Downloader + err error + ) + + if factory := getFactoryFromServiceType(opts.GitServiceType); factory != nil { + downloader, err = factory.New(ctx, opts) + if err != nil { + return nil, err + } + } + + if downloader == nil { + opts.Wiki = true + opts.Milestones = false + opts.Labels = false + opts.Releases = false + opts.Comments = false + opts.Issues = false + opts.PullRequests = false + downloader = NewPlainGitDownloader(ownerName, opts.RepoName, opts.CloneAddr) + log.Trace("Will migrate from git: %s", opts.OriginalURL) + } + + if setting.Migrations.MaxAttempts > 1 { + downloader = base.NewRetryDownloader(ctx, downloader, setting.Migrations.MaxAttempts, setting.Migrations.RetryBackoff) + } + return downloader, nil +} + +// migrateRepository will download information and then upload it to Uploader, this is a simple +// process for small repository. For a big repository, save all the data to disk +// before upload is better +func migrateRepository(_ context.Context, doer *user_model.User, downloader base.Downloader, uploader base.Uploader, opts base.MigrateOptions, messenger base.Messenger) error { + if messenger == nil { + messenger = base.NilMessenger + } + + repo, err := downloader.GetRepoInfo() + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Info("migrating repo infos is not supported, ignored") + } + repo.IsPrivate = opts.Private + repo.IsMirror = opts.Mirror + if opts.Description != "" { + repo.Description = opts.Description + } + if repo.CloneURL, err = downloader.FormatCloneURL(opts, repo.CloneURL); err != nil { + return err + } + + // SECURITY: If the downloader is not a RepositoryRestorer then we need to recheck the CloneURL + if _, ok := downloader.(*RepositoryRestorer); !ok { + // Now the clone URL can be rewritten by the downloader so we must recheck + if err := IsMigrateURLAllowed(repo.CloneURL, doer); err != nil { + return err + } + + // SECURITY: Ensure that we haven't been redirected from an external to a local filesystem + // Now we know all of these must parse + cloneAddrURL, _ := url.Parse(opts.CloneAddr) + cloneURL, _ := url.Parse(repo.CloneURL) + + if cloneURL.Scheme == "file" || cloneURL.Scheme == "" { + if cloneAddrURL.Scheme != "file" && cloneAddrURL.Scheme != "" { + return fmt.Errorf("repo info has changed from external to local filesystem") + } + } + + // We don't actually need to check the OriginalURL as it isn't used anywhere + } + + log.Trace("migrating git data from %s", repo.CloneURL) + messenger("repo.migrate.migrating_git") + if err = uploader.CreateRepo(repo, opts); err != nil { + return err + } + defer uploader.Close() + + log.Trace("migrating topics") + messenger("repo.migrate.migrating_topics") + topics, err := downloader.GetTopics() + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating topics is not supported, ignored") + } + if len(topics) != 0 { + if err = uploader.CreateTopics(topics...); err != nil { + return err + } + } + + if opts.Milestones { + log.Trace("migrating milestones") + messenger("repo.migrate.migrating_milestones") + milestones, err := downloader.GetMilestones() + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating milestones is not supported, ignored") + } + msBatchSize := uploader.MaxBatchInsertSize("milestone") + for len(milestones) > 0 { + if len(milestones) < msBatchSize { + msBatchSize = len(milestones) + } + + if err := uploader.CreateMilestones(milestones[:msBatchSize]...); err != nil { + return err + } + milestones = milestones[msBatchSize:] + } + } + + if opts.Labels { + log.Trace("migrating labels") + messenger("repo.migrate.migrating_labels") + labels, err := downloader.GetLabels() + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating labels is not supported, ignored") + } + + lbBatchSize := uploader.MaxBatchInsertSize("label") + for len(labels) > 0 { + if len(labels) < lbBatchSize { + lbBatchSize = len(labels) + } + + if err := uploader.CreateLabels(labels[:lbBatchSize]...); err != nil { + return err + } + labels = labels[lbBatchSize:] + } + } + + if opts.Releases { + log.Trace("migrating releases") + messenger("repo.migrate.migrating_releases") + releases, err := downloader.GetReleases() + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating releases is not supported, ignored") + } + + relBatchSize := uploader.MaxBatchInsertSize("release") + for len(releases) > 0 { + if len(releases) < relBatchSize { + relBatchSize = len(releases) + } + + if err = uploader.CreateReleases(releases[:relBatchSize]...); err != nil { + return err + } + releases = releases[relBatchSize:] + } + + // Once all releases (if any) are inserted, sync any remaining non-release tags + if err = uploader.SyncTags(); err != nil { + return err + } + } + + var ( + commentBatchSize = uploader.MaxBatchInsertSize("comment") + reviewBatchSize = uploader.MaxBatchInsertSize("review") + ) + + supportAllComments := downloader.SupportGetRepoComments() + + if opts.Issues { + log.Trace("migrating issues and comments") + messenger("repo.migrate.migrating_issues") + issueBatchSize := uploader.MaxBatchInsertSize("issue") + + for i := 1; ; i++ { + issues, isEnd, err := downloader.GetIssues(i, issueBatchSize) + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating issues is not supported, ignored") + break + } + + if err := uploader.CreateIssues(issues...); err != nil { + return err + } + + if opts.Comments && !supportAllComments { + allComments := make([]*base.Comment, 0, commentBatchSize) + for _, issue := range issues { + log.Trace("migrating issue %d's comments", issue.Number) + comments, _, err := downloader.GetComments(issue) + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating comments is not supported, ignored") + } + + allComments = append(allComments, comments...) + + if len(allComments) >= commentBatchSize { + if err = uploader.CreateComments(allComments[:commentBatchSize]...); err != nil { + return err + } + + allComments = allComments[commentBatchSize:] + } + } + + if len(allComments) > 0 { + if err = uploader.CreateComments(allComments...); err != nil { + return err + } + } + } + + if isEnd { + break + } + } + } + + if opts.PullRequests { + log.Trace("migrating pull requests and comments") + messenger("repo.migrate.migrating_pulls") + prBatchSize := uploader.MaxBatchInsertSize("pullrequest") + for i := 1; ; i++ { + prs, isEnd, err := downloader.GetPullRequests(i, prBatchSize) + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating pull requests is not supported, ignored") + break + } + + if err := uploader.CreatePullRequests(prs...); err != nil { + return err + } + + if opts.Comments { + if !supportAllComments { + // plain comments + allComments := make([]*base.Comment, 0, commentBatchSize) + for _, pr := range prs { + log.Trace("migrating pull request %d's comments", pr.Number) + comments, _, err := downloader.GetComments(pr) + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating comments is not supported, ignored") + } + + allComments = append(allComments, comments...) + + if len(allComments) >= commentBatchSize { + if err = uploader.CreateComments(allComments[:commentBatchSize]...); err != nil { + return err + } + allComments = allComments[commentBatchSize:] + } + } + if len(allComments) > 0 { + if err = uploader.CreateComments(allComments...); err != nil { + return err + } + } + } + + // migrate reviews + allReviews := make([]*base.Review, 0, reviewBatchSize) + for _, pr := range prs { + reviews, err := downloader.GetReviews(pr) + if err != nil { + if !base.IsErrNotSupported(err) { + return err + } + log.Warn("migrating reviews is not supported, ignored") + break + } + + allReviews = append(allReviews, reviews...) + + if len(allReviews) >= reviewBatchSize { + if err = uploader.CreateReviews(allReviews[:reviewBatchSize]...); err != nil { + return err + } + allReviews = allReviews[reviewBatchSize:] + } + } + if len(allReviews) > 0 { + if err = uploader.CreateReviews(allReviews...); err != nil { + return err + } + } + } + + if isEnd { + break + } + } + } + + if opts.Comments && supportAllComments { + log.Trace("migrating comments") + for i := 1; ; i++ { + comments, isEnd, err := downloader.GetAllComments(i, commentBatchSize) + if err != nil { + return err + } + + if err := uploader.CreateComments(comments...); err != nil { + return err + } + + if isEnd { + break + } + } + } + + return uploader.Finish() +} + +// Init migrations service +func Init() error { + // TODO: maybe we can deprecate these legacy ALLOWED_DOMAINS/ALLOW_LOCALNETWORKS/BLOCKED_DOMAINS, use ALLOWED_HOST_LIST/BLOCKED_HOST_LIST instead + + blockList = hostmatcher.ParseSimpleMatchList("migrations.BLOCKED_DOMAINS", setting.Migrations.BlockedDomains) + + allowList = hostmatcher.ParseSimpleMatchList("migrations.ALLOWED_DOMAINS/ALLOW_LOCALNETWORKS", setting.Migrations.AllowedDomains) + if allowList.IsEmpty() { + // the default policy is that migration module can access external hosts + allowList.AppendBuiltin(hostmatcher.MatchBuiltinExternal) + } + if setting.Migrations.AllowLocalNetworks { + allowList.AppendBuiltin(hostmatcher.MatchBuiltinPrivate) + allowList.AppendBuiltin(hostmatcher.MatchBuiltinLoopback) + } + // TODO: at the moment, if ALLOW_LOCALNETWORKS=false, ALLOWED_DOMAINS=domain.com, and domain.com has IP 127.0.0.1, then it's still allowed. + // if we want to block such case, the private&loopback should be added to the blockList when ALLOW_LOCALNETWORKS=false + + return nil +} diff --git a/services/migrations/migrate_test.go b/services/migrations/migrate_test.go new file mode 100644 index 0000000..109a092 --- /dev/null +++ b/services/migrations/migrate_test.go @@ -0,0 +1,115 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "net" + "path/filepath" + "testing" + + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" + + "github.com/stretchr/testify/require" +) + +func TestMigrateWhiteBlocklist(t *testing.T) { + require.NoError(t, unittest.PrepareTestDatabase()) + + adminUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: "user1"}) + nonAdminUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: "user2"}) + + setting.Migrations.AllowedDomains = "github.com" + setting.Migrations.AllowLocalNetworks = false + require.NoError(t, Init()) + + err := IsMigrateURLAllowed("https://gitlab.com/gitlab/gitlab.git", nonAdminUser) + require.Error(t, err) + + err = IsMigrateURLAllowed("https://github.com/go-gitea/gitea.git", nonAdminUser) + require.NoError(t, err) + + err = IsMigrateURLAllowed("https://gITHUb.com/go-gitea/gitea.git", nonAdminUser) + require.NoError(t, err) + + setting.Migrations.AllowedDomains = "" + setting.Migrations.BlockedDomains = "github.com" + require.NoError(t, Init()) + + err = IsMigrateURLAllowed("https://gitlab.com/gitlab/gitlab.git", nonAdminUser) + require.NoError(t, err) + + err = IsMigrateURLAllowed("https://github.com/go-gitea/gitea.git", nonAdminUser) + require.Error(t, err) + + err = IsMigrateURLAllowed("https://10.0.0.1/go-gitea/gitea.git", nonAdminUser) + require.Error(t, err) + + setting.Migrations.AllowLocalNetworks = true + require.NoError(t, Init()) + err = IsMigrateURLAllowed("https://10.0.0.1/go-gitea/gitea.git", nonAdminUser) + require.NoError(t, err) + + old := setting.ImportLocalPaths + setting.ImportLocalPaths = false + + err = IsMigrateURLAllowed("/home/foo/bar/goo", adminUser) + require.Error(t, err) + + setting.ImportLocalPaths = true + abs, err := filepath.Abs(".") + require.NoError(t, err) + + err = IsMigrateURLAllowed(abs, adminUser) + require.NoError(t, err) + + err = IsMigrateURLAllowed(abs, nonAdminUser) + require.Error(t, err) + + nonAdminUser.AllowImportLocal = true + err = IsMigrateURLAllowed(abs, nonAdminUser) + require.NoError(t, err) + + setting.ImportLocalPaths = old +} + +func TestAllowBlockList(t *testing.T) { + init := func(allow, block string, local bool) { + setting.Migrations.AllowedDomains = allow + setting.Migrations.BlockedDomains = block + setting.Migrations.AllowLocalNetworks = local + require.NoError(t, Init()) + } + + // default, allow all external, block none, no local networks + init("", "", false) + require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("1.2.3.4")})) + require.Error(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("127.0.0.1")})) + + // allow all including local networks (it could lead to SSRF in production) + init("", "", true) + require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("1.2.3.4")})) + require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("127.0.0.1")})) + + // allow wildcard, block some subdomains. if the domain name is allowed, then the local network check is skipped + init("*.domain.com", "blocked.domain.com", false) + require.NoError(t, checkByAllowBlockList("sub.domain.com", []net.IP{net.ParseIP("1.2.3.4")})) + require.NoError(t, checkByAllowBlockList("sub.domain.com", []net.IP{net.ParseIP("127.0.0.1")})) + require.Error(t, checkByAllowBlockList("blocked.domain.com", []net.IP{net.ParseIP("1.2.3.4")})) + require.Error(t, checkByAllowBlockList("sub.other.com", []net.IP{net.ParseIP("1.2.3.4")})) + + // allow wildcard (it could lead to SSRF in production) + init("*", "", false) + require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("1.2.3.4")})) + require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("127.0.0.1")})) + + // local network can still be blocked + init("*", "127.0.0.*", false) + require.NoError(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("1.2.3.4")})) + require.Error(t, checkByAllowBlockList("domain.com", []net.IP{net.ParseIP("127.0.0.1")})) + + // reset + init("", "", false) +} diff --git a/services/migrations/onedev.go b/services/migrations/onedev.go new file mode 100644 index 0000000..e2f7b77 --- /dev/null +++ b/services/migrations/onedev.go @@ -0,0 +1,634 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + base "code.gitea.io/gitea/modules/migration" + "code.gitea.io/gitea/modules/structs" +) + +var ( + _ base.Downloader = &OneDevDownloader{} + _ base.DownloaderFactory = &OneDevDownloaderFactory{} +) + +func init() { + RegisterDownloaderFactory(&OneDevDownloaderFactory{}) +} + +// OneDevDownloaderFactory defines a downloader factory +type OneDevDownloaderFactory struct{} + +// New returns a downloader related to this factory according MigrateOptions +func (f *OneDevDownloaderFactory) New(ctx context.Context, opts base.MigrateOptions) (base.Downloader, error) { + u, err := url.Parse(opts.CloneAddr) + if err != nil { + return nil, err + } + + var repoName string + + fields := strings.Split(strings.Trim(u.Path, "/"), "/") + if len(fields) == 2 && fields[0] == "projects" { + repoName = fields[1] + } else if len(fields) == 1 { + repoName = fields[0] + } else { + return nil, fmt.Errorf("invalid path: %s", u.Path) + } + + u.Path = "" + u.Fragment = "" + + log.Trace("Create onedev downloader. BaseURL: %v RepoName: %s", u, repoName) + + return NewOneDevDownloader(ctx, u, opts.AuthUsername, opts.AuthPassword, repoName), nil +} + +// GitServiceType returns the type of git service +func (f *OneDevDownloaderFactory) GitServiceType() structs.GitServiceType { + return structs.OneDevService +} + +type onedevUser struct { + ID int64 `json:"id"` + Name string `json:"name"` + Email string `json:"email"` +} + +// OneDevDownloader implements a Downloader interface to get repository information +// from OneDev +type OneDevDownloader struct { + base.NullDownloader + ctx context.Context + client *http.Client + baseURL *url.URL + repoName string + repoID int64 + maxIssueIndex int64 + userMap map[int64]*onedevUser + milestoneMap map[int64]string +} + +// SetContext set context +func (d *OneDevDownloader) SetContext(ctx context.Context) { + d.ctx = ctx +} + +// NewOneDevDownloader creates a new downloader +func NewOneDevDownloader(ctx context.Context, baseURL *url.URL, username, password, repoName string) *OneDevDownloader { + downloader := &OneDevDownloader{ + ctx: ctx, + baseURL: baseURL, + repoName: repoName, + client: &http.Client{ + Transport: &http.Transport{ + Proxy: func(req *http.Request) (*url.URL, error) { + if len(username) > 0 && len(password) > 0 { + req.SetBasicAuth(username, password) + } + return nil, nil + }, + }, + }, + userMap: make(map[int64]*onedevUser), + milestoneMap: make(map[int64]string), + } + + return downloader +} + +// String implements Stringer +func (d *OneDevDownloader) String() string { + return fmt.Sprintf("migration from oneDev server %s [%d]/%s", d.baseURL, d.repoID, d.repoName) +} + +func (d *OneDevDownloader) LogString() string { + if d == nil { + return "<OneDevDownloader nil>" + } + return fmt.Sprintf("<OneDevDownloader %s [%d]/%s>", d.baseURL, d.repoID, d.repoName) +} + +func (d *OneDevDownloader) callAPI(endpoint string, parameter map[string]string, result any) error { + u, err := d.baseURL.Parse(endpoint) + if err != nil { + return err + } + + if parameter != nil { + query := u.Query() + for k, v := range parameter { + query.Set(k, v) + } + u.RawQuery = query.Encode() + } + + req, err := http.NewRequestWithContext(d.ctx, "GET", u.String(), nil) + if err != nil { + return err + } + + resp, err := d.client.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + decoder := json.NewDecoder(resp.Body) + return decoder.Decode(&result) +} + +// GetRepoInfo returns repository information +func (d *OneDevDownloader) GetRepoInfo() (*base.Repository, error) { + info := make([]struct { + ID int64 `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + }, 0, 1) + + err := d.callAPI( + "/api/projects", + map[string]string{ + "query": `"Name" is "` + d.repoName + `"`, + "offset": "0", + "count": "1", + }, + &info, + ) + if err != nil { + return nil, err + } + if len(info) != 1 { + return nil, fmt.Errorf("Project %s not found", d.repoName) + } + + d.repoID = info[0].ID + + cloneURL, err := d.baseURL.Parse(info[0].Name) + if err != nil { + return nil, err + } + originalURL, err := d.baseURL.Parse("/projects/" + info[0].Name) + if err != nil { + return nil, err + } + + return &base.Repository{ + Name: info[0].Name, + Description: info[0].Description, + CloneURL: cloneURL.String(), + OriginalURL: originalURL.String(), + }, nil +} + +// GetMilestones returns milestones +func (d *OneDevDownloader) GetMilestones() ([]*base.Milestone, error) { + rawMilestones := make([]struct { + ID int64 `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + DueDate *time.Time `json:"dueDate"` + Closed bool `json:"closed"` + }, 0, 100) + + endpoint := fmt.Sprintf("/api/projects/%d/milestones", d.repoID) + + milestones := make([]*base.Milestone, 0, 100) + offset := 0 + for { + err := d.callAPI( + endpoint, + map[string]string{ + "offset": strconv.Itoa(offset), + "count": "100", + }, + &rawMilestones, + ) + if err != nil { + return nil, err + } + if len(rawMilestones) == 0 { + break + } + offset += 100 + + for _, milestone := range rawMilestones { + d.milestoneMap[milestone.ID] = milestone.Name + closed := milestone.DueDate + if !milestone.Closed { + closed = nil + } + + milestones = append(milestones, &base.Milestone{ + Title: milestone.Name, + Description: milestone.Description, + Deadline: milestone.DueDate, + Closed: closed, + }) + } + } + return milestones, nil +} + +// GetLabels returns labels +func (d *OneDevDownloader) GetLabels() ([]*base.Label, error) { + return []*base.Label{ + { + Name: "Bug", + Color: "f64e60", + }, + { + Name: "Build Failure", + Color: "f64e60", + }, + { + Name: "Discussion", + Color: "8950fc", + }, + { + Name: "Improvement", + Color: "1bc5bd", + }, + { + Name: "New Feature", + Color: "1bc5bd", + }, + { + Name: "Support Request", + Color: "8950fc", + }, + }, nil +} + +type onedevIssueContext struct { + IsPullRequest bool +} + +// GetIssues returns issues +func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) { + rawIssues := make([]struct { + ID int64 `json:"id"` + Number int64 `json:"number"` + State string `json:"state"` + Title string `json:"title"` + Description string `json:"description"` + SubmitterID int64 `json:"submitterId"` + SubmitDate time.Time `json:"submitDate"` + }, 0, perPage) + + err := d.callAPI( + "/api/issues", + map[string]string{ + "query": `"Project" is "` + d.repoName + `"`, + "offset": strconv.Itoa((page - 1) * perPage), + "count": strconv.Itoa(perPage), + }, + &rawIssues, + ) + if err != nil { + return nil, false, err + } + + issues := make([]*base.Issue, 0, len(rawIssues)) + for _, issue := range rawIssues { + fields := make([]struct { + Name string `json:"name"` + Value string `json:"value"` + }, 0, 10) + err := d.callAPI( + fmt.Sprintf("/api/issues/%d/fields", issue.ID), + nil, + &fields, + ) + if err != nil { + return nil, false, err + } + + var label *base.Label + for _, field := range fields { + if field.Name == "Type" { + label = &base.Label{Name: field.Value} + break + } + } + + milestones := make([]struct { + ID int64 `json:"id"` + Name string `json:"name"` + }, 0, 10) + err = d.callAPI( + fmt.Sprintf("/api/issues/%d/milestones", issue.ID), + nil, + &milestones, + ) + if err != nil { + return nil, false, err + } + milestoneID := int64(0) + if len(milestones) > 0 { + milestoneID = milestones[0].ID + } + + state := strings.ToLower(issue.State) + if state == "released" { + state = "closed" + } + poster := d.tryGetUser(issue.SubmitterID) + issues = append(issues, &base.Issue{ + Title: issue.Title, + Number: issue.Number, + PosterName: poster.Name, + PosterEmail: poster.Email, + Content: issue.Description, + Milestone: d.milestoneMap[milestoneID], + State: state, + Created: issue.SubmitDate, + Updated: issue.SubmitDate, + Labels: []*base.Label{label}, + ForeignIndex: issue.ID, + Context: onedevIssueContext{IsPullRequest: false}, + }) + + if d.maxIssueIndex < issue.Number { + d.maxIssueIndex = issue.Number + } + } + + return issues, len(issues) == 0, nil +} + +// GetComments returns comments +func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + context, ok := commentable.GetContext().(onedevIssueContext) + if !ok { + return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext()) + } + + rawComments := make([]struct { + ID int64 `json:"id"` + Date time.Time `json:"date"` + UserID int64 `json:"userId"` + Content string `json:"content"` + }, 0, 100) + + var endpoint string + if context.IsPullRequest { + endpoint = fmt.Sprintf("/api/pull-requests/%d/comments", commentable.GetForeignIndex()) + } else { + endpoint = fmt.Sprintf("/api/issues/%d/comments", commentable.GetForeignIndex()) + } + + err := d.callAPI( + endpoint, + nil, + &rawComments, + ) + if err != nil { + return nil, false, err + } + + rawChanges := make([]struct { + Date time.Time `json:"date"` + UserID int64 `json:"userId"` + Data map[string]any `json:"data"` + }, 0, 100) + + if context.IsPullRequest { + endpoint = fmt.Sprintf("/api/pull-requests/%d/changes", commentable.GetForeignIndex()) + } else { + endpoint = fmt.Sprintf("/api/issues/%d/changes", commentable.GetForeignIndex()) + } + + err = d.callAPI( + endpoint, + nil, + &rawChanges, + ) + if err != nil { + return nil, false, err + } + + comments := make([]*base.Comment, 0, len(rawComments)+len(rawChanges)) + for _, comment := range rawComments { + if len(comment.Content) == 0 { + continue + } + poster := d.tryGetUser(comment.UserID) + comments = append(comments, &base.Comment{ + IssueIndex: commentable.GetLocalIndex(), + Index: comment.ID, + PosterID: poster.ID, + PosterName: poster.Name, + PosterEmail: poster.Email, + Content: comment.Content, + Created: comment.Date, + Updated: comment.Date, + }) + } + for _, change := range rawChanges { + contentV, ok := change.Data["content"] + if !ok { + contentV, ok = change.Data["comment"] + if !ok { + continue + } + } + content, ok := contentV.(string) + if !ok || len(content) == 0 { + continue + } + + poster := d.tryGetUser(change.UserID) + comments = append(comments, &base.Comment{ + IssueIndex: commentable.GetLocalIndex(), + PosterID: poster.ID, + PosterName: poster.Name, + PosterEmail: poster.Email, + Content: content, + Created: change.Date, + Updated: change.Date, + }) + } + + return comments, true, nil +} + +// GetPullRequests returns pull requests +func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) { + rawPullRequests := make([]struct { + ID int64 `json:"id"` + Number int64 `json:"number"` + Title string `json:"title"` + SubmitterID int64 `json:"submitterId"` + SubmitDate time.Time `json:"submitDate"` + Description string `json:"description"` + TargetBranch string `json:"targetBranch"` + SourceBranch string `json:"sourceBranch"` + BaseCommitHash string `json:"baseCommitHash"` + CloseInfo *struct { + Date *time.Time `json:"date"` + Status string `json:"status"` + } + }, 0, perPage) + + err := d.callAPI( + "/api/pull-requests", + map[string]string{ + "query": `"Target Project" is "` + d.repoName + `"`, + "offset": strconv.Itoa((page - 1) * perPage), + "count": strconv.Itoa(perPage), + }, + &rawPullRequests, + ) + if err != nil { + return nil, false, err + } + + pullRequests := make([]*base.PullRequest, 0, len(rawPullRequests)) + for _, pr := range rawPullRequests { + var mergePreview struct { + TargetHeadCommitHash string `json:"targetHeadCommitHash"` + HeadCommitHash string `json:"headCommitHash"` + MergeStrategy string `json:"mergeStrategy"` + MergeCommitHash string `json:"mergeCommitHash"` + } + err := d.callAPI( + fmt.Sprintf("/api/pull-requests/%d/merge-preview", pr.ID), + nil, + &mergePreview, + ) + if err != nil { + return nil, false, err + } + + state := "open" + merged := false + var closeTime *time.Time + var mergedTime *time.Time + if pr.CloseInfo != nil { + state = "closed" + closeTime = pr.CloseInfo.Date + if pr.CloseInfo.Status == "MERGED" { // "DISCARDED" + merged = true + mergedTime = pr.CloseInfo.Date + } + } + poster := d.tryGetUser(pr.SubmitterID) + + number := pr.Number + d.maxIssueIndex + pullRequests = append(pullRequests, &base.PullRequest{ + Title: pr.Title, + Number: number, + PosterName: poster.Name, + PosterID: poster.ID, + Content: pr.Description, + State: state, + Created: pr.SubmitDate, + Updated: pr.SubmitDate, + Closed: closeTime, + Merged: merged, + MergedTime: mergedTime, + Head: base.PullRequestBranch{ + Ref: pr.SourceBranch, + SHA: mergePreview.HeadCommitHash, + RepoName: d.repoName, + }, + Base: base.PullRequestBranch{ + Ref: pr.TargetBranch, + SHA: mergePreview.TargetHeadCommitHash, + RepoName: d.repoName, + }, + ForeignIndex: pr.ID, + Context: onedevIssueContext{IsPullRequest: true}, + }) + + // SECURITY: Ensure that the PR is safe + _ = CheckAndEnsureSafePR(pullRequests[len(pullRequests)-1], d.baseURL.String(), d) + } + + return pullRequests, len(pullRequests) == 0, nil +} + +// GetReviews returns pull requests reviews +func (d *OneDevDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { + rawReviews := make([]struct { + ID int64 `json:"id"` + UserID int64 `json:"userId"` + Result *struct { + Commit string `json:"commit"` + Approved bool `json:"approved"` + Comment string `json:"comment"` + } + }, 0, 100) + + err := d.callAPI( + fmt.Sprintf("/api/pull-requests/%d/reviews", reviewable.GetForeignIndex()), + nil, + &rawReviews, + ) + if err != nil { + return nil, err + } + + reviews := make([]*base.Review, 0, len(rawReviews)) + for _, review := range rawReviews { + state := base.ReviewStatePending + content := "" + if review.Result != nil { + if len(review.Result.Comment) > 0 { + state = base.ReviewStateCommented + content = review.Result.Comment + } + if review.Result.Approved { + state = base.ReviewStateApproved + } + } + + poster := d.tryGetUser(review.UserID) + reviews = append(reviews, &base.Review{ + IssueIndex: reviewable.GetLocalIndex(), + ReviewerID: poster.ID, + ReviewerName: poster.Name, + Content: content, + State: state, + }) + } + + return reviews, nil +} + +// GetTopics return repository topics +func (d *OneDevDownloader) GetTopics() ([]string, error) { + return []string{}, nil +} + +func (d *OneDevDownloader) tryGetUser(userID int64) *onedevUser { + user, ok := d.userMap[userID] + if !ok { + err := d.callAPI( + fmt.Sprintf("/api/users/%d", userID), + nil, + &user, + ) + if err != nil { + user = &onedevUser{ + Name: fmt.Sprintf("User %d", userID), + } + } + d.userMap[userID] = user + } + + return user +} diff --git a/services/migrations/onedev_test.go b/services/migrations/onedev_test.go new file mode 100644 index 0000000..80c2613 --- /dev/null +++ b/services/migrations/onedev_test.go @@ -0,0 +1,149 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "net/http" + "net/url" + "testing" + "time" + + base "code.gitea.io/gitea/modules/migration" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOneDevDownloadRepo(t *testing.T) { + resp, err := http.Get("https://code.onedev.io/projects/go-gitea-test_repo") + if err != nil || resp.StatusCode != http.StatusOK { + t.Skipf("Can't access test repo, skipping %s", t.Name()) + } + + u, _ := url.Parse("https://code.onedev.io") + downloader := NewOneDevDownloader(context.Background(), u, "", "", "go-gitea-test_repo") + if err != nil { + t.Fatalf("NewOneDevDownloader is nil: %v", err) + } + repo, err := downloader.GetRepoInfo() + require.NoError(t, err) + assertRepositoryEqual(t, &base.Repository{ + Name: "go-gitea-test_repo", + Owner: "", + Description: "Test repository for testing migration from OneDev to gitea", + CloneURL: "https://code.onedev.io/go-gitea-test_repo", + OriginalURL: "https://code.onedev.io/projects/go-gitea-test_repo", + }, repo) + + milestones, err := downloader.GetMilestones() + require.NoError(t, err) + deadline := time.Unix(1620086400, 0) + assertMilestonesEqual(t, []*base.Milestone{ + { + Title: "1.0.0", + Deadline: &deadline, + Closed: &deadline, + }, + { + Title: "1.1.0", + Description: "next things?", + }, + }, milestones) + + labels, err := downloader.GetLabels() + require.NoError(t, err) + assert.Len(t, labels, 6) + + issues, isEnd, err := downloader.GetIssues(1, 2) + require.NoError(t, err) + assert.False(t, isEnd) + assertIssuesEqual(t, []*base.Issue{ + { + Number: 4, + Title: "Hi there", + Content: "an issue not assigned to a milestone", + PosterName: "User 336", + State: "open", + Created: time.Unix(1628549776, 734000000), + Updated: time.Unix(1628549776, 734000000), + Labels: []*base.Label{ + { + Name: "Improvement", + }, + }, + ForeignIndex: 398, + Context: onedevIssueContext{IsPullRequest: false}, + }, + { + Number: 3, + Title: "Add an awesome feature", + Content: "just another issue to test against", + PosterName: "User 336", + State: "open", + Milestone: "1.1.0", + Created: time.Unix(1628549749, 878000000), + Updated: time.Unix(1628549749, 878000000), + Labels: []*base.Label{ + { + Name: "New Feature", + }, + }, + ForeignIndex: 397, + Context: onedevIssueContext{IsPullRequest: false}, + }, + }, issues) + + comments, _, err := downloader.GetComments(&base.Issue{ + Number: 4, + ForeignIndex: 398, + Context: onedevIssueContext{IsPullRequest: false}, + }) + require.NoError(t, err) + assertCommentsEqual(t, []*base.Comment{ + { + IssueIndex: 4, + PosterName: "User 336", + Created: time.Unix(1628549791, 128000000), + Updated: time.Unix(1628549791, 128000000), + Content: "it has a comment\n\nEDIT: that got edited", + }, + }, comments) + + prs, _, err := downloader.GetPullRequests(1, 1) + require.NoError(t, err) + assertPullRequestsEqual(t, []*base.PullRequest{ + { + Number: 5, + Title: "Pull to add a new file", + Content: "just do some git stuff", + PosterName: "User 336", + State: "open", + Created: time.Unix(1628550076, 25000000), + Updated: time.Unix(1628550076, 25000000), + Head: base.PullRequestBranch{ + Ref: "branch-for-a-pull", + SHA: "343deffe3526b9bc84e873743ff7f6e6d8b827c0", + RepoName: "go-gitea-test_repo", + }, + Base: base.PullRequestBranch{ + Ref: "master", + SHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2", + RepoName: "go-gitea-test_repo", + }, + ForeignIndex: 186, + Context: onedevIssueContext{IsPullRequest: true}, + }, + }, prs) + + rvs, err := downloader.GetReviews(&base.PullRequest{Number: 5, ForeignIndex: 186}) + require.NoError(t, err) + assertReviewsEqual(t, []*base.Review{ + { + IssueIndex: 5, + ReviewerName: "User 317", + State: "PENDING", + }, + }, rvs) +} diff --git a/services/migrations/restore.go b/services/migrations/restore.go new file mode 100644 index 0000000..fd337b2 --- /dev/null +++ b/services/migrations/restore.go @@ -0,0 +1,272 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + "fmt" + "os" + "path/filepath" + "strconv" + + base "code.gitea.io/gitea/modules/migration" + + "gopkg.in/yaml.v3" +) + +// RepositoryRestorer implements an Downloader from the local directory +type RepositoryRestorer struct { + base.NullDownloader + ctx context.Context + baseDir string + repoOwner string + repoName string + validation bool +} + +// NewRepositoryRestorer creates a repository restorer which could restore repository from a dumped folder +func NewRepositoryRestorer(ctx context.Context, baseDir, owner, repoName string, validation bool) (*RepositoryRestorer, error) { + baseDir, err := filepath.Abs(baseDir) + if err != nil { + return nil, err + } + return &RepositoryRestorer{ + ctx: ctx, + baseDir: baseDir, + repoOwner: owner, + repoName: repoName, + validation: validation, + }, nil +} + +func (r *RepositoryRestorer) commentDir() string { + return filepath.Join(r.baseDir, "comments") +} + +func (r *RepositoryRestorer) reviewDir() string { + return filepath.Join(r.baseDir, "reviews") +} + +// SetContext set context +func (r *RepositoryRestorer) SetContext(ctx context.Context) { + r.ctx = ctx +} + +func (r *RepositoryRestorer) getRepoOptions() (map[string]string, error) { + p := filepath.Join(r.baseDir, "repo.yml") + bs, err := os.ReadFile(p) + if err != nil { + return nil, err + } + + opts := make(map[string]string) + err = yaml.Unmarshal(bs, &opts) + if err != nil { + return nil, err + } + return opts, nil +} + +// GetRepoInfo returns a repository information +func (r *RepositoryRestorer) GetRepoInfo() (*base.Repository, error) { + opts, err := r.getRepoOptions() + if err != nil { + return nil, err + } + + isPrivate, _ := strconv.ParseBool(opts["is_private"]) + + return &base.Repository{ + Owner: r.repoOwner, + Name: r.repoName, + IsPrivate: isPrivate, + Description: opts["description"], + OriginalURL: opts["original_url"], + CloneURL: filepath.Join(r.baseDir, "git"), + DefaultBranch: opts["default_branch"], + }, nil +} + +// GetTopics return github topics +func (r *RepositoryRestorer) GetTopics() ([]string, error) { + p := filepath.Join(r.baseDir, "topic.yml") + + topics := struct { + Topics []string `yaml:"topics"` + }{} + + bs, err := os.ReadFile(p) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + + err = yaml.Unmarshal(bs, &topics) + if err != nil { + return nil, err + } + return topics.Topics, nil +} + +// GetMilestones returns milestones +func (r *RepositoryRestorer) GetMilestones() ([]*base.Milestone, error) { + milestones := make([]*base.Milestone, 0, 10) + p := filepath.Join(r.baseDir, "milestone.yml") + err := base.Load(p, &milestones, r.validation) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + + return milestones, nil +} + +// GetReleases returns releases +func (r *RepositoryRestorer) GetReleases() ([]*base.Release, error) { + releases := make([]*base.Release, 0, 10) + p := filepath.Join(r.baseDir, "release.yml") + _, err := os.Stat(p) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + + bs, err := os.ReadFile(p) + if err != nil { + return nil, err + } + + err = yaml.Unmarshal(bs, &releases) + if err != nil { + return nil, err + } + for _, rel := range releases { + for _, asset := range rel.Assets { + if asset.DownloadURL != nil { + *asset.DownloadURL = "file://" + filepath.Join(r.baseDir, *asset.DownloadURL) + } + } + } + return releases, nil +} + +// GetLabels returns labels +func (r *RepositoryRestorer) GetLabels() ([]*base.Label, error) { + labels := make([]*base.Label, 0, 10) + p := filepath.Join(r.baseDir, "label.yml") + _, err := os.Stat(p) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + + bs, err := os.ReadFile(p) + if err != nil { + return nil, err + } + + err = yaml.Unmarshal(bs, &labels) + if err != nil { + return nil, err + } + return labels, nil +} + +// GetIssues returns issues according start and limit +func (r *RepositoryRestorer) GetIssues(page, perPage int) ([]*base.Issue, bool, error) { + issues := make([]*base.Issue, 0, 10) + p := filepath.Join(r.baseDir, "issue.yml") + err := base.Load(p, &issues, r.validation) + if err != nil { + if os.IsNotExist(err) { + return nil, true, nil + } + return nil, false, err + } + return issues, true, nil +} + +// GetComments returns comments according issueNumber +func (r *RepositoryRestorer) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + comments := make([]*base.Comment, 0, 10) + p := filepath.Join(r.commentDir(), fmt.Sprintf("%d.yml", commentable.GetForeignIndex())) + _, err := os.Stat(p) + if err != nil { + if os.IsNotExist(err) { + return nil, false, nil + } + return nil, false, err + } + + bs, err := os.ReadFile(p) + if err != nil { + return nil, false, err + } + + err = yaml.Unmarshal(bs, &comments) + if err != nil { + return nil, false, err + } + return comments, false, nil +} + +// GetPullRequests returns pull requests according page and perPage +func (r *RepositoryRestorer) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) { + pulls := make([]*base.PullRequest, 0, 10) + p := filepath.Join(r.baseDir, "pull_request.yml") + _, err := os.Stat(p) + if err != nil { + if os.IsNotExist(err) { + return nil, true, nil + } + return nil, false, err + } + + bs, err := os.ReadFile(p) + if err != nil { + return nil, false, err + } + + err = yaml.Unmarshal(bs, &pulls) + if err != nil { + return nil, false, err + } + for _, pr := range pulls { + pr.PatchURL = "file://" + filepath.Join(r.baseDir, pr.PatchURL) + CheckAndEnsureSafePR(pr, "", r) + } + return pulls, true, nil +} + +// GetReviews returns pull requests review +func (r *RepositoryRestorer) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { + reviews := make([]*base.Review, 0, 10) + p := filepath.Join(r.reviewDir(), fmt.Sprintf("%d.yml", reviewable.GetForeignIndex())) + _, err := os.Stat(p) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + + bs, err := os.ReadFile(p) + if err != nil { + return nil, err + } + + err = yaml.Unmarshal(bs, &reviews) + if err != nil { + return nil, err + } + return reviews, nil +} diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672 new file mode 100644 index 0000000..73532bf --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672 @@ -0,0 +1,17 @@ +X-Runtime: 0.155648 +Cache-Control: max-age=0, private, must-revalidate +Strict-Transport-Security: max-age=31536000 +Gitlab-Lb: haproxy-main-41-lb-gprd +Set-Cookie: _cfuvid=BI.nVv95qBu88KUbTZy0ZZJlRApJuj4nHeovyNu0YlU-1725394794027-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +X-Frame-Options: SAMEORIGIN +X-Gitlab-Meta: {"correlation_id":"6f22438486feec038cd6ea9f15b00ae5","version":"1"} +Cf-Cache-Status: MISS +Content-Type: application/json +Etag: W/"b36bd4522b7e8b2509078271491fb972" +Vary: Origin, Accept-Encoding +X-Content-Type-Options: nosniff +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Sv: api-gke-us-east1-d + +{"id":61363672,"description":"Test repository for testing migration from gitlab to forgejo","name":"test_repo","name_with_namespace":"Forgejo / test_repo","path":"test_repo","path_with_namespace":"forgejo/test_repo","created_at":"2024-09-03T07:44:30.668Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:forgejo/test_repo.git","http_url_to_repo":"https://gitlab.com/forgejo/test_repo.git","web_url":"https://gitlab.com/forgejo/test_repo","readme_url":"https://gitlab.com/forgejo/test_repo/-/blob/master/README.md","forks_count":0,"avatar_url":null,"star_count":0,"last_activity_at":"2024-09-03T20:03:18.187Z","namespace":{"id":64459497,"name":"Forgejo","path":"forgejo","kind":"group","full_path":"forgejo","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/64459497/73144-c883a242dec5299fbc06bbe3ee71d8c6.png","web_url":"https://gitlab.com/groups/forgejo"},"forked_from_project":{"id":15578026,"description":"Test repository for testing migration from gitlab to gitea","name":"test_repo","name_with_namespace":"gitea / test_repo","path":"test_repo","path_with_namespace":"gitea/test_repo","created_at":"2019-11-28T08:20:33.019Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:gitea/test_repo.git","http_url_to_repo":"https://gitlab.com/gitea/test_repo.git","web_url":"https://gitlab.com/gitea/test_repo","readme_url":"https://gitlab.com/gitea/test_repo/-/blob/master/README.md","forks_count":2,"avatar_url":null,"star_count":0,"last_activity_at":"2024-09-03T07:52:28.488Z","namespace":{"id":3181312,"name":"gitea","path":"gitea","kind":"group","full_path":"gitea","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/3181312/gitea.png","web_url":"https://gitlab.com/groups/gitea"}},"container_registry_image_prefix":"registry.gitlab.com/forgejo/test_repo","_links":{"self":"https://gitlab.com/api/v4/projects/61363672","issues":"https://gitlab.com/api/v4/projects/61363672/issues","merge_requests":"https://gitlab.com/api/v4/projects/61363672/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/61363672/repository/branches","labels":"https://gitlab.com/api/v4/projects/61363672/labels","events":"https://gitlab.com/api/v4/projects/61363672/events","members":"https://gitlab.com/api/v4/projects/61363672/members","cluster_agents":"https://gitlab.com/api/v4/projects/61363672/cluster_agents"},"packages_enabled":true,"empty_repo":false,"archived":false,"visibility":"public","resolve_outdated_diff_discussions":false,"container_expiration_policy":{"cadence":"1d","enabled":false,"keep_n":10,"older_than":"90d","name_regex":".*","name_regex_keep":null,"next_run_at":"2024-09-04T07:44:30.699Z"},"repository_object_format":"sha1","issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"service_desk_address":"contact-project+forgejo-test-repo-61363672-issue-@incoming.gitlab.com","can_create_merge_request_in":true,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","model_registry_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":true,"creator_id":2005797,"mr_default_target_self":false,"import_url":null,"import_type":null,"import_status":"finished","import_error":null,"open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:60\" dir=\"auto\"\u003eTest repository for testing migration from gitlab to forgejo\u003c/p\u003e","updated_at":"2024-09-03T20:03:18.187Z","ci_default_git_depth":50,"ci_forward_deployment_enabled":true,"ci_forward_deployment_rollback_allowed":true,"ci_job_token_scope_enabled":false,"ci_separated_caches":true,"ci_allow_fork_pipelines_to_run_in_parent_project":true,"ci_id_token_sub_claim_components":["project_path","ref_type","ref"],"build_git_strategy":"fetch","keep_latest_artifact":true,"restrict_user_defined_variables":false,"ci_pipeline_variables_minimum_override_role":"maintainer","runners_token":null,"runner_token_expiration_interval":null,"group_runners_enabled":true,"auto_cancel_pending_pipelines":"enabled","build_timeout":3600,"auto_devops_enabled":false,"auto_devops_deploy_strategy":"continuous","ci_push_repository_for_job_token_allowed":false,"ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":true,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":true,"printing_merge_request_link_enabled":true,"merge_method":"merge","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"warn_about_potentially_unwanted_characters":true,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":true,"pre_receive_secret_detection_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":{"access_level":40,"notification_level":3},"group_access":null}}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=1&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=1&per_page=2 new file mode 100644 index 0000000..ce2eb62 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=1&per_page=2 @@ -0,0 +1,24 @@ +X-Total-Pages: 1 +X-Next-Page: +Vary: Origin, Accept-Encoding +X-Prev-Page: +Gitlab-Sv: api-gke-us-east1-b +Cache-Control: max-age=0, private, must-revalidate +X-Total: 2 +Strict-Transport-Security: max-age=31536000 +Cf-Cache-Status: MISS +Link: <https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji?id=61363672&issue_iid=1&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji?id=61363672&issue_iid=1&page=1&per_page=2>; rel="last" +X-Frame-Options: SAMEORIGIN +Etag: W/"9eaad78fd40f769d67d34daaf19cfbab" +X-Content-Type-Options: nosniff +X-Page: 1 +Referrer-Policy: strict-origin-when-cross-origin +Set-Cookie: _cfuvid=8x.5zI7i_tau_4nKnR1WNvq_Cb_48MmatAHtHqxalEA-1725394795846-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Type: application/json +Content-Security-Policy: default-src 'none' +X-Per-Page: 2 +X-Runtime: 0.062405 +X-Gitlab-Meta: {"correlation_id":"d7fc12667b2139b99804080170986c28","version":"1"} +Gitlab-Lb: haproxy-main-18-lb-gprd + +[{"id":28099429,"name":"thumbsup","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T19:56:19.487Z","updated_at":"2024-09-03T19:56:19.487Z","awardable_id":152568896,"awardable_type":"Issue","url":null},{"id":28099432,"name":"open_mouth","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T19:56:24.365Z","updated_at":"2024-09-03T19:56:24.365Z","awardable_id":152568896,"awardable_type":"Issue","url":null}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=2&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=2&per_page=2 new file mode 100644 index 0000000..7755d80 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F1%2Faward_emoji%3Fpage=2&per_page=2 @@ -0,0 +1,26 @@ +X-Next-Page: +Accept-Ranges: bytes +X-Frame-Options: SAMEORIGIN +Strict-Transport-Security: max-age=31536000 +Content-Length: 2 +Link: <https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji?id=61363672&issue_iid=1&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji?id=61363672&issue_iid=1&page=1&per_page=2>; rel="last" +Cf-Cache-Status: MISS +X-Per-Page: 2 +Cache-Control: max-age=0, private, must-revalidate +Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5" +Vary: Origin, Accept-Encoding +Set-Cookie: _cfuvid=hSs90HRbG8m0_RpN8VaCLGaQcrBX1vjr5h0LpLouZrg-1725394796397-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +X-Gitlab-Meta: {"correlation_id":"7ecc8cd91d20fdae3efed851c53b3009","version":"1"} +X-Total: 2 +Gitlab-Lb: haproxy-main-55-lb-gprd +X-Page: 2 +X-Runtime: 0.059820 +Referrer-Policy: strict-origin-when-cross-origin +X-Prev-Page: +X-Total-Pages: 1 +Gitlab-Sv: api-gke-us-east1-c +Content-Type: application/json +Content-Security-Policy: default-src 'none' +X-Content-Type-Options: nosniff + +[]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=2 new file mode 100644 index 0000000..539ef68 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=2 @@ -0,0 +1,24 @@ +X-Content-Type-Options: nosniff +X-Runtime: 0.217878 +Etag: W/"5cff9c25fad9db0de0442f8a50af76ed" +Vary: Origin, Accept-Encoding +Cf-Cache-Status: MISS +Strict-Transport-Security: max-age=31536000 +Gitlab-Lb: haproxy-main-11-lb-gprd +Gitlab-Sv: api-gke-us-east1-d +Set-Cookie: _cfuvid=0ssSfnfiXaFlJe_DdQ9NOfPlga.fQbgnLjSEwGIfEzk-1725394796812-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +X-Frame-Options: SAMEORIGIN +X-Prev-Page: +Referrer-Policy: strict-origin-when-cross-origin +X-Next-Page: 2 +X-Page: 1 +X-Gitlab-Meta: {"correlation_id":"379af21d1624cba7375460437671af6c","version":"1"} +Content-Security-Policy: default-src 'none' +Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=2&per_page=2>; rel="next", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="last" +Content-Type: application/json +X-Per-Page: 2 +X-Total: 6 +X-Total-Pages: 3 +Cache-Control: max-age=0, private, must-revalidate + +[{"id":28092934,"name":"thumbsup","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:45:50.310Z","updated_at":"2024-09-03T14:45:50.310Z","awardable_id":152568900,"awardable_type":"Issue","url":null},{"id":28092936,"name":"thumbsdown","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:45:51.174Z","updated_at":"2024-09-03T14:45:51.174Z","awardable_id":152568900,"awardable_type":"Issue","url":null}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=2&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=2&per_page=2 new file mode 100644 index 0000000..60c54f2 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=2&per_page=2 @@ -0,0 +1,24 @@ +Cache-Control: max-age=0, private, must-revalidate +X-Total-Pages: 3 +Vary: Origin, Accept-Encoding +X-Gitlab-Meta: {"correlation_id":"9bea6a0d3bfa187c0276b05afba166c4","version":"1"} +X-Runtime: 0.086090 +X-Total: 6 +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Sv: api-gke-us-east1-b +Content-Security-Policy: default-src 'none' +X-Frame-Options: SAMEORIGIN +X-Prev-Page: 1 +Strict-Transport-Security: max-age=31536000 +Gitlab-Lb: haproxy-main-36-lb-gprd +X-Content-Type-Options: nosniff +X-Page: 2 +Set-Cookie: _cfuvid=ByaUDcdLuj9lg2l.wzIwOZ66jeGSBhcxPeVwYI6iJ0I-1725394797065-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +X-Per-Page: 2 +Content-Type: application/json +Etag: W/"1b260e111b955c4b5b99834b5445d047" +Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="prev", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="next", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="last" +X-Next-Page: 3 +Cf-Cache-Status: MISS + +[{"id":28092944,"name":"laughing","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:46:00.936Z","updated_at":"2024-09-03T14:46:00.936Z","awardable_id":152568900,"awardable_type":"Issue","url":null},{"id":28092948,"name":"tada","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:46:09.593Z","updated_at":"2024-09-03T14:46:09.593Z","awardable_id":152568900,"awardable_type":"Issue","url":null}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=3&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=3&per_page=2 new file mode 100644 index 0000000..e3018fa --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=3&per_page=2 @@ -0,0 +1,24 @@ +X-Per-Page: 2 +X-Runtime: 0.064070 +X-Content-Type-Options: nosniff +X-Prev-Page: 2 +X-Page: 3 +Vary: Origin, Accept-Encoding +X-Total: 6 +Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=2&per_page=2>; rel="prev", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="last" +X-Frame-Options: SAMEORIGIN +X-Gitlab-Meta: {"correlation_id":"db9cabb4c4399ec8680e56916a5f9ca2","version":"1"} +X-Next-Page: +X-Total-Pages: 3 +Strict-Transport-Security: max-age=31536000 +Content-Security-Policy: default-src 'none' +Content-Type: application/json +Etag: W/"578a2e92e9d4f9fb1c21c89b9e13eb0e" +Gitlab-Lb: haproxy-main-17-lb-gprd +Cf-Cache-Status: MISS +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Sv: api-gke-us-east1-d +Set-Cookie: _cfuvid=Upv78tZEcC_Ry_GNFdw5Ms5eMI9FkehWT5RF0a2i7d0-1725394797546-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Cache-Control: max-age=0, private, must-revalidate + +[{"id":28092953,"name":"confused","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:46:18.191Z","updated_at":"2024-09-03T14:46:18.191Z","awardable_id":152568900,"awardable_type":"Issue","url":null},{"id":28092962,"name":"hearts","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:46:35.367Z","updated_at":"2024-09-03T14:46:35.367Z","awardable_id":152568900,"awardable_type":"Issue","url":null}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=4&per_page=2 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=4&per_page=2 new file mode 100644 index 0000000..b7dd2a5 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Faward_emoji%3Fpage=4&per_page=2 @@ -0,0 +1,26 @@ +X-Runtime: 0.059461 +X-Total: 6 +Gitlab-Lb: haproxy-main-16-lb-gprd +Set-Cookie: _cfuvid=yVbakY3C4M4Kdnt7wIM2OYjNHbX8d6djf5tCk3NWtfw-1725394797782-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Type: application/json +Cache-Control: max-age=0, private, must-revalidate +X-Page: 4 +X-Per-Page: 2 +Gitlab-Sv: api-gke-us-east1-c +X-Next-Page: +Strict-Transport-Security: max-age=31536000 +Referrer-Policy: strict-origin-when-cross-origin +Content-Length: 2 +Vary: Origin, Accept-Encoding +X-Content-Type-Options: nosniff +Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5" +Cf-Cache-Status: MISS +X-Prev-Page: +Accept-Ranges: bytes +Content-Security-Policy: default-src 'none' +X-Frame-Options: SAMEORIGIN +X-Gitlab-Meta: {"correlation_id":"b494fe1273622e61d5b9171bcb8be8f8","version":"1"} +Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=1&per_page=2>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji?id=61363672&issue_iid=2&page=3&per_page=2>; rel="last" +X-Total-Pages: 3 + +[]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fdiscussions%3Fpage=1&per_page=100 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fdiscussions%3Fpage=1&per_page=100 new file mode 100644 index 0000000..7acaddf --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fdiscussions%3Fpage=1&per_page=100 @@ -0,0 +1,24 @@ +X-Runtime: 0.145197 +X-Total-Pages: 1 +Strict-Transport-Security: max-age=31536000 +Vary: Origin, Accept-Encoding +X-Prev-Page: +X-Frame-Options: SAMEORIGIN +X-Total: 2 +Gitlab-Lb: haproxy-main-52-lb-gprd +Gitlab-Sv: api-gke-us-east1-c +Content-Security-Policy: default-src 'none' +Etag: W/"7f9e8aa5e56c4a23a0ac1fe1e32ea1cf" +Cache-Control: max-age=0, private, must-revalidate +X-Content-Type-Options: nosniff +Referrer-Policy: strict-origin-when-cross-origin +Cf-Cache-Status: MISS +X-Next-Page: +X-Page: 1 +Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/discussions?id=61363672¬eable_id=2&page=1&per_page=100>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/discussions?id=61363672¬eable_id=2&page=1&per_page=100>; rel="last" +X-Gitlab-Meta: {"correlation_id":"e2dd8497292356efa5150a6c5ecd61b5","version":"1"} +Content-Type: application/json +X-Per-Page: 100 +Set-Cookie: _cfuvid=zB07q9Xq11k5SlfuxWW17Ez7DHpyfygT7b4L.VixX.I-1725394798110-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None + +[{"id":"8d6017e7426130502cd94fff207224b8a98efabc","individual_note":true,"notes":[{"id":2087994191,"type":null,"body":"This is a comment","attachment":null,"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:45:20.848Z","updated_at":"2024-09-03T14:45:46.592Z","system":false,"noteable_id":152568900,"noteable_type":"Issue","project_id":61363672,"resolvable":false,"confidential":false,"internal":false,"imported":false,"imported_from":"none","noteable_iid":2,"commands_changes":{}}]},{"id":"c721de2d3f2f0fe9a40005228f50d8c8d8131581","individual_note":true,"notes":[{"id":2087994632,"type":null,"body":"A second comment","attachment":null,"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:45:30.059Z","updated_at":"2024-09-03T14:45:30.059Z","system":false,"noteable_id":152568900,"noteable_type":"Issue","project_id":61363672,"resolvable":false,"confidential":false,"internal":false,"imported":false,"imported_from":"none","noteable_iid":2,"commands_changes":{}}]}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fresource_state_events%3Fpage=1&per_page=100 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fresource_state_events%3Fpage=1&per_page=100 new file mode 100644 index 0000000..ef8cac0 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%2F2%2Fresource_state_events%3Fpage=1&per_page=100 @@ -0,0 +1,24 @@ +Cache-Control: max-age=0, private, must-revalidate +X-Content-Type-Options: nosniff +X-Next-Page: +Gitlab-Sv: api-gke-us-east1-d +Cf-Cache-Status: MISS +Content-Type: application/json +Strict-Transport-Security: max-age=31536000 +X-Total-Pages: 1 +Referrer-Policy: strict-origin-when-cross-origin +Content-Security-Policy: default-src 'none' +Set-Cookie: _cfuvid=FG.klkpkCkFafn4bGe91EcTgDxILPZT9lIAALQsMguo-1725394798392-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +X-Frame-Options: SAMEORIGIN +X-Prev-Page: +Link: <https://gitlab.com/api/v4/projects/61363672/issues/2/resource_state_events?eventable_id=2&id=61363672&page=1&per_page=100>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues/2/resource_state_events?eventable_id=2&id=61363672&page=1&per_page=100>; rel="last" +X-Runtime: 0.103796 +X-Total: 1 +Etag: W/"7461fc73e919f707da29f7080cbbf5a5" +Vary: Origin, Accept-Encoding +X-Gitlab-Meta: {"correlation_id":"aacea0eebb5d187d57ce369f9bd57a96","version":"1"} +X-Page: 1 +X-Per-Page: 100 +Gitlab-Lb: haproxy-main-02-lb-gprd + +[{"id":241837962,"user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T14:43:10.947Z","resource_type":"Issue","resource_id":152568900,"state":"closed"}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%3Fpage=1&per_page=2&sort=asc&state=all b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%3Fpage=1&per_page=2&sort=asc&state=all new file mode 100644 index 0000000..4222407 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fissues%3Fpage=1&per_page=2&sort=asc&state=all @@ -0,0 +1,24 @@ +X-Total-Pages: 1 +Cache-Control: max-age=0, private, must-revalidate +X-Runtime: 0.200064 +Etag: W/"d8fb18a73522276c6ef2dcd41f54a48c" +Link: <https://gitlab.com/api/v4/projects/61363672/issues?id=61363672&order_by=created_at&page=1&per_page=2&sort=asc&state=all&with_labels_details=false>; rel="first", <https://gitlab.com/api/v4/projects/61363672/issues?id=61363672&order_by=created_at&page=1&per_page=2&sort=asc&state=all&with_labels_details=false>; rel="last" +Strict-Transport-Security: max-age=31536000 +Cf-Cache-Status: MISS +X-Gitlab-Meta: {"correlation_id":"e93266a7fd0f8392c302d86788f1915d","version":"1"} +X-Per-Page: 2 +X-Total: 2 +Content-Type: application/json +Vary: Origin, Accept-Encoding +X-Next-Page: +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Lb: haproxy-main-48-lb-gprd +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Prev-Page: +Gitlab-Sv: api-gke-us-east1-b +Set-Cookie: _cfuvid=dJlDovqc76Ccf_kb3CEsWZMasfjw9wsdzsdIUd.IMiQ-1725394795593-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +X-Page: 1 + +[{"id":152568896,"iid":1,"project_id":61363672,"title":"Please add an animated gif icon to the merge button","description":"I just want the merge button to hurt my eyes a little. :stuck_out_tongue_closed_eyes:","state":"closed","created_at":"2024-09-03T14:42:34.924Z","updated_at":"2024-09-03T14:48:43.756Z","closed_at":"2024-09-03T14:43:10.708Z","closed_by":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"labels":["bug","discussion"],"milestone":{"id":4711993,"iid":2,"project_id":61363672,"title":"1.0.0","description":"","state":"closed","created_at":"2024-09-03T13:53:08.516Z","updated_at":"2024-09-03T20:03:57.786Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/2"},"assignees":[],"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"type":"ISSUE","assignee":null,"user_notes_count":0,"merge_requests_count":0,"upvotes":1,"downvotes":0,"due_date":null,"confidential":false,"discussion_locked":null,"issue_type":"issue","web_url":"https://gitlab.com/forgejo/test_repo/-/issues/1","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"task_completion_status":{"count":0,"completed_count":0},"blocking_issues_count":0,"has_tasks":true,"task_status":"0 of 0 checklist items completed","_links":{"self":"https://gitlab.com/api/v4/projects/61363672/issues/1","notes":"https://gitlab.com/api/v4/projects/61363672/issues/1/notes","award_emoji":"https://gitlab.com/api/v4/projects/61363672/issues/1/award_emoji","project":"https://gitlab.com/api/v4/projects/61363672","closed_as_duplicate_of":null},"references":{"short":"#1","relative":"#1","full":"forgejo/test_repo#1"},"severity":"UNKNOWN","moved_to_id":null,"imported":false,"imported_from":"none","service_desk_reply_to":null},{"id":152568900,"iid":2,"project_id":61363672,"title":"Test issue","description":"This is test issue 2, do not touch!","state":"closed","created_at":"2024-09-03T14:42:35.371Z","updated_at":"2024-09-03T20:03:43.536Z","closed_at":"2024-09-03T14:43:10.906Z","closed_by":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"labels":["duplicate"],"milestone":{"id":4711993,"iid":2,"project_id":61363672,"title":"1.0.0","description":"","state":"closed","created_at":"2024-09-03T13:53:08.516Z","updated_at":"2024-09-03T20:03:57.786Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/2"},"assignees":[],"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"type":"ISSUE","assignee":null,"user_notes_count":2,"merge_requests_count":0,"upvotes":1,"downvotes":1,"due_date":null,"confidential":false,"discussion_locked":null,"issue_type":"issue","web_url":"https://gitlab.com/forgejo/test_repo/-/issues/2","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"task_completion_status":{"count":0,"completed_count":0},"blocking_issues_count":0,"has_tasks":true,"task_status":"0 of 0 checklist items completed","_links":{"self":"https://gitlab.com/api/v4/projects/61363672/issues/2","notes":"https://gitlab.com/api/v4/projects/61363672/issues/2/notes","award_emoji":"https://gitlab.com/api/v4/projects/61363672/issues/2/award_emoji","project":"https://gitlab.com/api/v4/projects/61363672","closed_as_duplicate_of":null},"references":{"short":"#2","relative":"#2","full":"forgejo/test_repo#2"},"severity":"UNKNOWN","moved_to_id":null,"imported":false,"imported_from":"none","service_desk_reply_to":null}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Flabels%3Fpage=1&per_page=100 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Flabels%3Fpage=1&per_page=100 new file mode 100644 index 0000000..7070f55 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Flabels%3Fpage=1&per_page=100 @@ -0,0 +1,24 @@ +X-Runtime: 0.134818 +Gitlab-Lb: haproxy-main-57-lb-gprd +X-Total: 11 +X-Total-Pages: 1 +Content-Security-Policy: default-src 'none' +X-Prev-Page: +Etag: W/"91f61a44ed534ef7d26e391dbef8dc0a" +Gitlab-Sv: api-gke-us-east1-b +Vary: Origin, Accept-Encoding +Referrer-Policy: strict-origin-when-cross-origin +Link: <https://gitlab.com/api/v4/projects/61363672/labels?id=61363672&include_ancestor_groups=true&page=1&per_page=100&with_counts=false>; rel="first", <https://gitlab.com/api/v4/projects/61363672/labels?id=61363672&include_ancestor_groups=true&page=1&per_page=100&with_counts=false>; rel="last" +X-Frame-Options: SAMEORIGIN +X-Gitlab-Meta: {"correlation_id":"25e616938688ad5e6ab58382f3e39c16","version":"1"} +X-Next-Page: +X-Page: 1 +Set-Cookie: _cfuvid=hdkQYZmgtcCpfA24UkICU4IGbz73Cpnd9.1NfpCL96Y-1725394794621-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Type: application/json +Cache-Control: max-age=0, private, must-revalidate +Cf-Cache-Status: MISS +X-Content-Type-Options: nosniff +X-Per-Page: 100 +Strict-Transport-Security: max-age=31536000 + +[{"id":36554072,"name":"bug","description":null,"description_html":"","text_color":"#FFFFFF","color":"#d9534f","subscribed":false,"priority":null,"is_project_label":true},{"id":36554074,"name":"confirmed","description":null,"description_html":"","text_color":"#FFFFFF","color":"#d9534f","subscribed":false,"priority":null,"is_project_label":true},{"id":36554073,"name":"critical","description":null,"description_html":"","text_color":"#FFFFFF","color":"#d9534f","subscribed":false,"priority":null,"is_project_label":true},{"id":36554077,"name":"discussion","description":null,"description_html":"","text_color":"#FFFFFF","color":"#428bca","subscribed":false,"priority":null,"is_project_label":true},{"id":36554075,"name":"documentation","description":null,"description_html":"","text_color":"#1F1E24","color":"#f0ad4e","subscribed":false,"priority":null,"is_project_label":true},{"id":36556606,"name":"duplicate","description":"","description_html":"","text_color":"#FFFFFF","color":"#7F8C8D","subscribed":false,"priority":null,"is_project_label":true},{"id":36554079,"name":"enhancement","description":null,"description_html":"","text_color":"#FFFFFF","color":"#5cb85c","subscribed":false,"priority":null,"is_project_label":true},{"id":36554078,"name":"suggestion","description":null,"description_html":"","text_color":"#FFFFFF","color":"#428bca","subscribed":false,"priority":null,"is_project_label":true},{"id":36554076,"name":"support","description":null,"description_html":"","text_color":"#1F1E24","color":"#f0ad4e","subscribed":false,"priority":null,"is_project_label":true},{"id":36554080,"name":"test-scope::label0","description":"scoped label","description_html":"scoped label","text_color":"#FFFFFF","color":"#6699cc","subscribed":false,"priority":null,"is_project_label":true},{"id":36554094,"name":"test-scope::label1","description":"","description_html":"","text_color":"#FFFFFF","color":"#dc143c","subscribed":false,"priority":null,"is_project_label":true}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1 new file mode 100644 index 0000000..2903724 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1 @@ -0,0 +1,17 @@ +X-Content-Type-Options: nosniff +X-Runtime: 0.132332 +Strict-Transport-Security: max-age=31536000 +Set-Cookie: _cfuvid=dCpqfgALGbwKdCAsAe6oT5DVCj6oBwrnU5y2Jd40KPs-1725394799000-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +X-Frame-Options: SAMEORIGIN +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Lb: haproxy-main-11-lb-gprd +Content-Security-Policy: default-src 'none' +Etag: W/"8b6a8cc6f36ac5289783c7654f292212" +Vary: Origin, Accept-Encoding +X-Gitlab-Meta: {"correlation_id":"bef818a29fa7cfc1f075ef0925e63404","version":"1"} +Gitlab-Sv: api-gke-us-east1-d +Content-Type: application/json +Cache-Control: max-age=0, private, must-revalidate +Cf-Cache-Status: MISS + +{"id":324657888,"iid":1,"project_id":61363672,"title":"Test branch","description":"do not merge this PR","state":"opened","created_at":"2024-09-03T07:57:19.866Z","updated_at":"2024-09-03T18:50:21.065Z","merged_by":null,"merge_user":null,"merged_at":null,"closed_by":null,"closed_at":null,"target_branch":"master","source_branch":"feat/test","user_notes_count":0,"upvotes":1,"downvotes":0,"author":{"id":2005797,"username":"oliverpool","name":"oliverpool","state":"active","locked":false,"avatar_url":"https://gitlab.com/uploads/-/system/user/avatar/2005797/avatar.png","web_url":"https://gitlab.com/oliverpool"},"assignees":[],"assignee":null,"reviewers":[],"source_project_id":61363672,"target_project_id":61363672,"labels":["test-scope::label0","test-scope::label1"],"draft":false,"imported":false,"imported_from":"none","work_in_progress":false,"milestone":{"id":4711991,"iid":1,"project_id":61363672,"title":"1.1.0","description":"","state":"active","created_at":"2024-09-03T13:52:48.414Z","updated_at":"2024-09-03T14:52:14.093Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/1"},"merge_when_pipeline_succeeds":false,"merge_status":"can_be_merged","detailed_merge_status":"mergeable","sha":"9f733b96b98a4175276edf6a2e1231489c3bdd23","merge_commit_sha":null,"squash_commit_sha":null,"discussion_locked":null,"should_remove_source_branch":null,"force_remove_source_branch":true,"prepared_at":"2024-09-03T08:15:46.361Z","reference":"!1","references":{"short":"!1","relative":"!1","full":"forgejo/test_repo!1"},"web_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests/1","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"squash":false,"squash_on_merge":false,"task_completion_status":{"count":0,"completed_count":0},"has_conflicts":false,"blocking_discussions_resolved":true,"approvals_before_merge":null,"subscribed":true,"changes_count":"1","latest_build_started_at":null,"latest_build_finished_at":null,"first_deployed_to_production_at":null,"pipeline":null,"head_pipeline":null,"diff_refs":{"base_sha":"c59c9b451acca9d106cc19d61d87afe3fbbb8b83","head_sha":"9f733b96b98a4175276edf6a2e1231489c3bdd23","start_sha":"c59c9b451acca9d106cc19d61d87afe3fbbb8b83"},"merge_error":null,"first_contribution":true,"user":{"can_merge":true}}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Fapprovals b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Fapprovals new file mode 100644 index 0000000..df85ea4 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Fapprovals @@ -0,0 +1,17 @@ +Gitlab-Sv: api-gke-us-east1-d +Set-Cookie: _cfuvid=c8dYhAX7c7Kj.9kgrISTCaOoMKuKV0amVHZbY28k_vc-1725394800394-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +X-Frame-Options: SAMEORIGIN +X-Gitlab-Meta: {"correlation_id":"1bfdf6ff862f2719b5ff0fa43d4b1f68","version":"1"} +Referrer-Policy: strict-origin-when-cross-origin +Cf-Cache-Status: MISS +Cache-Control: max-age=0, private, must-revalidate +X-Runtime: 0.141568 +Strict-Transport-Security: max-age=31536000 +Gitlab-Lb: haproxy-main-26-lb-gprd +Content-Type: application/json +Etag: W/"90fb650b1668940dd7ccac3869a3a2bd" +Vary: Origin, Accept-Encoding +X-Content-Type-Options: nosniff + +{"id":324657888,"iid":1,"project_id":61363672,"title":"Test branch","description":"do not merge this PR","state":"opened","created_at":"2024-09-03T07:57:19.866Z","updated_at":"2024-09-03T18:50:21.065Z","merge_status":"can_be_merged","approved":true,"approvals_required":0,"approvals_left":0,"require_password_to_approve":false,"approved_by":[{"user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"}}],"suggested_approvers":[],"approvers":[],"approver_groups":[],"user_has_approved":true,"user_can_approve":false,"approval_rules_left":[],"has_approval_rules":false,"merge_request_approvers_available":false,"multiple_approval_rules_available":false,"invalid_approvers_rules":[]}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=1 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=1 new file mode 100644 index 0000000..7e50312 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=1 @@ -0,0 +1,24 @@ +X-Gitlab-Meta: {"correlation_id":"46af78321ea2674ac3e1e56243baabf6","version":"1"} +Gitlab-Lb: haproxy-main-27-lb-gprd +Vary: Origin, Accept-Encoding +X-Total-Pages: 2 +Strict-Transport-Security: max-age=31536000 +Content-Security-Policy: default-src 'none' +X-Content-Type-Options: nosniff +X-Page: 1 +X-Runtime: 0.071781 +Cf-Cache-Status: MISS +Link: <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=2&per_page=1>; rel="next", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=1&per_page=1>; rel="first", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=2&per_page=1>; rel="last" +Etag: W/"a08d29f7fa018b5a6f30ae6de1035350" +X-Prev-Page: +X-Total: 2 +Content-Type: application/json +X-Frame-Options: SAMEORIGIN +X-Next-Page: 2 +X-Per-Page: 1 +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Sv: api-gke-us-east1-b +Set-Cookie: _cfuvid=PKNy4TeWDnd8j772wQMiBZpmFpOjDfu9JcpnUSyVULU-1725394799568-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Cache-Control: max-age=0, private, must-revalidate + +[{"id":28098492,"name":"thumbsup","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T18:49:58.072Z","updated_at":"2024-09-03T18:49:58.072Z","awardable_id":324657888,"awardable_type":"MergeRequest","url":null}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=2&per_page=1 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=2&per_page=1 new file mode 100644 index 0000000..f33a33c --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=2&per_page=1 @@ -0,0 +1,24 @@ +Etag: W/"9d4f10c73db7508f9f63f83f4f3e9dd2" +Link: <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=1&per_page=1>; rel="prev", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=1&per_page=1>; rel="first", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=2&per_page=1>; rel="last" +X-Runtime: 0.070580 +Gitlab-Sv: api-gke-us-east1-c +Content-Type: application/json +Cf-Cache-Status: MISS +Vary: Origin, Accept-Encoding +X-Frame-Options: SAMEORIGIN +X-Prev-Page: 1 +Gitlab-Lb: haproxy-main-58-lb-gprd +Cache-Control: max-age=0, private, must-revalidate +X-Total: 2 +X-Total-Pages: 2 +Strict-Transport-Security: max-age=31536000 +Referrer-Policy: strict-origin-when-cross-origin +X-Gitlab-Meta: {"correlation_id":"c39c59a22f48b51fcdbe4d7121983045","version":"1"} +X-Next-Page: +X-Per-Page: 1 +Set-Cookie: _cfuvid=ocsAYkwqggUMC09s009R.yWb7q3OTyWzwjV73iFeOAM-1725394799827-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +X-Content-Type-Options: nosniff +X-Page: 2 + +[{"id":28098494,"name":"tada","user":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"created_at":"2024-09-03T18:50:02.028Z","updated_at":"2024-09-03T18:50:02.028Z","awardable_id":324657888,"awardable_type":"MergeRequest","url":null}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=3&per_page=1 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=3&per_page=1 new file mode 100644 index 0000000..783ea3b --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=3&per_page=1 @@ -0,0 +1,26 @@ +Content-Length: 2 +X-Next-Page: +X-Per-Page: 1 +X-Runtime: 0.069736 +Link: <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=1&per_page=1>; rel="first", <https://gitlab.com/api/v4/projects/61363672/merge_requests/1/award_emoji?id=61363672&merge_request_iid=1&page=2&per_page=1>; rel="last" +X-Total-Pages: 2 +X-Content-Type-Options: nosniff +X-Gitlab-Meta: {"correlation_id":"4a199f75df6e91c7bb25ce7f0ae5ba87","version":"1"} +Cf-Cache-Status: MISS +Strict-Transport-Security: max-age=31536000 +Referrer-Policy: strict-origin-when-cross-origin +X-Prev-Page: +Content-Type: application/json +Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5" +Set-Cookie: _cfuvid=LKsdyXLErarfZPBo25O7PYiKWcvrF92MfU4i57.1wVw-1725394800092-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +Accept-Ranges: bytes +X-Frame-Options: SAMEORIGIN +Gitlab-Lb: haproxy-main-12-lb-gprd +Gitlab-Sv: api-gke-us-east1-b +Cache-Control: max-age=0, private, must-revalidate +Vary: Origin, Accept-Encoding +X-Page: 3 +X-Total: 2 + +[]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F2%2Fapprovals b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F2%2Fapprovals new file mode 100644 index 0000000..8025baa --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%2F2%2Fapprovals @@ -0,0 +1,16 @@ +Content-Type: application/json +Cache-Control: no-cache +X-Runtime: 0.050861 +Cf-Cache-Status: MISS +Content-Length: 27 +Strict-Transport-Security: max-age=31536000 +X-Content-Type-Options: nosniff +Set-Cookie: _cfuvid=dOl9pLwVdWdrfHK2_lQ8ilTg21PZJf8ErnJ6hi2V6LQ-1725394529656-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +X-Gitlab-Meta: {"correlation_id":"8b1408168090614939be8b301aaf8ec1","version":"1"} +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Lb: haproxy-main-42-lb-gprd +Vary: Origin, Accept-Encoding +Gitlab-Sv: api-gke-us-east1-b + +{"message":"404 Not found"}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%3Fpage=1&per_page=1&view=simple b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%3Fpage=1&per_page=1&view=simple new file mode 100644 index 0000000..1ad6255 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmerge_requests%3Fpage=1&per_page=1&view=simple @@ -0,0 +1,24 @@ +Content-Security-Policy: default-src 'none' +X-Prev-Page: +Set-Cookie: _cfuvid=7GL5tIuTakQp9CVUUSpwUwMYssAGhn7PgI8tTqNnmz0-1725394798686-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +X-Gitlab-Meta: {"correlation_id":"7b65fd9c80614af0ef38989ba51e5c29","version":"1"} +Gitlab-Lb: haproxy-main-30-lb-gprd +Etag: W/"8a9c7ac19d2c07896e0e68bc7725d52c" +X-Content-Type-Options: nosniff +Strict-Transport-Security: max-age=31536000 +Gitlab-Sv: api-gke-us-east1-b +X-Page: 1 +X-Total: 1 +Cache-Control: max-age=0, private, must-revalidate +Link: <https://gitlab.com/api/v4/projects/61363672/merge_requests?id=61363672&order_by=created_at&page=1&per_page=1&sort=desc&state=all&view=simple&with_labels_details=false&with_merge_status_recheck=false>; rel="first", <https://gitlab.com/api/v4/projects/61363672/merge_requests?id=61363672&order_by=created_at&page=1&per_page=1&sort=desc&state=all&view=simple&with_labels_details=false&with_merge_status_recheck=false>; rel="last" +X-Per-Page: 1 +Referrer-Policy: strict-origin-when-cross-origin +Cf-Cache-Status: MISS +Content-Type: application/json +X-Total-Pages: 1 +Vary: Origin, Accept-Encoding +X-Frame-Options: SAMEORIGIN +X-Runtime: 0.123283 +X-Next-Page: + +[{"id":324657888,"iid":1,"project_id":61363672,"title":"Test branch","description":"do not merge this PR","state":"opened","created_at":"2024-09-03T07:57:19.866Z","updated_at":"2024-09-03T18:50:21.065Z","web_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests/1"}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmilestones%3Fpage=1&per_page=100&state=all b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmilestones%3Fpage=1&per_page=100&state=all new file mode 100644 index 0000000..4795569 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Fmilestones%3Fpage=1&per_page=100&state=all @@ -0,0 +1,24 @@ +X-Total: 2 +Set-Cookie: _cfuvid=uwwcVHMnVqsf5dOVdmePMl8w9SEvmr1muvo7QttWeKI-1725394794295-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +Etag: W/"a42f286b703ec341ad7f117b273a51ad" +Link: <https://gitlab.com/api/v4/projects/61363672/milestones?id=61363672&include_ancestors=false&page=1&per_page=100&state=all>; rel="first", <https://gitlab.com/api/v4/projects/61363672/milestones?id=61363672&include_ancestors=false&page=1&per_page=100&state=all>; rel="last" +Vary: Origin, Accept-Encoding +X-Content-Type-Options: nosniff +X-Gitlab-Meta: {"correlation_id":"ed978cae0ea2bf9ac4b1f46fddfdf982","version":"1"} +X-Per-Page: 100 +Cache-Control: max-age=0, private, must-revalidate +Cf-Cache-Status: MISS +Content-Type: application/json +X-Next-Page: +X-Page: 1 +Strict-Transport-Security: max-age=31536000 +Gitlab-Sv: api-gke-us-east1-c +X-Frame-Options: SAMEORIGIN +X-Prev-Page: +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Lb: haproxy-main-34-lb-gprd +X-Runtime: 0.069266 +X-Total-Pages: 1 + +[{"id":4711993,"iid":2,"project_id":61363672,"title":"1.0.0","description":"","state":"closed","created_at":"2024-09-03T13:53:08.516Z","updated_at":"2024-09-03T20:03:57.786Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/2"},{"id":4711991,"iid":1,"project_id":61363672,"title":"1.1.0","description":"","state":"active","created_at":"2024-09-03T13:52:48.414Z","updated_at":"2024-09-03T14:52:14.093Z","due_date":null,"start_date":null,"expired":false,"web_url":"https://gitlab.com/forgejo/test_repo/-/milestones/1"}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Freleases%3Fpage=1&per_page=100 b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Freleases%3Fpage=1&per_page=100 new file mode 100644 index 0000000..e0dcec2 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2F61363672%2Freleases%3Fpage=1&per_page=100 @@ -0,0 +1,24 @@ +X-Total-Pages: 1 +Referrer-Policy: strict-origin-when-cross-origin +X-Total: 1 +X-Frame-Options: SAMEORIGIN +X-Prev-Page: +X-Content-Type-Options: nosniff +Strict-Transport-Security: max-age=31536000 +Link: <https://gitlab.com/api/v4/projects/61363672/releases?id=61363672&order_by=released_at&page=1&per_page=100&sort=desc>; rel="first", <https://gitlab.com/api/v4/projects/61363672/releases?id=61363672&order_by=released_at&page=1&per_page=100&sort=desc>; rel="last" +Vary: Origin, Accept-Encoding +X-Per-Page: 100 +Set-Cookie: _cfuvid=oZA4jh0EzL5.ONTRYvxi4IryznOCXhUFgv3_ILSeCaA-1725394795215-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Cache-Control: max-age=0, private, must-revalidate +X-Next-Page: +Gitlab-Sv: api-gke-us-east1-c +Cf-Cache-Status: MISS +X-Gitlab-Meta: {"correlation_id":"3ddca8834bb2582c7864327265a18732","version":"1"} +Gitlab-Lb: haproxy-main-37-lb-gprd +Etag: W/"0dca592238578abf637a888d6aa33e06" +X-Page: 1 +X-Runtime: 0.099990 +Content-Type: application/json +Content-Security-Policy: default-src 'none' + +[{"name":"First Release","tag_name":"v0.9.99","description":"A test release","created_at":"2024-09-03T15:01:01.513Z","released_at":"2024-09-03T15:01:01.000Z","upcoming_release":false,"author":{"id":548513,"username":"mkobel","name":"Moritz Kobel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/eae1be6324682816aedc885acbf5858719b40956e0278edabe5c0db7cbc95f3b?s=80\u0026d=identicon","web_url":"https://gitlab.com/mkobel"},"commit":{"id":"0720a3ec57c1f843568298117b874319e7deee75","short_id":"0720a3ec","created_at":"2019-11-28T08:49:16.000+00:00","parent_ids":["93ea21ce45d35690c35e80961d239645139e872c"],"title":"Add new file","message":"Add new file","author_name":"Lauris BH","author_email":"lauris@nix.lv","authored_date":"2019-11-28T08:49:16.000+00:00","committer_name":"Lauris BH","committer_email":"lauris@nix.lv","committed_date":"2019-11-28T08:49:16.000+00:00","trailers":{},"extended_trailers":{},"web_url":"https://gitlab.com/forgejo/test_repo/-/commit/0720a3ec57c1f843568298117b874319e7deee75"},"commit_path":"/forgejo/test_repo/-/commit/0720a3ec57c1f843568298117b874319e7deee75","tag_path":"/forgejo/test_repo/-/tags/v0.9.99","assets":{"count":4,"sources":[{"format":"zip","url":"https://gitlab.com/forgejo/test_repo/-/archive/v0.9.99/test_repo-v0.9.99.zip"},{"format":"tar.gz","url":"https://gitlab.com/forgejo/test_repo/-/archive/v0.9.99/test_repo-v0.9.99.tar.gz"},{"format":"tar.bz2","url":"https://gitlab.com/forgejo/test_repo/-/archive/v0.9.99/test_repo-v0.9.99.tar.bz2"},{"format":"tar","url":"https://gitlab.com/forgejo/test_repo/-/archive/v0.9.99/test_repo-v0.9.99.tar"}],"links":[]},"evidences":[{"sha":"e30c1d21d05ff0c73436ee1e97b3ef12a1d6d33d0dcd","filepath":"https://gitlab.com/forgejo/test_repo/-/releases/v0.9.99/evidences/9608487.json","collected_at":"2024-09-03T15:01:02.963Z"}],"_links":{"closed_issues_url":"https://gitlab.com/forgejo/test_repo/-/issues?release_tag=v0.9.99\u0026scope=all\u0026state=closed","closed_merge_requests_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests?release_tag=v0.9.99\u0026scope=all\u0026state=closed","edit_url":"https://gitlab.com/forgejo/test_repo/-/releases/v0.9.99/edit","merged_merge_requests_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests?release_tag=v0.9.99\u0026scope=all\u0026state=merged","opened_issues_url":"https://gitlab.com/forgejo/test_repo/-/issues?release_tag=v0.9.99\u0026scope=all\u0026state=opened","opened_merge_requests_url":"https://gitlab.com/forgejo/test_repo/-/merge_requests?release_tag=v0.9.99\u0026scope=all\u0026state=opened","self":"https://gitlab.com/forgejo/test_repo/-/releases/v0.9.99"}}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fforgejo%252Ftest_repo b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fforgejo%252Ftest_repo new file mode 100644 index 0000000..53c925a --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fforgejo%252Ftest_repo @@ -0,0 +1,17 @@ +Content-Security-Policy: default-src 'none' +Etag: W/"b36bd4522b7e8b2509078271491fb972" +X-Runtime: 0.182246 +Set-Cookie: _cfuvid=wk6gVgcAYZqUygBPZ8pK6j22vOlbZuagLq74bgkySCs-1725394793303-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Type: application/json +X-Content-Type-Options: nosniff +Gitlab-Sv: api-gke-us-east1-c +Gitlab-Lb: haproxy-main-58-lb-gprd +Cache-Control: max-age=0, private, must-revalidate +Strict-Transport-Security: max-age=31536000 +Referrer-Policy: strict-origin-when-cross-origin +Vary: Origin, Accept-Encoding +X-Frame-Options: SAMEORIGIN +X-Gitlab-Meta: {"correlation_id":"43c0c955821005b625f1707ecac8d4d8","version":"1"} +Cf-Cache-Status: MISS + +{"id":61363672,"description":"Test repository for testing migration from gitlab to forgejo","name":"test_repo","name_with_namespace":"Forgejo / test_repo","path":"test_repo","path_with_namespace":"forgejo/test_repo","created_at":"2024-09-03T07:44:30.668Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:forgejo/test_repo.git","http_url_to_repo":"https://gitlab.com/forgejo/test_repo.git","web_url":"https://gitlab.com/forgejo/test_repo","readme_url":"https://gitlab.com/forgejo/test_repo/-/blob/master/README.md","forks_count":0,"avatar_url":null,"star_count":0,"last_activity_at":"2024-09-03T20:03:18.187Z","namespace":{"id":64459497,"name":"Forgejo","path":"forgejo","kind":"group","full_path":"forgejo","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/64459497/73144-c883a242dec5299fbc06bbe3ee71d8c6.png","web_url":"https://gitlab.com/groups/forgejo"},"forked_from_project":{"id":15578026,"description":"Test repository for testing migration from gitlab to gitea","name":"test_repo","name_with_namespace":"gitea / test_repo","path":"test_repo","path_with_namespace":"gitea/test_repo","created_at":"2019-11-28T08:20:33.019Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:gitea/test_repo.git","http_url_to_repo":"https://gitlab.com/gitea/test_repo.git","web_url":"https://gitlab.com/gitea/test_repo","readme_url":"https://gitlab.com/gitea/test_repo/-/blob/master/README.md","forks_count":2,"avatar_url":null,"star_count":0,"last_activity_at":"2024-09-03T07:52:28.488Z","namespace":{"id":3181312,"name":"gitea","path":"gitea","kind":"group","full_path":"gitea","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/3181312/gitea.png","web_url":"https://gitlab.com/groups/gitea"}},"container_registry_image_prefix":"registry.gitlab.com/forgejo/test_repo","_links":{"self":"https://gitlab.com/api/v4/projects/61363672","issues":"https://gitlab.com/api/v4/projects/61363672/issues","merge_requests":"https://gitlab.com/api/v4/projects/61363672/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/61363672/repository/branches","labels":"https://gitlab.com/api/v4/projects/61363672/labels","events":"https://gitlab.com/api/v4/projects/61363672/events","members":"https://gitlab.com/api/v4/projects/61363672/members","cluster_agents":"https://gitlab.com/api/v4/projects/61363672/cluster_agents"},"packages_enabled":true,"empty_repo":false,"archived":false,"visibility":"public","resolve_outdated_diff_discussions":false,"container_expiration_policy":{"cadence":"1d","enabled":false,"keep_n":10,"older_than":"90d","name_regex":".*","name_regex_keep":null,"next_run_at":"2024-09-04T07:44:30.699Z"},"repository_object_format":"sha1","issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"service_desk_address":"contact-project+forgejo-test-repo-61363672-issue-@incoming.gitlab.com","can_create_merge_request_in":true,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","model_registry_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":true,"creator_id":2005797,"mr_default_target_self":false,"import_url":null,"import_type":null,"import_status":"finished","import_error":null,"open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:60\" dir=\"auto\"\u003eTest repository for testing migration from gitlab to forgejo\u003c/p\u003e","updated_at":"2024-09-03T20:03:18.187Z","ci_default_git_depth":50,"ci_forward_deployment_enabled":true,"ci_forward_deployment_rollback_allowed":true,"ci_job_token_scope_enabled":false,"ci_separated_caches":true,"ci_allow_fork_pipelines_to_run_in_parent_project":true,"ci_id_token_sub_claim_components":["project_path","ref_type","ref"],"build_git_strategy":"fetch","keep_latest_artifact":true,"restrict_user_defined_variables":false,"ci_pipeline_variables_minimum_override_role":"maintainer","runners_token":null,"runner_token_expiration_interval":null,"group_runners_enabled":true,"auto_cancel_pending_pipelines":"enabled","build_timeout":3600,"auto_devops_enabled":false,"auto_devops_deploy_strategy":"continuous","ci_push_repository_for_job_token_allowed":false,"ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":true,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":true,"printing_merge_request_link_enabled":true,"merge_method":"merge","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"warn_about_potentially_unwanted_characters":true,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":true,"pre_receive_secret_detection_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":{"access_level":40,"notification_level":3},"group_access":null}}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fgitea%252Ftest_repo b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fgitea%252Ftest_repo new file mode 100644 index 0000000..96f1ea8 --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fprojects%2Fgitea%252Ftest_repo @@ -0,0 +1,17 @@ +Referrer-Policy: strict-origin-when-cross-origin +Gitlab-Lb: haproxy-main-51-lb-gprd +Cf-Cache-Status: MISS +Etag: W/"8db4917b3be5f4ca0d101a702179b75a" +X-Content-Type-Options: nosniff +Strict-Transport-Security: max-age=31536000 +Gitlab-Sv: api-gke-us-east1-b +Content-Type: application/json +Cache-Control: max-age=0, private, must-revalidate +X-Gitlab-Meta: {"correlation_id":"9b3859cf6d73ce5de261a56d286072a5","version":"1"} +X-Runtime: 0.119487 +Content-Security-Policy: default-src 'none' +Vary: Origin, Accept-Encoding +Set-Cookie: _cfuvid=Cmc.ycVkdwA_tBvmR2tOVLQ5B.khzzU39ZUxgf4RNlw-1710504204838-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +X-Frame-Options: SAMEORIGIN + +{"id":15578026,"description":"Test repository for testing migration from gitlab to gitea","name":"test_repo","name_with_namespace":"gitea / test_repo","path":"test_repo","path_with_namespace":"gitea/test_repo","created_at":"2019-11-28T08:20:33.019Z","default_branch":"master","tag_list":["migration","test"],"topics":["migration","test"],"ssh_url_to_repo":"git@gitlab.com:gitea/test_repo.git","http_url_to_repo":"https://gitlab.com/gitea/test_repo.git","web_url":"https://gitlab.com/gitea/test_repo","readme_url":"https://gitlab.com/gitea/test_repo/-/blob/master/README.md","forks_count":1,"avatar_url":null,"star_count":0,"last_activity_at":"2020-04-19T19:46:04.527Z","namespace":{"id":3181312,"name":"gitea","path":"gitea","kind":"group","full_path":"gitea","parent_id":null,"avatar_url":"/uploads/-/system/group/avatar/3181312/gitea.png","web_url":"https://gitlab.com/groups/gitea"},"container_registry_image_prefix":"registry.gitlab.com/gitea/test_repo","_links":{"self":"https://gitlab.com/api/v4/projects/15578026","issues":"https://gitlab.com/api/v4/projects/15578026/issues","merge_requests":"https://gitlab.com/api/v4/projects/15578026/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/15578026/repository/branches","labels":"https://gitlab.com/api/v4/projects/15578026/labels","events":"https://gitlab.com/api/v4/projects/15578026/events","members":"https://gitlab.com/api/v4/projects/15578026/members","cluster_agents":"https://gitlab.com/api/v4/projects/15578026/cluster_agents"},"packages_enabled":true,"empty_repo":false,"archived":false,"visibility":"public","resolve_outdated_diff_discussions":false,"repository_object_format":"sha1","issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"can_create_merge_request_in":true,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","model_registry_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":true,"creator_id":1241334,"import_status":"none","open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:58\" dir=\"auto\"\u003eTest repository for testing migration from gitlab to gitea\u003c/p\u003e","updated_at":"2024-01-11T01:23:21.057Z","ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":true,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":true,"printing_merge_request_link_enabled":true,"merge_method":"ff","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"warn_about_potentially_unwanted_characters":true,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":null,"group_access":null}}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fversion b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fversion new file mode 100644 index 0000000..8b3dd5b --- /dev/null +++ b/services/migrations/testdata/gitlab/full_download/GET_%2Fapi%2Fv4%2Fversion @@ -0,0 +1,17 @@ +Content-Type: application/json +Cache-Control: max-age=0, private, must-revalidate +Etag: W/"a27b6b3c661f4ee7a68e5b905f5291fb" +Vary: Origin, Accept-Encoding +X-Gitlab-Meta: {"correlation_id":"10488cc696aabdc48229039f2c9e4ebd","version":"1"} +Gitlab-Sv: api-gke-us-east1-d +Cf-Cache-Status: MISS +Strict-Transport-Security: max-age=31536000 +X-Frame-Options: SAMEORIGIN +X-Runtime: 0.034189 +Referrer-Policy: strict-origin-when-cross-origin +Set-Cookie: _cfuvid=hbFjaLVJudhzz6Sqg5QnViD.eikToNruD.b1oEG5xrc-1725394792940-0.0.1.1-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +Gitlab-Lb: haproxy-main-56-lb-gprd +X-Content-Type-Options: nosniff + +{"version":"17.4.0-pre","revision":"8c6dcc9e627","kas":{"enabled":true,"externalUrl":"wss://kas.gitlab.com","version":"17.4.0+a2ca345cd681ef39094623d8f4b6ed65996de57d"},"enterprise":true}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996 b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996 new file mode 100644 index 0000000..db8d596 --- /dev/null +++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996 @@ -0,0 +1,22 @@ +X-Runtime: 0.088022 +Strict-Transport-Security: max-age=31536000 +Ratelimit-Observed: 3 +Cache-Control: max-age=0, private, must-revalidate +Etag: W/"03ce4f6ce1c1e8c5a31df8a44cf2fbdd" +Gitlab-Lb: haproxy-main-11-lb-gprd +Content-Security-Policy: default-src 'none' +Ratelimit-Limit: 2000 +X-Gitlab-Meta: {"correlation_id":"b57b226f741f9140a1fea54f65cb5cfd","version":"1"} +Referrer-Policy: strict-origin-when-cross-origin +Ratelimit-Remaining: 1997 +Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:53 GMT +Set-Cookie: _cfuvid=V0ToiOTUW0XbtWq7BirwVNfL1_YP1POMrLBnDSEWS0M-1701332633965-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +Gitlab-Sv: localhost +Content-Type: application/json +Vary: Origin, Accept-Encoding +Ratelimit-Reset: 1701332693 +Cf-Cache-Status: MISS + +{"id":6590996,"description":"Arch packaging and build files","name":"archbuild","name_with_namespace":"Troy Engel / archbuild","path":"archbuild","path_with_namespace":"troyengel/archbuild","created_at":"2018-06-03T22:53:17.388Z","default_branch":"master","tag_list":[],"topics":[],"ssh_url_to_repo":"git@gitlab.com:troyengel/archbuild.git","http_url_to_repo":"https://gitlab.com/troyengel/archbuild.git","web_url":"https://gitlab.com/troyengel/archbuild","readme_url":"https://gitlab.com/troyengel/archbuild/-/blob/master/README.md","forks_count":0,"avatar_url":null,"star_count":0,"last_activity_at":"2020-12-13T18:09:32.071Z","namespace":{"id":1452515,"name":"Troy Engel","path":"troyengel","kind":"user","full_path":"troyengel","parent_id":null,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"container_registry_image_prefix":"registry.gitlab.com/troyengel/archbuild","_links":{"self":"https://gitlab.com/api/v4/projects/6590996","issues":"https://gitlab.com/api/v4/projects/6590996/issues","merge_requests":"https://gitlab.com/api/v4/projects/6590996/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/6590996/repository/branches","labels":"https://gitlab.com/api/v4/projects/6590996/labels","events":"https://gitlab.com/api/v4/projects/6590996/events","members":"https://gitlab.com/api/v4/projects/6590996/members","cluster_agents":"https://gitlab.com/api/v4/projects/6590996/cluster_agents"},"packages_enabled":null,"empty_repo":false,"archived":true,"visibility":"public","owner":{"id":1215848,"username":"troyengel","name":"Troy Engel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"resolve_outdated_diff_discussions":false,"issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"can_create_merge_request_in":false,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":false,"creator_id":1215848,"import_status":"finished","open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:30\" dir=\"auto\"\u003eArch packaging and build files\u003c/p\u003e","updated_at":"2022-07-13T21:32:12.624Z","ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":false,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":null,"printing_merge_request_link_enabled":true,"merge_method":"merge","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":null,"group_access":null}}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=10 b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=10 new file mode 100644 index 0000000..8f829d0 --- /dev/null +++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%2F2%2Faward_emoji%3Fpage=1&per_page=10 @@ -0,0 +1,31 @@ +Gitlab-Sv: localhost +X-Content-Type-Options: nosniff +Gitlab-Lb: haproxy-main-25-lb-gprd +X-Total-Pages: 1 +Referrer-Policy: strict-origin-when-cross-origin +Ratelimit-Observed: 5 +Ratelimit-Remaining: 1995 +Content-Security-Policy: default-src 'none' +X-Gitlab-Meta: {"correlation_id":"eeab46d836341bd4cb18e3d2e82abf97","version":"1"} +Ratelimit-Limit: 2000 +Accept-Ranges: bytes +Content-Type: application/json +X-Page: 1 +X-Frame-Options: SAMEORIGIN +X-Prev-Page: +Cf-Cache-Status: MISS +X-Total: 0 +Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:54 GMT +Link: <https://gitlab.com/api/v4/projects/6590996/issues/2/award_emoji?id=6590996&issue_iid=2&page=1&per_page=10>; rel="first", <https://gitlab.com/api/v4/projects/6590996/issues/2/award_emoji?id=6590996&issue_iid=2&page=1&per_page=10>; rel="last" +X-Per-Page: 10 +Set-Cookie: _cfuvid=c5HuTPxOuSXdHSuVrXQALS.uV7WvAYfc5Mc_143EAB8-1701332634513-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Length: 2 +Vary: Origin, Accept-Encoding +Cache-Control: max-age=0, private, must-revalidate +Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5" +X-Runtime: 0.069269 +Strict-Transport-Security: max-age=31536000 +Ratelimit-Reset: 1701332694 +X-Next-Page: + +[]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%3Fpage=1&per_page=10&sort=asc&state=all b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%3Fpage=1&per_page=10&sort=asc&state=all new file mode 100644 index 0000000..99133d5 --- /dev/null +++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fissues%3Fpage=1&per_page=10&sort=asc&state=all @@ -0,0 +1,29 @@ +Link: <https://gitlab.com/api/v4/projects/6590996/issues?id=6590996&order_by=created_at&page=1&per_page=10&sort=asc&state=all&with_labels_details=false>; rel="first", <https://gitlab.com/api/v4/projects/6590996/issues?id=6590996&order_by=created_at&page=1&per_page=10&sort=asc&state=all&with_labels_details=false>; rel="last" +Ratelimit-Observed: 4 +Ratelimit-Remaining: 1996 +Gitlab-Lb: haproxy-main-04-lb-gprd +Vary: Origin, Accept-Encoding +Content-Security-Policy: default-src 'none' +X-Next-Page: +Ratelimit-Reset: 1701332694 +Etag: W/"f50a70d0fc1465a289d231f80806ced7" +X-Gitlab-Meta: {"correlation_id":"47afd74254dd7946d2b2bded87448c60","version":"1"} +X-Page: 1 +X-Prev-Page: +Referrer-Policy: strict-origin-when-cross-origin +Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:54 GMT +Cf-Cache-Status: MISS +X-Total: 1 +X-Total-Pages: 1 +Strict-Transport-Security: max-age=31536000 +Content-Type: application/json +X-Frame-Options: SAMEORIGIN +Ratelimit-Limit: 2000 +Gitlab-Sv: localhost +Set-Cookie: _cfuvid=YDWTZ5VoSuLBDZgKsBnXMyYxz.0rHJ9TBYXv5zBj24Q-1701332634294-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Cache-Control: max-age=0, private, must-revalidate +X-Content-Type-Options: nosniff +X-Per-Page: 10 +X-Runtime: 0.179458 + +[{"id":11201348,"iid":2,"project_id":6590996,"title":"vpn unlimited errors","description":"updated version to 2.8.0, build and tried running `vpnu-arch`:\n\n```\nvpn-unlimited: /usr/lib/libcurl.so.3: no version information available (required by /usr/lib/libvpnu_rpc.so.1)\nvpn-unlimited: /usr/lib/libssl.so.1.0.0: no version information available (required by /usr/lib/libvpnu_enc.so.1)\nvpn-unlimited: symbol lookup error: /usr/lib/libvpnu_rpc.so.1: undefined symbol: _ZNK4Json5Value8asStringEv\n```\n","state":"closed","created_at":"2016-03-26T16:41:12.000Z","updated_at":"2016-03-27T12:19:27.000Z","closed_at":null,"closed_by":null,"labels":[],"milestone":null,"assignees":[],"author":{"id":10273,"username":"brauliobo","name":"BrĂ¡ulio Bhavamitra","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/cd3fcb7a417c8acb989fc320b604a2a8?s=80\u0026d=identicon","web_url":"https://gitlab.com/brauliobo"},"type":"ISSUE","assignee":null,"user_notes_count":1,"merge_requests_count":0,"upvotes":0,"downvotes":0,"due_date":null,"confidential":false,"discussion_locked":null,"issue_type":"issue","web_url":"https://gitlab.com/troyengel/archbuild/-/issues/2","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"task_completion_status":{"count":0,"completed_count":0},"blocking_issues_count":0,"has_tasks":true,"task_status":"0 of 0 checklist items completed","_links":{"self":"https://gitlab.com/api/v4/projects/6590996/issues/2","notes":"https://gitlab.com/api/v4/projects/6590996/issues/2/notes","award_emoji":"https://gitlab.com/api/v4/projects/6590996/issues/2/award_emoji","project":"https://gitlab.com/api/v4/projects/6590996","closed_as_duplicate_of":null},"references":{"short":"#2","relative":"#2","full":"troyengel/archbuild#2"},"severity":"UNKNOWN","moved_to_id":null,"service_desk_reply_to":null}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1 b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1 new file mode 100644 index 0000000..18e8a85 --- /dev/null +++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1 @@ -0,0 +1,22 @@ +Ratelimit-Observed: 7 +Set-Cookie: _cfuvid=_b9GQEo3CBPMs9QmGE89dBdOmbSTfnYjZlzValULQPs-1701332635000-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Strict-Transport-Security: max-age=31536000 +Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:54 GMT +Gitlab-Lb: haproxy-main-50-lb-gprd +Gitlab-Sv: localhost +X-Gitlab-Meta: {"correlation_id":"da44cd0303a4e62cc52ed8de3b2adf14","version":"1"} +Referrer-Policy: strict-origin-when-cross-origin +Ratelimit-Remaining: 1993 +Etag: W/"f6299e7e884cb8df8109256c086eb4e7" +X-Runtime: 0.107573 +Content-Type: application/json +Ratelimit-Reset: 1701332694 +X-Frame-Options: SAMEORIGIN +Cache-Control: max-age=0, private, must-revalidate +X-Content-Type-Options: nosniff +Ratelimit-Limit: 2000 +Cf-Cache-Status: MISS +Content-Security-Policy: default-src 'none' +Vary: Origin, Accept-Encoding + +{"id":10518914,"iid":1,"project_id":6590996,"title":"Review","description":"*Created by: cgtx*\n\n### remove patch from makedepends\n- patch is in base-devel\n- The group base-devel is assumed to be already installed when building with makepkg. Members of \"base-devel\" should not be included in makedepends arrays.\n- https://wiki.archlinux.org/index.php/Pkgbuild#makedepends\n### remove python2 from makedepends\n- python2 is a dependency of python2-setuptools. It is redundant to list it again.\n- You do not need to list packages that your software depends on if other packages your software depends on already have those packages listed in their dependency.\n- https://wiki.archlinux.org/index.php/Pkgbuild#depends\n### more simple find/delete command\n- just because\n","state":"merged","created_at":"2014-12-12T15:01:32.000Z","updated_at":"2014-12-12T15:28:38.000Z","merged_by":null,"merge_user":null,"merged_at":null,"closed_by":null,"closed_at":null,"target_branch":"master","source_branch":"cgtx:review","user_notes_count":1,"upvotes":0,"downvotes":0,"author":{"id":1215848,"username":"troyengel","name":"Troy Engel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"assignees":[],"assignee":null,"reviewers":[],"source_project_id":6590996,"target_project_id":6590996,"labels":[],"draft":false,"work_in_progress":false,"milestone":null,"merge_when_pipeline_succeeds":false,"merge_status":"cannot_be_merged","detailed_merge_status":"not_open","sha":"9006fee398299beed8f5d5086f8e6008ffc02280","merge_commit_sha":null,"squash_commit_sha":null,"discussion_locked":null,"should_remove_source_branch":null,"force_remove_source_branch":null,"prepared_at":"2014-12-12T15:01:32.000Z","reference":"!1","references":{"short":"!1","relative":"!1","full":"troyengel/archbuild!1"},"web_url":"https://gitlab.com/troyengel/archbuild/-/merge_requests/1","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"squash":false,"squash_on_merge":false,"task_completion_status":{"count":0,"completed_count":0},"has_conflicts":true,"blocking_discussions_resolved":true,"approvals_before_merge":null,"subscribed":false,"changes_count":"1","latest_build_started_at":null,"latest_build_finished_at":null,"first_deployed_to_production_at":null,"pipeline":null,"head_pipeline":null,"diff_refs":{"base_sha":"6edcf8fc09f6c44213c892f5108d34a5255a47e1","head_sha":"9006fee398299beed8f5d5086f8e6008ffc02280","start_sha":"6edcf8fc09f6c44213c892f5108d34a5255a47e1"},"merge_error":null,"first_contribution":false,"user":{"can_merge":false}}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=10 b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=10 new file mode 100644 index 0000000..d6f8dd4 --- /dev/null +++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%2F1%2Faward_emoji%3Fpage=1&per_page=10 @@ -0,0 +1,31 @@ +Link: <https://gitlab.com/api/v4/projects/6590996/merge_requests/1/award_emoji?id=6590996&merge_request_iid=1&page=1&per_page=10>; rel="first", <https://gitlab.com/api/v4/projects/6590996/merge_requests/1/award_emoji?id=6590996&merge_request_iid=1&page=1&per_page=10>; rel="last" +Set-Cookie: _cfuvid=qK29tijoyp0AdVoHf9Lqjc8Y28h4jplJDW9hOFLfq28-1701332635229-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Cache-Control: max-age=0, private, must-revalidate +Etag: W/"4f53cda18c2baa0c0354bb5f9a3ecbe5" +Ratelimit-Observed: 8 +Gitlab-Sv: localhost +Content-Length: 2 +Gitlab-Lb: haproxy-main-16-lb-gprd +X-Total: 0 +Ratelimit-Remaining: 1992 +Ratelimit-Reset: 1701332695 +Ratelimit-Limit: 2000 +Vary: Origin, Accept-Encoding +X-Frame-Options: SAMEORIGIN +Content-Type: application/json +X-Content-Type-Options: nosniff +X-Next-Page: +X-Page: 1 +Strict-Transport-Security: max-age=31536000 +Accept-Ranges: bytes +Content-Security-Policy: default-src 'none' +X-Per-Page: 10 +X-Total-Pages: 1 +Referrer-Policy: strict-origin-when-cross-origin +Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:55 GMT +Cf-Cache-Status: MISS +X-Gitlab-Meta: {"correlation_id":"eb59d63fed23cdbec69308570cc49c3e","version":"1"} +X-Runtime: 0.065972 +X-Prev-Page: + +[]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%3Fpage=1&per_page=10&view=simple b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%3Fpage=1&per_page=10&view=simple new file mode 100644 index 0000000..5339392 --- /dev/null +++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2F6590996%2Fmerge_requests%3Fpage=1&per_page=10&view=simple @@ -0,0 +1,29 @@ +Vary: Origin, Accept-Encoding +Strict-Transport-Security: max-age=31536000 +Gitlab-Sv: localhost +X-Content-Type-Options: nosniff +X-Prev-Page: +Ratelimit-Reset: 1701332694 +Cache-Control: max-age=0, private, must-revalidate +Ratelimit-Limit: 2000 +Referrer-Policy: strict-origin-when-cross-origin +Ratelimit-Observed: 6 +Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:54 GMT +Cf-Cache-Status: MISS +Content-Type: application/json +Content-Security-Policy: default-src 'none' +Etag: W/"1a50811aa3cccb2e6a404a976422a83a" +X-Total: 1 +Ratelimit-Remaining: 1994 +Set-Cookie: _cfuvid=u.zumTkG1ayCnh_OwrT9Q1Fl3MXV9Gh98W.ma4WN2Xs-1701332634745-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Link: <https://gitlab.com/api/v4/projects/6590996/merge_requests?id=6590996&order_by=created_at&page=1&per_page=10&sort=desc&state=all&view=simple&with_labels_details=false&with_merge_status_recheck=false>; rel="first", <https://gitlab.com/api/v4/projects/6590996/merge_requests?id=6590996&order_by=created_at&page=1&per_page=10&sort=desc&state=all&view=simple&with_labels_details=false&with_merge_status_recheck=false>; rel="last" +X-Frame-Options: SAMEORIGIN +X-Page: 1 +X-Total-Pages: 1 +Gitlab-Lb: haproxy-main-05-lb-gprd +X-Gitlab-Meta: {"correlation_id":"907f9e1f94131ea7a6d1405100a8cc4b","version":"1"} +X-Next-Page: +X-Per-Page: 10 +X-Runtime: 0.078413 + +[{"id":10518914,"iid":1,"project_id":6590996,"title":"Review","description":"*Created by: cgtx*\n\n### remove patch from makedepends\n- patch is in base-devel\n- The group base-devel is assumed to be already installed when building with makepkg. Members of \"base-devel\" should not be included in makedepends arrays.\n- https://wiki.archlinux.org/index.php/Pkgbuild#makedepends\n### remove python2 from makedepends\n- python2 is a dependency of python2-setuptools. It is redundant to list it again.\n- You do not need to list packages that your software depends on if other packages your software depends on already have those packages listed in their dependency.\n- https://wiki.archlinux.org/index.php/Pkgbuild#depends\n### more simple find/delete command\n- just because\n","state":"merged","created_at":"2014-12-12T15:01:32.000Z","updated_at":"2014-12-12T15:28:38.000Z","web_url":"https://gitlab.com/troyengel/archbuild/-/merge_requests/1"}]
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2Ftroyengel%252Farchbuild b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2Ftroyengel%252Farchbuild new file mode 100644 index 0000000..a8c2882 --- /dev/null +++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fprojects%2Ftroyengel%252Farchbuild @@ -0,0 +1,22 @@ +Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:53 GMT +Gitlab-Lb: haproxy-main-41-lb-gprd +Cache-Control: max-age=0, private, must-revalidate +Referrer-Policy: strict-origin-when-cross-origin +Cf-Cache-Status: MISS +X-Content-Type-Options: nosniff +Set-Cookie: _cfuvid=r78xThY2IPR6QvHnea1t_L7DbvuQp4.HWOiG1cKTWUg-1701332633720-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Ratelimit-Limit: 2000 +Strict-Transport-Security: max-age=31536000 +Vary: Origin, Accept-Encoding +X-Gitlab-Meta: {"correlation_id":"4c3e0f8b5858454b6e138ecae9902a8d","version":"1"} +X-Runtime: 0.097047 +Ratelimit-Observed: 2 +Ratelimit-Remaining: 1998 +X-Frame-Options: SAMEORIGIN +Content-Security-Policy: default-src 'none' +Etag: W/"03ce4f6ce1c1e8c5a31df8a44cf2fbdd" +Content-Type: application/json +Gitlab-Sv: localhost +Ratelimit-Reset: 1701332693 + +{"id":6590996,"description":"Arch packaging and build files","name":"archbuild","name_with_namespace":"Troy Engel / archbuild","path":"archbuild","path_with_namespace":"troyengel/archbuild","created_at":"2018-06-03T22:53:17.388Z","default_branch":"master","tag_list":[],"topics":[],"ssh_url_to_repo":"git@gitlab.com:troyengel/archbuild.git","http_url_to_repo":"https://gitlab.com/troyengel/archbuild.git","web_url":"https://gitlab.com/troyengel/archbuild","readme_url":"https://gitlab.com/troyengel/archbuild/-/blob/master/README.md","forks_count":0,"avatar_url":null,"star_count":0,"last_activity_at":"2020-12-13T18:09:32.071Z","namespace":{"id":1452515,"name":"Troy Engel","path":"troyengel","kind":"user","full_path":"troyengel","parent_id":null,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"container_registry_image_prefix":"registry.gitlab.com/troyengel/archbuild","_links":{"self":"https://gitlab.com/api/v4/projects/6590996","issues":"https://gitlab.com/api/v4/projects/6590996/issues","merge_requests":"https://gitlab.com/api/v4/projects/6590996/merge_requests","repo_branches":"https://gitlab.com/api/v4/projects/6590996/repository/branches","labels":"https://gitlab.com/api/v4/projects/6590996/labels","events":"https://gitlab.com/api/v4/projects/6590996/events","members":"https://gitlab.com/api/v4/projects/6590996/members","cluster_agents":"https://gitlab.com/api/v4/projects/6590996/cluster_agents"},"packages_enabled":null,"empty_repo":false,"archived":true,"visibility":"public","owner":{"id":1215848,"username":"troyengel","name":"Troy Engel","state":"active","locked":false,"avatar_url":"https://secure.gravatar.com/avatar/b226c267929f1bcfcc446e75a025591c?s=80\u0026d=identicon","web_url":"https://gitlab.com/troyengel"},"resolve_outdated_diff_discussions":false,"issues_enabled":true,"merge_requests_enabled":true,"wiki_enabled":true,"jobs_enabled":true,"snippets_enabled":true,"container_registry_enabled":true,"service_desk_enabled":true,"can_create_merge_request_in":false,"issues_access_level":"enabled","repository_access_level":"enabled","merge_requests_access_level":"enabled","forking_access_level":"enabled","wiki_access_level":"enabled","builds_access_level":"enabled","snippets_access_level":"enabled","pages_access_level":"enabled","analytics_access_level":"enabled","container_registry_access_level":"enabled","security_and_compliance_access_level":"private","releases_access_level":"enabled","environments_access_level":"enabled","feature_flags_access_level":"enabled","infrastructure_access_level":"enabled","monitor_access_level":"enabled","model_experiments_access_level":"enabled","emails_disabled":false,"emails_enabled":true,"shared_runners_enabled":true,"lfs_enabled":false,"creator_id":1215848,"import_status":"finished","open_issues_count":0,"description_html":"\u003cp data-sourcepos=\"1:1-1:30\" dir=\"auto\"\u003eArch packaging and build files\u003c/p\u003e","updated_at":"2022-07-13T21:32:12.624Z","ci_config_path":null,"public_jobs":true,"shared_with_groups":[],"only_allow_merge_if_pipeline_succeeds":false,"allow_merge_on_skipped_pipeline":null,"request_access_enabled":false,"only_allow_merge_if_all_discussions_are_resolved":false,"remove_source_branch_after_merge":null,"printing_merge_request_link_enabled":true,"merge_method":"merge","squash_option":"default_off","enforce_auth_checks_on_uploads":true,"suggestion_commit_message":null,"merge_commit_template":null,"squash_commit_template":null,"issue_branch_template":null,"autoclose_referenced_issues":true,"external_authorization_classification_label":"","requirements_enabled":false,"requirements_access_level":"enabled","security_and_compliance_enabled":false,"compliance_frameworks":[],"permissions":{"project_access":null,"group_access":null}}
\ No newline at end of file diff --git a/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fversion b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fversion new file mode 100644 index 0000000..eb6df2f --- /dev/null +++ b/services/migrations/testdata/gitlab/skipped_issue_number/GET_%2Fapi%2Fv4%2Fversion @@ -0,0 +1,22 @@ +Ratelimit-Observed: 1 +X-Gitlab-Meta: {"correlation_id":"aa75720bd9c597c7f2f886a4042d1f80","version":"1"} +Etag: W/"4e5c0a031c3aacb6ba0a3c19e67d7592" +X-Content-Type-Options: nosniff +Ratelimit-Limit: 2000 +Ratelimit-Resettime: Thu, 30 Nov 2023 08:24:53 GMT +X-Runtime: 0.039899 +Ratelimit-Remaining: 1999 +Set-Cookie: _cfuvid=7OAEitQ3J0BOxrXk2pMBApFg1KFnz5aBVqOY7mHwLRk-1701332633452-0-604800000; path=/; domain=.gitlab.com; HttpOnly; Secure; SameSite=None +Content-Security-Policy: default-src 'none' +Gitlab-Sv: localhost +Cf-Cache-Status: MISS +Vary: Origin, Accept-Encoding +X-Frame-Options: SAMEORIGIN +Cache-Control: max-age=0, private, must-revalidate +Strict-Transport-Security: max-age=31536000 +Referrer-Policy: strict-origin-when-cross-origin +Ratelimit-Reset: 1701332693 +Gitlab-Lb: haproxy-main-39-lb-gprd +Content-Type: application/json + +{"version":"16.7.0-pre","revision":"acd848a9228","kas":{"enabled":true,"externalUrl":"wss://kas.gitlab.com","version":"v16.7.0-rc2"},"enterprise":true}
\ No newline at end of file diff --git a/services/migrations/update.go b/services/migrations/update.go new file mode 100644 index 0000000..4a49206 --- /dev/null +++ b/services/migrations/update.go @@ -0,0 +1,77 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrations + +import ( + "context" + + "code.gitea.io/gitea/models/db" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/services/externalaccount" +) + +// UpdateMigrationPosterID updates all migrated repositories' issues and comments posterID +func UpdateMigrationPosterID(ctx context.Context) error { + for _, gitService := range structs.SupportedFullGitService { + select { + case <-ctx.Done(): + log.Warn("UpdateMigrationPosterID aborted before %s", gitService.Name()) + return db.ErrCancelledf("during UpdateMigrationPosterID before %s", gitService.Name()) + default: + } + if err := updateMigrationPosterIDByGitService(ctx, gitService); err != nil { + log.Error("updateMigrationPosterIDByGitService failed: %v", err) + } + } + return nil +} + +func updateMigrationPosterIDByGitService(ctx context.Context, tp structs.GitServiceType) error { + provider := tp.Name() + if len(provider) == 0 { + return nil + } + + const batchSize = 100 + for page := 0; ; page++ { + select { + case <-ctx.Done(): + log.Warn("UpdateMigrationPosterIDByGitService(%s) cancelled", tp.Name()) + return nil + default: + } + + users, err := db.Find[user_model.ExternalLoginUser](ctx, user_model.FindExternalUserOptions{ + ListOptions: db.ListOptions{ + PageSize: batchSize, + Page: page, + }, + Provider: provider, + OrderBy: "login_source_id ASC, external_id ASC", + }) + if err != nil { + return err + } + + for _, user := range users { + select { + case <-ctx.Done(): + log.Warn("UpdateMigrationPosterIDByGitService(%s) cancelled", tp.Name()) + return nil + default: + } + externalUserID := user.ExternalID + if err := externalaccount.UpdateMigrationsByType(ctx, tp, externalUserID, user.UserID); err != nil { + log.Error("UpdateMigrationsByType type %s external user id %v to local user id %v failed: %v", tp.Name(), user.ExternalID, user.UserID, err) + } + } + + if len(users) < batchSize { + break + } + } + return nil +} |