diff options
Diffstat (limited to 'services')
272 files changed, 5541 insertions, 4150 deletions
diff --git a/services/actions/auth.go b/services/actions/auth.go index 1ef21f6e0e..12a8fba53f 100644 --- a/services/actions/auth.go +++ b/services/actions/auth.go @@ -4,6 +4,7 @@ package actions import ( + "errors" "fmt" "net/http" "strings" @@ -80,7 +81,7 @@ func ParseAuthorizationToken(req *http.Request) (int64, error) { parts := strings.SplitN(h, " ", 2) if len(parts) != 2 { log.Error("split token failed: %s", h) - return 0, fmt.Errorf("split token failed") + return 0, errors.New("split token failed") } return TokenToTaskID(parts[1]) @@ -100,7 +101,7 @@ func TokenToTaskID(token string) (int64, error) { c, ok := parsedToken.Claims.(*actionsClaims) if !parsedToken.Valid || !ok { - return 0, fmt.Errorf("invalid token claim") + return 0, errors.New("invalid token claim") } return c.TaskID, nil diff --git a/services/actions/auth_test.go b/services/actions/auth_test.go index 85e7409105..38d0ba7f82 100644 --- a/services/actions/auth_test.go +++ b/services/actions/auth_test.go @@ -18,7 +18,7 @@ func TestCreateAuthorizationToken(t *testing.T) { var taskID int64 = 23 token, err := CreateAuthorizationToken(taskID, 1, 2) assert.NoError(t, err) - assert.NotEqual(t, "", token) + assert.NotEmpty(t, token) claims := jwt.MapClaims{} _, err = jwt.ParseWithClaims(token, claims, func(t *jwt.Token) (any, error) { return setting.GetGeneralTokenSigningSecret(), nil @@ -44,7 +44,7 @@ func TestParseAuthorizationToken(t *testing.T) { var taskID int64 = 23 token, err := CreateAuthorizationToken(taskID, 1, 2) assert.NoError(t, err) - assert.NotEqual(t, "", token) + assert.NotEmpty(t, token) headers := http.Header{} headers.Set("Authorization", "Bearer "+token) rTaskID, err := ParseAuthorizationToken(&http.Request{ diff --git a/services/actions/cleanup.go b/services/actions/cleanup.go index 23d6e3a49d..d0cc63e538 100644 --- a/services/actions/cleanup.go +++ b/services/actions/cleanup.go @@ -5,12 +5,14 @@ package actions import ( "context" + "errors" "fmt" "time" actions_model "code.gitea.io/gitea/models/actions" "code.gitea.io/gitea/models/db" actions_module "code.gitea.io/gitea/modules/actions" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" @@ -27,7 +29,7 @@ func Cleanup(ctx context.Context) error { } // clean up old logs - if err := CleanupLogs(ctx); err != nil { + if err := CleanupExpiredLogs(ctx); err != nil { return fmt.Errorf("cleanup logs: %w", err) } @@ -98,8 +100,15 @@ func cleanNeedDeleteArtifacts(taskCtx context.Context) error { const deleteLogBatchSize = 100 -// CleanupLogs removes logs which are older than the configured retention time -func CleanupLogs(ctx context.Context) error { +func removeTaskLog(ctx context.Context, task *actions_model.ActionTask) { + if err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename); err != nil { + log.Error("Failed to remove log %s (in storage %v) of task %v: %v", task.LogFilename, task.LogInStorage, task.ID, err) + // do not return error here, go on + } +} + +// CleanupExpiredLogs removes logs which are older than the configured retention time +func CleanupExpiredLogs(ctx context.Context) error { olderThan := timeutil.TimeStampNow().AddDuration(-time.Duration(setting.Actions.LogRetentionDays) * 24 * time.Hour) count := 0 @@ -109,10 +118,7 @@ func CleanupLogs(ctx context.Context) error { return fmt.Errorf("find old tasks: %w", err) } for _, task := range tasks { - if err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename); err != nil { - log.Error("Failed to remove log %s (in storage %v) of task %v: %v", task.LogFilename, task.LogInStorage, task.ID, err) - // do not return error here, go on - } + removeTaskLog(ctx, task) task.LogIndexes = nil // clear log indexes since it's a heavy field task.LogExpired = true if err := actions_model.UpdateTask(ctx, task, "log_indexes", "log_expired"); err != nil { @@ -148,3 +154,107 @@ func CleanupEphemeralRunners(ctx context.Context) error { log.Info("Removed %d runners", affected) return nil } + +// CleanupEphemeralRunnersByPickedTaskOfRepo removes all ephemeral runners that have active/finished tasks on the given repository +func CleanupEphemeralRunnersByPickedTaskOfRepo(ctx context.Context, repoID int64) error { + subQuery := builder.Select("`action_runner`.id"). + From(builder.Select("*").From("`action_runner`"), "`action_runner`"). // mysql needs this redundant subquery + Join("INNER", "`action_task`", "`action_task`.`runner_id` = `action_runner`.`id`"). + Where(builder.And(builder.Eq{"`action_runner`.`ephemeral`": true}, builder.Eq{"`action_task`.`repo_id`": repoID})) + b := builder.Delete(builder.In("id", subQuery)).From("`action_runner`") + res, err := db.GetEngine(ctx).Exec(b) + if err != nil { + return fmt.Errorf("find runners: %w", err) + } + affected, _ := res.RowsAffected() + log.Info("Removed %d runners", affected) + return nil +} + +// DeleteRun deletes workflow run, including all logs and artifacts. +func DeleteRun(ctx context.Context, run *actions_model.ActionRun) error { + if !run.Status.IsDone() { + return errors.New("run is not done") + } + + repoID := run.RepoID + + jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID) + if err != nil { + return err + } + jobIDs := container.FilterSlice(jobs, func(j *actions_model.ActionRunJob) (int64, bool) { + return j.ID, true + }) + tasks := make(actions_model.TaskList, 0) + if len(jobIDs) > 0 { + if err := db.GetEngine(ctx).Where("repo_id = ?", repoID).In("job_id", jobIDs).Find(&tasks); err != nil { + return err + } + } + + artifacts, err := db.Find[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{ + RepoID: repoID, + RunID: run.ID, + }) + if err != nil { + return err + } + + var recordsToDelete []any + + recordsToDelete = append(recordsToDelete, &actions_model.ActionRun{ + RepoID: repoID, + ID: run.ID, + }) + recordsToDelete = append(recordsToDelete, &actions_model.ActionRunJob{ + RepoID: repoID, + RunID: run.ID, + }) + for _, tas := range tasks { + recordsToDelete = append(recordsToDelete, &actions_model.ActionTask{ + RepoID: repoID, + ID: tas.ID, + }) + recordsToDelete = append(recordsToDelete, &actions_model.ActionTaskStep{ + RepoID: repoID, + TaskID: tas.ID, + }) + recordsToDelete = append(recordsToDelete, &actions_model.ActionTaskOutput{ + TaskID: tas.ID, + }) + } + recordsToDelete = append(recordsToDelete, &actions_model.ActionArtifact{ + RepoID: repoID, + RunID: run.ID, + }) + + if err := db.WithTx(ctx, func(ctx context.Context) error { + // TODO: Deleting task records could break current ephemeral runner implementation. This is a temporary workaround suggested by ChristopherHX. + // Since you delete potentially the only task an ephemeral act_runner has ever run, please delete the affected runners first. + // one of + // call cleanup ephemeral runners first + // delete affected ephemeral act_runners + // I would make ephemeral runners fully delete directly before formally finishing the task + // + // See also: https://github.com/go-gitea/gitea/pull/34337#issuecomment-2862222788 + if err := CleanupEphemeralRunners(ctx); err != nil { + return err + } + return db.DeleteBeans(ctx, recordsToDelete...) + }); err != nil { + return err + } + + // Delete files on storage + for _, tas := range tasks { + removeTaskLog(ctx, tas) + } + for _, art := range artifacts { + if err := storage.ActionsArtifacts.Delete(art.StoragePath); err != nil { + log.Error("remove artifact file %q: %v", art.StoragePath, err) + } + } + + return nil +} diff --git a/services/actions/clear_tasks.go b/services/actions/clear_tasks.go index 2aeb0e8c96..274c04aa57 100644 --- a/services/actions/clear_tasks.go +++ b/services/actions/clear_tasks.go @@ -42,6 +42,10 @@ func notifyWorkflowJobStatusUpdate(ctx context.Context, jobs []*actions_model.Ac _ = job.LoadAttributes(ctx) notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil) } + if len(jobs) > 0 { + job := jobs[0] + notify_service.WorkflowRunStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job.Run) + } } } @@ -123,7 +127,7 @@ func CancelAbandonedJobs(ctx context.Context) error { } CreateCommitStatus(ctx, job) if updated { - _ = job.LoadAttributes(ctx) + NotifyWorkflowRunStatusUpdateWithReload(ctx, job) notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil) } } diff --git a/services/actions/commit_status.go b/services/actions/commit_status.go index 94ab89a3b7..ef241e5091 100644 --- a/services/actions/commit_status.go +++ b/services/actions/commit_status.go @@ -5,6 +5,7 @@ package actions import ( "context" + "errors" "fmt" "path" @@ -13,9 +14,9 @@ import ( git_model "code.gitea.io/gitea/models/git" user_model "code.gitea.io/gitea/models/user" actions_module "code.gitea.io/gitea/modules/actions" + "code.gitea.io/gitea/modules/commitstatus" git "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" - api "code.gitea.io/gitea/modules/structs" webhook_module "code.gitea.io/gitea/modules/webhook" commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus" @@ -51,7 +52,7 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er return fmt.Errorf("GetPushEventPayload: %w", err) } if payload.HeadCommit == nil { - return fmt.Errorf("head commit is missing in event payload") + return errors.New("head commit is missing in event payload") } sha = payload.HeadCommit.ID case // pull_request @@ -71,9 +72,9 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er return fmt.Errorf("GetPullRequestEventPayload: %w", err) } if payload.PullRequest == nil { - return fmt.Errorf("pull request is missing in event payload") + return errors.New("pull request is missing in event payload") } else if payload.PullRequest.Head == nil { - return fmt.Errorf("head of pull request is missing in event payload") + return errors.New("head of pull request is missing in event payload") } sha = payload.PullRequest.Head.Sha case webhook_module.HookEventRelease: @@ -91,7 +92,7 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er } ctxname := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event) state := toCommitStatus(job.Status) - if statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil { + if statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil { for _, v := range statuses { if v.Context == ctxname { if v.State == state { @@ -146,16 +147,18 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er return commitstatus_service.CreateCommitStatus(ctx, repo, creator, commitID.String(), &status) } -func toCommitStatus(status actions_model.Status) api.CommitStatusState { +func toCommitStatus(status actions_model.Status) commitstatus.CommitStatusState { switch status { - case actions_model.StatusSuccess, actions_model.StatusSkipped: - return api.CommitStatusSuccess + case actions_model.StatusSuccess: + return commitstatus.CommitStatusSuccess case actions_model.StatusFailure, actions_model.StatusCancelled: - return api.CommitStatusFailure + return commitstatus.CommitStatusFailure case actions_model.StatusWaiting, actions_model.StatusBlocked, actions_model.StatusRunning: - return api.CommitStatusPending + return commitstatus.CommitStatusPending + case actions_model.StatusSkipped: + return commitstatus.CommitStatusSkipped default: - return api.CommitStatusError + return commitstatus.CommitStatusError } } diff --git a/services/actions/context.go b/services/actions/context.go index d14728fae4..b6de429ccf 100644 --- a/services/actions/context.go +++ b/services/actions/context.go @@ -6,6 +6,7 @@ package actions import ( "context" "fmt" + "strconv" actions_model "code.gitea.io/gitea/models/actions" "code.gitea.io/gitea/models/db" @@ -14,11 +15,16 @@ import ( "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" + + "github.com/nektos/act/pkg/model" ) +type GiteaContext map[string]any + // GenerateGiteaContext generate the gitea context without token and gitea_runtime_token // job can be nil when generating a context for parsing workflow-level expressions -func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.ActionRunJob) map[string]any { +func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.ActionRunJob) GiteaContext { event := map[string]any{} _ = json.Unmarshal([]byte(run.EventPayload), &event) @@ -41,7 +47,7 @@ func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.Actio refName := git.RefName(ref) - gitContext := map[string]any{ + gitContext := GiteaContext{ // standard contexts, see https://docs.github.com/en/actions/learn-github-actions/contexts#github-context "action": "", // string, The name of the action currently running, or the id of a step. GitHub removes special characters, and uses the name __run when the current step runs a script without an id. If you use the same action more than once in the same job, the name will include a suffix with the sequence number with underscore before it. For example, the first script you run will have the name __run, and the second script will be named __run_2. Similarly, the second invocation of actions/checkout will be actionscheckout2. "action_path": "", // string, The path where an action is located. This property is only supported in composite actions. You can use this path to access files located in the same repository as the action. @@ -68,7 +74,7 @@ func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.Actio "repositoryUrl": run.Repo.HTMLURL(), // string, The Git URL to the repository. For example, git://github.com/codertocat/hello-world.git. "retention_days": "", // string, The number of days that workflow run logs and artifacts are kept. "run_id": "", // string, A unique number for each workflow run within a repository. This number does not change if you re-run the workflow run. - "run_number": fmt.Sprint(run.Index), // string, A unique number for each run of a particular workflow in a repository. This number begins at 1 for the workflow's first run, and increments with each new run. This number does not change if you re-run the workflow run. + "run_number": strconv.FormatInt(run.Index, 10), // string, A unique number for each run of a particular workflow in a repository. This number begins at 1 for the workflow's first run, and increments with each new run. This number does not change if you re-run the workflow run. "run_attempt": "", // string, A unique number for each attempt of a particular workflow run in a repository. This number begins at 1 for the workflow run's first attempt, and increments with each re-run. "secret_source": "Actions", // string, The source of a secret used in a workflow. Possible values are None, Actions, Dependabot, or Codespaces. "server_url": setting.AppURL, // string, The URL of the GitHub server. For example: https://github.com. @@ -83,8 +89,8 @@ func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.Actio if job != nil { gitContext["job"] = job.JobID - gitContext["run_id"] = fmt.Sprint(job.RunID) - gitContext["run_attempt"] = fmt.Sprint(job.Attempt) + gitContext["run_id"] = strconv.FormatInt(job.RunID, 10) + gitContext["run_attempt"] = strconv.FormatInt(job.Attempt, 10) } return gitContext @@ -159,3 +165,37 @@ func mergeTwoOutputs(o1, o2 map[string]string) map[string]string { } return ret } + +func (g *GiteaContext) ToGitHubContext() *model.GithubContext { + return &model.GithubContext{ + Event: util.GetMapValueOrDefault(*g, "event", map[string]any(nil)), + EventPath: util.GetMapValueOrDefault(*g, "event_path", ""), + Workflow: util.GetMapValueOrDefault(*g, "workflow", ""), + RunID: util.GetMapValueOrDefault(*g, "run_id", ""), + RunNumber: util.GetMapValueOrDefault(*g, "run_number", ""), + Actor: util.GetMapValueOrDefault(*g, "actor", ""), + Repository: util.GetMapValueOrDefault(*g, "repository", ""), + EventName: util.GetMapValueOrDefault(*g, "event_name", ""), + Sha: util.GetMapValueOrDefault(*g, "sha", ""), + Ref: util.GetMapValueOrDefault(*g, "ref", ""), + RefName: util.GetMapValueOrDefault(*g, "ref_name", ""), + RefType: util.GetMapValueOrDefault(*g, "ref_type", ""), + HeadRef: util.GetMapValueOrDefault(*g, "head_ref", ""), + BaseRef: util.GetMapValueOrDefault(*g, "base_ref", ""), + Token: "", // deliberately omitted for security + Workspace: util.GetMapValueOrDefault(*g, "workspace", ""), + Action: util.GetMapValueOrDefault(*g, "action", ""), + ActionPath: util.GetMapValueOrDefault(*g, "action_path", ""), + ActionRef: util.GetMapValueOrDefault(*g, "action_ref", ""), + ActionRepository: util.GetMapValueOrDefault(*g, "action_repository", ""), + Job: util.GetMapValueOrDefault(*g, "job", ""), + JobName: "", // not present in GiteaContext + RepositoryOwner: util.GetMapValueOrDefault(*g, "repository_owner", ""), + RetentionDays: util.GetMapValueOrDefault(*g, "retention_days", ""), + RunnerPerflog: "", // not present in GiteaContext + RunnerTrackingID: "", // not present in GiteaContext + ServerURL: util.GetMapValueOrDefault(*g, "server_url", ""), + APIURL: util.GetMapValueOrDefault(*g, "api_url", ""), + GraphQLURL: util.GetMapValueOrDefault(*g, "graphql_url", ""), + } +} diff --git a/services/actions/interface.go b/services/actions/interface.go index d4fa782fec..a054c38e4f 100644 --- a/services/actions/interface.go +++ b/services/actions/interface.go @@ -25,4 +25,16 @@ type API interface { UpdateVariable(*context.APIContext) // GetRegistrationToken get registration token GetRegistrationToken(*context.APIContext) + // CreateRegistrationToken get registration token + CreateRegistrationToken(*context.APIContext) + // ListRunners list runners + ListRunners(*context.APIContext) + // GetRunner get a runner + GetRunner(*context.APIContext) + // DeleteRunner delete runner + DeleteRunner(*context.APIContext) + // ListWorkflowJobs list jobs + ListWorkflowJobs(*context.APIContext) + // ListWorkflowRuns list runs + ListWorkflowRuns(*context.APIContext) } diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go index c11bb5875f..47c9f59094 100644 --- a/services/actions/job_emitter.go +++ b/services/actions/job_emitter.go @@ -11,6 +11,7 @@ import ( actions_model "code.gitea.io/gitea/models/actions" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/queue" notify_service "code.gitea.io/gitea/services/notify" @@ -78,9 +79,30 @@ func checkJobsOfRun(ctx context.Context, runID int64) error { _ = job.LoadAttributes(ctx) notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil) } + if len(jobs) > 0 { + runUpdated := true + for _, job := range jobs { + if !job.Status.IsDone() { + runUpdated = false + break + } + } + if runUpdated { + NotifyWorkflowRunStatusUpdateWithReload(ctx, jobs[0]) + } + } return nil } +func NotifyWorkflowRunStatusUpdateWithReload(ctx context.Context, job *actions_model.ActionRunJob) { + job.Run = nil + if err := job.LoadAttributes(ctx); err != nil { + log.Error("LoadAttributes: %v", err) + return + } + notify_service.WorkflowRunStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job.Run) +} + type jobStatusResolver struct { statuses map[int64]actions_model.Status needs map[int64][]int64 diff --git a/services/actions/notifier.go b/services/actions/notifier.go index 831cde3523..110e330f68 100644 --- a/services/actions/notifier.go +++ b/services/actions/notifier.go @@ -6,13 +6,16 @@ package actions import ( "context" + actions_model "code.gitea.io/gitea/models/actions" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" packages_model "code.gitea.io/gitea/models/packages" perm_model "code.gitea.io/gitea/models/perm" access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" @@ -260,11 +263,6 @@ func (n *actionsNotifier) CreateIssueComment(ctx context.Context, doer *user_mod func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) { ctx = withMethod(ctx, "UpdateComment") - if err := c.LoadIssue(ctx); err != nil { - log.Error("LoadIssue: %v", err) - return - } - if c.Issue.IsPull { notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventPullRequestComment, api.HookIssueCommentEdited) return @@ -275,11 +273,6 @@ func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.Us func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) { ctx = withMethod(ctx, "DeleteComment") - if err := comment.LoadIssue(ctx); err != nil { - log.Error("LoadIssue: %v", err) - return - } - if comment.Issue.IsPull { notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentDeleted) return @@ -288,6 +281,7 @@ func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.Us } func notifyIssueCommentChange(ctx context.Context, doer *user_model.User, comment *issues_model.Comment, oldContent string, event webhook_module.HookEventType, action api.HookIssueCommentAction) { + comment.Issue = nil // force issue to be loaded if err := comment.LoadIssue(ctx); err != nil { log.Error("LoadIssue: %v", err) return @@ -762,3 +756,41 @@ func (n *actionsNotifier) MigrateRepository(ctx context.Context, doer, u *user_m Sender: convert.ToUser(ctx, doer, nil), }).Notify(ctx) } + +func (n *actionsNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { + ctx = withMethod(ctx, "WorkflowRunStatusUpdate") + + var org *api.Organization + if repo.Owner.IsOrganization() { + org = convert.ToOrganization(ctx, organization.OrgFromUser(repo.Owner)) + } + + status := convert.ToWorkflowRunAction(run.Status) + + gitRepo, err := gitrepo.OpenRepository(ctx, repo) + if err != nil { + log.Error("OpenRepository: %v", err) + return + } + defer gitRepo.Close() + + convertedWorkflow, err := convert.GetActionWorkflow(ctx, gitRepo, repo, run.WorkflowID) + if err != nil { + log.Error("GetActionWorkflow: %v", err) + return + } + convertedRun, err := convert.ToActionWorkflowRun(ctx, repo, run) + if err != nil { + log.Error("ToActionWorkflowRun: %v", err) + return + } + + newNotifyInput(repo, sender, webhook_module.HookEventWorkflowRun).WithPayload(&api.WorkflowRunPayload{ + Action: status, + Workflow: convertedWorkflow, + WorkflowRun: convertedRun, + Organization: org, + Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}), + Sender: convert.ToUser(ctx, sender, nil), + }).Notify(ctx) +} diff --git a/services/actions/notifier_helper.go b/services/actions/notifier_helper.go index d179134798..b8bc20cdbb 100644 --- a/services/actions/notifier_helper.go +++ b/services/actions/notifier_helper.go @@ -33,7 +33,9 @@ import ( "github.com/nektos/act/pkg/model" ) -var methodCtxKey struct{} +type methodCtxKeyType struct{} + +var methodCtxKey methodCtxKeyType // withMethod sets the notification method that this context currently executes. // Used for debugging/ troubleshooting purposes. @@ -44,8 +46,7 @@ func withMethod(ctx context.Context, method string) context.Context { return ctx } } - // FIXME: review the use of this nolint directive - return context.WithValue(ctx, methodCtxKey, method) //nolint:staticcheck + return context.WithValue(ctx, methodCtxKey, method) } // getMethod gets the notification method that this context currently executes. @@ -103,7 +104,7 @@ func (input *notifyInput) WithPayload(payload api.Payloader) *notifyInput { func (input *notifyInput) WithPullRequest(pr *issues_model.PullRequest) *notifyInput { input.PullRequest = pr if input.Ref == "" { - input.Ref = git.RefName(pr.GetGitRefName()) + input.Ref = git.RefName(pr.GetGitHeadRefName()) } return input } @@ -178,7 +179,7 @@ func notify(ctx context.Context, input *notifyInput) error { return fmt.Errorf("gitRepo.GetCommit: %w", err) } - if skipWorkflows(input, commit) { + if skipWorkflows(ctx, input, commit) { return nil } @@ -243,7 +244,7 @@ func notify(ctx context.Context, input *notifyInput) error { return handleWorkflows(ctx, detectedWorkflows, commit, input, ref.String()) } -func skipWorkflows(input *notifyInput, commit *git.Commit) bool { +func skipWorkflows(ctx context.Context, input *notifyInput, commit *git.Commit) bool { // skip workflow runs with a configured skip-ci string in commit message or pr title if the event is push or pull_request(_sync) // https://docs.github.com/en/actions/managing-workflow-runs/skipping-workflow-runs skipWorkflowEvents := []webhook_module.HookEventType{ @@ -263,6 +264,27 @@ func skipWorkflows(input *notifyInput, commit *git.Commit) bool { } } } + if input.Event == webhook_module.HookEventWorkflowRun { + wrun, ok := input.Payload.(*api.WorkflowRunPayload) + for i := 0; i < 5 && ok && wrun.WorkflowRun != nil; i++ { + if wrun.WorkflowRun.Event != "workflow_run" { + return false + } + r, err := actions_model.GetRunByRepoAndID(ctx, input.Repo.ID, wrun.WorkflowRun.ID) + if err != nil { + log.Error("GetRunByRepoAndID: %v", err) + return true + } + wrun, err = r.GetWorkflowRunEventPayload() + if err != nil { + log.Error("GetWorkflowRunEventPayload: %v", err) + return true + } + } + // skip workflow runs events exceeding the maxiumum of 5 recursive events + log.Debug("repo %s: skipped workflow_run because of recursive event of 5", input.Repo.RepoPath()) + return true + } return false } @@ -302,9 +324,11 @@ func handleWorkflows( run := &actions_model.ActionRun{ Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0], RepoID: input.Repo.ID, + Repo: input.Repo, OwnerID: input.Repo.OwnerID, WorkflowID: dwf.EntryName, TriggerUserID: input.Doer.ID, + TriggerUser: input.Doer, Ref: ref, CommitSHA: commit.ID.String(), IsForkPullRequest: isForkPullRequest, @@ -333,12 +357,18 @@ func handleWorkflows( continue } - jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars)) + giteaCtx := GenerateGiteaContext(run, nil) + + jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars), jobparser.WithGitContext(giteaCtx.ToGitHubContext())) if err != nil { log.Error("jobparser.Parse: %v", err) continue } + if len(jobs) > 0 && jobs[0].RunName != "" { + run.Title = jobs[0].RunName + } + // cancel running jobs if the event is push or pull_request_sync if run.Event == webhook_module.HookEventPush || run.Event == webhook_module.HookEventPullRequestSync { @@ -364,6 +394,15 @@ func handleWorkflows( continue } CreateCommitStatus(ctx, alljobs...) + if len(alljobs) > 0 { + job := alljobs[0] + err := job.LoadRun(ctx) + if err != nil { + log.Error("LoadRun: %v", err) + continue + } + notify_service.WorkflowRunStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job.Run) + } for _, job := range alljobs { notify_service.WorkflowJobStatusUpdate(ctx, input.Repo, input.Doer, job, nil) } @@ -508,9 +547,11 @@ func handleSchedules( run := &actions_model.ActionSchedule{ Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0], RepoID: input.Repo.ID, + Repo: input.Repo, OwnerID: input.Repo.OwnerID, WorkflowID: dwf.EntryName, TriggerUserID: user_model.ActionsUserID, + TriggerUser: user_model.NewActionsUser(), Ref: ref, CommitSHA: commit.ID.String(), Event: input.Event, @@ -518,6 +559,25 @@ func handleSchedules( Specs: schedules, Content: dwf.Content, } + + vars, err := actions_model.GetVariablesOfRun(ctx, run.ToActionRun()) + if err != nil { + log.Error("GetVariablesOfRun: %v", err) + continue + } + + giteaCtx := GenerateGiteaContext(run.ToActionRun(), nil) + + jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars), jobparser.WithGitContext(giteaCtx.ToGitHubContext())) + if err != nil { + log.Error("jobparser.Parse: %v", err) + continue + } + + if len(jobs) > 0 && jobs[0].RunName != "" { + run.Title = jobs[0].RunName + } + crons = append(crons, run) } diff --git a/services/actions/schedule_tasks.go b/services/actions/schedule_tasks.go index a30b166063..c029c5a1a2 100644 --- a/services/actions/schedule_tasks.go +++ b/services/actions/schedule_tasks.go @@ -157,6 +157,7 @@ func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule) if err != nil { log.Error("LoadAttributes: %v", err) } + notify_service.WorkflowRunStatusUpdate(ctx, run.Repo, run.TriggerUser, run) for _, job := range allJobs { notify_service.WorkflowJobStatusUpdate(ctx, run.Repo, run.TriggerUser, job, nil) } diff --git a/services/actions/task.go b/services/actions/task.go index 9c8198206a..6a547c1c12 100644 --- a/services/actions/task.go +++ b/services/actions/task.go @@ -5,6 +5,7 @@ package actions import ( "context" + "errors" "fmt" actions_model "code.gitea.io/gitea/models/actions" @@ -39,7 +40,7 @@ func PickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv if err != nil { return nil, false, err } - return nil, false, fmt.Errorf("runner has been removed") + return nil, false, errors.New("runner has been removed") } } diff --git a/services/actions/workflow.go b/services/actions/workflow.go index dc8a1dd349..233e22b5dd 100644 --- a/services/actions/workflow.go +++ b/services/actions/workflow.go @@ -5,9 +5,6 @@ package actions import ( "fmt" - "net/http" - "net/url" - "path" "strings" actions_model "code.gitea.io/gitea/models/actions" @@ -31,61 +28,8 @@ import ( "github.com/nektos/act/pkg/model" ) -func getActionWorkflowPath(commit *git.Commit) string { - paths := []string{".gitea/workflows", ".github/workflows"} - for _, treePath := range paths { - if _, err := commit.SubTree(treePath); err == nil { - return treePath - } - } - return "" -} - -func getActionWorkflowEntry(ctx *context.APIContext, commit *git.Commit, folder string, entry *git.TreeEntry) *api.ActionWorkflow { - cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions) - cfg := cfgUnit.ActionsConfig() - - defaultBranch, _ := commit.GetBranchName() - - workflowURL := fmt.Sprintf("%s/actions/workflows/%s", ctx.Repo.Repository.APIURL(), url.PathEscape(entry.Name())) - workflowRepoURL := fmt.Sprintf("%s/src/branch/%s/%s/%s", ctx.Repo.Repository.HTMLURL(ctx), util.PathEscapeSegments(defaultBranch), util.PathEscapeSegments(folder), url.PathEscape(entry.Name())) - badgeURL := fmt.Sprintf("%s/actions/workflows/%s/badge.svg?branch=%s", ctx.Repo.Repository.HTMLURL(ctx), url.PathEscape(entry.Name()), url.QueryEscape(ctx.Repo.Repository.DefaultBranch)) - - // See https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#get-a-workflow - // State types: - // - active - // - deleted - // - disabled_fork - // - disabled_inactivity - // - disabled_manually - state := "active" - if cfg.IsWorkflowDisabled(entry.Name()) { - state = "disabled_manually" - } - - // The CreatedAt and UpdatedAt fields currently reflect the timestamp of the latest commit, which can later be refined - // by retrieving the first and last commits for the file history. The first commit would indicate the creation date, - // while the last commit would represent the modification date. The DeletedAt could be determined by identifying - // the last commit where the file existed. However, this implementation has not been done here yet, as it would likely - // cause a significant performance degradation. - createdAt := commit.Author.When - updatedAt := commit.Author.When - - return &api.ActionWorkflow{ - ID: entry.Name(), - Name: entry.Name(), - Path: path.Join(folder, entry.Name()), - State: state, - CreatedAt: createdAt, - UpdatedAt: updatedAt, - URL: workflowURL, - HTMLURL: workflowRepoURL, - BadgeURL: badgeURL, - } -} - func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnable bool) error { - workflow, err := GetActionWorkflow(ctx, workflowID) + workflow, err := convert.GetActionWorkflow(ctx, ctx.Repo.GitRepo, ctx.Repo.Repository, workflowID) if err != nil { return err } @@ -102,44 +46,6 @@ func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnabl return repo_model.UpdateRepoUnit(ctx, cfgUnit) } -func ListActionWorkflows(ctx *context.APIContext) ([]*api.ActionWorkflow, error) { - defaultBranchCommit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch) - if err != nil { - ctx.APIErrorInternal(err) - return nil, err - } - - entries, err := actions.ListWorkflows(defaultBranchCommit) - if err != nil { - ctx.APIError(http.StatusNotFound, err.Error()) - return nil, err - } - - folder := getActionWorkflowPath(defaultBranchCommit) - - workflows := make([]*api.ActionWorkflow, len(entries)) - for i, entry := range entries { - workflows[i] = getActionWorkflowEntry(ctx, defaultBranchCommit, folder, entry) - } - - return workflows, nil -} - -func GetActionWorkflow(ctx *context.APIContext, workflowID string) (*api.ActionWorkflow, error) { - entries, err := ListActionWorkflows(ctx) - if err != nil { - return nil, err - } - - for _, entry := range entries { - if entry.Name == workflowID { - return entry, nil - } - } - - return nil, util.NewNotExistErrorf("workflow %q not found", workflowID) -} - func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, gitRepo *git.Repository, workflowID, ref string, processInputs func(model *model.WorkflowDispatch, inputs map[string]any) error) error { if workflowID == "" { return util.ErrorWrapLocale( @@ -185,29 +91,62 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re } // get workflow entry from runTargetCommit - entries, err := actions.ListWorkflows(runTargetCommit) + _, entries, err := actions.ListWorkflows(runTargetCommit) if err != nil { return err } // find workflow from commit var workflows []*jobparser.SingleWorkflow - for _, entry := range entries { - if entry.Name() != workflowID { - continue - } + var entry *git.TreeEntry - content, err := actions.GetContentFromEntry(entry) - if err != nil { - return err - } - workflows, err = jobparser.Parse(content) - if err != nil { - return err + run := &actions_model.ActionRun{ + Title: strings.SplitN(runTargetCommit.CommitMessage, "\n", 2)[0], + RepoID: repo.ID, + Repo: repo, + OwnerID: repo.OwnerID, + WorkflowID: workflowID, + TriggerUserID: doer.ID, + TriggerUser: doer, + Ref: string(refName), + CommitSHA: runTargetCommit.ID.String(), + IsForkPullRequest: false, + Event: "workflow_dispatch", + TriggerEvent: "workflow_dispatch", + Status: actions_model.StatusWaiting, + } + + for _, e := range entries { + if e.Name() != workflowID { + continue } + entry = e break } + if entry == nil { + return util.ErrorWrapLocale( + util.NewNotExistErrorf("workflow %q doesn't exist", workflowID), + "actions.workflow.not_found", workflowID, + ) + } + + content, err := actions.GetContentFromEntry(entry) + if err != nil { + return err + } + + giteaCtx := GenerateGiteaContext(run, nil) + + workflows, err = jobparser.Parse(content, jobparser.WithGitContext(giteaCtx.ToGitHubContext())) + if err != nil { + return err + } + + if len(workflows) > 0 && workflows[0].RunName != "" { + run.Title = workflows[0].RunName + } + if len(workflows) == 0 { return util.ErrorWrapLocale( util.NewNotExistErrorf("workflow %q doesn't exist", workflowID), @@ -236,25 +175,12 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re Inputs: inputsWithDefaults, Sender: convert.ToUserWithAccessMode(ctx, doer, perm.AccessModeNone), } + var eventPayload []byte if eventPayload, err = workflowDispatchPayload.JSONPayload(); err != nil { return fmt.Errorf("JSONPayload: %w", err) } - - run := &actions_model.ActionRun{ - Title: strings.SplitN(runTargetCommit.CommitMessage, "\n", 2)[0], - RepoID: repo.ID, - OwnerID: repo.OwnerID, - WorkflowID: workflowID, - TriggerUserID: doer.ID, - Ref: string(refName), - CommitSHA: runTargetCommit.ID.String(), - IsForkPullRequest: false, - Event: "workflow_dispatch", - TriggerEvent: "workflow_dispatch", - EventPayload: string(eventPayload), - Status: actions_model.StatusWaiting, - } + run.EventPayload = string(eventPayload) // cancel running jobs of the same workflow if err := CancelPreviousJobs( @@ -277,9 +203,17 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re log.Error("FindRunJobs: %v", err) } CreateCommitStatus(ctx, allJobs...) + if len(allJobs) > 0 { + job := allJobs[0] + err := job.LoadRun(ctx) + if err != nil { + log.Error("LoadRun: %v", err) + } else { + notify_service.WorkflowRunStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job.Run) + } + } for _, job := range allJobs { notify_service.WorkflowJobStatusUpdate(ctx, repo, doer, job, nil) } - return nil } diff --git a/services/agit/agit.go b/services/agit/agit.go index 1e6ce93312..63b3eab4f2 100644 --- a/services/agit/agit.go +++ b/services/agit/agit.go @@ -5,10 +5,12 @@ package agit import ( "context" + "encoding/base64" "fmt" "os" "strings" + git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -17,17 +19,30 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/private" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" notify_service "code.gitea.io/gitea/services/notify" pull_service "code.gitea.io/gitea/services/pull" ) +func parseAgitPushOptionValue(s string) string { + if base64Value, ok := strings.CutPrefix(s, "{base64}"); ok { + decoded, err := base64.StdEncoding.DecodeString(base64Value) + return util.Iif(err == nil, string(decoded), s) + } + return s +} + // ProcReceive handle proc receive work func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, opts *private.HookOptions) ([]private.HookProcReceiveRefResult, error) { results := make([]private.HookProcReceiveRefResult, 0, len(opts.OldCommitIDs)) forcePush := opts.GitPushOptions.Bool(private.GitPushOptionForcePush) topicBranch := opts.GitPushOptions["topic"] - title := strings.TrimSpace(opts.GitPushOptions["title"]) - description := strings.TrimSpace(opts.GitPushOptions["description"]) + + // some options are base64-encoded with "{base64}" prefix if they contain new lines + // other agit push options like "issue", "reviewer" and "cc" are not supported + title := parseAgitPushOptionValue(opts.GitPushOptions["title"]) + description := parseAgitPushOptionValue(opts.GitPushOptions["description"]) + objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName) userName := strings.ToLower(opts.UserName) @@ -150,7 +165,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git. log.Trace("Pull request created: %d/%d", repo.ID, prIssue.ID) results = append(results, private.HookProcReceiveRefResult{ - Ref: pr.GetGitRefName(), + Ref: pr.GetGitHeadRefName(), OriginalRef: opts.RefFullNames[i], OldOID: objectFormat.EmptyObjectID().String(), NewOID: opts.NewCommitIDs[i], @@ -167,7 +182,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git. return nil, fmt.Errorf("unable to load base repository for PR[%d] Error: %w", pr.ID, err) } - oldCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName()) + oldCommitID, err := gitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil { return nil, fmt.Errorf("unable to get ref commit id in base repository for PR[%d] Error: %w", pr.ID, err) } @@ -199,12 +214,38 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git. } } + // Store old commit ID for review staleness checking + oldHeadCommitID := pr.HeadCommitID + pr.HeadCommitID = opts.NewCommitIDs[i] if err = pull_service.UpdateRef(ctx, pr); err != nil { return nil, fmt.Errorf("failed to update pull ref. Error: %w", err) } - pull_service.AddToTaskQueue(ctx, pr) + // Mark existing reviews as stale when PR content changes (same as regular GitHub flow) + if oldHeadCommitID != opts.NewCommitIDs[i] { + if err := issues_model.MarkReviewsAsStale(ctx, pr.IssueID); err != nil { + log.Error("MarkReviewsAsStale: %v", err) + } + + // Dismiss all approval reviews if protected branch rule item enabled + pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch) + if err != nil { + log.Error("GetFirstMatchProtectedBranchRule: %v", err) + } + if pb != nil && pb.DismissStaleApprovals { + if err := pull_service.DismissApprovalReviews(ctx, pusher, pr); err != nil { + log.Error("DismissApprovalReviews: %v", err) + } + } + + // Mark reviews for the new commit as not stale + if err := issues_model.MarkReviewsAsNotStale(ctx, pr.IssueID, opts.NewCommitIDs[i]); err != nil { + log.Error("MarkReviewsAsNotStale: %v", err) + } + } + + pull_service.StartPullRequestCheckImmediately(ctx, pr) err = pr.LoadIssue(ctx) if err != nil { return nil, fmt.Errorf("failed to load pull issue. Error: %w", err) @@ -219,7 +260,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git. results = append(results, private.HookProcReceiveRefResult{ OldOID: oldCommitID, NewOID: opts.NewCommitIDs[i], - Ref: pr.GetGitRefName(), + Ref: pr.GetGitHeadRefName(), OriginalRef: opts.RefFullNames[i], IsForcePush: isForcePush, IsCreatePR: false, diff --git a/services/agit/agit_test.go b/services/agit/agit_test.go new file mode 100644 index 0000000000..feaf7dca9b --- /dev/null +++ b/services/agit/agit_test.go @@ -0,0 +1,16 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package agit + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseAgitPushOptionValue(t *testing.T) { + assert.Equal(t, "a", parseAgitPushOptionValue("a")) + assert.Equal(t, "a", parseAgitPushOptionValue("{base64}YQ==")) + assert.Equal(t, "{base64}invalid value", parseAgitPushOptionValue("{base64}invalid value")) +} diff --git a/services/asymkey/commit.go b/services/asymkey/commit.go index df29133972..54ef052a50 100644 --- a/services/asymkey/commit.go +++ b/services/asymkey/commit.go @@ -5,61 +5,63 @@ package asymkey import ( "context" + "fmt" + "slices" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/cachegroup" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "github.com/42wim/sshsig" "github.com/ProtonMail/go-crypto/openpgp/packet" ) // ParseCommitWithSignature check if signature is good against keystore. func ParseCommitWithSignature(ctx context.Context, c *git.Commit) *asymkey_model.CommitVerification { - var committer *user_model.User - if c.Committer != nil { - var err error - // Find Committer account - committer, err = user_model.GetUserByEmail(ctx, c.Committer.Email) // This finds the user by primary email or activated email so commit will not be valid if email is not - if err != nil { // Skipping not user for committer - committer = &user_model.User{ - Name: c.Committer.Name, - Email: c.Committer.Email, - } - // We can expect this to often be an ErrUserNotExist. in the case - // it is not, however, it is important to log it. - if !user_model.IsErrUserNotExist(err) { - log.Error("GetUserByEmail: %v", err) - return &asymkey_model.CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.no_committer_account", - } - } + committer, err := user_model.GetUserByEmail(ctx, c.Committer.Email) + if err != nil && !user_model.IsErrUserNotExist(err) { + log.Error("GetUserByEmail: %v", err) + return &asymkey_model.CommitVerification{ + Verified: false, + Reason: "gpg.error.no_committer_account", // this error is not right, but such error should seldom happen } } - return ParseCommitWithSignatureCommitter(ctx, c, committer) } +// ParseCommitWithSignatureCommitter parses a commit's GPG or SSH signature. +// The caller guarantees that the committer user is related to the commit by checking its activated email addresses or no-reply address. +// If the commit is singed by an instance key, then committer can be nil. +// If the signature exists, even if committer is nil, the returned CommittingUser will be a non-nil fake user (e.g.: instance key) func ParseCommitWithSignatureCommitter(ctx context.Context, c *git.Commit, committer *user_model.User) *asymkey_model.CommitVerification { - // If no signature just report the committer + // If no signature, just report the committer if c.Signature == nil { return &asymkey_model.CommitVerification{ CommittingUser: committer, - Verified: false, // Default value - Reason: "gpg.error.not_signed_commit", // Default value + Verified: false, + Reason: "gpg.error.not_signed_commit", + } + } + // to support instance key, we need a fake committer user (not really needed, but legacy code accesses the committer without nil-check) + if committer == nil { + committer = &user_model.User{ + Name: c.Committer.Name, + Email: c.Committer.Email, } } - - // If this a SSH signature handle it differently if strings.HasPrefix(c.Signature.Signature, "-----BEGIN SSH SIGNATURE-----") { - return asymkey_model.ParseCommitWithSSHSignature(ctx, c, committer) + return parseCommitWithSSHSignature(ctx, c, committer) } + return parseCommitWithGPGSignature(ctx, c, committer) +} +func parseCommitWithGPGSignature(ctx context.Context, c *git.Commit, committer *user_model.User) *asymkey_model.CommitVerification { // Parsing signature sig, err := asymkey_model.ExtractSignature(c.Signature.Signature) if err != nil { // Skipping failed to extract sign @@ -113,20 +115,11 @@ func ParseCommitWithSignatureCommitter(ctx context.Context, c *git.Commit, commi } } - committerEmailAddresses, _ := user_model.GetEmailAddresses(ctx, committer.ID) - activated := false - for _, e := range committerEmailAddresses { - if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { - activated = true - break - } - } - for _, k := range keys { // Pre-check (& optimization) that emails attached to key can be attached to the committer email and can validate canValidate := false email := "" - if k.Verified && activated { + if k.Verified { canValidate = true email = c.Committer.Email } @@ -160,7 +153,7 @@ func ParseCommitWithSignatureCommitter(ctx context.Context, c *git.Commit, commi } if err := gpgSettings.LoadPublicKeyContent(); err != nil { log.Error("Error getting default signing key: %s %v", gpgSettings.KeyID, err) - } else if commitVerification := VerifyWithGPGSettings(ctx, &gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { + } else if commitVerification := verifyWithGPGSettings(ctx, &gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { if commitVerification.Reason == asymkey_model.BadSignature { defaultReason = asymkey_model.BadSignature } else { @@ -175,7 +168,7 @@ func ParseCommitWithSignatureCommitter(ctx context.Context, c *git.Commit, commi } else if defaultGPGSettings == nil { log.Warn("Unable to get defaultGPGSettings for unattached commit: %s", c.ID.String()) } else if defaultGPGSettings.Sign { - if commitVerification := VerifyWithGPGSettings(ctx, defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { + if commitVerification := verifyWithGPGSettings(ctx, defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { if commitVerification.Reason == asymkey_model.BadSignature { defaultReason = asymkey_model.BadSignature } else { @@ -207,18 +200,17 @@ func checkKeyEmails(ctx context.Context, email string, keys ...*asymkey_model.GP } if key.Verified && key.OwnerID != 0 { if uid != key.OwnerID { - userEmails, _ = user_model.GetEmailAddresses(ctx, key.OwnerID) + userEmails, _ = cache.GetWithContextCache(ctx, cachegroup.UserEmailAddresses, key.OwnerID, user_model.GetEmailAddresses) uid = key.OwnerID - user = &user_model.User{ID: uid} - _, _ = user_model.GetUser(ctx, user) + user, _ = cache.GetWithContextCache(ctx, cachegroup.User, uid, user_model.GetUserByID) } for _, e := range userEmails { if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) { return true, e.Email } } - if user.KeepEmailPrivate && strings.EqualFold(email, user.GetEmail()) { - return true, user.GetEmail() + if user != nil && strings.EqualFold(email, user.GetPlaceholderEmail()) { + return true, user.GetPlaceholderEmail() } } } @@ -229,10 +221,7 @@ func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload s if keyID == "" { return nil } - keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{ - KeyID: keyID, - IncludeSubKeys: true, - }) + keys, err := cache.GetWithContextCache(ctx, cachegroup.GPGKeyWithSubKeys, keyID, asymkey_model.FindGPGKeyWithSubKeys) if err != nil { log.Error("GetGPGKeysByKeyID: %v", err) return &asymkey_model.CommitVerification{ @@ -247,10 +236,7 @@ func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload s for _, key := range keys { var primaryKeys []*asymkey_model.GPGKey if key.PrimaryKeyID != "" { - primaryKeys, err = db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{ - KeyID: key.PrimaryKeyID, - IncludeSubKeys: true, - }) + primaryKeys, err = cache.GetWithContextCache(ctx, cachegroup.GPGKeyWithSubKeys, key.PrimaryKeyID, asymkey_model.FindGPGKeyWithSubKeys) if err != nil { log.Error("GetGPGKeysByKeyID: %v", err) return &asymkey_model.CommitVerification{ @@ -270,8 +256,8 @@ func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload s Name: name, Email: email, } - if key.OwnerID != 0 { - owner, err := user_model.GetUserByID(ctx, key.OwnerID) + if key.OwnerID > 0 { + owner, err := cache.GetWithContextCache(ctx, cachegroup.User, key.OwnerID, user_model.GetUserByID) if err == nil { signer = owner } else if !user_model.IsErrUserNotExist(err) { @@ -297,7 +283,7 @@ func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload s } } -func VerifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *user_model.User, keyID string) *asymkey_model.CommitVerification { +func verifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *user_model.User, keyID string) *asymkey_model.CommitVerification { // First try to find the key in the db if commitVerification := HashAndVerifyForKeyID(ctx, sig, payload, committer, gpgSettings.KeyID, gpgSettings.Name, gpgSettings.Email); commitVerification != nil { return commitVerification @@ -361,3 +347,104 @@ func VerifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, si } return nil } + +func verifySSHCommitVerificationByInstanceKey(c *git.Commit, committerUser, signerUser *user_model.User, committerGitEmail, publicKeyContent string) *asymkey_model.CommitVerification { + fingerprint, err := asymkey_model.CalcFingerprint(publicKeyContent) + if err != nil { + log.Error("Error calculating the fingerprint public key %q, err: %v", publicKeyContent, err) + return nil + } + sshPubKey := &asymkey_model.PublicKey{ + Verified: true, + Content: publicKeyContent, + Fingerprint: fingerprint, + HasUsed: true, + } + return verifySSHCommitVerification(c.Signature.Signature, c.Signature.Payload, sshPubKey, committerUser, signerUser, committerGitEmail) +} + +// parseCommitWithSSHSignature check if signature is good against keystore. +func parseCommitWithSSHSignature(ctx context.Context, c *git.Commit, committerUser *user_model.User) *asymkey_model.CommitVerification { + // Now try to associate the signature with the committer, if present + if committerUser.ID != 0 { + keys, err := db.Find[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{ + OwnerID: committerUser.ID, + NotKeytype: asymkey_model.KeyTypePrincipal, + }) + if err != nil { // Skipping failed to get ssh keys of user + log.Error("ListPublicKeys: %v", err) + return &asymkey_model.CommitVerification{ + CommittingUser: committerUser, + Verified: false, + Reason: "gpg.error.failed_retrieval_gpg_keys", + } + } + + for _, k := range keys { + if k.Verified { + commitVerification := verifySSHCommitVerification(c.Signature.Signature, c.Signature.Payload, k, committerUser, committerUser, c.Committer.Email) + if commitVerification != nil { + return commitVerification + } + } + } + } + + // Try the pre-set trusted keys (for key-rotation purpose) + // At the moment, we still use the SigningName&SigningEmail for the rotated keys. + // Maybe in the future we can extend the key format to "ssh-xxx .... old-user@example.com" to support different signer emails. + for _, k := range setting.Repository.Signing.TrustedSSHKeys { + signerUser := &user_model.User{ + Name: setting.Repository.Signing.SigningName, + Email: setting.Repository.Signing.SigningEmail, + } + commitVerification := verifySSHCommitVerificationByInstanceKey(c, committerUser, signerUser, c.Committer.Email, k) + if commitVerification != nil && commitVerification.Verified { + return commitVerification + } + } + + // Try the configured instance-wide SSH public key + if setting.Repository.Signing.SigningFormat == git.SigningKeyFormatSSH && !slices.Contains([]string{"", "default", "none"}, setting.Repository.Signing.SigningKey) { + gpgSettings := git.GPGSettings{ + Sign: true, + KeyID: setting.Repository.Signing.SigningKey, + Name: setting.Repository.Signing.SigningName, + Email: setting.Repository.Signing.SigningEmail, + Format: setting.Repository.Signing.SigningFormat, + } + signerUser := &user_model.User{ + Name: gpgSettings.Name, + Email: gpgSettings.Email, + } + if err := gpgSettings.LoadPublicKeyContent(); err != nil { + log.Error("Error getting instance-wide SSH signing key %q, err: %v", gpgSettings.KeyID, err) + } else { + commitVerification := verifySSHCommitVerificationByInstanceKey(c, committerUser, signerUser, gpgSettings.Email, gpgSettings.PublicKeyContent) + if commitVerification != nil && commitVerification.Verified { + return commitVerification + } + } + } + + return &asymkey_model.CommitVerification{ + CommittingUser: committerUser, + Verified: false, + Reason: asymkey_model.NoKeyFound, + } +} + +func verifySSHCommitVerification(sig, payload string, k *asymkey_model.PublicKey, committer, signer *user_model.User, email string) *asymkey_model.CommitVerification { + if err := sshsig.Verify(strings.NewReader(payload), []byte(sig), []byte(k.Content), "git"); err != nil { + return nil + } + + return &asymkey_model.CommitVerification{ // Everything is ok + CommittingUser: committer, + Verified: true, + Reason: fmt.Sprintf("%s / %s", signer.Name, k.Fingerprint), + SigningUser: signer, + SigningSSHKey: k, + SigningEmail: email, + } +} diff --git a/services/asymkey/commit_test.go b/services/asymkey/commit_test.go new file mode 100644 index 0000000000..6edba1e90a --- /dev/null +++ b/services/asymkey/commit_test.go @@ -0,0 +1,99 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package asymkey + +import ( + "strings" + "testing" + + asymkey_model "code.gitea.io/gitea/models/asymkey" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParseCommitWithSSHSignature(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + // Here we only need to do some tests that "tests/integration/gpg_ssh_git_test.go" doesn't cover + + // -----BEGIN OPENSSH PRIVATE KEY----- + // b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW + // QyNTUxOQAAACC6T6zF0oPak8dOIzzT1kXB7LrcsVo04SKc3GjuvMllZwAAAJgy08upMtPL + // qQAAAAtzc2gtZWQyNTUxOQAAACC6T6zF0oPak8dOIzzT1kXB7LrcsVo04SKc3GjuvMllZw + // AAAEDWqPHTH51xb4hy1y1f1VeWL/2A9Q0b6atOyv5fx8x5prpPrMXSg9qTx04jPNPWRcHs + // utyxWjThIpzcaO68yWVnAAAAEXVzZXIyQGV4YW1wbGUuY29tAQIDBA== + // -----END OPENSSH PRIVATE KEY----- + sshPubKey, err := asymkey_model.AddPublicKey(t.Context(), 999, "user-ssh-key-any-name", "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILpPrMXSg9qTx04jPNPWRcHsutyxWjThIpzcaO68yWVn", 0) + require.NoError(t, err) + _, err = db.GetEngine(t.Context()).ID(sshPubKey.ID).Cols("verified").Update(&asymkey_model.PublicKey{Verified: true}) + require.NoError(t, err) + + t.Run("UserSSHKey", func(t *testing.T) { + commit, err := git.CommitFromReader(nil, git.Sha1ObjectFormat.EmptyObjectID(), strings.NewReader(`tree a3b1fad553e0f9a2b4a58327bebde36c7da75aa2 +author user2 <user2@example.com> 1752194028 -0700 +committer user2 <user2@example.com> 1752194028 -0700 +gpgsig -----BEGIN SSH SIGNATURE----- + U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAguk+sxdKD2pPHTiM809ZFwey63L + FaNOEinNxo7rzJZWcAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5 + AAAAQBfX+6mcKZBnXckwHcBFqRuXMD3vTKi1yv5wgrqIxTyr2LWB97xxmO92cvjsr0POQ2 + 2YA7mQS510Cg2s1uU1XAk= + -----END SSH SIGNATURE----- + +init project +`)) + require.NoError(t, err) + + // the committingUser is guaranteed by the caller, parseCommitWithSSHSignature doesn't do any more checks + committingUser := &user_model.User{ID: 999, Name: "user-x"} + ret := parseCommitWithSSHSignature(t.Context(), commit, committingUser) + require.NotNil(t, ret) + assert.True(t, ret.Verified) + assert.Equal(t, committingUser.Name+" / "+sshPubKey.Fingerprint, ret.Reason) + assert.False(t, ret.Warning) + assert.Equal(t, committingUser, ret.SigningUser) + assert.Equal(t, committingUser, ret.CommittingUser) + assert.Equal(t, sshPubKey.ID, ret.SigningSSHKey.ID) + }) + + t.Run("TrustedSSHKey", func(t *testing.T) { + defer test.MockVariableValue(&setting.Repository.Signing.SigningName, "gitea")() + defer test.MockVariableValue(&setting.Repository.Signing.SigningEmail, "gitea@fake.local")() + defer test.MockVariableValue(&setting.Repository.Signing.TrustedSSHKeys, []string{"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIH6Y4idVaW3E+bLw1uqoAfJD7o5Siu+HqS51E9oQLPE9"})() + + commit, err := git.CommitFromReader(nil, git.Sha1ObjectFormat.EmptyObjectID(), strings.NewReader(`tree 9a93ffa76e8b72bdb6431910b3a506fa2b39f42e +author User Two <user2@example.com> 1749230009 +0200 +committer User Two <user2@example.com> 1749230009 +0200 +gpgsig -----BEGIN SSH SIGNATURE----- + U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgfpjiJ1VpbcT5svDW6qgB8kPujl + KK74epLnUT2hAs8T0AAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5 + AAAAQDX2t2iHuuLxEWHLJetYXKsgayv3c43r0pJNfAzdLN55Q65pC5M7rG6++gT2bxcpOu + Y6EXbpLqia9sunEF3+LQY= + -----END SSH SIGNATURE----- + +Initial commit with signed file +`)) + require.NoError(t, err) + committingUser := &user_model.User{ + ID: 2, + Name: "User Two", + Email: "user2@example.com", + } + ret := parseCommitWithSSHSignature(t.Context(), commit, committingUser) + require.NotNil(t, ret) + assert.True(t, ret.Verified) + assert.False(t, ret.Warning) + assert.Equal(t, committingUser, ret.CommittingUser) + if assert.NotNil(t, ret.SigningUser) { + assert.Equal(t, "gitea", ret.SigningUser.Name) + assert.Equal(t, "gitea@fake.local", ret.SigningUser.Email) + } + }) +} diff --git a/services/asymkey/deploy_key.go b/services/asymkey/deploy_key.go index 9e5a6d6292..04023f9ffb 100644 --- a/services/asymkey/deploy_key.go +++ b/services/asymkey/deploy_key.go @@ -49,28 +49,21 @@ func deleteDeployKeyFromDB(ctx context.Context, key *asymkey_model.DeployKey) er // DeleteDeployKey deletes deploy key from its repository authorized_keys file if needed. // Permissions check should be done outside. func DeleteDeployKey(ctx context.Context, repo *repo_model.Repository, id int64) error { - dbCtx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - key, err := asymkey_model.GetDeployKeyByID(ctx, id) - if err != nil { - if asymkey_model.IsErrDeployKeyNotExist(err) { - return nil + if err := db.WithTx(ctx, func(ctx context.Context) error { + key, err := asymkey_model.GetDeployKeyByID(ctx, id) + if err != nil { + if asymkey_model.IsErrDeployKeyNotExist(err) { + return nil + } + return fmt.Errorf("GetDeployKeyByID: %w", err) } - return fmt.Errorf("GetDeployKeyByID: %w", err) - } - if key.RepoID != repo.ID { - return fmt.Errorf("deploy key %d does not belong to repository %d", id, repo.ID) - } + if key.RepoID != repo.ID { + return fmt.Errorf("deploy key %d does not belong to repository %d", id, repo.ID) + } - if err := deleteDeployKeyFromDB(dbCtx, key); err != nil { - return err - } - if err := committer.Commit(); err != nil { + return deleteDeployKeyFromDB(ctx, key) + }); err != nil { return err } diff --git a/services/asymkey/sign.go b/services/asymkey/sign.go index 2216bca54a..f94462ea46 100644 --- a/services/asymkey/sign.go +++ b/services/asymkey/sign.go @@ -6,6 +6,7 @@ package asymkey import ( "context" "fmt" + "os" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" @@ -85,9 +86,9 @@ func IsErrWontSign(err error) bool { } // SigningKey returns the KeyID and git Signature for the repo -func SigningKey(ctx context.Context, repoPath string) (string, *git.Signature) { +func SigningKey(ctx context.Context, repoPath string) (*git.SigningKey, *git.Signature) { if setting.Repository.Signing.SigningKey == "none" { - return "", nil + return nil, nil } if setting.Repository.Signing.SigningKey == "default" || setting.Repository.Signing.SigningKey == "" { @@ -95,53 +96,77 @@ func SigningKey(ctx context.Context, repoPath string) (string, *git.Signature) { value, _, _ := git.NewCommand("config", "--get", "commit.gpgsign").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) sign, valid := git.ParseBool(strings.TrimSpace(value)) if !sign || !valid { - return "", nil + return nil, nil } + format, _, _ := git.NewCommand("config", "--default", git.SigningKeyFormatOpenPGP, "--get", "gpg.format").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) signingKey, _, _ := git.NewCommand("config", "--get", "user.signingkey").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) signingName, _, _ := git.NewCommand("config", "--get", "user.name").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) signingEmail, _, _ := git.NewCommand("config", "--get", "user.email").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) - return strings.TrimSpace(signingKey), &git.Signature{ - Name: strings.TrimSpace(signingName), - Email: strings.TrimSpace(signingEmail), + + if strings.TrimSpace(signingKey) == "" { + return nil, nil } + + return &git.SigningKey{ + KeyID: strings.TrimSpace(signingKey), + Format: strings.TrimSpace(format), + }, &git.Signature{ + Name: strings.TrimSpace(signingName), + Email: strings.TrimSpace(signingEmail), + } } - return setting.Repository.Signing.SigningKey, &git.Signature{ - Name: setting.Repository.Signing.SigningName, - Email: setting.Repository.Signing.SigningEmail, + if setting.Repository.Signing.SigningKey == "" { + return nil, nil } + + return &git.SigningKey{ + KeyID: setting.Repository.Signing.SigningKey, + Format: setting.Repository.Signing.SigningFormat, + }, &git.Signature{ + Name: setting.Repository.Signing.SigningName, + Email: setting.Repository.Signing.SigningEmail, + } } // PublicSigningKey gets the public signing key within a provided repository directory -func PublicSigningKey(ctx context.Context, repoPath string) (string, error) { +func PublicSigningKey(ctx context.Context, repoPath string) (content, format string, err error) { signingKey, _ := SigningKey(ctx, repoPath) - if signingKey == "" { - return "", nil + if signingKey == nil { + return "", "", nil + } + if signingKey.Format == git.SigningKeyFormatSSH { + content, err := os.ReadFile(signingKey.KeyID) + if err != nil { + log.Error("Unable to read SSH public key file in %s: %s, %v", repoPath, signingKey, err) + return "", signingKey.Format, err + } + return string(content), signingKey.Format, nil } content, stderr, err := process.GetManager().ExecDir(ctx, -1, repoPath, - "gpg --export -a", "gpg", "--export", "-a", signingKey) + "gpg --export -a", "gpg", "--export", "-a", signingKey.KeyID) if err != nil { log.Error("Unable to get default signing key in %s: %s, %s, %v", repoPath, signingKey, stderr, err) - return "", err + return "", signingKey.Format, err } - return content, nil + return content, signingKey.Format, nil } // SignInitialCommit determines if we should sign the initial commit to this repository -func SignInitialCommit(ctx context.Context, repoPath string, u *user_model.User) (bool, string, *git.Signature, error) { +func SignInitialCommit(ctx context.Context, repoPath string, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) { rules := signingModeFromStrings(setting.Repository.Signing.InitialCommit) signingKey, sig := SigningKey(ctx, repoPath) - if signingKey == "" { - return false, "", nil, &ErrWontSign{noKey} + if signingKey == nil { + return false, nil, nil, &ErrWontSign{noKey} } Loop: for _, rule := range rules { switch rule { case never: - return false, "", nil, &ErrWontSign{never} + return false, nil, nil, &ErrWontSign{never} case always: break Loop case pubkey: @@ -150,18 +175,18 @@ Loop: IncludeSubKeys: true, }) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if len(keys) == 0 { - return false, "", nil, &ErrWontSign{pubkey} + return false, nil, nil, &ErrWontSign{pubkey} } case twofa: twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID) if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) { - return false, "", nil, err + return false, nil, nil, err } if twofaModel == nil { - return false, "", nil, &ErrWontSign{twofa} + return false, nil, nil, &ErrWontSign{twofa} } } } @@ -169,19 +194,19 @@ Loop: } // SignWikiCommit determines if we should sign the commits to this repository wiki -func SignWikiCommit(ctx context.Context, repo *repo_model.Repository, u *user_model.User) (bool, string, *git.Signature, error) { +func SignWikiCommit(ctx context.Context, repo *repo_model.Repository, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) { repoWikiPath := repo.WikiPath() rules := signingModeFromStrings(setting.Repository.Signing.Wiki) signingKey, sig := SigningKey(ctx, repoWikiPath) - if signingKey == "" { - return false, "", nil, &ErrWontSign{noKey} + if signingKey == nil { + return false, nil, nil, &ErrWontSign{noKey} } Loop: for _, rule := range rules { switch rule { case never: - return false, "", nil, &ErrWontSign{never} + return false, nil, nil, &ErrWontSign{never} case always: break Loop case pubkey: @@ -190,35 +215,35 @@ Loop: IncludeSubKeys: true, }) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if len(keys) == 0 { - return false, "", nil, &ErrWontSign{pubkey} + return false, nil, nil, &ErrWontSign{pubkey} } case twofa: twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID) if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) { - return false, "", nil, err + return false, nil, nil, err } if twofaModel == nil { - return false, "", nil, &ErrWontSign{twofa} + return false, nil, nil, &ErrWontSign{twofa} } case parentSigned: gitRepo, err := gitrepo.OpenRepository(ctx, repo.WikiStorageRepo()) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() commit, err := gitRepo.GetCommit("HEAD") if err != nil { - return false, "", nil, err + return false, nil, nil, err } if commit.Signature == nil { - return false, "", nil, &ErrWontSign{parentSigned} + return false, nil, nil, &ErrWontSign{parentSigned} } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{parentSigned} + return false, nil, nil, &ErrWontSign{parentSigned} } } } @@ -226,18 +251,18 @@ Loop: } // SignCRUDAction determines if we should sign a CRUD commit to this repository -func SignCRUDAction(ctx context.Context, repoPath string, u *user_model.User, tmpBasePath, parentCommit string) (bool, string, *git.Signature, error) { +func SignCRUDAction(ctx context.Context, repoPath string, u *user_model.User, tmpBasePath, parentCommit string) (bool, *git.SigningKey, *git.Signature, error) { rules := signingModeFromStrings(setting.Repository.Signing.CRUDActions) signingKey, sig := SigningKey(ctx, repoPath) - if signingKey == "" { - return false, "", nil, &ErrWontSign{noKey} + if signingKey == nil { + return false, nil, nil, &ErrWontSign{noKey} } Loop: for _, rule := range rules { switch rule { case never: - return false, "", nil, &ErrWontSign{never} + return false, nil, nil, &ErrWontSign{never} case always: break Loop case pubkey: @@ -246,35 +271,35 @@ Loop: IncludeSubKeys: true, }) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if len(keys) == 0 { - return false, "", nil, &ErrWontSign{pubkey} + return false, nil, nil, &ErrWontSign{pubkey} } case twofa: twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID) if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) { - return false, "", nil, err + return false, nil, nil, err } if twofaModel == nil { - return false, "", nil, &ErrWontSign{twofa} + return false, nil, nil, &ErrWontSign{twofa} } case parentSigned: gitRepo, err := git.OpenRepository(ctx, tmpBasePath) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() commit, err := gitRepo.GetCommit(parentCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if commit.Signature == nil { - return false, "", nil, &ErrWontSign{parentSigned} + return false, nil, nil, &ErrWontSign{parentSigned} } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{parentSigned} + return false, nil, nil, &ErrWontSign{parentSigned} } } } @@ -282,16 +307,16 @@ Loop: } // SignMerge determines if we should sign a PR merge commit to the base repository -func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, tmpBasePath, baseCommit, headCommit string) (bool, string, *git.Signature, error) { +func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, tmpBasePath, baseCommit, headCommit string) (bool, *git.SigningKey, *git.Signature, error) { if err := pr.LoadBaseRepo(ctx); err != nil { log.Error("Unable to get Base Repo for pull request") - return false, "", nil, err + return false, nil, nil, err } repo := pr.BaseRepo signingKey, signer := SigningKey(ctx, repo.RepoPath()) - if signingKey == "" { - return false, "", nil, &ErrWontSign{noKey} + if signingKey == nil { + return false, nil, nil, &ErrWontSign{noKey} } rules := signingModeFromStrings(setting.Repository.Signing.Merges) @@ -302,7 +327,7 @@ Loop: for _, rule := range rules { switch rule { case never: - return false, "", nil, &ErrWontSign{never} + return false, nil, nil, &ErrWontSign{never} case always: break Loop case pubkey: @@ -311,91 +336,91 @@ Loop: IncludeSubKeys: true, }) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if len(keys) == 0 { - return false, "", nil, &ErrWontSign{pubkey} + return false, nil, nil, &ErrWontSign{pubkey} } case twofa: twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID) if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) { - return false, "", nil, err + return false, nil, nil, err } if twofaModel == nil { - return false, "", nil, &ErrWontSign{twofa} + return false, nil, nil, &ErrWontSign{twofa} } case approved: protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pr.BaseBranch) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if protectedBranch == nil { - return false, "", nil, &ErrWontSign{approved} + return false, nil, nil, &ErrWontSign{approved} } if issues_model.GetGrantedApprovalsCount(ctx, protectedBranch, pr) < 1 { - return false, "", nil, &ErrWontSign{approved} + return false, nil, nil, &ErrWontSign{approved} } case baseSigned: if gitRepo == nil { gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() } commit, err := gitRepo.GetCommit(baseCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{baseSigned} + return false, nil, nil, &ErrWontSign{baseSigned} } case headSigned: if gitRepo == nil { gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() } commit, err := gitRepo.GetCommit(headCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{headSigned} + return false, nil, nil, &ErrWontSign{headSigned} } case commitsSigned: if gitRepo == nil { gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() } commit, err := gitRepo.GetCommit(headCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{commitsSigned} + return false, nil, nil, &ErrWontSign{commitsSigned} } // need to work out merge-base mergeBaseCommit, _, err := gitRepo.GetMergeBase("", baseCommit, headCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } commitList, err := commit.CommitsBeforeUntil(mergeBaseCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } for _, commit := range commitList { verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{commitsSigned} + return false, nil, nil, &ErrWontSign{commitsSigned} } } } diff --git a/services/asymkey/ssh_key.go b/services/asymkey/ssh_key.go index da57059d4b..01fa7ff15f 100644 --- a/services/asymkey/ssh_key.go +++ b/services/asymkey/ssh_key.go @@ -27,20 +27,9 @@ func DeletePublicKey(ctx context.Context, doer *user_model.User, id int64) (err } } - dbCtx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if _, err = db.DeleteByID[asymkey_model.PublicKey](dbCtx, id); err != nil { - return err - } - - if err = committer.Commit(); err != nil { + if _, err = db.DeleteByID[asymkey_model.PublicKey](ctx, id); err != nil { return err } - committer.Close() if key.Type == asymkey_model.KeyTypePrincipal { return RewriteAllPrincipalKeys(ctx) diff --git a/services/asymkey/ssh_key_principals.go b/services/asymkey/ssh_key_principals.go index 5ed5cfa782..6493c1cc51 100644 --- a/services/asymkey/ssh_key_principals.go +++ b/services/asymkey/ssh_key_principals.go @@ -14,24 +14,6 @@ import ( // AddPrincipalKey adds new principal to database and authorized_principals file. func AddPrincipalKey(ctx context.Context, ownerID int64, content string, authSourceID int64) (*asymkey_model.PublicKey, error) { - dbCtx, committer, err := db.TxContext(ctx) - if err != nil { - return nil, err - } - defer committer.Close() - - // Principals cannot be duplicated. - has, err := db.GetEngine(dbCtx). - Where("content = ? AND type = ?", content, asymkey_model.KeyTypePrincipal). - Get(new(asymkey_model.PublicKey)) - if err != nil { - return nil, err - } else if has { - return nil, asymkey_model.ErrKeyAlreadyExist{ - Content: content, - } - } - key := &asymkey_model.PublicKey{ OwnerID: ownerID, Name: content, @@ -40,15 +22,27 @@ func AddPrincipalKey(ctx context.Context, ownerID int64, content string, authSou Type: asymkey_model.KeyTypePrincipal, LoginSourceID: authSourceID, } - if err = db.Insert(dbCtx, key); err != nil { - return nil, fmt.Errorf("addKey: %w", err) - } - if err = committer.Commit(); err != nil { + if err := db.WithTx(ctx, func(ctx context.Context) error { + // Principals cannot be duplicated. + has, err := db.GetEngine(ctx). + Where("content = ? AND type = ?", content, asymkey_model.KeyTypePrincipal). + Get(new(asymkey_model.PublicKey)) + if err != nil { + return err + } else if has { + return asymkey_model.ErrKeyAlreadyExist{ + Content: content, + } + } + + if err = db.Insert(ctx, key); err != nil { + return fmt.Errorf("addKey: %w", err) + } + return nil + }); err != nil { return nil, err } - committer.Close() - return key, RewriteAllPrincipalKeys(ctx) } diff --git a/services/attachment/attachment_test.go b/services/attachment/attachment_test.go index 142bcfe629..65475836be 100644 --- a/services/attachment/attachment_test.go +++ b/services/attachment/attachment_test.go @@ -41,6 +41,6 @@ func TestUploadAttachment(t *testing.T) { attachment, err := repo_model.GetAttachmentByUUID(db.DefaultContext, attach.UUID) assert.NoError(t, err) - assert.EqualValues(t, user.ID, attachment.UploaderID) + assert.Equal(t, user.ID, attachment.UploaderID) assert.Equal(t, int64(0), attachment.DownloadCount) } diff --git a/services/auth/auth.go b/services/auth/auth.go index f7deeb4c50..fb6612290b 100644 --- a/services/auth/auth.go +++ b/services/auth/auth.go @@ -62,14 +62,14 @@ func (a *authPathDetector) isAPIPath() bool { // isAttachmentDownload check if request is a file download (GET) with URL to an attachment func (a *authPathDetector) isAttachmentDownload() bool { - return strings.HasPrefix(a.req.URL.Path, "/attachments/") && a.req.Method == "GET" + return strings.HasPrefix(a.req.URL.Path, "/attachments/") && a.req.Method == http.MethodGet } func (a *authPathDetector) isFeedRequest(req *http.Request) bool { if !setting.Other.EnableFeed { return false } - if req.Method != "GET" { + if req.Method != http.MethodGet { return false } return a.vars.feedPathRe.MatchString(req.URL.Path) || a.vars.feedRefPathRe.MatchString(req.URL.Path) diff --git a/services/auth/auth_test.go b/services/auth/auth_test.go index b8d3396163..c45f312c90 100644 --- a/services/auth/auth_test.go +++ b/services/auth/auth_test.go @@ -97,7 +97,7 @@ func Test_isGitRawOrLFSPath(t *testing.T) { defer test.MockVariableValue(&setting.LFS.StartServer)() for _, tt := range tests { t.Run(tt.path, func(t *testing.T) { - req, _ := http.NewRequest("POST", "http://localhost"+tt.path, nil) + req, _ := http.NewRequest(http.MethodPost, "http://localhost"+tt.path, nil) setting.LFS.StartServer = false assert.Equal(t, tt.want, newAuthPathDetector(req).isGitRawOrAttachOrLFSPath()) @@ -119,7 +119,7 @@ func Test_isGitRawOrLFSPath(t *testing.T) { } for _, tt := range lfsTests { t.Run(tt, func(t *testing.T) { - req, _ := http.NewRequest("POST", tt, nil) + req, _ := http.NewRequest(http.MethodPost, tt, nil) setting.LFS.StartServer = false got := newAuthPathDetector(req).isGitRawOrAttachOrLFSPath() assert.Equalf(t, setting.LFS.StartServer, got, "isGitOrLFSPath(%q) = %v, want %v, %v", tt, got, setting.LFS.StartServer, globalVars().gitRawOrAttachPathRe.MatchString(tt)) @@ -148,7 +148,7 @@ func Test_isFeedRequest(t *testing.T) { } for _, tt := range tests { t.Run(tt.path, func(t *testing.T) { - req, _ := http.NewRequest("GET", "http://localhost"+tt.path, nil) + req, _ := http.NewRequest(http.MethodGet, "http://localhost"+tt.path, nil) assert.Equal(t, tt.want, newAuthPathDetector(req).isFeedRequest(req)) }) } diff --git a/services/auth/basic.go b/services/auth/basic.go index e22b9e1eb7..6d147deeb1 100644 --- a/services/auth/basic.go +++ b/services/auth/basic.go @@ -7,12 +7,11 @@ package auth import ( "errors" "net/http" - "strings" actions_model "code.gitea.io/gitea/models/actions" auth_model "code.gitea.io/gitea/models/auth" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/auth/httpauth" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -47,24 +46,22 @@ func (b *Basic) Name() string { // name/token on successful validation. // Returns nil if header is empty or validation fails. func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) { - // Basic authentication should only fire on API, Feed, Download or on Git or LFSPaths + // Basic authentication should only fire on API, Feed, Download, Archives or on Git or LFSPaths // Not all feed (rss/atom) clients feature the ability to add cookies or headers, so we need to allow basic auth for feeds detector := newAuthPathDetector(req) - if !detector.isAPIPath() && !detector.isFeedRequest(req) && !detector.isContainerPath() && !detector.isAttachmentDownload() && !detector.isGitRawOrAttachOrLFSPath() { + if !detector.isAPIPath() && !detector.isFeedRequest(req) && !detector.isContainerPath() && !detector.isAttachmentDownload() && !detector.isArchivePath() && !detector.isGitRawOrAttachOrLFSPath() { return nil, nil } - baHead := req.Header.Get("Authorization") - if len(baHead) == 0 { + authHeader := req.Header.Get("Authorization") + if authHeader == "" { return nil, nil } - - auths := strings.SplitN(baHead, " ", 2) - if len(auths) != 2 || (strings.ToLower(auths[0]) != "basic") { + parsed, ok := httpauth.ParseAuthorizationHeader(authHeader) + if !ok || parsed.BasicAuth == nil { return nil, nil } - - uname, passwd, _ := base.BasicAuthDecode(auths[1]) + uname, passwd := parsed.BasicAuth.Username, parsed.BasicAuth.Password // Check if username or password is a token isUsernameToken := len(passwd) == 0 || passwd == "x-oauth-basic" @@ -142,14 +139,14 @@ func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore return nil, err } - if skipper, ok := source.Cfg.(LocalTwoFASkipper); !ok || !skipper.IsSkipLocalTwoFA() { - // Check if the user has webAuthn registration + if !source.TwoFactorShouldSkip() { + // Check if the user has WebAuthn registration hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(req.Context(), u.ID) if err != nil { return nil, err } if hasWebAuthn { - return nil, errors.New("Basic authorization is not allowed while webAuthn enrolled") + return nil, errors.New("basic authorization is not allowed while WebAuthn enrolled") } if err := validateTOTP(req, u); err != nil { diff --git a/services/auth/httpsign.go b/services/auth/httpsign.go index 83a36bef23..25e96ff32d 100644 --- a/services/auth/httpsign.go +++ b/services/auth/httpsign.go @@ -134,7 +134,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) { // Check if it's really a ssh certificate cert, ok := pk.(*ssh.Certificate) if !ok { - return nil, fmt.Errorf("no certificate found") + return nil, errors.New("no certificate found") } c := &ssh.CertChecker{ @@ -153,7 +153,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) { // check the CA of the cert if !c.IsUserAuthority(cert.SignatureKey) { - return nil, fmt.Errorf("CA check failed") + return nil, errors.New("CA check failed") } // Create a verifier @@ -191,7 +191,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) { } // No public key matching a principal in the certificate is registered in gitea - return nil, fmt.Errorf("no valid principal found") + return nil, errors.New("no valid principal found") } // doVerify iterates across the provided public keys attempting the verify the current request against each key in turn diff --git a/services/auth/interface.go b/services/auth/interface.go index 275b4dd56c..c4bed2b640 100644 --- a/services/auth/interface.go +++ b/services/auth/interface.go @@ -20,7 +20,7 @@ type SessionStore session.Store // Method represents an authentication method (plugin) for HTTP requests. type Method interface { // Verify tries to verify the authentication data contained in the request. - // If verification is successful returns either an existing user object (with id > 0) + // If verification succeeds, it returns either an existing user object (with id > 0) // or a new user object (with id = 0) populated with the information that was found // in the authentication data (username or email). // Second argument returns err if verification fails, otherwise @@ -35,11 +35,6 @@ type PasswordAuthenticator interface { Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error) } -// LocalTwoFASkipper represents a source of authentication that can skip local 2fa -type LocalTwoFASkipper interface { - IsSkipLocalTwoFA() bool -} - // SynchronizableSource represents a source that can synchronize users type SynchronizableSource interface { Sync(ctx context.Context, updateExisting bool) error diff --git a/services/auth/oauth2.go b/services/auth/oauth2.go index 66cc686809..7df6f4638e 100644 --- a/services/auth/oauth2.go +++ b/services/auth/oauth2.go @@ -13,6 +13,7 @@ import ( actions_model "code.gitea.io/gitea/models/actions" auth_model "code.gitea.io/gitea/models/auth" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/auth/httpauth" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -97,9 +98,9 @@ func parseToken(req *http.Request) (string, bool) { // check header token if auHead := req.Header.Get("Authorization"); auHead != "" { - auths := strings.Fields(auHead) - if len(auths) == 2 && (auths[0] == "token" || strings.ToLower(auths[0]) == "bearer") { - return auths[1], true + parsed, ok := httpauth.ParseAuthorizationHeader(auHead) + if ok && parsed.BearerToken != nil { + return parsed.BearerToken.Token, true } } return "", false diff --git a/services/auth/oauth2_test.go b/services/auth/oauth2_test.go index 9edf18d58e..f003742a94 100644 --- a/services/auth/oauth2_test.go +++ b/services/auth/oauth2_test.go @@ -26,7 +26,7 @@ func TestUserIDFromToken(t *testing.T) { o := OAuth2{} uid := o.userIDFromToken(t.Context(), token, ds) - assert.Equal(t, int64(user_model.ActionsUserID), uid) + assert.Equal(t, user_model.ActionsUserID, uid) assert.Equal(t, true, ds["IsActionsToken"]) assert.Equal(t, ds["ActionsTaskID"], int64(RunningTaskID)) }) diff --git a/services/auth/source/db/source.go b/services/auth/source/db/source.go index bb2270cbd6..90baa61f5b 100644 --- a/services/auth/source/db/source.go +++ b/services/auth/source/db/source.go @@ -11,7 +11,9 @@ import ( ) // Source is a password authentication service -type Source struct{} +type Source struct { + auth.ConfigBase `json:"-"` +} // FromDB fills up an OAuth2Config from serialized format. func (source *Source) FromDB(bs []byte) error { diff --git a/services/auth/source/ldap/assert_interface_test.go b/services/auth/source/ldap/assert_interface_test.go index 33347687dc..8e8accd668 100644 --- a/services/auth/source/ldap/assert_interface_test.go +++ b/services/auth/source/ldap/assert_interface_test.go @@ -15,13 +15,11 @@ import ( type sourceInterface interface { auth.PasswordAuthenticator auth.SynchronizableSource - auth.LocalTwoFASkipper auth_model.SSHKeyProvider auth_model.Config auth_model.SkipVerifiable auth_model.HasTLSer auth_model.UseTLSer - auth_model.SourceSettable } var _ (sourceInterface) = &ldap.Source{} diff --git a/services/auth/source/ldap/source.go b/services/auth/source/ldap/source.go index 963cdba7c2..d49dbc45ce 100644 --- a/services/auth/source/ldap/source.go +++ b/services/auth/source/ldap/source.go @@ -24,6 +24,8 @@ import ( // Source Basic LDAP authentication service type Source struct { + auth.ConfigBase `json:"-"` + Name string // canonical name (ie. corporate.ad) Host string // LDAP host Port int // port number @@ -54,9 +56,6 @@ type Source struct { GroupTeamMap string // Map LDAP groups to teams GroupTeamMapRemoval bool // Remove user from teams which are synchronized and user is not a member of the corresponding LDAP group UserUID string // User Attribute listed in Group - SkipLocalTwoFA bool `json:",omitempty"` // Skip Local 2fa for users authenticated with this source - - authSource *auth.Source // reference to the authSource } // FromDB fills up a LDAPConfig from serialized format. @@ -109,11 +108,6 @@ func (source *Source) ProvidesSSHKeys() bool { return strings.TrimSpace(source.AttributeSSHPublicKey) != "" } -// SetAuthSource sets the related AuthSource -func (source *Source) SetAuthSource(authSource *auth.Source) { - source.authSource = authSource -} - func init() { auth.RegisterTypeConfig(auth.LDAP, &Source{}) auth.RegisterTypeConfig(auth.DLDAP, &Source{}) diff --git a/services/auth/source/ldap/source_authenticate.go b/services/auth/source/ldap/source_authenticate.go index 6a6c60cd40..6005a4744a 100644 --- a/services/auth/source/ldap/source_authenticate.go +++ b/services/auth/source/ldap/source_authenticate.go @@ -5,7 +5,6 @@ package ldap import ( "context" - "fmt" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" @@ -26,7 +25,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u if user != nil { loginName = user.LoginName } - sr := source.SearchEntry(loginName, password, source.authSource.Type == auth.DLDAP) + sr := source.SearchEntry(loginName, password, source.AuthSource.Type == auth.DLDAP) if sr == nil { // User not in LDAP, do nothing return nil, user_model.ErrUserNotExist{Name: loginName} @@ -41,7 +40,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u sr.Username = userName } if sr.Mail == "" { - sr.Mail = fmt.Sprintf("%s@localhost.local", sr.Username) + sr.Mail = sr.Username + "@localhost.local" } isAttributeSSHPublicKeySet := strings.TrimSpace(source.AttributeSSHPublicKey) != "" @@ -59,7 +58,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u opts := &user_service.UpdateOptions{} if source.AdminFilter != "" && user.IsAdmin != sr.IsAdmin { // Change existing admin flag only if AdminFilter option is set - opts.IsAdmin = optional.Some(sr.IsAdmin) + opts.IsAdmin = user_service.UpdateOptionFieldFromSync(sr.IsAdmin) } if !sr.IsAdmin && source.RestrictedFilter != "" && user.IsRestricted != sr.IsRestricted { // Change existing restricted flag only if RestrictedFilter option is set @@ -74,7 +73,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u } if user != nil { - if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, user, source.authSource, sr.SSHPublicKey) { + if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, user, source.AuthSource, sr.SSHPublicKey) { if err := asymkey_service.RewriteAllPublicKeys(ctx); err != nil { return user, err } @@ -85,8 +84,8 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u Name: sr.Username, FullName: composeFullName(sr.Name, sr.Surname, sr.Username), Email: sr.Mail, - LoginType: source.authSource.Type, - LoginSource: source.authSource.ID, + LoginType: source.AuthSource.Type, + LoginSource: source.AuthSource.ID, LoginName: userName, IsAdmin: sr.IsAdmin, } @@ -100,7 +99,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u return user, err } - if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(ctx, user, source.authSource, sr.SSHPublicKey) { + if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(ctx, user, source.AuthSource, sr.SSHPublicKey) { if err := asymkey_service.RewriteAllPublicKeys(ctx); err != nil { return user, err } @@ -124,8 +123,3 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u return user, nil } - -// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication -func (source *Source) IsSkipLocalTwoFA() bool { - return source.SkipLocalTwoFA -} diff --git a/services/auth/source/ldap/source_search.go b/services/auth/source/ldap/source_search.go index fa2c45ce4a..f6c032492f 100644 --- a/services/auth/source/ldap/source_search.go +++ b/services/auth/source/ldap/source_search.go @@ -117,10 +117,10 @@ func dial(source *Source) (*ldap.Conn, error) { } if source.SecurityProtocol == SecurityProtocolLDAPS { - return ldap.DialTLS("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)), tlsConfig) //nolint:staticcheck + return ldap.DialTLS("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)), tlsConfig) //nolint:staticcheck // DialTLS is deprecated } - conn, err := ldap.Dial("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port))) //nolint:staticcheck + conn, err := ldap.Dial("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port))) //nolint:staticcheck // Dial is deprecated if err != nil { return nil, fmt.Errorf("error during Dial: %w", err) } @@ -241,7 +241,7 @@ func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string, applyGr } func (source *Source) getUserAttributeListedInGroup(entry *ldap.Entry) string { - if strings.ToLower(source.UserUID) == "dn" { + if strings.EqualFold(source.UserUID, "dn") { return entry.DN } diff --git a/services/auth/source/ldap/source_sync.go b/services/auth/source/ldap/source_sync.go index e817bf1fa9..7b401c5c96 100644 --- a/services/auth/source/ldap/source_sync.go +++ b/services/auth/source/ldap/source_sync.go @@ -5,7 +5,6 @@ package ldap import ( "context" - "fmt" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" @@ -23,21 +22,21 @@ import ( // Sync causes this ldap source to synchronize its users with the db func (source *Source) Sync(ctx context.Context, updateExisting bool) error { - log.Trace("Doing: SyncExternalUsers[%s]", source.authSource.Name) + log.Trace("Doing: SyncExternalUsers[%s]", source.AuthSource.Name) isAttributeSSHPublicKeySet := strings.TrimSpace(source.AttributeSSHPublicKey) != "" var sshKeysNeedUpdate bool // Find all users with this login type - FIXME: Should this be an iterator? - users, err := user_model.GetUsersBySource(ctx, source.authSource) + users, err := user_model.GetUsersBySource(ctx, source.AuthSource) if err != nil { log.Error("SyncExternalUsers: %v", err) return err } select { case <-ctx.Done(): - log.Warn("SyncExternalUsers: Cancelled before update of %s", source.authSource.Name) - return db.ErrCancelledf("Before update of %s", source.authSource.Name) + log.Warn("SyncExternalUsers: Cancelled before update of %s", source.AuthSource.Name) + return db.ErrCancelledf("Before update of %s", source.AuthSource.Name) default: } @@ -52,7 +51,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { sr, err := source.SearchEntries() if err != nil { - log.Error("SyncExternalUsers LDAP source failure [%s], skipped", source.authSource.Name) + log.Error("SyncExternalUsers LDAP source failure [%s], skipped", source.AuthSource.Name) return nil } @@ -75,7 +74,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { for _, su := range sr { select { case <-ctx.Done(): - log.Warn("SyncExternalUsers: Cancelled at update of %s before completed update of users", source.authSource.Name) + log.Warn("SyncExternalUsers: Cancelled at update of %s before completed update of users", source.AuthSource.Name) // Rewrite authorized_keys file if LDAP Public SSH Key attribute is set and any key was added or removed if sshKeysNeedUpdate { err = asymkey_service.RewriteAllPublicKeys(ctx) @@ -83,7 +82,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { log.Error("RewriteAllPublicKeys: %v", err) } } - return db.ErrCancelledf("During update of %s before completed update of users", source.authSource.Name) + return db.ErrCancelledf("During update of %s before completed update of users", source.AuthSource.Name) default: } if su.Username == "" && su.Mail == "" { @@ -106,20 +105,20 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { } if su.Mail == "" { - su.Mail = fmt.Sprintf("%s@localhost.local", su.Username) + su.Mail = su.Username + "@localhost.local" } fullName := composeFullName(su.Name, su.Surname, su.Username) // If no existing user found, create one if usr == nil { - log.Trace("SyncExternalUsers[%s]: Creating user %s", source.authSource.Name, su.Username) + log.Trace("SyncExternalUsers[%s]: Creating user %s", source.AuthSource.Name, su.Username) usr = &user_model.User{ LowerName: su.LowerName, Name: su.Username, FullName: fullName, - LoginType: source.authSource.Type, - LoginSource: source.authSource.ID, + LoginType: source.AuthSource.Type, + LoginSource: source.AuthSource.ID, LoginName: su.Username, Email: su.Mail, IsAdmin: su.IsAdmin, @@ -131,12 +130,12 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { err = user_model.CreateUser(ctx, usr, &user_model.Meta{}, overwriteDefault) if err != nil { - log.Error("SyncExternalUsers[%s]: Error creating user %s: %v", source.authSource.Name, su.Username, err) + log.Error("SyncExternalUsers[%s]: Error creating user %s: %v", source.AuthSource.Name, su.Username, err) } if err == nil && isAttributeSSHPublicKeySet { - log.Trace("SyncExternalUsers[%s]: Adding LDAP Public SSH Keys for user %s", source.authSource.Name, usr.Name) - if asymkey_model.AddPublicKeysBySource(ctx, usr, source.authSource, su.SSHPublicKey) { + log.Trace("SyncExternalUsers[%s]: Adding LDAP Public SSH Keys for user %s", source.AuthSource.Name, usr.Name) + if asymkey_model.AddPublicKeysBySource(ctx, usr, source.AuthSource, su.SSHPublicKey) { sshKeysNeedUpdate = true } } @@ -146,7 +145,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { } } else if updateExisting { // Synchronize SSH Public Key if that attribute is set - if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, usr, source.authSource, su.SSHPublicKey) { + if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, usr, source.AuthSource, su.SSHPublicKey) { sshKeysNeedUpdate = true } @@ -156,14 +155,14 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { !strings.EqualFold(usr.Email, su.Mail) || usr.FullName != fullName || !usr.IsActive { - log.Trace("SyncExternalUsers[%s]: Updating user %s", source.authSource.Name, usr.Name) + log.Trace("SyncExternalUsers[%s]: Updating user %s", source.AuthSource.Name, usr.Name) opts := &user_service.UpdateOptions{ FullName: optional.Some(fullName), IsActive: optional.Some(true), } if source.AdminFilter != "" { - opts.IsAdmin = optional.Some(su.IsAdmin) + opts.IsAdmin = user_service.UpdateOptionFieldFromSync(su.IsAdmin) } // Change existing restricted flag only if RestrictedFilter option is set if !su.IsAdmin && source.RestrictedFilter != "" { @@ -171,16 +170,17 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { } if err := user_service.UpdateUser(ctx, usr, opts); err != nil { - log.Error("SyncExternalUsers[%s]: Error updating user %s: %v", source.authSource.Name, usr.Name, err) + log.Error("SyncExternalUsers[%s]: Error updating user %s: %v", source.AuthSource.Name, usr.Name, err) } if err := user_service.ReplacePrimaryEmailAddress(ctx, usr, su.Mail); err != nil { - log.Error("SyncExternalUsers[%s]: Error updating user %s primary email %s: %v", source.authSource.Name, usr.Name, su.Mail, err) + log.Error("SyncExternalUsers[%s]: Error updating user %s primary email %s: %v", source.AuthSource.Name, usr.Name, su.Mail, err) } } - if usr.IsUploadAvatarChanged(su.Avatar) { - if err == nil && source.AttributeAvatar != "" { + if source.AttributeAvatar != "" { + if len(su.Avatar) > 0 && usr.IsUploadAvatarChanged(su.Avatar) { + log.Trace("SyncExternalUsers[%s]: Uploading new avatar for %s", source.AuthSource.Name, usr.Name) _ = user_service.UploadAvatar(ctx, usr, su.Avatar) } } @@ -203,8 +203,8 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { select { case <-ctx.Done(): - log.Warn("SyncExternalUsers: Cancelled during update of %s before delete users", source.authSource.Name) - return db.ErrCancelledf("During update of %s before delete users", source.authSource.Name) + log.Warn("SyncExternalUsers: Cancelled during update of %s before delete users", source.AuthSource.Name) + return db.ErrCancelledf("During update of %s before delete users", source.AuthSource.Name) default: } @@ -215,13 +215,13 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { continue } - log.Trace("SyncExternalUsers[%s]: Deactivating user %s", source.authSource.Name, usr.Name) + log.Trace("SyncExternalUsers[%s]: Deactivating user %s", source.AuthSource.Name, usr.Name) opts := &user_service.UpdateOptions{ IsActive: optional.Some(false), } if err := user_service.UpdateUser(ctx, usr, opts); err != nil { - log.Error("SyncExternalUsers[%s]: Error deactivating user %s: %v", source.authSource.Name, usr.Name, err) + log.Error("SyncExternalUsers[%s]: Error deactivating user %s: %v", source.AuthSource.Name, usr.Name, err) } } } diff --git a/services/auth/source/oauth2/assert_interface_test.go b/services/auth/source/oauth2/assert_interface_test.go index 56fe0e4aa8..d870ac1dcd 100644 --- a/services/auth/source/oauth2/assert_interface_test.go +++ b/services/auth/source/oauth2/assert_interface_test.go @@ -14,7 +14,6 @@ import ( type sourceInterface interface { auth_model.Config - auth_model.SourceSettable auth_model.RegisterableSource auth.PasswordAuthenticator } diff --git a/services/auth/source/oauth2/providers.go b/services/auth/source/oauth2/providers.go index f2c1bb4894..75ed41ba66 100644 --- a/services/auth/source/oauth2/providers.go +++ b/services/auth/source/oauth2/providers.go @@ -27,6 +27,7 @@ type Provider interface { DisplayName() string IconHTML(size int) template.HTML CustomURLSettings() *CustomURLSettings + SupportSSHPublicKey() bool } // GothProviderCreator provides a function to create a goth.Provider diff --git a/services/auth/source/oauth2/providers_base.go b/services/auth/source/oauth2/providers_base.go index 9d4ab106e5..d34597d6d9 100644 --- a/services/auth/source/oauth2/providers_base.go +++ b/services/auth/source/oauth2/providers_base.go @@ -14,6 +14,13 @@ import ( type BaseProvider struct { name string displayName string + + // TODO: maybe some providers also support SSH public keys, then they can set this to true + supportSSHPublicKey bool +} + +func (b *BaseProvider) SupportSSHPublicKey() bool { + return b.supportSSHPublicKey } // Name provides the technical name for this provider diff --git a/services/auth/source/oauth2/providers_openid.go b/services/auth/source/oauth2/providers_openid.go index 285876d5ac..e86dc48232 100644 --- a/services/auth/source/oauth2/providers_openid.go +++ b/services/auth/source/oauth2/providers_openid.go @@ -17,6 +17,10 @@ import ( // OpenIDProvider is a GothProvider for OpenID type OpenIDProvider struct{} +func (o *OpenIDProvider) SupportSSHPublicKey() bool { + return true +} + // Name provides the technical name for this provider func (o *OpenIDProvider) Name() string { return "openidConnect" diff --git a/services/auth/source/oauth2/source.go b/services/auth/source/oauth2/source.go index 3454c9ad55..00d89b3481 100644 --- a/services/auth/source/oauth2/source.go +++ b/services/auth/source/oauth2/source.go @@ -10,6 +10,8 @@ import ( // Source holds configuration for the OAuth2 login source. type Source struct { + auth.ConfigBase `json:"-"` + Provider string ClientID string ClientSecret string @@ -25,10 +27,9 @@ type Source struct { GroupTeamMap string GroupTeamMapRemoval bool RestrictedGroup string - SkipLocalTwoFA bool `json:",omitempty"` - // reference to the authSource - authSource *auth.Source + SSHPublicKeyClaimName string + FullNameClaimName string } // FromDB fills up an OAuth2Config from serialized format. @@ -41,11 +42,6 @@ func (source *Source) ToDB() ([]byte, error) { return json.Marshal(source) } -// SetAuthSource sets the related AuthSource -func (source *Source) SetAuthSource(authSource *auth.Source) { - source.authSource = authSource -} - func init() { auth.RegisterTypeConfig(auth.OAuth2, &Source{}) } diff --git a/services/auth/source/oauth2/source_callout.go b/services/auth/source/oauth2/source_callout.go index 8d70bee248..f09d25c772 100644 --- a/services/auth/source/oauth2/source_callout.go +++ b/services/auth/source/oauth2/source_callout.go @@ -13,7 +13,7 @@ import ( // Callout redirects request/response pair to authenticate against the provider func (source *Source) Callout(request *http.Request, response http.ResponseWriter) error { // not sure if goth is thread safe (?) when using multiple providers - request.Header.Set(ProviderHeaderKey, source.authSource.Name) + request.Header.Set(ProviderHeaderKey, source.AuthSource.Name) // don't use the default gothic begin handler to prevent issues when some error occurs // normally the gothic library will write some custom stuff to the response instead of our own nice error page @@ -33,7 +33,7 @@ func (source *Source) Callout(request *http.Request, response http.ResponseWrite // this will trigger a new authentication request, but because we save it in the session we can use that func (source *Source) Callback(request *http.Request, response http.ResponseWriter) (goth.User, error) { // not sure if goth is thread safe (?) when using multiple providers - request.Header.Set(ProviderHeaderKey, source.authSource.Name) + request.Header.Set(ProviderHeaderKey, source.AuthSource.Name) gothRWMutex.RLock() defer gothRWMutex.RUnlock() diff --git a/services/auth/source/oauth2/source_register.go b/services/auth/source/oauth2/source_register.go index 82a36acaa6..12da56c11b 100644 --- a/services/auth/source/oauth2/source_register.go +++ b/services/auth/source/oauth2/source_register.go @@ -9,13 +9,13 @@ import ( // RegisterSource causes an OAuth2 configuration to be registered func (source *Source) RegisterSource() error { - err := RegisterProviderWithGothic(source.authSource.Name, source) - return wrapOpenIDConnectInitializeError(err, source.authSource.Name, source) + err := RegisterProviderWithGothic(source.AuthSource.Name, source) + return wrapOpenIDConnectInitializeError(err, source.AuthSource.Name, source) } // UnregisterSource causes an OAuth2 configuration to be unregistered func (source *Source) UnregisterSource() error { - RemoveProviderFromGothic(source.authSource.Name) + RemoveProviderFromGothic(source.AuthSource.Name) return nil } diff --git a/services/auth/source/oauth2/source_sync.go b/services/auth/source/oauth2/source_sync.go index 5e30313c8f..c2e3dfb1a8 100644 --- a/services/auth/source/oauth2/source_sync.go +++ b/services/auth/source/oauth2/source_sync.go @@ -18,27 +18,27 @@ import ( // Sync causes this OAuth2 source to synchronize its users with the db. func (source *Source) Sync(ctx context.Context, updateExisting bool) error { - log.Trace("Doing: SyncExternalUsers[%s] %d", source.authSource.Name, source.authSource.ID) + log.Trace("Doing: SyncExternalUsers[%s] %d", source.AuthSource.Name, source.AuthSource.ID) if !updateExisting { - log.Info("SyncExternalUsers[%s] not running since updateExisting is false", source.authSource.Name) + log.Info("SyncExternalUsers[%s] not running since updateExisting is false", source.AuthSource.Name) return nil } - provider, err := createProvider(source.authSource.Name, source) + provider, err := createProvider(source.AuthSource.Name, source) if err != nil { return err } if !provider.RefreshTokenAvailable() { - log.Trace("SyncExternalUsers[%s] provider doesn't support refresh tokens, can't synchronize", source.authSource.Name) + log.Trace("SyncExternalUsers[%s] provider doesn't support refresh tokens, can't synchronize", source.AuthSource.Name) return nil } opts := user_model.FindExternalUserOptions{ HasRefreshToken: true, Expired: true, - LoginSourceID: source.authSource.ID, + LoginSourceID: source.AuthSource.ID, } return user_model.IterateExternalLogin(ctx, opts, func(ctx context.Context, u *user_model.ExternalLoginUser) error { @@ -77,7 +77,7 @@ func (source *Source) refresh(ctx context.Context, provider goth.Provider, u *us // recognizes them as a valid user, they will be able to login // via their provider and reactivate their account. if shouldDisable { - log.Info("SyncExternalUsers[%s] disabling user %d", source.authSource.Name, user.ID) + log.Info("SyncExternalUsers[%s] disabling user %d", source.AuthSource.Name, user.ID) return db.WithTx(ctx, func(ctx context.Context) error { if hasUser { diff --git a/services/auth/source/oauth2/source_sync_test.go b/services/auth/source/oauth2/source_sync_test.go index aacb4286a1..2927f3634b 100644 --- a/services/auth/source/oauth2/source_sync_test.go +++ b/services/auth/source/oauth2/source_sync_test.go @@ -18,19 +18,21 @@ func TestSource(t *testing.T) { source := &Source{ Provider: "fake", - authSource: &auth.Source{ - ID: 12, - Type: auth.OAuth2, - Name: "fake", - IsActive: true, - IsSyncEnabled: true, + ConfigBase: auth.ConfigBase{ + AuthSource: &auth.Source{ + ID: 12, + Type: auth.OAuth2, + Name: "fake", + IsActive: true, + IsSyncEnabled: true, + }, }, } user := &user_model.User{ LoginName: "external", LoginType: auth.OAuth2, - LoginSource: source.authSource.ID, + LoginSource: source.AuthSource.ID, Name: "test", Email: "external@example.com", } @@ -47,7 +49,7 @@ func TestSource(t *testing.T) { err = user_model.LinkExternalToUser(t.Context(), user, e) assert.NoError(t, err) - provider, err := createProvider(source.authSource.Name, source) + provider, err := createProvider(source.AuthSource.Name, source) assert.NoError(t, err) t.Run("refresh", func(t *testing.T) { @@ -88,8 +90,8 @@ func TestSource(t *testing.T) { ok, err := user_model.GetExternalLogin(t.Context(), e) assert.NoError(t, err) assert.True(t, ok) - assert.Equal(t, "", e.RefreshToken) - assert.Equal(t, "", e.AccessToken) + assert.Empty(t, e.RefreshToken) + assert.Empty(t, e.AccessToken) u, err := user_model.GetUserByID(t.Context(), user.ID) assert.NoError(t, err) diff --git a/services/auth/source/oauth2/store.go b/services/auth/source/oauth2/store.go index 90fa965602..7b6b26edc8 100644 --- a/services/auth/source/oauth2/store.go +++ b/services/auth/source/oauth2/store.go @@ -11,7 +11,6 @@ import ( "code.gitea.io/gitea/modules/log" session_module "code.gitea.io/gitea/modules/session" - chiSession "gitea.com/go-chi/session" "github.com/gorilla/sessions" ) @@ -35,11 +34,11 @@ func (st *SessionsStore) New(r *http.Request, name string) (*sessions.Session, e // getOrNew gets the session from the chi-session if it exists. Override permits the overriding of an unexpected object. func (st *SessionsStore) getOrNew(r *http.Request, name string, override bool) (*sessions.Session, error) { - chiStore := chiSession.GetSession(r) + store := session_module.GetContextSession(r) session := sessions.NewSession(st, name) - rawData := chiStore.Get(name) + rawData := store.Get(name) if rawData != nil { oldSession, ok := rawData.(*sessions.Session) if ok { @@ -56,21 +55,21 @@ func (st *SessionsStore) getOrNew(r *http.Request, name string, override bool) ( } session.IsNew = override - session.ID = chiStore.ID() // Simply copy the session id from the chi store + session.ID = store.ID() // Simply copy the session id from the chi store - return session, chiStore.Set(name, session) + return session, store.Set(name, session) } // Save should persist session to the underlying store implementation. func (st *SessionsStore) Save(r *http.Request, w http.ResponseWriter, session *sessions.Session) error { - chiStore := chiSession.GetSession(r) + store := session_module.GetContextSession(r) if session.IsNew { _, _ = session_module.RegenerateSession(w, r) session.IsNew = false } - if err := chiStore.Set(session.Name(), session); err != nil { + if err := store.Set(session.Name(), session); err != nil { return err } @@ -83,7 +82,7 @@ func (st *SessionsStore) Save(r *http.Request, w http.ResponseWriter, session *s } } - return chiStore.Release() + return store.Release() } type sizeWriter struct { diff --git a/services/auth/source/oauth2/urlmapping.go b/services/auth/source/oauth2/urlmapping.go index d0442d58a8..b9f445caa7 100644 --- a/services/auth/source/oauth2/urlmapping.go +++ b/services/auth/source/oauth2/urlmapping.go @@ -14,11 +14,11 @@ type CustomURLMapping struct { // CustomURLSettings describes the urls values and availability to use when customizing OAuth2 provider URLs type CustomURLSettings struct { - AuthURL Attribute `json:",omitempty"` - TokenURL Attribute `json:",omitempty"` - ProfileURL Attribute `json:",omitempty"` - EmailURL Attribute `json:",omitempty"` - Tenant Attribute `json:",omitempty"` + AuthURL Attribute + TokenURL Attribute + ProfileURL Attribute + EmailURL Attribute + Tenant Attribute } // Attribute describes the availability, and required status for a custom url configuration diff --git a/services/auth/source/pam/assert_interface_test.go b/services/auth/source/pam/assert_interface_test.go index 8e7648b8d3..908d097d96 100644 --- a/services/auth/source/pam/assert_interface_test.go +++ b/services/auth/source/pam/assert_interface_test.go @@ -15,7 +15,6 @@ import ( type sourceInterface interface { auth.PasswordAuthenticator auth_model.Config - auth_model.SourceSettable } var _ (sourceInterface) = &pam.Source{} diff --git a/services/auth/source/pam/source.go b/services/auth/source/pam/source.go index 96b182e185..d1db6db9b7 100644 --- a/services/auth/source/pam/source.go +++ b/services/auth/source/pam/source.go @@ -17,12 +17,10 @@ import ( // Source holds configuration for the PAM login source. type Source struct { - ServiceName string // pam service (e.g. system-auth) - EmailDomain string - SkipLocalTwoFA bool `json:",omitempty"` // Skip Local 2fa for users authenticated with this source + auth.ConfigBase `json:"-"` - // reference to the authSource - authSource *auth.Source + ServiceName string // pam service (e.g. system-auth) + EmailDomain string } // FromDB fills up a PAMConfig from serialized format. @@ -35,11 +33,6 @@ func (source *Source) ToDB() ([]byte, error) { return json.Marshal(source) } -// SetAuthSource sets the related AuthSource -func (source *Source) SetAuthSource(authSource *auth.Source) { - source.authSource = authSource -} - func init() { auth.RegisterTypeConfig(auth.PAM, &Source{}) } diff --git a/services/auth/source/pam/source_authenticate.go b/services/auth/source/pam/source_authenticate.go index 6fd02dc29f..db7c6aab96 100644 --- a/services/auth/source/pam/source_authenticate.go +++ b/services/auth/source/pam/source_authenticate.go @@ -56,7 +56,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u Email: email, Passwd: password, LoginType: auth.PAM, - LoginSource: source.authSource.ID, + LoginSource: source.AuthSource.ID, LoginName: userName, // This is what the user typed in } overwriteDefault := &user_model.CreateUserOverwriteOptions{ @@ -69,8 +69,3 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u return user, nil } - -// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication -func (source *Source) IsSkipLocalTwoFA() bool { - return source.SkipLocalTwoFA -} diff --git a/services/auth/source/smtp/assert_interface_test.go b/services/auth/source/smtp/assert_interface_test.go index 6c9cde66e1..56edad0c71 100644 --- a/services/auth/source/smtp/assert_interface_test.go +++ b/services/auth/source/smtp/assert_interface_test.go @@ -18,7 +18,6 @@ type sourceInterface interface { auth_model.SkipVerifiable auth_model.HasTLSer auth_model.UseTLSer - auth_model.SourceSettable } var _ (sourceInterface) = &smtp.Source{} diff --git a/services/auth/source/smtp/source.go b/services/auth/source/smtp/source.go index 2a648e421e..2ae81ad4f2 100644 --- a/services/auth/source/smtp/source.go +++ b/services/auth/source/smtp/source.go @@ -17,6 +17,8 @@ import ( // Source holds configuration for the SMTP login source. type Source struct { + auth.ConfigBase `json:"-"` + Auth string Host string Port int @@ -25,10 +27,6 @@ type Source struct { SkipVerify bool HeloHostname string DisableHelo bool - SkipLocalTwoFA bool `json:",omitempty"` - - // reference to the authSource - authSource *auth.Source } // FromDB fills up an SMTPConfig from serialized format. @@ -56,11 +54,6 @@ func (source *Source) UseTLS() bool { return source.ForceSMTPS || source.Port == 465 } -// SetAuthSource sets the related AuthSource -func (source *Source) SetAuthSource(authSource *auth.Source) { - source.authSource = authSource -} - func init() { auth.RegisterTypeConfig(auth.SMTP, &Source{}) } diff --git a/services/auth/source/smtp/source_authenticate.go b/services/auth/source/smtp/source_authenticate.go index b2e94933a6..b8e668f5f9 100644 --- a/services/auth/source/smtp/source_authenticate.go +++ b/services/auth/source/smtp/source_authenticate.go @@ -72,7 +72,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u Email: userName, Passwd: password, LoginType: auth_model.SMTP, - LoginSource: source.authSource.ID, + LoginSource: source.AuthSource.ID, LoginName: userName, } overwriteDefault := &user_model.CreateUserOverwriteOptions{ @@ -85,8 +85,3 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u return user, nil } - -// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication -func (source *Source) IsSkipLocalTwoFA() bool { - return source.SkipLocalTwoFA -} diff --git a/services/auth/source/sspi/source.go b/services/auth/source/sspi/source.go index bdd6ef451c..3b7b5cb033 100644 --- a/services/auth/source/sspi/source.go +++ b/services/auth/source/sspi/source.go @@ -17,6 +17,8 @@ import ( // Source holds configuration for SSPI single sign-on. type Source struct { + auth.ConfigBase `json:"-"` + AutoCreateUsers bool AutoActivateUsers bool StripDomainNames bool diff --git a/services/automerge/automerge.go b/services/automerge/automerge.go index 62d560ff94..a60883b4cc 100644 --- a/services/automerge/automerge.go +++ b/services/automerge/automerge.go @@ -22,23 +22,21 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/queue" + "code.gitea.io/gitea/services/automergequeue" notify_service "code.gitea.io/gitea/services/notify" pull_service "code.gitea.io/gitea/services/pull" repo_service "code.gitea.io/gitea/services/repository" ) -// prAutoMergeQueue represents a queue to handle update pull request tests -var prAutoMergeQueue *queue.WorkerPoolQueue[string] - // Init runs the task queue to that handles auto merges func Init() error { notify_service.RegisterNotifier(NewNotifier()) - prAutoMergeQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_auto_merge", handler) - if prAutoMergeQueue == nil { - return fmt.Errorf("unable to create pr_auto_merge queue") + automergequeue.AutoMergeQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_auto_merge", handler) + if automergequeue.AutoMergeQueue == nil { + return errors.New("unable to create pr_auto_merge queue") } - go graceful.GetManager().RunWithCancel(prAutoMergeQueue) + go graceful.GetManager().RunWithCancel(automergequeue.AutoMergeQueue) return nil } @@ -56,24 +54,23 @@ func handler(items ...string) []string { return nil } -func addToQueue(pr *issues_model.PullRequest, sha string) { - log.Trace("Adding pullID: %d to the pull requests patch checking queue with sha %s", pr.ID, sha) - if err := prAutoMergeQueue.Push(fmt.Sprintf("%d_%s", pr.ID, sha)); err != nil { - log.Error("Error adding pullID: %d to the pull requests patch checking queue %v", pr.ID, err) - } -} - // ScheduleAutoMerge if schedule is false and no error, pull can be merged directly func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pull *issues_model.PullRequest, style repo_model.MergeStyle, message string, deleteBranchAfterMerge bool) (scheduled bool, err error) { err = db.WithTx(ctx, func(ctx context.Context) error { if err := pull_model.ScheduleAutoMerge(ctx, doer, pull.ID, style, message, deleteBranchAfterMerge); err != nil { return err } - scheduled = true - _, err = issues_model.CreateAutoMergeComment(ctx, issues_model.CommentTypePRScheduledToAutoMerge, pull, doer) return err }) + // Old code made "scheduled" to be true after "ScheduleAutoMerge", but it's not right: + // If the transaction rolls back, then the pull request is not scheduled to auto merge. + // So we should only set "scheduled" to true if there is no error. + scheduled = err == nil + if scheduled { + log.Trace("Pull request [%d] scheduled for auto merge with style [%s] and message [%s]", pull.ID, style, message) + automergequeue.StartPRCheckAndAutoMerge(ctx, pull) + } return scheduled, err } @@ -99,38 +96,12 @@ func StartPRCheckAndAutoMergeBySHA(ctx context.Context, sha string, repo *repo_m } for _, pr := range pulls { - addToQueue(pr, sha) + automergequeue.AddToQueue(pr, sha) } return nil } -// StartPRCheckAndAutoMerge start an automerge check and auto merge task for a pull request -func StartPRCheckAndAutoMerge(ctx context.Context, pull *issues_model.PullRequest) { - if pull == nil || pull.HasMerged || !pull.CanAutoMerge() { - return - } - - if err := pull.LoadBaseRepo(ctx); err != nil { - log.Error("LoadBaseRepo: %v", err) - return - } - - gitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo) - if err != nil { - log.Error("OpenRepository: %v", err) - return - } - defer gitRepo.Close() - commitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName()) - if err != nil { - log.Error("GetRefCommitID: %v", err) - return - } - - addToQueue(pull, commitID) -} - func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.Repository, filter func(*issues_model.PullRequest) bool) (map[int64]*issues_model.PullRequest, error) { gitRepo, err := gitrepo.OpenRepository(ctx, repo) if err != nil { @@ -216,7 +187,7 @@ func handlePullRequestAutoMerge(pullID int64, sha string) { } defer baseGitRepo.Close() - headCommitID, err := baseGitRepo.GetRefCommitID(pr.GetGitRefName()) + headCommitID, err := baseGitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil { log.Error("GetRefCommitID: %v", err) return @@ -254,7 +225,7 @@ func handlePullRequestAutoMerge(pullID int64, sha string) { return } case issues_model.PullRequestFlowAGit: - headBranchExist := gitrepo.IsReferenceExist(ctx, pr.BaseRepo, pr.GetGitRefName()) + headBranchExist := gitrepo.IsReferenceExist(ctx, pr.BaseRepo, pr.GetGitHeadRefName()) if !headBranchExist { log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch(Agit): %s]", pr, pr.HeadRepoID, pr.HeadBranch) return @@ -289,7 +260,7 @@ func handlePullRequestAutoMerge(pullID int64, sha string) { } if err := pull_service.CheckPullMergeable(ctx, doer, &perm, pr, pull_service.MergeCheckTypeGeneral, false); err != nil { - if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) { + if errors.Is(err, pull_service.ErrNotReadyToMerge) { log.Info("%-v was scheduled to automerge by an unauthorized user", pr) return } diff --git a/services/automerge/notify.go b/services/automerge/notify.go index b6bbca333b..8a1bb5fc90 100644 --- a/services/automerge/notify.go +++ b/services/automerge/notify.go @@ -12,6 +12,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/services/automergequeue" notify_service "code.gitea.io/gitea/services/notify" ) @@ -45,7 +46,7 @@ func (n *automergeNotifier) PullReviewDismiss(ctx context.Context, doer *user_mo return } // as reviews could have blocked a pending automerge let's recheck - StartPRCheckAndAutoMerge(ctx, review.Issue.PullRequest) + automergequeue.StartPRCheckAndAutoMerge(ctx, review.Issue.PullRequest) } func (n *automergeNotifier) CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus) { diff --git a/services/automergequeue/automergequeue.go b/services/automergequeue/automergequeue.go new file mode 100644 index 0000000000..fa9c04da87 --- /dev/null +++ b/services/automergequeue/automergequeue.go @@ -0,0 +1,49 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package automergequeue + +import ( + "context" + "fmt" + + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/queue" +) + +var AutoMergeQueue *queue.WorkerPoolQueue[string] + +var AddToQueue = func(pr *issues_model.PullRequest, sha string) { + log.Trace("Adding pullID: %d to the pull requests patch checking queue with sha %s", pr.ID, sha) + if err := AutoMergeQueue.Push(fmt.Sprintf("%d_%s", pr.ID, sha)); err != nil { + log.Error("Error adding pullID: %d to the pull requests patch checking queue %v", pr.ID, err) + } +} + +// StartPRCheckAndAutoMerge start an automerge check and auto merge task for a pull request +func StartPRCheckAndAutoMerge(ctx context.Context, pull *issues_model.PullRequest) { + if pull == nil || pull.HasMerged || !pull.CanAutoMerge() { + return + } + + if err := pull.LoadBaseRepo(ctx); err != nil { + log.Error("LoadBaseRepo: %v", err) + return + } + + gitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo) + if err != nil { + log.Error("OpenRepository: %v", err) + return + } + defer gitRepo.Close() + commitID, err := gitRepo.GetRefCommitID(pull.GetGitHeadRefName()) + if err != nil { + log.Error("GetRefCommitID: %v", err) + return + } + + AddToQueue(pull, commitID) +} diff --git a/services/context/access_log.go b/services/context/access_log.go index 925e4a3056..caade113a7 100644 --- a/services/context/access_log.go +++ b/services/context/access_log.go @@ -5,7 +5,6 @@ package context import ( "bytes" - "fmt" "net" "net/http" "strings" @@ -47,7 +46,7 @@ func parseRequestIDFromRequestHeader(req *http.Request) string { } } if len(requestID) > maxRequestIDByteLength { - requestID = fmt.Sprintf("%s...", requestID[:maxRequestIDByteLength]) + requestID = requestID[:maxRequestIDByteLength] + "..." } return requestID } diff --git a/services/context/access_log_test.go b/services/context/access_log_test.go index c40ef9acd1..139a6eb217 100644 --- a/services/context/access_log_test.go +++ b/services/context/access_log_test.go @@ -59,7 +59,7 @@ func TestAccessLogger(t *testing.T) { recorder.logger = mockLogger req := &http.Request{ RemoteAddr: "remote-addr", - Method: "GET", + Method: http.MethodGet, Proto: "https", URL: &url.URL{Path: "/path"}, } diff --git a/services/context/api.go b/services/context/api.go index 10fad419ba..ab50a360f4 100644 --- a/services/context/api.go +++ b/services/context/api.go @@ -9,11 +9,14 @@ import ( "fmt" "net/http" "net/url" + "slices" + "strconv" "strings" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/httpcache" "code.gitea.io/gitea/modules/log" @@ -168,7 +171,7 @@ func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string { if paginater.HasNext() { u := *curURL queries := u.Query() - queries.Set("page", fmt.Sprintf("%d", paginater.Next())) + queries.Set("page", strconv.Itoa(paginater.Next())) u.RawQuery = queries.Encode() links = append(links, fmt.Sprintf("<%s%s>; rel=\"next\"", setting.AppURL, u.RequestURI()[1:])) @@ -176,7 +179,7 @@ func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string { if !paginater.IsLast() { u := *curURL queries := u.Query() - queries.Set("page", fmt.Sprintf("%d", paginater.TotalPages())) + queries.Set("page", strconv.Itoa(paginater.TotalPages())) u.RawQuery = queries.Encode() links = append(links, fmt.Sprintf("<%s%s>; rel=\"last\"", setting.AppURL, u.RequestURI()[1:])) @@ -192,7 +195,7 @@ func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string { if paginater.HasPrevious() { u := *curURL queries := u.Query() - queries.Set("page", fmt.Sprintf("%d", paginater.Previous())) + queries.Set("page", strconv.Itoa(paginater.Previous())) u.RawQuery = queries.Encode() links = append(links, fmt.Sprintf("<%s%s>; rel=\"prev\"", setting.AppURL, u.RequestURI()[1:])) @@ -225,7 +228,7 @@ func APIContexter() func(http.Handler) http.Handler { ctx.SetContextValue(apiContextKey, ctx) // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid. - if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { + if ctx.Req.Method == http.MethodPost && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size ctx.APIErrorInternal(err) return @@ -243,8 +246,8 @@ func APIContexter() func(http.Handler) http.Handler { // APIErrorNotFound handles 404s for APIContext // String will replace message, errors will be added to a slice func (ctx *APIContext) APIErrorNotFound(objs ...any) { - message := ctx.Locale.TrString("error.not_found") - var errors []string + var message string + var errs []string for _, obj := range objs { // Ignore nil if obj == nil { @@ -252,16 +255,15 @@ func (ctx *APIContext) APIErrorNotFound(objs ...any) { } if err, ok := obj.(error); ok { - errors = append(errors, err.Error()) + errs = append(errs, err.Error()) } else { message = obj.(string) } } - ctx.JSON(http.StatusNotFound, map[string]any{ - "message": message, + "message": util.IfZero(message, "not found"), // do not use locale in API "url": setting.API.SwaggerURL, - "errors": errors, + "errors": errs, }) } @@ -297,39 +299,27 @@ func RepoRefForAPI(next http.Handler) http.Handler { } if ctx.Repo.GitRepo == nil { - ctx.APIErrorInternal(fmt.Errorf("no open git repo")) - return + panic("no GitRepo, forgot to call the middleware?") // it is a programming error } - refName, _, _ := getRefNameLegacy(ctx.Base, ctx.Repo, ctx.PathParam("*"), ctx.FormTrim("ref")) + refName, refType, _ := getRefNameLegacy(ctx.Base, ctx.Repo, ctx.PathParam("*"), ctx.FormTrim("ref")) var err error - - if gitrepo.IsBranchExist(ctx, ctx.Repo.Repository, refName) { + switch refType { + case git.RefTypeBranch: ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName) - if err != nil { - ctx.APIErrorInternal(err) - return - } - ctx.Repo.CommitID = ctx.Repo.Commit.ID.String() - } else if gitrepo.IsTagExist(ctx, ctx.Repo.Repository, refName) { + case git.RefTypeTag: ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName) - if err != nil { - ctx.APIErrorInternal(err) - return - } - ctx.Repo.CommitID = ctx.Repo.Commit.ID.String() - } else if len(refName) == ctx.Repo.GetObjectFormat().FullLength() { - ctx.Repo.CommitID = refName + case git.RefTypeCommit: ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName) - if err != nil { - ctx.APIErrorNotFound("GetCommit", err) - return - } - } else { - ctx.APIErrorNotFound(fmt.Errorf("not exist: '%s'", ctx.PathParam("*"))) + } + if ctx.Repo.Commit == nil || errors.Is(err, util.ErrNotExist) { + ctx.APIErrorNotFound("unable to find a git ref") + return + } else if err != nil { + ctx.APIErrorInternal(err) return } - + ctx.Repo.CommitID = ctx.Repo.Commit.ID.String() next.ServeHTTP(w, req) }) } @@ -375,11 +365,5 @@ func (ctx *APIContext) IsUserRepoAdmin() bool { // IsUserRepoWriter returns true if current user has "write" privilege in current repo func (ctx *APIContext) IsUserRepoWriter(unitTypes []unit.Type) bool { - for _, unitType := range unitTypes { - if ctx.Repo.CanWrite(unitType) { - return true - } - } - - return false + return slices.ContainsFunc(unitTypes, ctx.Repo.CanWrite) } diff --git a/services/context/api_test.go b/services/context/api_test.go index 911a49949e..87d74004db 100644 --- a/services/context/api_test.go +++ b/services/context/api_test.go @@ -45,6 +45,6 @@ func TestGenAPILinks(t *testing.T) { links := genAPILinks(u, 100, 20, curPage) - assert.EqualValues(t, links, response) + assert.Equal(t, links, response) } } diff --git a/services/context/base.go b/services/context/base.go index 3701668bf6..28d6656fd1 100644 --- a/services/context/base.go +++ b/services/context/base.go @@ -8,6 +8,7 @@ import ( "html/template" "io" "net/http" + "strconv" "strings" "code.gitea.io/gitea/modules/httplib" @@ -53,7 +54,7 @@ func (b *Base) AppendAccessControlExposeHeaders(names ...string) { // SetTotalCountHeader set "X-Total-Count" header func (b *Base) SetTotalCountHeader(total int64) { - b.RespHeader().Set("X-Total-Count", fmt.Sprint(total)) + b.RespHeader().Set("X-Total-Count", strconv.FormatInt(total, 10)) b.AppendAccessControlExposeHeaders("X-Total-Count") } @@ -82,6 +83,7 @@ func (b *Base) RespHeader() http.Header { } // HTTPError returned an error to web browser +// FIXME: many calls to this HTTPError are not right: it shouldn't expose err.Error() directly, it doesn't accept more than one content func (b *Base) HTTPError(status int, contents ...string) { v := http.StatusText(status) if len(contents) > 0 { diff --git a/services/context/base_form.go b/services/context/base_form.go index 5b8cae9e99..81fd7cd328 100644 --- a/services/context/base_form.go +++ b/services/context/base_form.go @@ -12,6 +12,8 @@ import ( ) // FormString returns the first value matching the provided key in the form as a string +// It works the same as http.Request.FormValue: +// try urlencoded request body first, then query string, then multipart form body func (b *Base) FormString(key string, def ...string) string { s := b.Req.FormValue(key) if s == "" { @@ -20,7 +22,7 @@ func (b *Base) FormString(key string, def ...string) string { return s } -// FormStrings returns a string slice for the provided key from the form +// FormStrings returns a values for the key in the form (including query parameters), similar to FormString func (b *Base) FormStrings(key string) []string { if b.Req.Form == nil { if err := b.Req.ParseMultipartForm(32 << 20); err != nil { diff --git a/services/context/base_test.go b/services/context/base_test.go index b936b76f58..2a4f86dddf 100644 --- a/services/context/base_test.go +++ b/services/context/base_test.go @@ -15,7 +15,7 @@ import ( func TestRedirect(t *testing.T) { setting.IsInTesting = true - req, _ := http.NewRequest("GET", "/", nil) + req, _ := http.NewRequest(http.MethodGet, "/", nil) cases := []struct { url string @@ -36,7 +36,7 @@ func TestRedirect(t *testing.T) { assert.Equal(t, c.keep, has, "url = %q", c.url) } - req, _ = http.NewRequest("GET", "/", nil) + req, _ = http.NewRequest(http.MethodGet, "/", nil) resp := httptest.NewRecorder() req.Header.Add("HX-Request", "true") b := NewBaseContextForTest(resp, req) diff --git a/services/context/context.go b/services/context/context.go index 79bc5da920..32ec260aab 100644 --- a/services/context/context.go +++ b/services/context/context.go @@ -23,6 +23,7 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" web_types "code.gitea.io/gitea/modules/web/types" @@ -184,7 +185,7 @@ func Contexter() func(next http.Handler) http.Handler { }) // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid. - if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { + if ctx.Req.Method == http.MethodPost && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size ctx.ServerError("ParseMultipartForm", err) return @@ -196,6 +197,8 @@ func Contexter() func(next http.Handler) http.Handler { ctx.Data["SystemConfig"] = setting.Config() + ctx.Data["ShowTwoFactorRequiredMessage"] = ctx.DoerNeedTwoFactorAuth() + // FIXME: do we really always need these setting? There should be someway to have to avoid having to always set these ctx.Data["DisableMigrations"] = setting.Repository.DisableMigrations ctx.Data["DisableStars"] = setting.Repository.DisableStars @@ -209,6 +212,13 @@ func Contexter() func(next http.Handler) http.Handler { } } +func (ctx *Context) DoerNeedTwoFactorAuth() bool { + if !setting.TwoFactorAuthEnforced { + return false + } + return ctx.Session.Get(session.KeyUserHasTwoFactorAuth) == false +} + // HasError returns true if error occurs in form validation. // Attention: this function changes ctx.Data and ctx.Flash // If HasError is called, then before Redirect, the error message should be stored by ctx.Flash.Error(ctx.GetErrMsg()) again. @@ -252,3 +262,11 @@ func (ctx *Context) JSONError(msg any) { panic(fmt.Sprintf("unsupported type: %T", msg)) } } + +func (ctx *Context) JSONErrorNotFound(optMsg ...string) { + msg := util.OptionalArg(optMsg) + if msg == "" { + msg = ctx.Locale.TrString("error.not_found") + } + ctx.JSON(http.StatusNotFound, map[string]any{"errorMessage": msg, "renderFormat": "text"}) +} diff --git a/services/context/context_response.go b/services/context/context_response.go index 61b432395a..3f64fc7352 100644 --- a/services/context/context_response.go +++ b/services/context/context_response.go @@ -92,7 +92,7 @@ func (ctx *Context) HTML(status int, name templates.TplName) { } // JSONTemplate renders the template as JSON response -// keep in mind that the template is processed in HTML context, so JSON-things should be handled carefully, eg: by JSEscape +// keep in mind that the template is processed in HTML context, so JSON things should be handled carefully, e.g.: use JSEscape func (ctx *Context) JSONTemplate(tmpl templates.TplName) { t, err := ctx.Render.TemplateLookup(string(tmpl), nil) if err != nil { @@ -150,7 +150,7 @@ func (ctx *Context) notFoundInternal(logMsg string, logErr error) { ctx.Data["IsRepo"] = ctx.Repo.Repository != nil ctx.Data["Title"] = "Page Not Found" - ctx.HTML(http.StatusNotFound, templates.TplName("status/404")) + ctx.HTML(http.StatusNotFound, "status/404") } // ServerError displays a 500 (Internal Server Error) page and prints the given error, if any. diff --git a/services/context/org.go b/services/context/org.go index 992a48afa0..1cd8923178 100644 --- a/services/context/org.go +++ b/services/context/org.go @@ -182,7 +182,7 @@ func OrgAssignment(opts OrgAssignmentOptions) func(ctx *Context) { return } for _, team := range teams { - if team.IncludesAllRepositories && team.AccessMode >= perm.AccessModeAdmin { + if team.IncludesAllRepositories && team.HasAdminAccess() { shouldSeeAllTeams = true break } @@ -208,7 +208,7 @@ func OrgAssignment(opts OrgAssignmentOptions) func(ctx *Context) { if len(teamName) > 0 { teamExists := false for _, team := range ctx.Org.Teams { - if team.LowerName == strings.ToLower(teamName) { + if strings.EqualFold(team.LowerName, teamName) { teamExists = true ctx.Org.Team = team ctx.Org.IsTeamMember = true @@ -228,7 +228,7 @@ func OrgAssignment(opts OrgAssignmentOptions) func(ctx *Context) { return } - ctx.Org.IsTeamAdmin = ctx.Org.Team.IsOwnerTeam() || ctx.Org.Team.AccessMode >= perm.AccessModeAdmin + ctx.Org.IsTeamAdmin = ctx.Org.Team.IsOwnerTeam() || ctx.Org.Team.HasAdminAccess() ctx.Data["IsTeamAdmin"] = ctx.Org.IsTeamAdmin if opts.RequireTeamAdmin && !ctx.Org.IsTeamAdmin { ctx.NotFound(err) diff --git a/services/context/package.go b/services/context/package.go index 64bb4f3ecd..8b722932b1 100644 --- a/services/context/package.go +++ b/services/context/package.go @@ -93,7 +93,7 @@ func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, any)) *Package } func determineAccessMode(ctx *Base, pkg *Package, doer *user_model.User) (perm.AccessMode, error) { - if setting.Service.RequireSignInView && (doer == nil || doer.IsGhost()) { + if setting.Service.RequireSignInViewStrict && (doer == nil || doer.IsGhost()) { return perm.AccessModeNone, nil } diff --git a/services/context/pagination.go b/services/context/pagination.go index 25a9298e01..2a9805db05 100644 --- a/services/context/pagination.go +++ b/services/context/pagination.go @@ -33,8 +33,8 @@ func (p *Pagination) WithCurRows(n int) *Pagination { return p } -func (p *Pagination) AddParamFromRequest(req *http.Request) { - for key, values := range req.URL.Query() { +func (p *Pagination) AddParamFromQuery(q url.Values) { + for key, values := range q { if key == "page" || len(values) == 0 || (len(values) == 1 && values[0] == "") { continue } @@ -45,6 +45,10 @@ func (p *Pagination) AddParamFromRequest(req *http.Request) { } } +func (p *Pagination) AddParamFromRequest(req *http.Request) { + p.AddParamFromQuery(req.URL.Query()) +} + // GetParams returns the configured URL params func (p *Pagination) GetParams() template.URL { return template.URL(strings.Join(p.urlParams, "&")) diff --git a/services/context/permission.go b/services/context/permission.go index 7055f798da..c0a5a98724 100644 --- a/services/context/permission.go +++ b/services/context/permission.go @@ -5,6 +5,7 @@ package context import ( "net/http" + "slices" auth_model "code.gitea.io/gitea/models/auth" repo_model "code.gitea.io/gitea/models/repo" @@ -34,10 +35,8 @@ func CanWriteToBranch() func(ctx *Context) { // RequireUnitWriter returns a middleware for requiring repository write to one of the unit permission func RequireUnitWriter(unitTypes ...unit.Type) func(ctx *Context) { return func(ctx *Context) { - for _, unitType := range unitTypes { - if ctx.Repo.CanWrite(unitType) { - return - } + if slices.ContainsFunc(unitTypes, ctx.Repo.CanWrite) { + return } ctx.NotFound(nil) } diff --git a/services/context/private.go b/services/context/private.go index 51857da8fe..d20e49f588 100644 --- a/services/context/private.go +++ b/services/context/private.go @@ -5,7 +5,6 @@ package context import ( "context" - "fmt" "net/http" "time" @@ -29,7 +28,6 @@ func init() { }) } -// Deadline is part of the interface for context.Context and we pass this to the request context func (ctx *PrivateContext) Deadline() (deadline time.Time, ok bool) { if ctx.Override != nil { return ctx.Override.Deadline() @@ -37,7 +35,6 @@ func (ctx *PrivateContext) Deadline() (deadline time.Time, ok bool) { return ctx.Base.Deadline() } -// Done is part of the interface for context.Context and we pass this to the request context func (ctx *PrivateContext) Done() <-chan struct{} { if ctx.Override != nil { return ctx.Override.Done() @@ -45,7 +42,6 @@ func (ctx *PrivateContext) Done() <-chan struct{} { return ctx.Base.Done() } -// Err is part of the interface for context.Context and we pass this to the request context func (ctx *PrivateContext) Err() error { if ctx.Override != nil { return ctx.Override.Err() @@ -53,14 +49,14 @@ func (ctx *PrivateContext) Err() error { return ctx.Base.Err() } -var privateContextKey any = "default_private_context" +type privateContextKeyType struct{} + +var privateContextKey privateContextKeyType -// GetPrivateContext returns a context for Private routes func GetPrivateContext(req *http.Request) *PrivateContext { return req.Context().Value(privateContextKey).(*PrivateContext) } -// PrivateContexter returns apicontext as middleware func PrivateContexter() func(http.Handler) http.Handler { return func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { @@ -82,7 +78,7 @@ func OverrideContext() func(http.Handler) http.Handler { // We now need to override the request context as the base for our work because even if the request is cancelled we have to continue this work ctx := GetPrivateContext(req) var finished func() - ctx.Override, _, finished = process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), fmt.Sprintf("PrivateContext: %s", ctx.Req.RequestURI), process.RequestProcessType, true) + ctx.Override, _, finished = process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "PrivateContext: "+ctx.Req.RequestURI, process.RequestProcessType, true) defer finished() next.ServeHTTP(ctx.Resp, ctx.Req) }) diff --git a/services/context/repo.go b/services/context/repo.go index 6eccd1312a..afc6de9b16 100644 --- a/services/context/repo.go +++ b/services/context/repo.go @@ -71,11 +71,6 @@ func (r *Repository) CanWriteToBranch(ctx context.Context, user *user_model.User return issues_model.CanMaintainerWriteToBranch(ctx, r.Permission, branch, user) } -// CanEnableEditor returns true if repository is editable and user has proper access level. -func (r *Repository) CanEnableEditor(ctx context.Context, user *user_model.User) bool { - return r.RefFullName.IsBranch() && r.CanWriteToBranch(ctx, user, r.BranchName) && r.Repository.CanEnableEditor() && !r.Repository.IsArchived -} - // CanCreateBranch returns true if repository is editable and user has proper access level. func (r *Repository) CanCreateBranch() bool { return r.Permission.CanWrite(unit_model.TypeCode) && r.Repository.CanCreateBranch() @@ -94,58 +89,100 @@ func RepoMustNotBeArchived() func(ctx *Context) { } } -// CanCommitToBranchResults represents the results of CanCommitToBranch -type CanCommitToBranchResults struct { - CanCommitToBranch bool - EditorEnabled bool - UserCanPush bool - RequireSigned bool - WillSign bool - SigningKey string - WontSignReason string +type CommitFormOptions struct { + NeedFork bool + + TargetRepo *repo_model.Repository + TargetFormAction string + WillSubmitToFork bool + CanCommitToBranch bool + UserCanPush bool + RequireSigned bool + WillSign bool + SigningKey *git.SigningKey + WontSignReason string + CanCreatePullRequest bool + CanCreateBasePullRequest bool } -// CanCommitToBranch returns true if repository is editable and user has proper access level -// -// and branch is not protected for push -func (r *Repository) CanCommitToBranch(ctx context.Context, doer *user_model.User) (CanCommitToBranchResults, error) { - protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, r.Repository.ID, r.BranchName) +func PrepareCommitFormOptions(ctx *Context, doer *user_model.User, targetRepo *repo_model.Repository, doerRepoPerm access_model.Permission, refName git.RefName) (*CommitFormOptions, error) { + if !refName.IsBranch() { + // it shouldn't happen because middleware already checks + return nil, util.NewInvalidArgumentErrorf("ref %q is not a branch", refName) + } + + originRepo := targetRepo + branchName := refName.ShortName() + // TODO: CanMaintainerWriteToBranch is a bad name, but it really does what "CanWriteToBranch" does + if !issues_model.CanMaintainerWriteToBranch(ctx, doerRepoPerm, branchName, doer) { + targetRepo = repo_model.GetForkedRepo(ctx, doer.ID, targetRepo.ID) + if targetRepo == nil { + return &CommitFormOptions{NeedFork: true}, nil + } + // now, we get our own forked repo; it must be writable by us. + } + submitToForkedRepo := targetRepo.ID != originRepo.ID + err := targetRepo.GetBaseRepo(ctx) + if err != nil { + return nil, err + } + + protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, targetRepo.ID, branchName) if err != nil { - return CanCommitToBranchResults{}, err + return nil, err } - userCanPush := true - requireSigned := false + canPushWithProtection := true + protectionRequireSigned := false if protectedBranch != nil { - protectedBranch.Repo = r.Repository - userCanPush = protectedBranch.CanUserPush(ctx, doer) - requireSigned = protectedBranch.RequireSignedCommits + protectedBranch.Repo = targetRepo + canPushWithProtection = protectedBranch.CanUserPush(ctx, doer) + protectionRequireSigned = protectedBranch.RequireSignedCommits } - sign, keyID, _, err := asymkey_service.SignCRUDAction(ctx, r.Repository.RepoPath(), doer, r.Repository.RepoPath(), git.BranchPrefix+r.BranchName) - - canCommit := r.CanEnableEditor(ctx, doer) && userCanPush - if requireSigned { - canCommit = canCommit && sign - } + willSign, signKeyID, _, err := asymkey_service.SignCRUDAction(ctx, targetRepo.RepoPath(), doer, targetRepo.RepoPath(), refName.String()) wontSignReason := "" - if err != nil { - if asymkey_service.IsErrWontSign(err) { - wontSignReason = string(err.(*asymkey_service.ErrWontSign).Reason) - err = nil - } else { - wontSignReason = "error" - } + if asymkey_service.IsErrWontSign(err) { + wontSignReason = string(err.(*asymkey_service.ErrWontSign).Reason) + } else if err != nil { + return nil, err } - return CanCommitToBranchResults{ - CanCommitToBranch: canCommit, - EditorEnabled: r.CanEnableEditor(ctx, doer), - UserCanPush: userCanPush, - RequireSigned: requireSigned, - WillSign: sign, - SigningKey: keyID, + canCommitToBranch := !submitToForkedRepo /* same repo */ && targetRepo.CanEnableEditor() && canPushWithProtection + if protectionRequireSigned { + canCommitToBranch = canCommitToBranch && willSign + } + + canCreateBasePullRequest := targetRepo.BaseRepo != nil && targetRepo.BaseRepo.UnitEnabled(ctx, unit_model.TypePullRequests) + canCreatePullRequest := targetRepo.UnitEnabled(ctx, unit_model.TypePullRequests) || canCreateBasePullRequest + + opts := &CommitFormOptions{ + TargetRepo: targetRepo, + WillSubmitToFork: submitToForkedRepo, + CanCommitToBranch: canCommitToBranch, + UserCanPush: canPushWithProtection, + RequireSigned: protectionRequireSigned, + WillSign: willSign, + SigningKey: signKeyID, WontSignReason: wontSignReason, - }, err + + CanCreatePullRequest: canCreatePullRequest, + CanCreateBasePullRequest: canCreateBasePullRequest, + } + editorAction := ctx.PathParam("editor_action") + editorPathParamRemaining := util.PathEscapeSegments(branchName) + "/" + util.PathEscapeSegments(ctx.Repo.TreePath) + if submitToForkedRepo { + // there is only "default branch" in forked repo, we will use "from_base_branch" to get a new branch from base repo + editorPathParamRemaining = util.PathEscapeSegments(targetRepo.DefaultBranch) + "/" + util.PathEscapeSegments(ctx.Repo.TreePath) + "?from_base_branch=" + url.QueryEscape(branchName) + } + if editorAction == "_cherrypick" { + opts.TargetFormAction = targetRepo.Link() + "/" + editorAction + "/" + ctx.PathParam("sha") + "/" + editorPathParamRemaining + } else { + opts.TargetFormAction = targetRepo.Link() + "/" + editorAction + "/" + editorPathParamRemaining + } + if ctx.Req.URL.RawQuery != "" { + opts.TargetFormAction += util.Iif(strings.Contains(opts.TargetFormAction, "?"), "&", "?") + ctx.Req.URL.RawQuery + } + return opts, nil } // CanUseTimetracker returns whether a user can use the timetracker. @@ -328,7 +365,9 @@ func RedirectToRepo(ctx *Base, redirectRepoID int64) { if ctx.Req.URL.RawQuery != "" { redirectPath += "?" + ctx.Req.URL.RawQuery } - ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusTemporaryRedirect) + // Git client needs a 301 redirect by default to follow the new location + // It's not documentated in git documentation, but it's the behavior of git client + ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusMovedPermanently) } func repoAssignment(ctx *Context, repo *repo_model.Repository) { @@ -338,13 +377,17 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) { return } - ctx.Repo.Permission, err = access_model.GetUserRepoPermission(ctx, repo, ctx.Doer) - if err != nil { - ctx.ServerError("GetUserRepoPermission", err) - return + if ctx.DoerNeedTwoFactorAuth() { + ctx.Repo.Permission = access_model.PermissionNoAccess() + } else { + ctx.Repo.Permission, err = access_model.GetUserRepoPermission(ctx, repo, ctx.Doer) + if err != nil { + ctx.ServerError("GetUserRepoPermission", err) + return + } } - if !ctx.Repo.Permission.HasAnyUnitAccessOrEveryoneAccess() && !canWriteAsMaintainer(ctx) { + if !ctx.Repo.Permission.HasAnyUnitAccessOrPublicAccess() && !canWriteAsMaintainer(ctx) { if ctx.FormString("go-get") == "1" { EarlyResponseForGoGetMeta(ctx) return @@ -386,7 +429,7 @@ func RepoAssignment(ctx *Context) { } // Check if the user is the same as the repository owner - if ctx.IsSigned && ctx.Doer.LowerName == strings.ToLower(userName) { + if ctx.IsSigned && strings.EqualFold(ctx.Doer.LowerName, userName) { ctx.Repo.Owner = ctx.Doer } else { ctx.Repo.Owner, err = user_model.GetUserByName(ctx, userName) @@ -667,12 +710,6 @@ func RepoAssignment(ctx *Context) { const headRefName = "HEAD" -func RepoRef() func(*Context) { - // old code does: return RepoRefByType(git.RefTypeBranch) - // in most cases, it is an abuse, so we just disable it completely and fix the abuses one by one (if there is anything wrong) - return nil -} - func getRefNameFromPath(repo *Repository, path string, isExist func(string) bool) string { refName := "" parts := strings.Split(path, "/") @@ -795,8 +832,8 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) { return func(ctx *Context) { var err error refType := detectRefType - if ctx.Repo.Repository.IsBeingCreated() { - return // no git repo, so do nothing, users will see a "migrating" UI provided by "migrate/migrating.tmpl" + if ctx.Repo.Repository.IsBeingCreated() || ctx.Repo.Repository.IsBroken() { + return // no git repo, so do nothing, users will see a "migrating" UI provided by "migrate/migrating.tmpl", or empty repo guide } // Empty repository does not have reference information. if ctx.Repo.Repository.IsEmpty { @@ -815,9 +852,9 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) { if reqPath == "" { refShortName = ctx.Repo.Repository.DefaultBranch if !gitrepo.IsBranchExist(ctx, ctx.Repo.Repository, refShortName) { - brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 1) + brs, _, err := ctx.Repo.GitRepo.GetBranchNames(0, 1) if err == nil && len(brs) != 0 { - refShortName = brs[0].Name + refShortName = brs[0] } else if len(brs) == 0 { log.Error("No branches in non-empty repository %s", ctx.Repo.GitRepo.Path) } else { @@ -936,6 +973,15 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) { ctx.ServerError("GetCommitsCount", err) return } + if ctx.Repo.RefFullName.IsTag() { + rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, ctx.Repo.RefFullName.TagName()) + if err == nil && rel.NumCommits <= 0 { + rel.NumCommits = ctx.Repo.CommitsCount + if err := repo_model.UpdateReleaseNumCommits(ctx, rel); err != nil { + log.Error("UpdateReleaseNumCommits", err) + } + } + } ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount ctx.Repo.GitRepo.LastCommitCache = git.NewLastCommitCache(ctx.Repo.CommitsCount, ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, cache.GetCache()) } diff --git a/services/context/upload/upload.go b/services/context/upload/upload.go index da4370a433..23707950d4 100644 --- a/services/context/upload/upload.go +++ b/services/context/upload/upload.go @@ -11,7 +11,9 @@ import ( "regexp" "strings" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/reqctx" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/services/context" ) @@ -39,7 +41,7 @@ func Verify(buf []byte, fileName, allowedTypesStr string) error { allowedTypesStr = strings.ReplaceAll(allowedTypesStr, "|", ",") // compat for old config format allowedTypes := []string{} - for _, entry := range strings.Split(allowedTypesStr, ",") { + for entry := range strings.SplitSeq(allowedTypesStr, ",") { entry = strings.ToLower(strings.TrimSpace(entry)) if entry != "" { allowedTypes = append(allowedTypes, entry) @@ -87,14 +89,15 @@ func Verify(buf []byte, fileName, allowedTypesStr string) error { // AddUploadContext renders template values for dropzone func AddUploadContext(ctx *context.Context, uploadType string) { - if uploadType == "release" { + switch uploadType { + case "release": ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/releases/attachments" ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/releases/attachments/remove" ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/releases/attachments" ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Release.AllowedTypes, "|", ",") ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize - } else if uploadType == "comment" { + case "comment": ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/issues/attachments" ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/issues/attachments/remove" if len(ctx.PathParam("index")) > 0 { @@ -105,12 +108,17 @@ func AddUploadContext(ctx *context.Context, uploadType string) { ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Attachment.AllowedTypes, "|", ",") ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize - } else if uploadType == "repo" { - ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/upload-file" - ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/upload-remove" - ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/upload-file" - ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Upload.AllowedTypes, "|", ",") - ctx.Data["UploadMaxFiles"] = setting.Repository.Upload.MaxFiles - ctx.Data["UploadMaxSize"] = setting.Repository.Upload.FileMaxSize + default: + setting.PanicInDevOrTesting("Invalid upload type: %s", uploadType) } } + +func AddUploadContextForRepo(ctx reqctx.RequestContext, repo *repo_model.Repository) { + ctxData, repoLink := ctx.GetData(), repo.Link() + ctxData["UploadUrl"] = repoLink + "/upload-file" + ctxData["UploadRemoveUrl"] = repoLink + "/upload-remove" + ctxData["UploadLinkUrl"] = repoLink + "/upload-file" + ctxData["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Upload.AllowedTypes, "|", ",") + ctxData["UploadMaxFiles"] = setting.Repository.Upload.MaxFiles + ctxData["UploadMaxSize"] = setting.Repository.Upload.FileMaxSize +} diff --git a/services/context/user.go b/services/context/user.go index c09ded8339..f1a3035ee9 100644 --- a/services/context/user.go +++ b/services/context/user.go @@ -61,7 +61,7 @@ func UserAssignmentAPI() func(ctx *APIContext) { func userAssignment(ctx *Base, doer *user_model.User, errCb func(int, any)) (contextUser *user_model.User) { username := ctx.PathParam("username") - if doer != nil && doer.LowerName == strings.ToLower(username) { + if doer != nil && strings.EqualFold(doer.LowerName, username) { contextUser = doer } else { var err error diff --git a/services/contexttest/context_tests.go b/services/contexttest/context_tests.go index c895de3569..44d9f4a70f 100644 --- a/services/contexttest/context_tests.go +++ b/services/contexttest/context_tests.go @@ -49,7 +49,7 @@ func mockRequest(t *testing.T, reqPath string) *http.Request { type MockContextOption struct { Render context.Render - SessionStore *session.MockStore + SessionStore session.Store } // MockContext mock context for unit tests @@ -170,10 +170,19 @@ func LoadUser(t *testing.T, ctx gocontext.Context, userID int64) { // LoadGitRepo load a git repo into a test context. Requires that ctx.Repo has // already been populated. -func LoadGitRepo(t *testing.T, ctx *context.Context) { - assert.NoError(t, ctx.Repo.Repository.LoadOwner(ctx)) +func LoadGitRepo(t *testing.T, ctx gocontext.Context) { + var repo *context.Repository + switch ctx := any(ctx).(type) { + case *context.Context: + repo = ctx.Repo + case *context.APIContext: + repo = ctx.Repo + default: + assert.FailNow(t, "context is not *context.Context or *context.APIContext") + } + assert.NoError(t, repo.Repository.LoadOwner(ctx)) var err error - ctx.Repo.GitRepo, err = gitrepo.OpenRepository(ctx, ctx.Repo.Repository) + repo.GitRepo, err = gitrepo.OpenRepository(ctx, repo.Repository) assert.NoError(t, err) } diff --git a/services/convert/convert.go b/services/convert/convert.go index ac2680766c..0de3822140 100644 --- a/services/convert/convert.go +++ b/services/convert/convert.go @@ -5,8 +5,11 @@ package convert import ( + "bytes" "context" "fmt" + "net/url" + "path" "strconv" "strings" "time" @@ -14,6 +17,7 @@ import ( actions_model "code.gitea.io/gitea/models/actions" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/auth" + "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/organization" @@ -22,6 +26,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/actions" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" @@ -30,6 +35,9 @@ import ( "code.gitea.io/gitea/modules/util" asymkey_service "code.gitea.io/gitea/services/asymkey" "code.gitea.io/gitea/services/gitdiff" + + runnerv1 "code.gitea.io/actions-proto-go/runner/v1" + "github.com/nektos/act/pkg/model" ) // ToEmail convert models.EmailAddress to api.Email @@ -141,7 +149,7 @@ func ToBranchProtection(ctx context.Context, bp *git_model.ProtectedBranch, repo mergeWhitelistUsernames := getWhitelistEntities(readers, bp.MergeWhitelistUserIDs) approvalsWhitelistUsernames := getWhitelistEntities(readers, bp.ApprovalsWhitelistUserIDs) - teamReaders, err := organization.OrgFromUser(repo.Owner).TeamsWithAccessToRepo(ctx, repo.ID, perm.AccessModeRead) + teamReaders, err := organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.Owner.ID, repo.ID, perm.AccessModeRead, unit.TypeCode, unit.TypePullRequests) if err != nil { log.Error("Repo.Owner.TeamsWithAccessToRepo: %v", err) } @@ -195,13 +203,22 @@ func ToBranchProtection(ctx context.Context, bp *git_model.ProtectedBranch, repo // ToTag convert a git.Tag to an api.Tag func ToTag(repo *repo_model.Repository, t *git.Tag) *api.Tag { + tarballURL := util.URLJoin(repo.HTMLURL(), "archive", t.Name+".tar.gz") + zipballURL := util.URLJoin(repo.HTMLURL(), "archive", t.Name+".zip") + + // Archive URLs are "" if the download feature is disabled + if setting.Repository.DisableDownloadSourceArchives { + tarballURL = "" + zipballURL = "" + } + return &api.Tag{ Name: t.Name, Message: strings.TrimSpace(t.Message), ID: t.ID.String(), Commit: ToCommitMeta(repo, t), - ZipballURL: util.URLJoin(repo.HTMLURL(), "archive", t.Name+".zip"), - TarballURL: util.URLJoin(repo.HTMLURL(), "archive", t.Name+".tar.gz"), + ZipballURL: zipballURL, + TarballURL: tarballURL, } } @@ -230,6 +247,242 @@ func ToActionTask(ctx context.Context, t *actions_model.ActionTask) (*api.Action }, nil } +func ToActionWorkflowRun(ctx context.Context, repo *repo_model.Repository, run *actions_model.ActionRun) (*api.ActionWorkflowRun, error) { + err := run.LoadAttributes(ctx) + if err != nil { + return nil, err + } + status, conclusion := ToActionsStatus(run.Status) + return &api.ActionWorkflowRun{ + ID: run.ID, + URL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), run.ID), + HTMLURL: run.HTMLURL(), + RunNumber: run.Index, + StartedAt: run.Started.AsLocalTime(), + CompletedAt: run.Stopped.AsLocalTime(), + Event: string(run.Event), + DisplayTitle: run.Title, + HeadBranch: git.RefName(run.Ref).BranchName(), + HeadSha: run.CommitSHA, + Status: status, + Conclusion: conclusion, + Path: fmt.Sprintf("%s@%s", run.WorkflowID, run.Ref), + Repository: ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeNone}), + TriggerActor: ToUser(ctx, run.TriggerUser, nil), + // We do not have a way to get a different User for the actor than the trigger user + Actor: ToUser(ctx, run.TriggerUser, nil), + }, nil +} + +func ToWorkflowRunAction(status actions_model.Status) string { + var action string + switch status { + case actions_model.StatusWaiting, actions_model.StatusBlocked: + action = "requested" + case actions_model.StatusRunning: + action = "in_progress" + } + if status.IsDone() { + action = "completed" + } + return action +} + +func ToActionsStatus(status actions_model.Status) (string, string) { + var action string + var conclusion string + switch status { + // This is a naming conflict of the webhook between Gitea and GitHub Actions + case actions_model.StatusWaiting: + action = "queued" + case actions_model.StatusBlocked: + action = "waiting" + case actions_model.StatusRunning: + action = "in_progress" + } + if status.IsDone() { + action = "completed" + switch status { + case actions_model.StatusSuccess: + conclusion = "success" + case actions_model.StatusCancelled: + conclusion = "cancelled" + case actions_model.StatusFailure: + conclusion = "failure" + case actions_model.StatusSkipped: + conclusion = "skipped" + } + } + return action, conclusion +} + +// ToActionWorkflowJob convert a actions_model.ActionRunJob to an api.ActionWorkflowJob +// task is optional and can be nil +func ToActionWorkflowJob(ctx context.Context, repo *repo_model.Repository, task *actions_model.ActionTask, job *actions_model.ActionRunJob) (*api.ActionWorkflowJob, error) { + err := job.LoadAttributes(ctx) + if err != nil { + return nil, err + } + + jobIndex := 0 + jobs, err := actions_model.GetRunJobsByRunID(ctx, job.RunID) + if err != nil { + return nil, err + } + for i, j := range jobs { + if j.ID == job.ID { + jobIndex = i + break + } + } + + status, conclusion := ToActionsStatus(job.Status) + var runnerID int64 + var runnerName string + var steps []*api.ActionWorkflowStep + + if job.TaskID != 0 { + if task == nil { + task, _, err = db.GetByID[actions_model.ActionTask](ctx, job.TaskID) + if err != nil { + return nil, err + } + } + + runnerID = task.RunnerID + if runner, ok, _ := db.GetByID[actions_model.ActionRunner](ctx, runnerID); ok { + runnerName = runner.Name + } + for i, step := range task.Steps { + stepStatus, stepConclusion := ToActionsStatus(job.Status) + steps = append(steps, &api.ActionWorkflowStep{ + Name: step.Name, + Number: int64(i), + Status: stepStatus, + Conclusion: stepConclusion, + StartedAt: step.Started.AsTime().UTC(), + CompletedAt: step.Stopped.AsTime().UTC(), + }) + } + } + + return &api.ActionWorkflowJob{ + ID: job.ID, + // missing api endpoint for this location + URL: fmt.Sprintf("%s/actions/jobs/%d", repo.APIURL(), job.ID), + HTMLURL: fmt.Sprintf("%s/jobs/%d", job.Run.HTMLURL(), jobIndex), + RunID: job.RunID, + // Missing api endpoint for this location, artifacts are available under a nested url + RunURL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), job.RunID), + Name: job.Name, + Labels: job.RunsOn, + RunAttempt: job.Attempt, + HeadSha: job.Run.CommitSHA, + HeadBranch: git.RefName(job.Run.Ref).BranchName(), + Status: status, + Conclusion: conclusion, + RunnerID: runnerID, + RunnerName: runnerName, + Steps: steps, + CreatedAt: job.Created.AsTime().UTC(), + StartedAt: job.Started.AsTime().UTC(), + CompletedAt: job.Stopped.AsTime().UTC(), + }, nil +} + +func getActionWorkflowEntry(ctx context.Context, repo *repo_model.Repository, commit *git.Commit, folder string, entry *git.TreeEntry) *api.ActionWorkflow { + cfgUnit := repo.MustGetUnit(ctx, unit.TypeActions) + cfg := cfgUnit.ActionsConfig() + + defaultBranch, _ := commit.GetBranchName() + + workflowURL := fmt.Sprintf("%s/actions/workflows/%s", repo.APIURL(), util.PathEscapeSegments(entry.Name())) + workflowRepoURL := fmt.Sprintf("%s/src/branch/%s/%s/%s", repo.HTMLURL(ctx), util.PathEscapeSegments(defaultBranch), util.PathEscapeSegments(folder), util.PathEscapeSegments(entry.Name())) + badgeURL := fmt.Sprintf("%s/actions/workflows/%s/badge.svg?branch=%s", repo.HTMLURL(ctx), util.PathEscapeSegments(entry.Name()), url.QueryEscape(repo.DefaultBranch)) + + // See https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#get-a-workflow + // State types: + // - active + // - deleted + // - disabled_fork + // - disabled_inactivity + // - disabled_manually + state := "active" + if cfg.IsWorkflowDisabled(entry.Name()) { + state = "disabled_manually" + } + + // The CreatedAt and UpdatedAt fields currently reflect the timestamp of the latest commit, which can later be refined + // by retrieving the first and last commits for the file history. The first commit would indicate the creation date, + // while the last commit would represent the modification date. The DeletedAt could be determined by identifying + // the last commit where the file existed. However, this implementation has not been done here yet, as it would likely + // cause a significant performance degradation. + createdAt := commit.Author.When + updatedAt := commit.Author.When + + content, err := actions.GetContentFromEntry(entry) + name := entry.Name() + if err == nil { + workflow, err := model.ReadWorkflow(bytes.NewReader(content)) + if err == nil { + // Only use the name when specified in the workflow file + if workflow.Name != "" { + name = workflow.Name + } + } else { + log.Error("getActionWorkflowEntry: Failed to parse workflow: %v", err) + } + } else { + log.Error("getActionWorkflowEntry: Failed to get content from entry: %v", err) + } + + return &api.ActionWorkflow{ + ID: entry.Name(), + Name: name, + Path: path.Join(folder, entry.Name()), + State: state, + CreatedAt: createdAt, + UpdatedAt: updatedAt, + URL: workflowURL, + HTMLURL: workflowRepoURL, + BadgeURL: badgeURL, + } +} + +func ListActionWorkflows(ctx context.Context, gitrepo *git.Repository, repo *repo_model.Repository) ([]*api.ActionWorkflow, error) { + defaultBranchCommit, err := gitrepo.GetBranchCommit(repo.DefaultBranch) + if err != nil { + return nil, err + } + + folder, entries, err := actions.ListWorkflows(defaultBranchCommit) + if err != nil { + return nil, err + } + + workflows := make([]*api.ActionWorkflow, len(entries)) + for i, entry := range entries { + workflows[i] = getActionWorkflowEntry(ctx, repo, defaultBranchCommit, folder, entry) + } + + return workflows, nil +} + +func GetActionWorkflow(ctx context.Context, gitrepo *git.Repository, repo *repo_model.Repository, workflowID string) (*api.ActionWorkflow, error) { + entries, err := ListActionWorkflows(ctx, gitrepo, repo) + if err != nil { + return nil, err + } + + for _, entry := range entries { + if entry.ID == workflowID { + return entry, nil + } + } + + return nil, util.NewNotExistErrorf("workflow %q not found", workflowID) +} + // ToActionArtifact convert a actions_model.ActionArtifact to an api.ActionArtifact func ToActionArtifact(repo *repo_model.Repository, art *actions_model.ActionArtifact) (*api.ActionArtifact, error) { url := fmt.Sprintf("%s/actions/artifacts/%d", repo.APIURL(), art.ID) @@ -252,6 +505,30 @@ func ToActionArtifact(repo *repo_model.Repository, art *actions_model.ActionArti }, nil } +func ToActionRunner(ctx context.Context, runner *actions_model.ActionRunner) *api.ActionRunner { + status := runner.Status() + apiStatus := "offline" + if runner.IsOnline() { + apiStatus = "online" + } + labels := make([]*api.ActionRunnerLabel, len(runner.AgentLabels)) + for i, label := range runner.AgentLabels { + labels[i] = &api.ActionRunnerLabel{ + ID: int64(i), + Name: label, + Type: "custom", + } + } + return &api.ActionRunner{ + ID: runner.ID, + Name: runner.Name, + Status: apiStatus, + Busy: status == runnerv1.RunnerStatus_RUNNER_STATUS_ACTIVE, + Ephemeral: runner.Ephemeral, + Labels: labels, + } +} + // ToVerification convert a git.Commit.Signature to an api.PayloadCommitVerification func ToVerification(ctx context.Context, c *git.Commit) *api.PayloadCommitVerification { verif := asymkey_service.ParseCommitWithSignature(ctx, c) @@ -281,6 +558,7 @@ func ToPublicKey(apiLink string, key *asymkey_model.PublicKey) *api.PublicKey { Title: key.Name, Fingerprint: key.Fingerprint, Created: key.CreatedUnix.AsTime(), + Updated: key.UpdatedUnix.AsTime(), } } @@ -449,7 +727,7 @@ func ToTagProtection(ctx context.Context, pt *git_model.ProtectedTag, repo *repo whitelistUsernames := getWhitelistEntities(readers, pt.AllowlistUserIDs) - teamReaders, err := organization.OrgFromUser(repo.Owner).TeamsWithAccessToRepo(ctx, repo.ID, perm.AccessModeRead) + teamReaders, err := organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.Owner.ID, repo.ID, perm.AccessModeRead, unit.TypeCode, unit.TypePullRequests) if err != nil { log.Error("Repo.Owner.TeamsWithAccessToRepo: %v", err) } diff --git a/services/convert/git_commit_test.go b/services/convert/git_commit_test.go index 73cb5e8c71..ad1cc0eca3 100644 --- a/services/convert/git_commit_test.go +++ b/services/convert/git_commit_test.go @@ -33,7 +33,7 @@ func TestToCommitMeta(t *testing.T) { commitMeta := ToCommitMeta(headRepo, tag) assert.NotNil(t, commitMeta) - assert.EqualValues(t, &api.CommitMeta{ + assert.Equal(t, &api.CommitMeta{ SHA: sha1.EmptyObjectID().String(), URL: util.URLJoin(headRepo.APIURL(), "git/commits", sha1.EmptyObjectID().String()), Created: time.Unix(0, 0), diff --git a/services/convert/pull.go b/services/convert/pull.go index 928534ce5e..4acbd880dc 100644 --- a/services/convert/pull.go +++ b/services/convert/pull.go @@ -14,6 +14,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/cachegroup" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" @@ -28,8 +29,8 @@ import ( // Optional - Merger func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) *api.PullRequest { var ( - baseBranch *git.Branch - headBranch *git.Branch + baseBranch string + headBranch string baseCommit *git.Commit err error ) @@ -60,14 +61,14 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u doerID = doer.ID } - const repoDoerPermCacheKey = "repo_doer_perm_cache" - p, err := cache.GetWithContextCache(ctx, repoDoerPermCacheKey, fmt.Sprintf("%d_%d", pr.BaseRepoID, doerID), - func() (access_model.Permission, error) { + repoUserPerm, err := cache.GetWithContextCache(ctx, cachegroup.RepoUserPermission, fmt.Sprintf("%d-%d", pr.BaseRepoID, doerID), + func(ctx context.Context, _ string) (access_model.Permission, error) { return access_model.GetUserRepoPermission(ctx, pr.BaseRepo, doer) - }) + }, + ) if err != nil { log.Error("GetUserRepoPermission[%d]: %v", pr.BaseRepoID, err) - p.AccessMode = perm.AccessModeNone + repoUserPerm.AccessMode = perm.AccessModeNone } apiPullRequest := &api.PullRequest{ @@ -107,11 +108,11 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u Name: pr.BaseBranch, Ref: pr.BaseBranch, RepoID: pr.BaseRepoID, - Repository: ToRepo(ctx, pr.BaseRepo, p), + Repository: ToRepo(ctx, pr.BaseRepo, repoUserPerm), }, Head: &api.PRBranchInfo{ Name: pr.HeadBranch, - Ref: fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index), + Ref: pr.GetGitHeadRefName(), RepoID: -1, }, } @@ -150,16 +151,16 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u } defer gitRepo.Close() - baseBranch, err = gitRepo.GetBranch(pr.BaseBranch) - if err != nil && !git.IsErrBranchNotExist(err) { + exist, err := git_model.IsBranchExist(ctx, pr.BaseRepoID, pr.BaseBranch) + if err != nil { log.Error("GetBranch[%s]: %v", pr.BaseBranch, err) return nil } - if err == nil { - baseCommit, err = baseBranch.GetCommit() + if exist { + baseCommit, err = gitRepo.GetBranchCommit(pr.BaseBranch) if err != nil && !git.IsErrNotExist(err) { - log.Error("GetCommit[%s]: %v", baseBranch.Name, err) + log.Error("GetCommit[%s]: %v", baseBranch, err) return nil } @@ -169,16 +170,9 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u } if pr.Flow == issues_model.PullRequestFlowAGit { - gitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo) - if err != nil { - log.Error("OpenRepository[%s]: %v", pr.GetGitRefName(), err) - return nil - } - defer gitRepo.Close() - - apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName()) + apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil { - log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err) + log.Error("GetRefCommitID[%s]: %v", pr.GetGitHeadRefName(), err) return nil } apiPullRequest.Head.RepoID = pr.BaseRepoID @@ -203,8 +197,8 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u } defer headGitRepo.Close() - headBranch, err = headGitRepo.GetBranch(pr.HeadBranch) - if err != nil && !git.IsErrBranchNotExist(err) { + exist, err = git_model.IsBranchExist(ctx, pr.HeadRepoID, pr.HeadBranch) + if err != nil { log.Error("GetBranch[%s]: %v", pr.HeadBranch, err) return nil } @@ -215,7 +209,7 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u endCommitID string ) - if git.IsErrBranchNotExist(err) { + if !exist { headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref) if err != nil && !git.IsErrNotExist(err) { log.Error("GetCommit[%s]: %v", pr.HeadBranch, err) @@ -226,9 +220,9 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u endCommitID = headCommitID } } else { - commit, err := headBranch.GetCommit() + commit, err := headGitRepo.GetBranchCommit(pr.HeadBranch) if err != nil && !git.IsErrNotExist(err) { - log.Error("GetCommit[%s]: %v", headBranch.Name, err) + log.Error("GetCommit[%s]: %v", headBranch, err) return nil } if err == nil { @@ -389,7 +383,7 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs }, Head: &api.PRBranchInfo{ Name: pr.HeadBranch, - Ref: fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index), + Ref: pr.GetGitHeadRefName(), RepoID: -1, }, } @@ -422,72 +416,43 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs return nil, err } } - if baseBranch != nil { apiPullRequest.Base.Sha = baseBranch.CommitID } + if pr.HeadRepoID == pr.BaseRepoID { + apiPullRequest.Head.Repository = apiPullRequest.Base.Repository + } - if pr.Flow == issues_model.PullRequestFlowAGit { - apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName()) + // pull request head branch, both repository and branch could not exist + if pr.HeadRepo != nil { + apiPullRequest.Head.RepoID = pr.HeadRepo.ID + exist, err := git_model.IsBranchExist(ctx, pr.HeadRepo.ID, pr.HeadBranch) if err != nil { - log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err) + log.Error("IsBranchExist[%d]: %v", pr.HeadRepo.ID, err) return nil, err } - apiPullRequest.Head.RepoID = pr.BaseRepoID - apiPullRequest.Head.Repository = apiPullRequest.Base.Repository - apiPullRequest.Head.Name = "" - } - - var headGitRepo *git.Repository - if pr.HeadRepo != nil && pr.Flow == issues_model.PullRequestFlowGithub { - if pr.HeadRepoID == pr.BaseRepoID { - apiPullRequest.Head.RepoID = pr.HeadRepo.ID - apiPullRequest.Head.Repository = apiRepo - headGitRepo = gitRepo - } else { + if exist { + apiPullRequest.Head.Ref = pr.HeadBranch + } + if pr.HeadRepoID != pr.BaseRepoID { p, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer) if err != nil { log.Error("GetUserRepoPermission[%d]: %v", pr.HeadRepoID, err) p.AccessMode = perm.AccessModeNone } - - apiPullRequest.Head.RepoID = pr.HeadRepo.ID apiPullRequest.Head.Repository = ToRepo(ctx, pr.HeadRepo, p) - - headGitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo) - if err != nil { - log.Error("OpenRepository[%s]: %v", pr.HeadRepo.RepoPath(), err) - return nil, err - } - defer headGitRepo.Close() - } - - headBranch, err := headGitRepo.GetBranch(pr.HeadBranch) - if err != nil && !git.IsErrBranchNotExist(err) { - log.Error("GetBranch[%s]: %v", pr.HeadBranch, err) - return nil, err } + } + if apiPullRequest.Head.Ref == "" { + apiPullRequest.Head.Ref = pr.GetGitHeadRefName() + } - if git.IsErrBranchNotExist(err) { - headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref) - if err != nil && !git.IsErrNotExist(err) { - log.Error("GetCommit[%s]: %v", pr.HeadBranch, err) - return nil, err - } - if err == nil { - apiPullRequest.Head.Sha = headCommitID - } - } else { - commit, err := headBranch.GetCommit() - if err != nil && !git.IsErrNotExist(err) { - log.Error("GetCommit[%s]: %v", headBranch.Name, err) - return nil, err - } - if err == nil { - apiPullRequest.Head.Ref = pr.HeadBranch - apiPullRequest.Head.Sha = commit.ID.String() - } - } + if pr.Flow == issues_model.PullRequestFlowAGit { + apiPullRequest.Head.Name = "" + } + apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitHeadRefName()) + if err != nil { + log.Error("GetRefCommitID[%s]: %v", pr.GetGitHeadRefName(), err) } if len(apiPullRequest.Head.Sha) == 0 && len(apiPullRequest.Head.Ref) != 0 { diff --git a/services/convert/pull_review_test.go b/services/convert/pull_review_test.go index a1296fafd4..d0a077ab24 100644 --- a/services/convert/pull_review_test.go +++ b/services/convert/pull_review_test.go @@ -19,8 +19,8 @@ func Test_ToPullReview(t *testing.T) { reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 6}) - assert.EqualValues(t, reviewer.ID, review.ReviewerID) - assert.EqualValues(t, issues_model.ReviewTypePending, review.Type) + assert.Equal(t, reviewer.ID, review.ReviewerID) + assert.Equal(t, issues_model.ReviewTypePending, review.Type) reviewList := []*issues_model.Review{review} diff --git a/services/convert/pull_test.go b/services/convert/pull_test.go index e069fa4a68..dfbe24d184 100644 --- a/services/convert/pull_test.go +++ b/services/convert/pull_test.go @@ -27,7 +27,7 @@ func TestPullRequest_APIFormat(t *testing.T) { assert.NoError(t, pr.LoadIssue(db.DefaultContext)) apiPullRequest := ToAPIPullRequest(git.DefaultContext, pr, nil) assert.NotNil(t, apiPullRequest) - assert.EqualValues(t, &structs.PRBranchInfo{ + assert.Equal(t, &structs.PRBranchInfo{ Name: "branch1", Ref: "refs/pull/2/head", Sha: "4a357436d925b5c974181ff12a994538ddc5a269", @@ -46,4 +46,11 @@ func TestPullRequest_APIFormat(t *testing.T) { assert.NotNil(t, apiPullRequest) assert.Nil(t, apiPullRequest.Head.Repository) assert.EqualValues(t, -1, apiPullRequest.Head.RepoID) + + apiPullRequests, err := ToAPIPullRequests(git.DefaultContext, pr.BaseRepo, []*issues_model.PullRequest{pr}, nil) + assert.NoError(t, err) + assert.Len(t, apiPullRequests, 1) + assert.NotNil(t, apiPullRequests[0]) + assert.Nil(t, apiPullRequests[0].Head.Repository) + assert.EqualValues(t, -1, apiPullRequests[0].Head.RepoID) } diff --git a/services/convert/release_test.go b/services/convert/release_test.go index 201b27e16d..bb618c9ca3 100644 --- a/services/convert/release_test.go +++ b/services/convert/release_test.go @@ -23,6 +23,6 @@ func TestRelease_ToRelease(t *testing.T) { apiRelease := ToAPIRelease(db.DefaultContext, repo1, release1) assert.NotNil(t, apiRelease) assert.EqualValues(t, 1, apiRelease.ID) - assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL) - assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL) + assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL) + assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL) } diff --git a/services/convert/repository.go b/services/convert/repository.go index 7dfdfd2179..a364591bb8 100644 --- a/services/convert/repository.go +++ b/services/convert/repository.go @@ -98,6 +98,8 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR allowSquash := false allowFastForwardOnly := false allowRebaseUpdate := false + allowManualMerge := true + autodetectManualMerge := false defaultDeleteBranchAfterMerge := false defaultMergeStyle := repo_model.MergeStyleMerge defaultAllowMaintainerEdit := false @@ -111,6 +113,8 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR allowSquash = config.AllowSquash allowFastForwardOnly = config.AllowFastForwardOnly allowRebaseUpdate = config.AllowRebaseUpdate + allowManualMerge = config.AllowManualMerge + autodetectManualMerge = config.AutodetectManualMerge defaultDeleteBranchAfterMerge = config.DefaultDeleteBranchAfterMerge defaultMergeStyle = config.GetDefaultMergeStyle() defaultAllowMaintainerEdit = config.DefaultAllowMaintainerEdit @@ -235,6 +239,8 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR AllowSquash: allowSquash, AllowFastForwardOnly: allowFastForwardOnly, AllowRebaseUpdate: allowRebaseUpdate, + AllowManualMerge: allowManualMerge, + AutodetectManualMerge: autodetectManualMerge, DefaultDeleteBranchAfterMerge: defaultDeleteBranchAfterMerge, DefaultMergeStyle: string(defaultMergeStyle), DefaultAllowMaintainerEdit: defaultAllowMaintainerEdit, @@ -245,7 +251,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR RepoTransfer: transfer, Topics: util.SliceNilAsEmpty(repo.Topics), ObjectFormatName: repo.ObjectFormatName, - Licenses: repoLicenses.StringList(), + Licenses: util.SliceNilAsEmpty(repoLicenses.StringList()), } } diff --git a/services/convert/status.go b/services/convert/status.go index 6cef63c1cd..b4864a0307 100644 --- a/services/convert/status.go +++ b/services/convert/status.go @@ -5,6 +5,7 @@ package convert import ( "context" + "net/url" git_model "code.gitea.io/gitea/models/git" user_model "code.gitea.io/gitea/models/user" @@ -32,34 +33,29 @@ func ToCommitStatus(ctx context.Context, status *git_model.CommitStatus) *api.Co return apiStatus } +func ToCommitStatuses(ctx context.Context, statuses []*git_model.CommitStatus) []*api.CommitStatus { + apiStatuses := make([]*api.CommitStatus, len(statuses)) + for i, status := range statuses { + apiStatuses[i] = ToCommitStatus(ctx, status) + } + return apiStatuses +} + // ToCombinedStatus converts List of CommitStatus to a CombinedStatus func ToCombinedStatus(ctx context.Context, statuses []*git_model.CommitStatus, repo *api.Repository) *api.CombinedStatus { if len(statuses) == 0 { return nil } - retStatus := &api.CombinedStatus{ - SHA: statuses[0].SHA, + combinedStatus := git_model.CalcCommitStatus(statuses) + + return &api.CombinedStatus{ + State: combinedStatus.State, + Statuses: ToCommitStatuses(ctx, statuses), + SHA: combinedStatus.SHA, TotalCount: len(statuses), Repository: repo, - URL: "", + CommitURL: repo.URL + "/commits/" + url.PathEscape(combinedStatus.SHA), + URL: repo.URL + "/commits/" + url.PathEscape(combinedStatus.SHA) + "/status", } - - retStatus.Statuses = make([]*api.CommitStatus, 0, len(statuses)) - for _, status := range statuses { - retStatus.Statuses = append(retStatus.Statuses, ToCommitStatus(ctx, status)) - if retStatus.State == "" || status.State.NoBetterThan(retStatus.State) { - retStatus.State = status.State - } - } - // According to https://docs.github.com/en/rest/commits/statuses?apiVersion=2022-11-28#get-the-combined-status-for-a-specific-reference - // > Additionally, a combined state is returned. The state is one of: - // > failure if any of the contexts report as error or failure - // > pending if there are no statuses or a context is pending - // > success if the latest status for all contexts is success - if retStatus.State.IsError() { - retStatus.State = api.CommitStatusFailure - } - - return retStatus } diff --git a/services/convert/user_test.go b/services/convert/user_test.go index 4b1effc7aa..199d500732 100644 --- a/services/convert/user_test.go +++ b/services/convert/user_test.go @@ -30,11 +30,11 @@ func TestUser_ToUser(t *testing.T) { apiUser = toUser(db.DefaultContext, user1, false, false) assert.False(t, apiUser.IsAdmin) - assert.EqualValues(t, api.VisibleTypePublic.String(), apiUser.Visibility) + assert.Equal(t, api.VisibleTypePublic.String(), apiUser.Visibility) user31 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate}) apiUser = toUser(db.DefaultContext, user31, true, true) assert.False(t, apiUser.IsAdmin) - assert.EqualValues(t, api.VisibleTypePrivate.String(), apiUser.Visibility) + assert.Equal(t, api.VisibleTypePrivate.String(), apiUser.Visibility) } diff --git a/services/convert/utils_test.go b/services/convert/utils_test.go index a8363ec6bd..7965624e2b 100644 --- a/services/convert/utils_test.go +++ b/services/convert/utils_test.go @@ -10,10 +10,10 @@ import ( ) func TestToCorrectPageSize(t *testing.T) { - assert.EqualValues(t, 30, ToCorrectPageSize(0)) - assert.EqualValues(t, 30, ToCorrectPageSize(-10)) - assert.EqualValues(t, 20, ToCorrectPageSize(20)) - assert.EqualValues(t, 50, ToCorrectPageSize(100)) + assert.Equal(t, 30, ToCorrectPageSize(0)) + assert.Equal(t, 30, ToCorrectPageSize(-10)) + assert.Equal(t, 20, ToCorrectPageSize(20)) + assert.Equal(t, 50, ToCorrectPageSize(100)) } func TestToGitServiceType(t *testing.T) { diff --git a/services/cron/tasks_extended.go b/services/cron/tasks_extended.go index 0018c5facc..3747111984 100644 --- a/services/cron/tasks_extended.go +++ b/services/cron/tasks_extended.go @@ -171,34 +171,35 @@ func registerDeleteOldSystemNotices() { }) } +type GCLFSConfig struct { + BaseConfig + OlderThan time.Duration + LastUpdatedMoreThanAgo time.Duration + NumberToCheckPerRepo int64 + ProportionToCheckPerRepo float64 +} + func registerGCLFS() { if !setting.LFS.StartServer { return } - type GCLFSConfig struct { - OlderThanConfig - LastUpdatedMoreThanAgo time.Duration - NumberToCheckPerRepo int64 - ProportionToCheckPerRepo float64 - } RegisterTaskFatal("gc_lfs", &GCLFSConfig{ - OlderThanConfig: OlderThanConfig{ - BaseConfig: BaseConfig{ - Enabled: false, - RunAtStart: false, - Schedule: "@every 24h", - }, - // Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload - // and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby - // an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid - // changes in new branches that might lead to lfs objects becoming temporarily unassociated with git - // objects. - // - // It is likely that a week is potentially excessive but it should definitely be enough that any - // unassociated LFS object is genuinely unassociated. - OlderThan: 24 * time.Hour * 7, + BaseConfig: BaseConfig{ + Enabled: false, + RunAtStart: false, + Schedule: "@every 24h", }, + // Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload + // and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby + // an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid + // changes in new branches that might lead to lfs objects becoming temporarily unassociated with git + // objects. + // + // It is likely that a week is potentially excessive but it should definitely be enough that any + // unassociated LFS object is genuinely unassociated. + OlderThan: 24 * time.Hour * 7, + // Only GC things that haven't been looked at in the past 3 days LastUpdatedMoreThanAgo: 24 * time.Hour * 3, NumberToCheckPerRepo: 100, diff --git a/services/cron/tasks_extended_test.go b/services/cron/tasks_extended_test.go new file mode 100644 index 0000000000..a3d40630a3 --- /dev/null +++ b/services/cron/tasks_extended_test.go @@ -0,0 +1,51 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package cron + +import ( + "testing" + "time" + + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" + + "github.com/stretchr/testify/assert" +) + +func Test_GCLFSConfig(t *testing.T) { + cfg, err := setting.NewConfigProviderFromData(` +[cron.gc_lfs] +ENABLED = true +RUN_AT_START = true +SCHEDULE = "@every 2h" +OLDER_THAN = "1h" +LAST_UPDATED_MORE_THAN_AGO = "7h" +NUMBER_TO_CHECK_PER_REPO = 10 +PROPORTION_TO_CHECK_PER_REPO = 0.1 +`) + assert.NoError(t, err) + defer test.MockVariableValue(&setting.CfgProvider, cfg)() + + config := &GCLFSConfig{ + BaseConfig: BaseConfig{ + Enabled: false, + RunAtStart: false, + Schedule: "@every 24h", + }, + OlderThan: 24 * time.Hour * 7, + LastUpdatedMoreThanAgo: 24 * time.Hour * 3, + NumberToCheckPerRepo: 100, + ProportionToCheckPerRepo: 0.6, + } + + _, err = setting.GetCronSettings("gc_lfs", config) + assert.NoError(t, err) + assert.True(t, config.Enabled) + assert.True(t, config.RunAtStart) + assert.Equal(t, "@every 2h", config.Schedule) + assert.Equal(t, 1*time.Hour, config.OlderThan) + assert.Equal(t, 7*time.Hour, config.LastUpdatedMoreThanAgo) + assert.Equal(t, int64(10), config.NumberToCheckPerRepo) + assert.InDelta(t, 0.1, config.ProportionToCheckPerRepo, 0.001) +} diff --git a/services/doctor/actions.go b/services/doctor/actions.go index 7c44fb8392..28e26c88eb 100644 --- a/services/doctor/actions.go +++ b/services/doctor/actions.go @@ -19,7 +19,7 @@ func disableMirrorActionsUnit(ctx context.Context, logger log.Logger, autofix bo var reposToFix []*repo_model.Repository for page := 1; ; page++ { - repos, _, err := repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{ + repos, _, err := repo_model.SearchRepository(ctx, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ PageSize: repo_model.RepositoryListDefaultPageSize, Page: page, diff --git a/services/doctor/authorizedkeys.go b/services/doctor/authorizedkeys.go index 8d6fc9cb5e..46e7099dce 100644 --- a/services/doctor/authorizedkeys.go +++ b/services/doctor/authorizedkeys.go @@ -7,6 +7,7 @@ import ( "bufio" "bytes" "context" + "errors" "fmt" "os" "path/filepath" @@ -78,7 +79,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e fPath, "gitea admin regenerate keys", "gitea doctor --run authorized-keys --fix") - return fmt.Errorf(`authorized_keys is out of date and should be regenerated with "gitea admin regenerate keys" or "gitea doctor --run authorized-keys --fix"`) + return errors.New(`authorized_keys is out of date and should be regenerated with "gitea admin regenerate keys" or "gitea doctor --run authorized-keys --fix"`) } logger.Warn("authorized_keys is out of date. Attempting rewrite...") err = asymkey_service.RewriteAllPublicKeys(ctx) diff --git a/services/doctor/dbconsistency.go b/services/doctor/dbconsistency.go index 62326ed07c..d5a133d8b2 100644 --- a/services/doctor/dbconsistency.go +++ b/services/doctor/dbconsistency.go @@ -15,6 +15,7 @@ import ( secret_model "code.gitea.io/gitea/models/secret" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + issue_service "code.gitea.io/gitea/services/issue" ) type consistencyCheck struct { @@ -93,7 +94,7 @@ func prepareDBConsistencyChecks() []consistencyCheck { // find issues without existing repository Name: "Orphaned Issues without existing repository", Counter: issues_model.CountOrphanedIssues, - Fixer: asFixer(issues_model.DeleteOrphanedIssues), + Fixer: asFixer(issue_service.DeleteOrphanedIssues), }, // find releases without existing repository genericOrphanCheck("Orphaned Releases without existing repository", diff --git a/services/doctor/fix16961_test.go b/services/doctor/fix16961_test.go index 498ed9c8d5..11a128620c 100644 --- a/services/doctor/fix16961_test.go +++ b/services/doctor/fix16961_test.go @@ -19,12 +19,6 @@ func Test_fixUnitConfig_16961(t *testing.T) { wantErr bool }{ { - name: "empty", - bs: "", - wantFixed: true, - wantErr: false, - }, - { name: "normal: {}", bs: "{}", wantFixed: false, @@ -221,7 +215,7 @@ func Test_fixPullRequestsConfig_16961(t *testing.T) { if gotFixed != tt.wantFixed { t.Errorf("fixPullRequestsConfig_16961() = %v, want %v", gotFixed, tt.wantFixed) } - assert.EqualValues(t, &tt.expected, cfg) + assert.Equal(t, &tt.expected, cfg) }) } } @@ -265,7 +259,7 @@ func Test_fixIssuesConfig_16961(t *testing.T) { if gotFixed != tt.wantFixed { t.Errorf("fixIssuesConfig_16961() = %v, want %v", gotFixed, tt.wantFixed) } - assert.EqualValues(t, &tt.expected, cfg) + assert.Equal(t, &tt.expected, cfg) }) } } diff --git a/services/doctor/lfs.go b/services/doctor/lfs.go index 5f110b8f97..a90f394450 100644 --- a/services/doctor/lfs.go +++ b/services/doctor/lfs.go @@ -5,7 +5,7 @@ package doctor import ( "context" - "fmt" + "errors" "time" "code.gitea.io/gitea/modules/log" @@ -27,7 +27,7 @@ func init() { func garbageCollectLFSCheck(ctx context.Context, logger log.Logger, autofix bool) error { if !setting.LFS.StartServer { - return fmt.Errorf("LFS support is disabled") + return errors.New("LFS support is disabled") } if err := repository.GarbageCollectLFSMetaObjects(ctx, repository.GarbageCollectLFSMetaObjectsOptions{ diff --git a/services/doctor/mergebase.go b/services/doctor/mergebase.go index 482bcd0a46..cbd8aa59fd 100644 --- a/services/doctor/mergebase.go +++ b/services/doctor/mergebase.go @@ -42,7 +42,7 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro if !pr.HasMerged { var err error - pr.MergeBase, _, err = git.NewCommand("merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitRefName()).RunStdString(ctx, &git.RunOpts{Dir: repoPath}) + pr.MergeBase, _, err = git.NewCommand("merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitHeadRefName()).RunStdString(ctx, &git.RunOpts{Dir: repoPath}) if err != nil { var err2 error pr.MergeBase, _, err2 = git.NewCommand("rev-parse").AddDynamicArguments(git.BranchPrefix+pr.BaseBranch).RunStdString(ctx, &git.RunOpts{Dir: repoPath}) @@ -63,7 +63,7 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro } refs := append([]string{}, parents[1:]...) - refs = append(refs, pr.GetGitRefName()) + refs = append(refs, pr.GetGitHeadRefName()) cmd := git.NewCommand("merge-base").AddDashesAndList(refs...) pr.MergeBase, _, err = cmd.RunStdString(ctx, &git.RunOpts{Dir: repoPath}) if err != nil { diff --git a/services/doctor/paths.go b/services/doctor/paths.go index 3f62d587ab..4214c36b1a 100644 --- a/services/doctor/paths.go +++ b/services/doctor/paths.go @@ -99,15 +99,14 @@ func checkConfigurationFiles(ctx context.Context, logger log.Logger, autofix boo func isWritableDir(path string) error { // There's no platform-independent way of checking if a directory is writable // https://stackoverflow.com/questions/20026320/how-to-tell-if-folder-exists-and-is-writable - tmpFile, err := os.CreateTemp(path, "doctors-order") if err != nil { return err } if err := os.Remove(tmpFile.Name()); err != nil { - fmt.Printf("Warning: can't remove temporary file: '%s'\n", tmpFile.Name()) //nolint:forbidigo + log.Warn("can't remove temporary file: %q", tmpFile.Name()) } - tmpFile.Close() + _ = tmpFile.Close() return nil } diff --git a/services/doctor/repository.go b/services/doctor/repository.go index 6c33426636..359c4a17e0 100644 --- a/services/doctor/repository.go +++ b/services/doctor/repository.go @@ -7,7 +7,6 @@ import ( "context" "code.gitea.io/gitea/models/db" - user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/storage" repo_service "code.gitea.io/gitea/services/repository" @@ -39,7 +38,6 @@ func deleteOrphanedRepos(ctx context.Context) (int64, error) { batchSize := db.MaxBatchInsertSize("repository") e := db.GetEngine(ctx) var deleted int64 - adminUser := &user_model.User{IsAdmin: true} for { select { @@ -60,7 +58,7 @@ func deleteOrphanedRepos(ctx context.Context) (int64, error) { } for _, id := range ids { - if err := repo_service.DeleteRepositoryDirectly(ctx, adminUser, id, true); err != nil { + if err := repo_service.DeleteRepositoryDirectly(ctx, id, true); err != nil { return deleted, err } deleted++ diff --git a/services/doctor/storage.go b/services/doctor/storage.go index 3f3b562c37..77fc6d65df 100644 --- a/services/doctor/storage.go +++ b/services/doctor/storage.go @@ -121,7 +121,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo storer: storage.LFS, isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { // The oid of an LFS stored object is the name but with all the path.Separators removed - oid := strings.ReplaceAll(path, "/", "") + oid := strings.ReplaceAll(strings.ReplaceAll(path, "\\", ""), "/", "") exists, err := git.ExistsLFSObject(ctx, oid) return !exists, err }, diff --git a/services/externalaccount/link.go b/services/externalaccount/link.go deleted file mode 100644 index d6e2ea7e94..0000000000 --- a/services/externalaccount/link.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2021 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package externalaccount - -import ( - "context" - "fmt" - - user_model "code.gitea.io/gitea/models/user" - - "github.com/markbates/goth" -) - -// Store represents a thing that stores things -type Store interface { - Get(any) any - Set(any, any) error - Release() error -} - -// LinkAccountFromStore links the provided user with a stored external user -func LinkAccountFromStore(ctx context.Context, store Store, user *user_model.User) error { - gothUser := store.Get("linkAccountGothUser") - if gothUser == nil { - return fmt.Errorf("not in LinkAccount session") - } - - return LinkAccountToUser(ctx, user, gothUser.(goth.User)) -} diff --git a/services/externalaccount/user.go b/services/externalaccount/user.go index b53e33654a..1eddc4a5df 100644 --- a/services/externalaccount/user.go +++ b/services/externalaccount/user.go @@ -8,7 +8,6 @@ import ( "strconv" "strings" - "code.gitea.io/gitea/models/auth" issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -17,15 +16,11 @@ import ( "github.com/markbates/goth" ) -func toExternalLoginUser(ctx context.Context, user *user_model.User, gothUser goth.User) (*user_model.ExternalLoginUser, error) { - authSource, err := auth.GetActiveOAuth2SourceByName(ctx, gothUser.Provider) - if err != nil { - return nil, err - } +func toExternalLoginUser(authSourceID int64, user *user_model.User, gothUser goth.User) *user_model.ExternalLoginUser { return &user_model.ExternalLoginUser{ ExternalID: gothUser.UserID, UserID: user.ID, - LoginSourceID: authSource.ID, + LoginSourceID: authSourceID, RawData: gothUser.RawData, Provider: gothUser.Provider, Email: gothUser.Email, @@ -40,15 +35,12 @@ func toExternalLoginUser(ctx context.Context, user *user_model.User, gothUser go AccessTokenSecret: gothUser.AccessTokenSecret, RefreshToken: gothUser.RefreshToken, ExpiresAt: gothUser.ExpiresAt, - }, nil + } } // LinkAccountToUser link the gothUser to the user -func LinkAccountToUser(ctx context.Context, user *user_model.User, gothUser goth.User) error { - externalLoginUser, err := toExternalLoginUser(ctx, user, gothUser) - if err != nil { - return err - } +func LinkAccountToUser(ctx context.Context, authSourceID int64, user *user_model.User, gothUser goth.User) error { + externalLoginUser := toExternalLoginUser(authSourceID, user, gothUser) if err := user_model.LinkExternalToUser(ctx, user, externalLoginUser); err != nil { return err @@ -72,12 +64,8 @@ func LinkAccountToUser(ctx context.Context, user *user_model.User, gothUser goth } // EnsureLinkExternalToUser link the gothUser to the user -func EnsureLinkExternalToUser(ctx context.Context, user *user_model.User, gothUser goth.User) error { - externalLoginUser, err := toExternalLoginUser(ctx, user, gothUser) - if err != nil { - return err - } - +func EnsureLinkExternalToUser(ctx context.Context, authSourceID int64, user *user_model.User, gothUser goth.User) error { + externalLoginUser := toExternalLoginUser(authSourceID, user, gothUser) return user_model.EnsureLinkExternalToUser(ctx, externalLoginUser) } diff --git a/services/feed/feed.go b/services/feed/feed.go index 214e9b5765..1dbd2e0e26 100644 --- a/services/feed/feed.go +++ b/services/feed/feed.go @@ -6,6 +6,7 @@ package feed import ( "context" "fmt" + "strings" activities_model "code.gitea.io/gitea/models/activities" "code.gitea.io/gitea/models/db" @@ -13,15 +14,10 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" ) -func userFeedCacheKey(userID int64) string { - return fmt.Sprintf("user_feed_%d", userID) -} - func GetFeedsForDashboard(ctx context.Context, opts activities_model.GetFeedsOptions) (activities_model.ActionList, int, error) { opts.DontCount = opts.RequestedTeam == nil && opts.Date == "" results, cnt, err := activities_model.GetFeeds(ctx, opts) @@ -39,7 +35,18 @@ func GetFeeds(ctx context.Context, opts activities_model.GetFeedsOptions) (activ // * Organization action: UserID=100 (the repo's org), ActUserID=1 // * Watcher action: UserID=20 (a user who is watching a repo), ActUserID=1 func notifyWatchers(ctx context.Context, act *activities_model.Action, watchers []*repo_model.Watch, permCode, permIssue, permPR []bool) error { - // Add feed for actioner. + // MySQL has TEXT length limit 65535. + // Sometimes the content is "field1|field2|field3", sometimes the content is JSON (ActionMirrorSyncPush, ActionCommitRepo, ActionPushTag, etc...) + if left, right := util.EllipsisDisplayStringX(act.Content, 65535); right != "" { + if strings.HasPrefix(act.Content, `{"`) && strings.HasSuffix(act.Content, `}`) { + // FIXME: at the moment we can do nothing if the content is JSON and it is too long + act.Content = "{}" + } else { + act.Content = left + } + } + + // Add feed for actor. act.UserID = act.ActUserID if err := db.Insert(ctx, act); err != nil { return fmt.Errorf("insert new actioner: %w", err) @@ -75,24 +82,18 @@ func notifyWatchers(ctx context.Context, act *activities_model.Action, watchers if !permPR[i] { continue } + default: } if err := db.Insert(ctx, act); err != nil { return fmt.Errorf("insert new action: %w", err) } - - total, err := activities_model.CountUserFeeds(ctx, act.UserID) - if err != nil { - return fmt.Errorf("count user feeds: %w", err) - } - - _ = cache.GetCache().Put(userFeedCacheKey(act.UserID), fmt.Sprintf("%d", total), setting.CacheService.TTLSeconds()) } return nil } -// NotifyWatchersActions creates batch of actions for every watcher. +// NotifyWatchers creates batch of actions for every watcher. func NotifyWatchers(ctx context.Context, acts ...*activities_model.Action) error { return db.WithTx(ctx, func(ctx context.Context) error { if len(acts) == 0 { diff --git a/services/feed/feed_test.go b/services/feed/feed_test.go index 243bc046b0..705d42a2eb 100644 --- a/services/feed/feed_test.go +++ b/services/feed/feed_test.go @@ -30,7 +30,7 @@ func TestGetFeeds(t *testing.T) { assert.NoError(t, err) if assert.Len(t, actions, 1) { assert.EqualValues(t, 1, actions[0].ID) - assert.EqualValues(t, user.ID, actions[0].UserID) + assert.Equal(t, user.ID, actions[0].UserID) } assert.Equal(t, int64(1), count) @@ -107,7 +107,7 @@ func TestGetFeeds2(t *testing.T) { assert.Len(t, actions, 1) if assert.Len(t, actions, 1) { assert.EqualValues(t, 2, actions[0].ID) - assert.EqualValues(t, org.ID, actions[0].UserID) + assert.Equal(t, org.ID, actions[0].UserID) } assert.Equal(t, int64(1), count) @@ -147,7 +147,7 @@ func TestRepoActions(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) _ = db.TruncateBeans(db.DefaultContext, &activities_model.Action{}) - for i := 0; i < 3; i++ { + for i := range 3 { _ = db.Insert(db.DefaultContext, &activities_model.Action{ UserID: 2 + int64(i), ActUserID: 2, diff --git a/services/forms/auth_form.go b/services/forms/auth_form.go index c9f3182b3a..886110236c 100644 --- a/services/forms/auth_form.go +++ b/services/forms/auth_form.go @@ -14,47 +14,58 @@ import ( // AuthenticationForm form for authentication type AuthenticationForm struct { - ID int64 - Type int `binding:"Range(2,7)"` - Name string `binding:"Required;MaxSize(30)"` - Host string - Port int - BindDN string - BindPassword string - UserBase string - UserDN string - AttributeUsername string - AttributeName string - AttributeSurname string - AttributeMail string - AttributeSSHPublicKey string - AttributeAvatar string - AttributesInBind bool - UsePagedSearch bool - SearchPageSize int - Filter string - AdminFilter string - GroupsEnabled bool - GroupDN string - GroupFilter string - GroupMemberUID string - UserUID string - RestrictedFilter string - AllowDeactivateAll bool - IsActive bool - IsSyncEnabled bool - SMTPAuth string - SMTPHost string - SMTPPort int - AllowedDomains string - SecurityProtocol int `binding:"Range(0,2)"` - TLS bool - SkipVerify bool - HeloHostname string - DisableHelo bool - ForceSMTPS bool - PAMServiceName string - PAMEmailDomain string + ID int64 + Type int `binding:"Range(2,7)"` + Name string `binding:"Required;MaxSize(30)"` + TwoFactorPolicy string + IsActive bool + IsSyncEnabled bool + + // LDAP + Host string + Port int + BindDN string + BindPassword string + UserBase string + UserDN string + AttributeUsername string + AttributeName string + AttributeSurname string + AttributeMail string + AttributeSSHPublicKey string + AttributeAvatar string + AttributesInBind bool + UsePagedSearch bool + SearchPageSize int + Filter string + AdminFilter string + GroupsEnabled bool + GroupDN string + GroupFilter string + GroupMemberUID string + UserUID string + RestrictedFilter string + AllowDeactivateAll bool + GroupTeamMap string `binding:"ValidGroupTeamMap"` + GroupTeamMapRemoval bool + + // SMTP + SMTPAuth string + SMTPHost string + SMTPPort int + AllowedDomains string + SecurityProtocol int `binding:"Range(0,2)"` + TLS bool + SkipVerify bool + HeloHostname string + DisableHelo bool + ForceSMTPS bool + + // PAM + PAMServiceName string + PAMEmailDomain string + + // Oauth2 & OIDC Oauth2Provider string Oauth2Key string Oauth2Secret string @@ -74,14 +85,15 @@ type AuthenticationForm struct { Oauth2RestrictedGroup string Oauth2GroupTeamMap string `binding:"ValidGroupTeamMap"` Oauth2GroupTeamMapRemoval bool - SkipLocalTwoFA bool - SSPIAutoCreateUsers bool - SSPIAutoActivateUsers bool - SSPIStripDomainNames bool - SSPISeparatorReplacement string `binding:"AlphaDashDot;MaxSize(5)"` - SSPIDefaultLanguage string - GroupTeamMap string `binding:"ValidGroupTeamMap"` - GroupTeamMapRemoval bool + Oauth2SSHPublicKeyClaimName string + Oauth2FullNameClaimName string + + // SSPI + SSPIAutoCreateUsers bool + SSPIAutoActivateUsers bool + SSPIStripDomainNames bool + SSPISeparatorReplacement string `binding:"AlphaDashDot;MaxSize(5)"` + SSPIDefaultLanguage string } // Validate validates fields diff --git a/services/forms/org.go b/services/forms/org.go index db182f7e96..2ac18ef25c 100644 --- a/services/forms/org.go +++ b/services/forms/org.go @@ -36,7 +36,6 @@ func (f *CreateOrgForm) Validate(req *http.Request, errs binding.Errors) binding // UpdateOrgSettingForm form for updating organization settings type UpdateOrgSettingForm struct { - Name string `binding:"Required;Username;MaxSize(40)" locale:"org.org_name_holder"` FullName string `binding:"MaxSize(100)"` Email string `binding:"MaxSize(255)"` Description string `binding:"MaxSize(255)"` @@ -53,6 +52,11 @@ func (f *UpdateOrgSettingForm) Validate(req *http.Request, errs binding.Errors) return middleware.Validate(errs, ctx.Data, f, ctx.Locale) } +type RenameOrgForm struct { + OrgName string `binding:"Required"` + NewOrgName string `binding:"Required;Username;MaxSize(40)" locale:"org.org_name_holder"` +} + // ___________ // \__ ___/___ _____ _____ // | |_/ __ \\__ \ / \ diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go index 1366d30b1f..cb267f891c 100644 --- a/services/forms/repo_form.go +++ b/services/forms/repo_form.go @@ -10,7 +10,6 @@ import ( issues_model "code.gitea.io/gitea/models/issues" project_model "code.gitea.io/gitea/models/project" - "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/services/context" @@ -110,17 +109,14 @@ type RepoSettingForm struct { EnablePrune bool // Advanced settings - EnableCode bool - DefaultCodeEveryoneAccess string + EnableCode bool - EnableWiki bool - EnableExternalWiki bool - DefaultWikiBranch string - DefaultWikiEveryoneAccess string - ExternalWikiURL string + EnableWiki bool + EnableExternalWiki bool + DefaultWikiBranch string + ExternalWikiURL string EnableIssues bool - DefaultIssuesEveryoneAccess string EnableExternalTracker bool ExternalTrackerURL string TrackerURLFormat string @@ -237,10 +233,12 @@ type WebhookForm struct { Release bool Package bool Status bool + WorkflowRun bool WorkflowJob bool Active bool BranchFilter string `binding:"GlobPattern"` AuthorizationHeader string + Secret string } // PushOnly if the hook will be triggered when push @@ -263,7 +261,6 @@ type NewWebhookForm struct { PayloadURL string `binding:"Required;ValidUrl"` HTTPMethod string `binding:"Required;In(POST,GET)"` ContentType int `binding:"Required"` - Secret string WebhookForm } @@ -277,7 +274,6 @@ func (f *NewWebhookForm) Validate(req *http.Request, errs binding.Errors) bindin type NewGogshookForm struct { PayloadURL string `binding:"Required;ValidUrl"` ContentType int `binding:"Required"` - Secret string WebhookForm } @@ -476,22 +472,6 @@ func (i *IssueLockForm) Validate(req *http.Request, errs binding.Errors) binding return middleware.Validate(errs, ctx.Data, i, ctx.Locale) } -// HasValidReason checks to make sure that the reason submitted in -// the form matches any of the values in the config -func (i IssueLockForm) HasValidReason() bool { - if strings.TrimSpace(i.Reason) == "" { - return true - } - - for _, v := range setting.Repository.Issue.LockReasons { - if v == i.Reason { - return true - } - } - - return false -} - // CreateProjectForm form for creating a project type CreateProjectForm struct { Title string `binding:"Required;MaxSize(100)"` @@ -522,12 +502,13 @@ func (f *CreateMilestoneForm) Validate(req *http.Request, errs binding.Errors) b // CreateLabelForm form for creating label type CreateLabelForm struct { - ID int64 - Title string `binding:"Required;MaxSize(50)" locale:"repo.issues.label_title"` - Exclusive bool `form:"exclusive"` - IsArchived bool `form:"is_archived"` - Description string `binding:"MaxSize(200)" locale:"repo.issues.label_description"` - Color string `binding:"Required;MaxSize(7)" locale:"repo.issues.label_color"` + ID int64 + Title string `binding:"Required;MaxSize(50)" locale:"repo.issues.label_title"` + Exclusive bool `form:"exclusive"` + ExclusiveOrder int `form:"exclusive_order"` + IsArchived bool `form:"is_archived"` + Description string `binding:"MaxSize(200)" locale:"repo.issues.label_description"` + Color string `binding:"Required;MaxSize(7)" locale:"repo.issues.label_color"` } // Validate validates the fields @@ -698,129 +679,6 @@ func (f *NewWikiForm) Validate(req *http.Request, errs binding.Errors) binding.E return middleware.Validate(errs, ctx.Data, f, ctx.Locale) } -// ___________ .___.__ __ -// \_ _____/ __| _/|__|/ |_ -// | __)_ / __ | | \ __\ -// | \/ /_/ | | || | -// /_______ /\____ | |__||__| -// \/ \/ - -// EditRepoFileForm form for changing repository file -type EditRepoFileForm struct { - TreePath string `binding:"Required;MaxSize(500)"` - Content string - CommitSummary string `binding:"MaxSize(100)"` - CommitMessage string - CommitChoice string `binding:"Required;MaxSize(50)"` - NewBranchName string `binding:"GitRefName;MaxSize(100)"` - LastCommit string - Signoff bool - CommitEmail string -} - -// Validate validates the fields -func (f *EditRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// EditPreviewDiffForm form for changing preview diff -type EditPreviewDiffForm struct { - Content string -} - -// Validate validates the fields -func (f *EditPreviewDiffForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// _________ .__ __________.__ __ -// \_ ___ \| |__ __________________ ___.__. \______ \__| ____ | | __ -// / \ \/| | \_/ __ \_ __ \_ __ < | | | ___/ |/ ___\| |/ / -// \ \___| Y \ ___/| | \/| | \/\___ | | | | \ \___| < -// \______ /___| /\___ >__| |__| / ____| |____| |__|\___ >__|_ \ -// \/ \/ \/ \/ \/ \/ - -// CherryPickForm form for changing repository file -type CherryPickForm struct { - CommitSummary string `binding:"MaxSize(100)"` - CommitMessage string - CommitChoice string `binding:"Required;MaxSize(50)"` - NewBranchName string `binding:"GitRefName;MaxSize(100)"` - LastCommit string - Revert bool - Signoff bool - CommitEmail string -} - -// Validate validates the fields -func (f *CherryPickForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// ____ ___ .__ .___ -// | | \______ | | _________ __| _/ -// | | /\____ \| | / _ \__ \ / __ | -// | | / | |_> > |_( <_> ) __ \_/ /_/ | -// |______/ | __/|____/\____(____ /\____ | -// |__| \/ \/ -// - -// UploadRepoFileForm form for uploading repository file -type UploadRepoFileForm struct { - TreePath string `binding:"MaxSize(500)"` - CommitSummary string `binding:"MaxSize(100)"` - CommitMessage string - CommitChoice string `binding:"Required;MaxSize(50)"` - NewBranchName string `binding:"GitRefName;MaxSize(100)"` - Files []string - Signoff bool - CommitEmail string -} - -// Validate validates the fields -func (f *UploadRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// RemoveUploadFileForm form for removing uploaded file -type RemoveUploadFileForm struct { - File string `binding:"Required;MaxSize(50)"` -} - -// Validate validates the fields -func (f *RemoveUploadFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// ________ .__ __ -// \______ \ ____ | | _____/ |_ ____ -// | | \_/ __ \| | _/ __ \ __\/ __ \ -// | ` \ ___/| |_\ ___/| | \ ___/ -// /_______ /\___ >____/\___ >__| \___ > -// \/ \/ \/ \/ - -// DeleteRepoFileForm form for deleting repository file -type DeleteRepoFileForm struct { - CommitSummary string `binding:"MaxSize(100)"` - CommitMessage string - CommitChoice string `binding:"Required;MaxSize(50)"` - NewBranchName string `binding:"GitRefName;MaxSize(100)"` - LastCommit string - Signoff bool - CommitEmail string -} - -// Validate validates the fields -func (f *DeleteRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - // ___________.__ ___________ __ // \__ ___/|__| _____ ____ \__ ___/___________ ____ | | __ ___________ // | | | |/ \_/ __ \ | | \_ __ \__ \ _/ ___\| |/ // __ \_ __ \ diff --git a/services/forms/repo_form_editor.go b/services/forms/repo_form_editor.go new file mode 100644 index 0000000000..3ad2eae75d --- /dev/null +++ b/services/forms/repo_form_editor.go @@ -0,0 +1,57 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package forms + +import ( + "net/http" + + "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/modules/web/middleware" + "code.gitea.io/gitea/services/context" + + "gitea.com/go-chi/binding" +) + +type CommitCommonForm struct { + TreePath string `binding:"MaxSize(500)"` + CommitSummary string `binding:"MaxSize(100)"` + CommitMessage string + CommitChoice string `binding:"Required;MaxSize(50)"` + NewBranchName string `binding:"GitRefName;MaxSize(100)"` + LastCommit string + Signoff bool + CommitEmail string +} + +func (f *CommitCommonForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { + ctx := context.GetValidateContext(req) + return middleware.Validate(errs, ctx.Data, f, ctx.Locale) +} + +type CommitCommonFormInterface interface { + GetCommitCommonForm() *CommitCommonForm +} + +func (f *CommitCommonForm) GetCommitCommonForm() *CommitCommonForm { + return f +} + +type EditRepoFileForm struct { + CommitCommonForm + Content optional.Option[string] +} + +type DeleteRepoFileForm struct { + CommitCommonForm +} + +type UploadRepoFileForm struct { + CommitCommonForm + Files []string +} + +type CherryPickForm struct { + CommitCommonForm + Revert bool +} diff --git a/services/forms/repo_form_test.go b/services/forms/repo_form_test.go index 2c5a8e2c0f..a0c67fe0f8 100644 --- a/services/forms/repo_form_test.go +++ b/services/forms/repo_form_test.go @@ -6,8 +6,6 @@ package forms import ( "testing" - "code.gitea.io/gitea/modules/setting" - "github.com/stretchr/testify/assert" ) @@ -39,26 +37,3 @@ func TestSubmitReviewForm_IsEmpty(t *testing.T) { assert.Equal(t, v.expected, v.form.HasEmptyContent()) } } - -func TestIssueLock_HasValidReason(t *testing.T) { - // Init settings - _ = setting.Repository - - cases := []struct { - form IssueLockForm - expected bool - }{ - {IssueLockForm{""}, true}, // an empty reason is accepted - {IssueLockForm{"Off-topic"}, true}, - {IssueLockForm{"Too heated"}, true}, - {IssueLockForm{"Spam"}, true}, - {IssueLockForm{"Resolved"}, true}, - - {IssueLockForm{"ZZZZ"}, false}, - {IssueLockForm{"I want to lock this issue"}, false}, - } - - for _, v := range cases { - assert.Equal(t, v.expected, v.form.HasValidReason()) - } -} diff --git a/services/forms/user_form_test.go b/services/forms/user_form_test.go index b4120f20ed..09e9ec0f65 100644 --- a/services/forms/user_form_test.go +++ b/services/forms/user_form_test.go @@ -7,6 +7,7 @@ import ( "testing" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" "github.com/gobwas/glob" "github.com/stretchr/testify/assert" @@ -26,12 +27,7 @@ func TestRegisterForm_IsDomainAllowed_Empty(t *testing.T) { } func TestRegisterForm_IsDomainAllowed_InvalidEmail(t *testing.T) { - oldService := setting.Service - defer func() { - setting.Service = oldService - }() - - setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("gitea.io")} + defer test.MockVariableValue(&setting.Service.EmailDomainAllowList, []glob.Glob{glob.MustCompile("gitea.io")})() tt := []struct { email string @@ -48,12 +44,7 @@ func TestRegisterForm_IsDomainAllowed_InvalidEmail(t *testing.T) { } func TestRegisterForm_IsDomainAllowed_AllowedEmail(t *testing.T) { - oldService := setting.Service - defer func() { - setting.Service = oldService - }() - - setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.allow")} + defer test.MockVariableValue(&setting.Service.EmailDomainAllowList, []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.allow")})() tt := []struct { email string @@ -76,13 +67,7 @@ func TestRegisterForm_IsDomainAllowed_AllowedEmail(t *testing.T) { } func TestRegisterForm_IsDomainAllowed_BlockedEmail(t *testing.T) { - oldService := setting.Service - defer func() { - setting.Service = oldService - }() - - setting.Service.EmailDomainAllowList = nil - setting.Service.EmailDomainBlockList = []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.block")} + defer test.MockVariableValue(&setting.Service.EmailDomainBlockList, []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.block")})() tt := []struct { email string diff --git a/services/git/commit.go b/services/git/commit.go index 8ab8f3d369..e4755ef93d 100644 --- a/services/git/commit.go +++ b/services/git/commit.go @@ -17,7 +17,7 @@ import ( ) // ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys. -func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.UserCommit, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error)) ([]*asymkey_model.SignCommit, error) { +func ParseCommitsWithSignature(ctx context.Context, repo *repo_model.Repository, oldCommits []*user_model.UserCommit, repoTrustModel repo_model.TrustModelType) ([]*asymkey_model.SignCommit, error) { newCommits := make([]*asymkey_model.SignCommit, 0, len(oldCommits)) keyMap := map[string]bool{} @@ -34,17 +34,14 @@ func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.Use } for _, c := range oldCommits { - committer, ok := emailUsers[c.Committer.Email] - if !ok && c.Committer != nil { - committer = &user_model.User{ - Name: c.Committer.Name, - Email: c.Committer.Email, - } - } - + committerUser := emailUsers.GetByEmail(c.Committer.Email) // FIXME: why ValidateCommitsWithEmails uses "Author", but ParseCommitsWithSignature uses "Committer"? signCommit := &asymkey_model.SignCommit{ UserCommit: c, - Verification: asymkey_service.ParseCommitWithSignatureCommitter(ctx, c.Commit, committer), + Verification: asymkey_service.ParseCommitWithSignatureCommitter(ctx, c.Commit, committerUser), + } + + isOwnerMemberCollaborator := func(user *user_model.User) (bool, error) { + return repo_model.IsOwnerMemberCollaborator(ctx, repo, user.ID) } _ = asymkey_model.CalculateTrustStatus(signCommit.Verification, repoTrustModel, isOwnerMemberCollaborator, &keyMap) @@ -62,11 +59,9 @@ func ConvertFromGitCommit(ctx context.Context, commits []*git.Commit, repo *repo } signedCommits, err := ParseCommitsWithSignature( ctx, + repo, validatedCommits, repo.GetTrustModel(), - func(user *user_model.User) (bool, error) { - return repo_model.IsOwnerMemberCollaborator(ctx, repo, user.ID) - }, ) if err != nil { return nil, err @@ -82,7 +77,7 @@ func ParseCommitsWithStatus(ctx context.Context, oldCommits []*asymkey_model.Sig commit := &git_model.SignCommitWithStatuses{ SignCommit: c, } - statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptions{}) + statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptionsAll) if err != nil { return nil, err } diff --git a/services/gitdiff/csv.go b/services/gitdiff/csv.go index 8db73c56a3..c10ee14490 100644 --- a/services/gitdiff/csv.go +++ b/services/gitdiff/csv.go @@ -134,7 +134,7 @@ func createCsvDiffSingle(reader *csv.Reader, celltype TableDiffCellType) ([]*Tab return nil, err } cells := make([]*TableDiffCell, len(row)) - for j := 0; j < len(row); j++ { + for j := range row { if celltype == TableDiffCellDel { cells[j] = &TableDiffCell{LeftCell: row[j], Type: celltype} } else { @@ -365,11 +365,11 @@ func getColumnMapping(baseCSVReader, headCSVReader *csvReader) ([]int, []int) { } // Loops through the baseRow and see if there is a match in the head row - for i := 0; i < len(baseRow); i++ { + for i := range baseRow { base2HeadColMap[i] = unmappedColumn baseCell, err := getCell(baseRow, i) if err == nil { - for j := 0; j < len(headRow); j++ { + for j := range headRow { if head2BaseColMap[j] == -1 { headCell, err := getCell(headRow, j) if err == nil && baseCell == headCell { @@ -390,7 +390,7 @@ func getColumnMapping(baseCSVReader, headCSVReader *csvReader) ([]int, []int) { // tryMapColumnsByContent tries to map missing columns by the content of the first lines. func tryMapColumnsByContent(baseCSVReader *csvReader, base2HeadColMap []int, headCSVReader *csvReader, head2BaseColMap []int) { - for i := 0; i < len(base2HeadColMap); i++ { + for i := range base2HeadColMap { headStart := 0 for base2HeadColMap[i] == unmappedColumn && headStart < len(head2BaseColMap) { if head2BaseColMap[headStart] == unmappedColumn { @@ -424,7 +424,7 @@ func getCell(row []string, column int) (string, error) { // countUnmappedColumns returns the count of unmapped columns. func countUnmappedColumns(mapping []int) int { count := 0 - for i := 0; i < len(mapping); i++ { + for i := range mapping { if mapping[i] == unmappedColumn { count++ } diff --git a/services/gitdiff/git_diff_tree.go b/services/gitdiff/git_diff_tree.go index 035210a31d..ed94bfbfe4 100644 --- a/services/gitdiff/git_diff_tree.go +++ b/services/gitdiff/git_diff_tree.go @@ -6,6 +6,7 @@ package gitdiff import ( "bufio" "context" + "errors" "fmt" "io" "strconv" @@ -71,7 +72,7 @@ func runGitDiffTree(ctx context.Context, gitRepo *git.Repository, useMergeBase b func validateGitDiffTreeArguments(gitRepo *git.Repository, useMergeBase bool, baseSha, headSha string) (shouldUseMergeBase bool, resolvedBaseSha, resolvedHeadSha string, err error) { // if the head is empty its an error if headSha == "" { - return false, "", "", fmt.Errorf("headSha is empty") + return false, "", "", errors.New("headSha is empty") } // if the head commit doesn't exist its and error @@ -207,7 +208,7 @@ func parseGitDiffTreeLine(line string) (*DiffTreeRecord, error) { func statusFromLetter(rawStatus string) (status string, score uint8, err error) { if len(rawStatus) < 1 { - return "", 0, fmt.Errorf("empty status letter") + return "", 0, errors.New("empty status letter") } switch rawStatus[0] { case 'A': @@ -235,7 +236,7 @@ func statusFromLetter(rawStatus string) (status string, score uint8, err error) func tryParseStatusScore(rawStatus string) (uint8, error) { if len(rawStatus) < 2 { - return 0, fmt.Errorf("status score missing") + return 0, errors.New("status score missing") } score, err := strconv.ParseUint(rawStatus[1:], 10, 8) diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go index b9781cf8d0..7c99e049d5 100644 --- a/services/gitdiff/gitdiff.go +++ b/services/gitdiff/gitdiff.go @@ -25,6 +25,7 @@ import ( "code.gitea.io/gitea/modules/analyze" "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/git/attribute" "code.gitea.io/gitea/modules/highlight" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" @@ -178,7 +179,7 @@ func (d *DiffLine) GetExpandDirection() DiffLineExpandDirection { } func getDiffLineSectionInfo(treePath, line string, lastLeftIdx, lastRightIdx int) *DiffLineSectionInfo { - leftLine, leftHunk, rightLine, righHunk := git.ParseDiffHunkString(line) + leftLine, leftHunk, rightLine, rightHunk := git.ParseDiffHunkString(line) return &DiffLineSectionInfo{ Path: treePath, @@ -187,7 +188,7 @@ func getDiffLineSectionInfo(treePath, line string, lastLeftIdx, lastRightIdx int LeftIdx: leftLine, RightIdx: rightLine, LeftHunkSize: leftHunk, - RightHunkSize: righHunk, + RightHunkSize: rightHunk, } } @@ -213,51 +214,6 @@ func (diffSection *DiffSection) GetLine(idx int) *DiffLine { return diffSection.Lines[idx] } -// GetLine gets a specific line by type (add or del) and file line number -// This algorithm is not quite right. -// Actually now we have "Match" field, it is always right, so use it instead in new GetLine -func (diffSection *DiffSection) getLineLegacy(lineType DiffLineType, idx int) *DiffLine { //nolint:unused - var ( - difference = 0 - addCount = 0 - delCount = 0 - matchDiffLine *DiffLine - ) - -LOOP: - for _, diffLine := range diffSection.Lines { - switch diffLine.Type { - case DiffLineAdd: - addCount++ - case DiffLineDel: - delCount++ - default: - if matchDiffLine != nil { - break LOOP - } - difference = diffLine.RightIdx - diffLine.LeftIdx - addCount = 0 - delCount = 0 - } - - switch lineType { - case DiffLineDel: - if diffLine.RightIdx == 0 && diffLine.LeftIdx == idx-difference { - matchDiffLine = diffLine - } - case DiffLineAdd: - if diffLine.LeftIdx == 0 && diffLine.RightIdx == idx+difference { - matchDiffLine = diffLine - } - } - } - - if addCount == delCount { - return matchDiffLine - } - return nil -} - func defaultDiffMatchPatch() *diffmatchpatch.DiffMatchPatch { dmp := diffmatchpatch.New() dmp.DiffEditCost = 100 @@ -334,7 +290,7 @@ func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine, loc // try to find equivalent diff line. ignore, otherwise switch diffLine.Type { case DiffLineSection: - return getLineContent(diffLine.Content[1:], locale) + return getLineContent(diffLine.Content, locale) case DiffLineAdd: compareDiffLine := diffSection.GetLine(diffLine.Match) return diffSection.getDiffLineForRender(DiffLineAdd, compareDiffLine, diffLine, locale) @@ -539,10 +495,7 @@ func ParsePatch(ctx context.Context, maxLines, maxLineCharacters, maxFiles int, // OK let's set a reasonable buffer size. // This should be at least the size of maxLineCharacters or 4096 whichever is larger. - readerSize := maxLineCharacters - if readerSize < 4096 { - readerSize = 4096 - } + readerSize := max(maxLineCharacters, 4096) input := bufio.NewReaderSize(reader, readerSize) line, err := input.ReadString('\n') @@ -903,6 +856,7 @@ func parseHunks(ctx context.Context, curFile *DiffFile, maxLines, maxLineCharact lastLeftIdx = -1 curFile.Sections = append(curFile.Sections, curSection) + // FIXME: the "-1" can't be right, these "line idx" are all 1-based, maybe there are other bugs that covers this bug. lineSectionInfo := getDiffLineSectionInfo(curFile.Name, line, leftLine-1, rightLine-1) diffLine := &DiffLine{ Type: DiffLineSection, @@ -1231,36 +1185,35 @@ func GetDiffForAPI(ctx context.Context, gitRepo *git.Repository, opts *DiffOptio return diff, err } -func GetDiffForRender(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) { +func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) { diff, beforeCommit, afterCommit, err := getDiffBasic(ctx, gitRepo, opts, files...) if err != nil { return nil, err } - checker, deferrable := gitRepo.CheckAttributeReader(opts.AfterCommitID) - defer deferrable() + checker, err := attribute.NewBatchChecker(gitRepo, opts.AfterCommitID, []string{attribute.LinguistVendored, attribute.LinguistGenerated, attribute.LinguistLanguage, attribute.GitlabLanguage, attribute.Diff}) + if err != nil { + return nil, err + } + defer checker.Close() for _, diffFile := range diff.Files { isVendored := optional.None[bool]() isGenerated := optional.None[bool]() - if checker != nil { - attrs, err := checker.CheckPath(diffFile.Name) - if err == nil { - isVendored = git.AttributeToBool(attrs, git.AttributeLinguistVendored) - isGenerated = git.AttributeToBool(attrs, git.AttributeLinguistGenerated) - - language := git.TryReadLanguageAttribute(attrs) - if language.Has() { - diffFile.Language = language.Value() - } - } else { - checker = nil // CheckPath fails, it's not impossible to "check" anymore + attrDiff := optional.None[string]() + attrs, err := checker.CheckPath(diffFile.Name) + if err == nil { + isVendored, isGenerated = attrs.GetVendored(), attrs.GetGenerated() + language := attrs.GetLanguage() + if language.Has() { + diffFile.Language = language.Value() } + attrDiff = attrs.Get(attribute.Diff).ToString() } // Populate Submodule URLs if diffFile.SubmoduleDiffInfo != nil { - diffFile.SubmoduleDiffInfo.PopulateURL(diffFile, beforeCommit, afterCommit) + diffFile.SubmoduleDiffInfo.PopulateURL(repoLink, diffFile, beforeCommit, afterCommit) } if !isVendored.Has() { @@ -1277,7 +1230,8 @@ func GetDiffForRender(ctx context.Context, gitRepo *git.Repository, opts *DiffOp diffFile.Sections = append(diffFile.Sections, tailSection) } - if !setting.Git.DisableDiffHighlight { + shouldFullFileHighlight := !setting.Git.DisableDiffHighlight && attrDiff.Value() == "" + if shouldFullFileHighlight { if limitedContent.LeftContent != nil && limitedContent.LeftContent.buf.Len() < MaxDiffHighlightEntireFileSize { diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.String()) } @@ -1339,10 +1293,13 @@ func GetDiffShortStat(gitRepo *git.Repository, beforeCommitID, afterCommitID str // SyncUserSpecificDiff inserts user-specific data such as which files the user has already viewed on the given diff // Additionally, the database is updated asynchronously if files have changed since the last review -func SyncUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model.PullRequest, gitRepo *git.Repository, diff *Diff, opts *DiffOptions, files ...string) error { +func SyncUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model.PullRequest, gitRepo *git.Repository, diff *Diff, opts *DiffOptions) (*pull_model.ReviewState, error) { review, err := pull_model.GetNewestReviewState(ctx, userID, pull.ID) - if err != nil || review == nil || review.UpdatedFiles == nil { - return err + if err != nil { + return nil, err + } + if review == nil || len(review.UpdatedFiles) == 0 { + return review, nil } latestCommit := opts.AfterCommitID @@ -1350,13 +1307,13 @@ func SyncUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model. latestCommit = pull.HeadBranch // opts.AfterCommitID is preferred because it handles PRs from forks correctly and the branch name doesn't } - changedFiles, err := gitRepo.GetFilesChangedBetween(review.CommitSHA, latestCommit) + changedFiles, errIgnored := gitRepo.GetFilesChangedBetween(review.CommitSHA, latestCommit) // There are way too many possible errors. // Examples are various git errors such as the commit the review was based on was gc'ed and hence doesn't exist anymore as well as unrecoverable errors where we should serve a 500 response // Due to the current architecture and physical limitation of needing to compare explicit error messages, we can only choose one approach without the code getting ugly // For SOME of the errors such as the gc'ed commit, it would be best to mark all files as changed // But as that does not work for all potential errors, we simply mark all files as unchanged and drop the error which always works, even if not as good as possible - if err != nil { + if errIgnored != nil { log.Error("Could not get changed files between %s and %s for pull request %d in repo with path %s. Assuming no changes. Error: %w", review.CommitSHA, latestCommit, pull.Index, gitRepo.Path, err) } @@ -1395,11 +1352,11 @@ outer: err := pull_model.UpdateReviewState(ctx, review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff) if err != nil { log.Warn("Could not update review for user %d, pull %d, commit %s and the changed files %v: %v", review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff, err) - return err + return nil, err } } - return nil + return review, nil } // CommentAsDiff returns c.Patch as *Diff diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go index 71394b1915..b84530043a 100644 --- a/services/gitdiff/gitdiff_test.go +++ b/services/gitdiff/gitdiff_test.go @@ -416,7 +416,7 @@ index 0000000..6bb8f39 ` diffBuilder.WriteString(diff) - for i := 0; i < 35; i++ { + for i := range 35 { diffBuilder.WriteString("+line" + strconv.Itoa(i) + "\n") } diff = diffBuilder.String() @@ -453,11 +453,11 @@ index 0000000..6bb8f39 diffBuilder.Reset() diffBuilder.WriteString(diff) - for i := 0; i < 33; i++ { + for i := range 33 { diffBuilder.WriteString("+line" + strconv.Itoa(i) + "\n") } diffBuilder.WriteString("+line33") - for i := 0; i < 512; i++ { + for range 512 { diffBuilder.WriteString("0123456789ABCDEF") } diffBuilder.WriteByte('\n') diff --git a/services/gitdiff/highlightdiff.go b/services/gitdiff/highlightdiff.go index 6e18651d83..e8be063e69 100644 --- a/services/gitdiff/highlightdiff.go +++ b/services/gitdiff/highlightdiff.go @@ -14,13 +14,14 @@ import ( // token is a html tag or entity, eg: "<span ...>", "</span>", "<" func extractHTMLToken(s string) (before, token, after string, valid bool) { for pos1 := 0; pos1 < len(s); pos1++ { - if s[pos1] == '<' { + switch s[pos1] { + case '<': pos2 := strings.IndexByte(s[pos1:], '>') if pos2 == -1 { return "", "", s, false } return s[:pos1], s[pos1 : pos1+pos2+1], s[pos1+pos2+1:], true - } else if s[pos1] == '&' { + case '&': pos2 := strings.IndexByte(s[pos1:], ';') if pos2 == -1 { return "", "", s, false diff --git a/services/gitdiff/highlightdiff_test.go b/services/gitdiff/highlightdiff_test.go index c2584dc622..aebe38ae7c 100644 --- a/services/gitdiff/highlightdiff_test.go +++ b/services/gitdiff/highlightdiff_test.go @@ -38,15 +38,15 @@ func TestDiffWithHighlight(t *testing.T) { hcd.placeholderTokenMap['O'], hcd.placeholderTokenMap['C'] = "<span>", "</span>" assert.Equal(t, "<span></span>", string(hcd.recoverOneDiff("OC"))) assert.Equal(t, "<span></span>", string(hcd.recoverOneDiff("O"))) - assert.Equal(t, "", string(hcd.recoverOneDiff("C"))) + assert.Empty(t, string(hcd.recoverOneDiff("C"))) }) } func TestDiffWithHighlightPlaceholder(t *testing.T) { hcd := newHighlightCodeDiff() output := hcd.diffLineWithHighlight(DiffLineDel, "a='\U00100000'", "a='\U0010FFFD''") - assert.Equal(t, "", hcd.placeholderTokenMap[0x00100000]) - assert.Equal(t, "", hcd.placeholderTokenMap[0x0010FFFD]) + assert.Empty(t, hcd.placeholderTokenMap[0x00100000]) + assert.Empty(t, hcd.placeholderTokenMap[0x0010FFFD]) expected := fmt.Sprintf(`a='<span class="removed-code">%s</span>'`, "\U00100000") assert.Equal(t, expected, string(output)) diff --git a/services/gitdiff/submodule.go b/services/gitdiff/submodule.go index 02ca666544..4347743e3d 100644 --- a/services/gitdiff/submodule.go +++ b/services/gitdiff/submodule.go @@ -20,7 +20,7 @@ type SubmoduleDiffInfo struct { PreviousRefID string } -func (si *SubmoduleDiffInfo) PopulateURL(diffFile *DiffFile, leftCommit, rightCommit *git.Commit) { +func (si *SubmoduleDiffInfo) PopulateURL(repoLink string, diffFile *DiffFile, leftCommit, rightCommit *git.Commit) { si.SubmoduleName = diffFile.Name submoduleCommit := rightCommit // If the submodule is added or updated, check at the right commit if diffFile.IsDeleted { @@ -30,18 +30,19 @@ func (si *SubmoduleDiffInfo) PopulateURL(diffFile *DiffFile, leftCommit, rightCo return } - submodule, err := submoduleCommit.GetSubModule(diffFile.GetDiffFileName()) + submoduleFullPath := diffFile.GetDiffFileName() + submodule, err := submoduleCommit.GetSubModule(submoduleFullPath) if err != nil { - log.Error("Unable to PopulateURL for submodule %q: GetSubModule: %v", diffFile.GetDiffFileName(), err) + log.Error("Unable to PopulateURL for submodule %q: GetSubModule: %v", submoduleFullPath, err) return // ignore the error, do not cause 500 errors for end users } if submodule != nil { - si.SubmoduleFile = git.NewCommitSubmoduleFile(submodule.URL, submoduleCommit.ID.String()) + si.SubmoduleFile = git.NewCommitSubmoduleFile(repoLink, submoduleFullPath, submodule.URL, submoduleCommit.ID.String()) } } func (si *SubmoduleDiffInfo) CommitRefIDLinkHTML(ctx context.Context, commitID string) template.HTML { - webLink := si.SubmoduleFile.SubmoduleWebLink(ctx, commitID) + webLink := si.SubmoduleFile.SubmoduleWebLinkTree(ctx, commitID) if webLink == nil { return htmlutil.HTMLFormat("%s", base.ShortSha(commitID)) } @@ -49,7 +50,7 @@ func (si *SubmoduleDiffInfo) CommitRefIDLinkHTML(ctx context.Context, commitID s } func (si *SubmoduleDiffInfo) CompareRefIDLinkHTML(ctx context.Context) template.HTML { - webLink := si.SubmoduleFile.SubmoduleWebLink(ctx, si.PreviousRefID, si.NewRefID) + webLink := si.SubmoduleFile.SubmoduleWebLinkCompare(ctx, si.PreviousRefID, si.NewRefID) if webLink == nil { return htmlutil.HTMLFormat("%s...%s", base.ShortSha(si.PreviousRefID), base.ShortSha(si.NewRefID)) } @@ -57,7 +58,7 @@ func (si *SubmoduleDiffInfo) CompareRefIDLinkHTML(ctx context.Context) template. } func (si *SubmoduleDiffInfo) SubmoduleRepoLinkHTML(ctx context.Context) template.HTML { - webLink := si.SubmoduleFile.SubmoduleWebLink(ctx) + webLink := si.SubmoduleFile.SubmoduleWebLinkTree(ctx) if webLink == nil { return htmlutil.HTMLFormat("%s", si.SubmoduleName) } diff --git a/services/gitdiff/submodule_test.go b/services/gitdiff/submodule_test.go index 3047b23103..c793969f0e 100644 --- a/services/gitdiff/submodule_test.go +++ b/services/gitdiff/submodule_test.go @@ -203,7 +203,6 @@ index 0000000..68972a9 } for _, testcase := range tests { - testcase := testcase t.Run(testcase.name, func(t *testing.T) { diff, err := ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(testcase.gitdiff), "") assert.NoError(t, err) @@ -228,7 +227,7 @@ func TestSubmoduleInfo(t *testing.T) { assert.EqualValues(t, "aaaa...bbbb", sdi.CompareRefIDLinkHTML(ctx)) assert.EqualValues(t, "name", sdi.SubmoduleRepoLinkHTML(ctx)) - sdi.SubmoduleFile = git.NewCommitSubmoduleFile("https://github.com/owner/repo", "1234") + sdi.SubmoduleFile = git.NewCommitSubmoduleFile("/any/repo-link", "fullpath", "https://github.com/owner/repo", "1234") assert.EqualValues(t, `<a href="https://github.com/owner/repo/tree/1111">1111</a>`, sdi.CommitRefIDLinkHTML(ctx, "1111")) assert.EqualValues(t, `<a href="https://github.com/owner/repo/compare/aaaa...bbbb">aaaa...bbbb</a>`, sdi.CompareRefIDLinkHTML(ctx)) assert.EqualValues(t, `<a href="https://github.com/owner/repo">name</a>`, sdi.SubmoduleRepoLinkHTML(ctx)) diff --git a/services/issue/assignee.go b/services/issue/assignee.go index c7e2495568..ba9c91e0ed 100644 --- a/services/issue/assignee.go +++ b/services/issue/assignee.go @@ -54,6 +54,8 @@ func ToggleAssigneeWithNotify(ctx context.Context, issue *issues_model.Issue, do if err != nil { return false, nil, err } + issue.AssigneeID = assigneeID + issue.Assignee = assignee notify_service.IssueChangeAssignee(ctx, doer, issue, assignee, removed, comment) @@ -302,7 +304,7 @@ func CanDoerChangeReviewRequests(ctx context.Context, doer *user_model.User, rep // If the repo's owner is an organization, members of teams with read permission on pull requests can change reviewers if repo.Owner.IsOrganization() { - teams, err := organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead) + teams, err := organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead, unit.TypePullRequests) if err != nil { log.Error("GetTeamsWithAccessToRepo: %v", err) return false diff --git a/services/issue/comments.go b/services/issue/comments.go index 46f92f7cd2..9442701029 100644 --- a/services/issue/comments.go +++ b/services/issue/comments.go @@ -5,6 +5,7 @@ package issue import ( "context" + "errors" "fmt" "code.gitea.io/gitea/models/db" @@ -22,7 +23,7 @@ import ( // CreateRefComment creates a commit reference comment to issue. func CreateRefComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, content, commitSHA string) error { if len(commitSHA) == 0 { - return fmt.Errorf("cannot create reference with empty commit SHA") + return errors.New("cannot create reference with empty commit SHA") } if user_model.IsUserBlockedBy(ctx, doer, issue.PosterID, repo.OwnerID) { @@ -79,6 +80,12 @@ func CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_m return nil, err } + // reload issue to ensure it has the latest data, especially the number of comments + issue, err = issues_model.GetIssueByID(ctx, issue.ID) + if err != nil { + return nil, err + } + notify_service.CreateIssueComment(ctx, doer, repo, issue, comment, mentions) return comment, nil diff --git a/services/issue/issue.go b/services/issue/issue.go index 455a1ec297..f03be3e18f 100644 --- a/services/issue/issue.go +++ b/services/issue/issue.go @@ -190,13 +190,17 @@ func DeleteIssue(ctx context.Context, doer *user_model.User, gitRepo *git.Reposi } // delete entries in database - if err := deleteIssue(ctx, issue); err != nil { + attachmentPaths, err := deleteIssue(ctx, issue) + if err != nil { return err } + for _, attachmentPath := range attachmentPaths { + system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", attachmentPath) + } // delete pull request related git data if issue.IsPull && gitRepo != nil { - if err := gitRepo.RemoveReference(fmt.Sprintf("%s%d/head", git.PullPrefix, issue.PullRequest.Index)); err != nil { + if err := gitRepo.RemoveReference(issue.PullRequest.GetGitHeadRefName()); err != nil { return err } } @@ -256,45 +260,45 @@ func GetRefEndNamesAndURLs(issues []*issues_model.Issue, repoLink string) (map[i } // deleteIssue deletes the issue -func deleteIssue(ctx context.Context, issue *issues_model.Issue) error { +func deleteIssue(ctx context.Context, issue *issues_model.Issue) ([]string, error) { ctx, committer, err := db.TxContext(ctx) if err != nil { - return err + return nil, err } defer committer.Close() - e := db.GetEngine(ctx) - if _, err := e.ID(issue.ID).NoAutoCondition().Delete(issue); err != nil { - return err + if _, err := db.GetEngine(ctx).ID(issue.ID).NoAutoCondition().Delete(issue); err != nil { + return nil, err } // update the total issue numbers if err := repo_model.UpdateRepoIssueNumbers(ctx, issue.RepoID, issue.IsPull, false); err != nil { - return err + return nil, err } // if the issue is closed, update the closed issue numbers if issue.IsClosed { if err := repo_model.UpdateRepoIssueNumbers(ctx, issue.RepoID, issue.IsPull, true); err != nil { - return err + return nil, err } } if err := issues_model.UpdateMilestoneCounters(ctx, issue.MilestoneID); err != nil { - return fmt.Errorf("error updating counters for milestone id %d: %w", + return nil, fmt.Errorf("error updating counters for milestone id %d: %w", issue.MilestoneID, err) } if err := activities_model.DeleteIssueActions(ctx, issue.RepoID, issue.ID, issue.Index); err != nil { - return err + return nil, err } // find attachments related to this issue and remove them - if err := issue.LoadAttributes(ctx); err != nil { - return err + if err := issue.LoadAttachments(ctx); err != nil { + return nil, err } + var attachmentPaths []string for i := range issue.Attachments { - system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", issue.Attachments[i].RelativePath()) + attachmentPaths = append(attachmentPaths, issue.Attachments[i].RelativePath()) } // delete all database data still assigned to this issue @@ -318,8 +322,68 @@ func deleteIssue(ctx context.Context, issue *issues_model.Issue) error { &issues_model.Comment{DependentIssueID: issue.ID}, &issues_model.IssuePin{IssueID: issue.ID}, ); err != nil { + return nil, err + } + + if err := committer.Commit(); err != nil { + return nil, err + } + return attachmentPaths, nil +} + +// DeleteOrphanedIssues delete issues without a repo +func DeleteOrphanedIssues(ctx context.Context) error { + var attachmentPaths []string + err := db.WithTx(ctx, func(ctx context.Context) error { + repoIDs, err := issues_model.GetOrphanedIssueRepoIDs(ctx) + if err != nil { + return err + } + for i := range repoIDs { + paths, err := DeleteIssuesByRepoID(ctx, repoIDs[i]) + if err != nil { + return err + } + attachmentPaths = append(attachmentPaths, paths...) + } + return nil + }) + if err != nil { return err } - return committer.Commit() + // Remove issue attachment files. + for i := range attachmentPaths { + system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", attachmentPaths[i]) + } + return nil +} + +// DeleteIssuesByRepoID deletes issues by repositories id +func DeleteIssuesByRepoID(ctx context.Context, repoID int64) (attachmentPaths []string, err error) { + for { + issues := make([]*issues_model.Issue, 0, db.DefaultMaxInSize) + if err := db.GetEngine(ctx). + Where("repo_id = ?", repoID). + OrderBy("id"). + Limit(db.DefaultMaxInSize). + Find(&issues); err != nil { + return nil, err + } + + if len(issues) == 0 { + break + } + + for _, issue := range issues { + issueAttachPaths, err := deleteIssue(ctx, issue) + if err != nil { + return nil, fmt.Errorf("deleteIssue [issue_id: %d]: %w", issue.ID, err) + } + + attachmentPaths = append(attachmentPaths, issueAttachPaths...) + } + } + + return attachmentPaths, err } diff --git a/services/issue/issue_test.go b/services/issue/issue_test.go index 8806cec0e7..bad0d65d1e 100644 --- a/services/issue/issue_test.go +++ b/services/issue/issue_test.go @@ -24,8 +24,8 @@ func TestGetRefEndNamesAndURLs(t *testing.T) { repoLink := "/foo/bar" endNames, urls := GetRefEndNamesAndURLs(issues, repoLink) - assert.EqualValues(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames) - assert.EqualValues(t, map[int64]string{ + assert.Equal(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames) + assert.Equal(t, map[int64]string{ 1: repoLink + "/src/branch/branch1", 2: repoLink + "/src/tag/tag1", 3: repoLink + "/src/commit/c0ffee", @@ -44,7 +44,7 @@ func TestIssue_DeleteIssue(t *testing.T) { ID: issueIDs[2], } - err = deleteIssue(db.DefaultContext, issue) + _, err = deleteIssue(db.DefaultContext, issue) assert.NoError(t, err) issueIDs, err = issues_model.GetIssueIDsByRepoID(db.DefaultContext, 1) assert.NoError(t, err) @@ -55,7 +55,7 @@ func TestIssue_DeleteIssue(t *testing.T) { assert.NoError(t, err) issue, err = issues_model.GetIssueByID(db.DefaultContext, 4) assert.NoError(t, err) - err = deleteIssue(db.DefaultContext, issue) + _, err = deleteIssue(db.DefaultContext, issue) assert.NoError(t, err) assert.Len(t, attachments, 2) for i := range attachments { @@ -78,7 +78,7 @@ func TestIssue_DeleteIssue(t *testing.T) { assert.NoError(t, err) assert.False(t, left) - err = deleteIssue(db.DefaultContext, issue2) + _, err = deleteIssue(db.DefaultContext, issue2) assert.NoError(t, err) left, err = issues_model.IssueNoDependenciesLeft(db.DefaultContext, issue1) assert.NoError(t, err) diff --git a/services/issue/label.go b/services/issue/label.go index 6b8070d8aa..e30983df37 100644 --- a/services/issue/label.go +++ b/services/issue/label.go @@ -46,32 +46,24 @@ func AddLabels(ctx context.Context, issue *issues_model.Issue, doer *user_model. // RemoveLabel removes a label from issue by given ID. func RemoveLabel(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, label *issues_model.Label) error { - dbCtx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if err := issue.LoadRepo(dbCtx); err != nil { - return err - } - - perm, err := access_model.GetUserRepoPermission(dbCtx, issue.Repo, doer) - if err != nil { - return err - } - if !perm.CanWriteIssuesOrPulls(issue.IsPull) { - if label.OrgID > 0 { - return issues_model.ErrOrgLabelNotExist{} + if err := db.WithTx(ctx, func(ctx context.Context) error { + if err := issue.LoadRepo(ctx); err != nil { + return err } - return issues_model.ErrRepoLabelNotExist{} - } - if err := issues_model.DeleteIssueLabel(dbCtx, issue, label, doer); err != nil { - return err - } + perm, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer) + if err != nil { + return err + } + if !perm.CanWriteIssuesOrPulls(issue.IsPull) { + if label.OrgID > 0 { + return issues_model.ErrOrgLabelNotExist{} + } + return issues_model.ErrRepoLabelNotExist{} + } - if err := committer.Commit(); err != nil { + return issues_model.DeleteIssueLabel(ctx, issue, label, doer) + }); err != nil { return err } diff --git a/services/issue/milestone.go b/services/issue/milestone.go index beb6f131a9..05aefad752 100644 --- a/services/issue/milestone.go +++ b/services/issue/milestone.go @@ -5,6 +5,7 @@ package issue import ( "context" + "errors" "fmt" "code.gitea.io/gitea/models/db" @@ -21,7 +22,7 @@ func changeMilestoneAssign(ctx context.Context, doer *user_model.User, issue *is return fmt.Errorf("HasMilestoneByRepoID: %w", err) } if !has { - return fmt.Errorf("HasMilestoneByRepoID: issue doesn't exist") + return errors.New("HasMilestoneByRepoID: issue doesn't exist") } } @@ -68,21 +69,12 @@ func changeMilestoneAssign(ctx context.Context, doer *user_model.User, issue *is // ChangeMilestoneAssign changes assignment of milestone for issue. func ChangeMilestoneAssign(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, oldMilestoneID int64) (err error) { - dbCtx, committer, err := db.TxContext(ctx) - if err != nil { + if err := db.WithTx(ctx, func(dbCtx context.Context) error { + return changeMilestoneAssign(dbCtx, doer, issue, oldMilestoneID) + }); err != nil { return err } - defer committer.Close() - - if err = changeMilestoneAssign(dbCtx, doer, issue, oldMilestoneID); err != nil { - return err - } - - if err = committer.Commit(); err != nil { - return fmt.Errorf("Commit: %w", err) - } notify_service.IssueChangeMilestone(ctx, doer, issue, oldMilestoneID) - return nil } diff --git a/services/issue/pull.go b/services/issue/pull.go index bd19c25436..512fdf78e8 100644 --- a/services/issue/pull.go +++ b/services/issue/pull.go @@ -48,10 +48,6 @@ func IsCodeOwnerFile(f string) bool { } func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullRequest) ([]*ReviewRequestNotifier, error) { - return PullRequestCodeOwnersReviewSpecialCommits(ctx, pr, "", "") // no commit is provided, then it uses PR's base&head branch -} - -func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_model.PullRequest, startCommitID, endCommitID string) ([]*ReviewRequestNotifier, error) { if err := pr.LoadIssue(ctx); err != nil { return nil, err } @@ -100,19 +96,15 @@ func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_m return nil, nil } - if startCommitID == "" && endCommitID == "" { - // get the mergebase - mergeBase, err := getMergeBase(repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName()) - if err != nil { - return nil, err - } - startCommitID = mergeBase - endCommitID = pr.GetGitRefName() + // get the mergebase + mergeBase, err := getMergeBase(repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitHeadRefName()) + if err != nil { + return nil, err } // https://github.com/go-gitea/gitea/issues/29763, we need to get the files changed // between the merge base and the head commit but not the base branch and the head commit - changedFiles, err := repo.GetFilesChangedBetween(startCommitID, endCommitID) + changedFiles, err := repo.GetFilesChangedBetween(mergeBase, pr.GetGitHeadRefName()) if err != nil { return nil, err } @@ -138,13 +130,31 @@ func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_m return nil, err } + // load all reviews from database + latestReivews, _, err := issues_model.GetReviewsByIssueID(ctx, pr.IssueID) + if err != nil { + return nil, err + } + + contain := func(list issues_model.ReviewList, u *user_model.User) bool { + for _, review := range list { + if review.ReviewerTeamID == 0 && review.ReviewerID == u.ID { + return true + } + } + return false + } + for _, u := range uniqUsers { - if u.ID != issue.Poster.ID { + if u.ID != issue.Poster.ID && !contain(latestReivews, u) { comment, err := issues_model.AddReviewRequest(ctx, issue, u, issue.Poster) if err != nil { log.Warn("Failed add assignee user: %s to PR review: %s#%d, error: %s", u.Name, pr.BaseRepo.Name, pr.ID, err) return nil, err } + if comment == nil { // comment maybe nil if review type is ReviewTypeRequest + continue + } notifiers = append(notifiers, &ReviewRequestNotifier{ Comment: comment, IsAdd: true, @@ -152,12 +162,16 @@ func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_m }) } } + for _, t := range uniqTeams { comment, err := issues_model.AddTeamReviewRequest(ctx, issue, t, issue.Poster) if err != nil { log.Warn("Failed add assignee team: %s to PR review: %s#%d, error: %s", t.Name, pr.BaseRepo.Name, pr.ID, err) return nil, err } + if comment == nil { // comment maybe nil if review type is ReviewTypeRequest + continue + } notifiers = append(notifiers, &ReviewRequestNotifier{ Comment: comment, IsAdd: true, diff --git a/services/issue/status.go b/services/issue/status.go index e18b891175..fa59df93ba 100644 --- a/services/issue/status.go +++ b/services/issue/status.go @@ -15,30 +15,24 @@ import ( // CloseIssue close an issue. func CloseIssue(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, commitID string) error { - dbCtx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - comment, err := issues_model.CloseIssue(dbCtx, issue, doer) - if err != nil { - if issues_model.IsErrDependenciesLeft(err) { - if err := issues_model.FinishIssueStopwatchIfPossible(dbCtx, doer, issue); err != nil { - log.Error("Unable to stop stopwatch for issue[%d]#%d: %v", issue.ID, issue.Index, err) + var comment *issues_model.Comment + if err := db.WithTx(ctx, func(ctx context.Context) error { + var err error + comment, err = issues_model.CloseIssue(ctx, issue, doer) + if err != nil { + if issues_model.IsErrDependenciesLeft(err) { + if _, err := issues_model.FinishIssueStopwatch(ctx, doer, issue); err != nil { + log.Error("Unable to stop stopwatch for issue[%d]#%d: %v", issue.ID, issue.Index, err) + } } + return err } - return err - } - if err := issues_model.FinishIssueStopwatchIfPossible(dbCtx, doer, issue); err != nil { + _, err = issues_model.FinishIssueStopwatch(ctx, doer, issue) return err - } - - if err := committer.Commit(); err != nil { + }); err != nil { return err } - committer.Close() notify_service.IssueChangeStatus(ctx, doer, commitID, issue, comment, true) diff --git a/services/issue/suggestion_test.go b/services/issue/suggestion_test.go index 84cfd520ac..a5b39d27bb 100644 --- a/services/issue/suggestion_test.go +++ b/services/issue/suggestion_test.go @@ -51,7 +51,7 @@ func Test_Suggestion(t *testing.T) { for _, issue := range issues { issueIndexes = append(issueIndexes, issue.Index) } - assert.EqualValues(t, testCase.expectedIndexes, issueIndexes) + assert.Equal(t, testCase.expectedIndexes, issueIndexes) }) } } diff --git a/services/lfs/locks.go b/services/lfs/locks.go index 1d464f4a66..264001f0f9 100644 --- a/services/lfs/locks.go +++ b/services/lfs/locks.go @@ -74,10 +74,7 @@ func GetListLockHandler(ctx *context.Context) { } ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - cursor := ctx.FormInt("cursor") - if cursor < 0 { - cursor = 0 - } + cursor := max(ctx.FormInt("cursor"), 0) limit := ctx.FormInt("limit") if limit > setting.LFS.LocksPagingNum && setting.LFS.LocksPagingNum > 0 { limit = setting.LFS.LocksPagingNum @@ -239,10 +236,7 @@ func VerifyLockHandler(ctx *context.Context) { ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - cursor := ctx.FormInt("cursor") - if cursor < 0 { - cursor = 0 - } + cursor := max(ctx.FormInt("cursor"), 0) limit := ctx.FormInt("limit") if limit > setting.LFS.LocksPagingNum && setting.LFS.LocksPagingNum > 0 { limit = setting.LFS.LocksPagingNum diff --git a/services/lfs/server.go b/services/lfs/server.go index c4866edaab..c9d9f164bf 100644 --- a/services/lfs/server.go +++ b/services/lfs/server.go @@ -11,6 +11,7 @@ import ( "errors" "fmt" "io" + "maps" "net/http" "net/url" "path" @@ -26,6 +27,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/auth/httpauth" "code.gitea.io/gitea/modules/json" lfs_module "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" @@ -41,6 +43,7 @@ type requestContext struct { User string Repo string Authorization string + Method string } // Claims is a JWT Token Claims @@ -164,11 +167,12 @@ func BatchHandler(ctx *context.Context) { } var isUpload bool - if br.Operation == "upload" { + switch br.Operation { + case "upload": isUpload = true - } else if br.Operation == "download" { + case "download": isUpload = false - } else { + default: log.Trace("Attempt to BATCH with invalid operation: %s", br.Operation) writeStatus(ctx, http.StatusBadRequest) return @@ -201,7 +205,7 @@ func BatchHandler(ctx *context.Context) { exists, err := contentStore.Exists(p) if err != nil { - log.Error("Unable to check if LFS OID[%s] exist. Error: %v", p.Oid, rc.User, rc.Repo, err) + log.Error("Unable to check if LFS object with ID '%s' exists for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err) writeStatus(ctx, http.StatusInternalServerError) return } @@ -394,6 +398,7 @@ func getRequestContext(ctx *context.Context) *requestContext { User: ctx.PathParam("username"), Repo: strings.TrimSuffix(ctx.PathParam("reponame"), ".git"), Authorization: ctx.Req.Header.Get("Authorization"), + Method: ctx.Req.Method, } } @@ -462,7 +467,7 @@ func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, downloa var link *lfs_module.Link if setting.LFS.Storage.ServeDirect() { // If we have a signed url (S3, object storage), redirect to this directly. - u, err := storage.LFS.URL(pointer.RelativePath(), pointer.Oid, nil) + u, err := storage.LFS.URL(pointer.RelativePath(), pointer.Oid, rc.Method, nil) if u != nil && err == nil { // Presigned url does not need the Authorization header // https://github.com/go-gitea/gitea/issues/21525 @@ -479,9 +484,7 @@ func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, downloa rep.Actions["upload"] = &lfs_module.Link{Href: rc.UploadLink(pointer), Header: header} verifyHeader := make(map[string]string) - for key, value := range header { - verifyHeader[key] = value - } + maps.Copy(verifyHeader, header) // This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662 verifyHeader["Accept"] = lfs_module.AcceptHeader @@ -571,15 +574,15 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo claims, claimsOk := token.Claims.(*Claims) if !token.Valid || !claimsOk { - return nil, fmt.Errorf("invalid token claim") + return nil, errors.New("invalid token claim") } if claims.RepoID != target.ID { - return nil, fmt.Errorf("invalid token claim") + return nil, errors.New("invalid token claim") } if mode == perm_model.AccessModeWrite && claims.Op != "upload" { - return nil, fmt.Errorf("invalid token claim") + return nil, errors.New("invalid token claim") } u, err := user_model.GetUserByID(ctx, claims.UserID) @@ -592,21 +595,13 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo func parseToken(ctx stdCtx.Context, authorization string, target *repo_model.Repository, mode perm_model.AccessMode) (*user_model.User, error) { if authorization == "" { - return nil, fmt.Errorf("no token") - } - - parts := strings.SplitN(authorization, " ", 2) - if len(parts) != 2 { - return nil, fmt.Errorf("no token") + return nil, errors.New("no token") } - tokenSHA := parts[1] - switch strings.ToLower(parts[0]) { - case "bearer": - fallthrough - case "token": - return handleLFSToken(ctx, tokenSHA, target, mode) + parsed, ok := httpauth.ParseAuthorizationHeader(authorization) + if !ok || parsed.BearerToken == nil { + return nil, errors.New("token not found") } - return nil, fmt.Errorf("token not found") + return handleLFSToken(ctx, parsed.BearerToken.Token, target, mode) } func requireAuth(ctx *context.Context) { diff --git a/services/mailer/mail.go b/services/mailer/mail.go index f7e5b0c9f0..d81b6d10af 100644 --- a/services/mailer/mail.go +++ b/services/mailer/mail.go @@ -8,13 +8,14 @@ import ( "bytes" "context" "encoding/base64" + "errors" "fmt" "html/template" "io" "mime" "regexp" "strings" - texttmpl "text/template" + "sync/atomic" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -22,6 +23,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/typesniffer" sender_service "code.gitea.io/gitea/services/mailer/sender" @@ -30,11 +32,13 @@ import ( const mailMaxSubjectRunes = 256 // There's no actual limit for subject in RFC 5322 -var ( - bodyTemplates *template.Template - subjectTemplates *texttmpl.Template - subjectRemoveSpaces = regexp.MustCompile(`[\s]+`) -) +var loadedTemplates atomic.Pointer[templates.MailTemplates] + +var subjectRemoveSpaces = regexp.MustCompile(`[\s]+`) + +func LoadedTemplates() *templates.MailTemplates { + return loadedTemplates.Load() +} // SendTestMail sends a test mail func SendTestMail(email string) error { @@ -117,7 +121,7 @@ func (b64embedder *mailAttachmentBase64Embedder) AttachmentSrcToBase64DataURI(ct attachmentUUID, ok = strings.CutPrefix(parsedSrc.RepoSubPath, "/attachments/") } if !ok { - return "", fmt.Errorf("not an attachment") + return "", errors.New("not an attachment") } } attachment, err := repo_model.GetAttachmentByUUID(ctx, attachmentUUID) @@ -126,10 +130,10 @@ func (b64embedder *mailAttachmentBase64Embedder) AttachmentSrcToBase64DataURI(ct } if attachment.RepoID != b64embedder.repo.ID { - return "", fmt.Errorf("attachment does not belong to the repository") + return "", errors.New("attachment does not belong to the repository") } if attachment.Size+b64embedder.estimateSize > b64embedder.maxSize { - return "", fmt.Errorf("total embedded images exceed max limit") + return "", errors.New("total embedded images exceed max limit") } fr, err := storage.Attachments.Open(attachment.RelativePath()) @@ -146,7 +150,7 @@ func (b64embedder *mailAttachmentBase64Embedder) AttachmentSrcToBase64DataURI(ct mimeType := typesniffer.DetectContentType(content) if !mimeType.IsImage() { - return "", fmt.Errorf("not an image") + return "", errors.New("not an image") } encoded := base64.StdEncoding.EncodeToString(content) @@ -170,3 +174,41 @@ func fromDisplayName(u *user_model.User) string { } return u.GetCompleteName() } + +func generateMetadataHeaders(repo *repo_model.Repository) map[string]string { + return map[string]string{ + // https://datatracker.ietf.org/doc/html/rfc2919 + "List-ID": fmt.Sprintf("%s <%s.%s.%s>", repo.FullName(), repo.Name, repo.OwnerName, setting.Domain), + + // https://datatracker.ietf.org/doc/html/rfc2369 + "List-Archive": fmt.Sprintf("<%s>", repo.HTMLURL()), + + "X-Mailer": "Gitea", + + "X-Gitea-Repository": repo.Name, + "X-Gitea-Repository-Path": repo.FullName(), + "X-Gitea-Repository-Link": repo.HTMLURL(), + + "X-GitLab-Project": repo.Name, + "X-GitLab-Project-Path": repo.FullName(), + } +} + +func generateSenderRecipientHeaders(doer, recipient *user_model.User) map[string]string { + return map[string]string{ + "X-Gitea-Sender": doer.Name, + "X-Gitea-Recipient": recipient.Name, + "X-Gitea-Recipient-Address": recipient.Email, + "X-GitHub-Sender": doer.Name, + "X-GitHub-Recipient": recipient.Name, + "X-GitHub-Recipient-Address": recipient.Email, + } +} + +func generateReasonHeaders(reason string) map[string]string { + return map[string]string{ + "X-Gitea-Reason": reason, + "X-GitHub-Reason": reason, + "X-GitLab-NotificationReason": reason, + } +} diff --git a/services/mailer/mail_issue_common.go b/services/mailer/mail_issue_common.go index ebfd52162c..a34d8a68c9 100644 --- a/services/mailer/mail_issue_common.go +++ b/services/mailer/mail_issue_common.go @@ -7,6 +7,7 @@ import ( "bytes" "context" "fmt" + "maps" "strconv" "strings" "time" @@ -29,7 +30,7 @@ import ( // Many e-mail service providers have limitations on the size of the email body, it's usually from 10MB to 25MB const maxEmailBodySize = 9_000_000 -func fallbackMailSubject(issue *issues_model.Issue) string { +func fallbackIssueMailSubject(issue *issues_model.Issue) string { return fmt.Sprintf("[%s] %s (#%d)", issue.Repo.FullName(), issue.Title, issue.Index) } @@ -86,7 +87,7 @@ func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang if actName != "new" { prefix = "Re: " } - fallback = prefix + fallbackMailSubject(comment.Issue) + fallback = prefix + fallbackIssueMailSubject(comment.Issue) if comment.Comment != nil && comment.Comment.Review != nil { reviewComments = make([]*issues_model.Comment, 0, 10) @@ -119,7 +120,7 @@ func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang } var mailSubject bytes.Buffer - if err := subjectTemplates.ExecuteTemplate(&mailSubject, tplName, mailMeta); err == nil { + if err := LoadedTemplates().SubjectTemplates.ExecuteTemplate(&mailSubject, tplName, mailMeta); err == nil { subject = sanitizeSubject(mailSubject.String()) if subject == "" { subject = fallback @@ -134,7 +135,7 @@ func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang var mailBody bytes.Buffer - if err := bodyTemplates.ExecuteTemplate(&mailBody, tplName, mailMeta); err != nil { + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&mailBody, tplName, mailMeta); err != nil { log.Error("ExecuteTemplate [%s]: %v", tplName+"/body", err) } @@ -202,7 +203,7 @@ func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang msg.SetHeader("References", references...) msg.SetHeader("List-Unsubscribe", listUnsubscribe...) - for key, value := range generateAdditionalHeaders(comment, actType, recipient) { + for key, value := range generateAdditionalHeadersForIssue(comment, actType, recipient) { msg.SetHeader(key, value) } @@ -260,14 +261,14 @@ func actionToTemplate(issue *issues_model.Issue, actionType activities_model.Act } template = typeName + "/" + name - ok := bodyTemplates.Lookup(template) != nil + ok := LoadedTemplates().BodyTemplates.Lookup(template) != nil if !ok && typeName != "issue" { template = "issue/" + name - ok = bodyTemplates.Lookup(template) != nil + ok = LoadedTemplates().BodyTemplates.Lookup(template) != nil } if !ok { template = typeName + "/default" - ok = bodyTemplates.Lookup(template) != nil + ok = LoadedTemplates().BodyTemplates.Lookup(template) != nil } if !ok { template = "issue/default" @@ -302,35 +303,18 @@ func generateMessageIDForIssue(issue *issues_model.Issue, comment *issues_model. return fmt.Sprintf("<%s/%s/%d%s@%s>", issue.Repo.FullName(), path, issue.Index, extra, setting.Domain) } -func generateAdditionalHeaders(ctx *mailComment, reason string, recipient *user_model.User) map[string]string { +func generateAdditionalHeadersForIssue(ctx *mailComment, reason string, recipient *user_model.User) map[string]string { repo := ctx.Issue.Repo - return map[string]string{ - // https://datatracker.ietf.org/doc/html/rfc2919 - "List-ID": fmt.Sprintf("%s <%s.%s.%s>", repo.FullName(), repo.Name, repo.OwnerName, setting.Domain), - - // https://datatracker.ietf.org/doc/html/rfc2369 - "List-Archive": fmt.Sprintf("<%s>", repo.HTMLURL()), - - "X-Mailer": "Gitea", - "X-Gitea-Reason": reason, - "X-Gitea-Sender": ctx.Doer.Name, - "X-Gitea-Recipient": recipient.Name, - "X-Gitea-Recipient-Address": recipient.Email, - "X-Gitea-Repository": repo.Name, - "X-Gitea-Repository-Path": repo.FullName(), - "X-Gitea-Repository-Link": repo.HTMLURL(), - "X-Gitea-Issue-ID": strconv.FormatInt(ctx.Issue.Index, 10), - "X-Gitea-Issue-Link": ctx.Issue.HTMLURL(), - - "X-GitHub-Reason": reason, - "X-GitHub-Sender": ctx.Doer.Name, - "X-GitHub-Recipient": recipient.Name, - "X-GitHub-Recipient-Address": recipient.Email, - - "X-GitLab-NotificationReason": reason, - "X-GitLab-Project": repo.Name, - "X-GitLab-Project-Path": repo.FullName(), - "X-GitLab-Issue-IID": strconv.FormatInt(ctx.Issue.Index, 10), - } + issueID := strconv.FormatInt(ctx.Issue.Index, 10) + headers := generateMetadataHeaders(repo) + + maps.Copy(headers, generateSenderRecipientHeaders(ctx.Doer, recipient)) + maps.Copy(headers, generateReasonHeaders(reason)) + + headers["X-Gitea-Issue-ID"] = issueID + headers["X-Gitea-Issue-Link"] = ctx.Issue.HTMLURL() + headers["X-GitLab-Issue-IID"] = issueID + + return headers } diff --git a/services/mailer/mail_release.go b/services/mailer/mail_release.go index bfff73c39c..fd97fb5312 100644 --- a/services/mailer/mail_release.go +++ b/services/mailer/mail_release.go @@ -79,7 +79,7 @@ func mailNewRelease(ctx context.Context, lang string, tos []*user_model.User, re var mailBody bytes.Buffer - if err := bodyTemplates.ExecuteTemplate(&mailBody, string(tplNewReleaseMail), mailMeta); err != nil { + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&mailBody, string(tplNewReleaseMail), mailMeta); err != nil { log.Error("ExecuteTemplate [%s]: %v", string(tplNewReleaseMail)+"/body", err) return } diff --git a/services/mailer/mail_repo.go b/services/mailer/mail_repo.go index b6b2d5ca07..1ec7995ab9 100644 --- a/services/mailer/mail_repo.go +++ b/services/mailer/mail_repo.go @@ -78,7 +78,7 @@ func sendRepoTransferNotifyMailPerLang(lang string, newOwner, doer *user_model.U "Destination": destination, } - if err := bodyTemplates.ExecuteTemplate(&content, string(mailRepoTransferNotify), data); err != nil { + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&content, string(mailRepoTransferNotify), data); err != nil { return err } diff --git a/services/mailer/mail_team_invite.go b/services/mailer/mail_team_invite.go index 1fbade7e23..034dc14e3d 100644 --- a/services/mailer/mail_team_invite.go +++ b/services/mailer/mail_team_invite.go @@ -6,6 +6,7 @@ package mailer import ( "bytes" "context" + "errors" "fmt" "net/url" @@ -38,10 +39,10 @@ func MailTeamInvite(ctx context.Context, inviter *user_model.User, team *org_mod if err != nil && !user_model.IsErrUserNotExist(err) { return err } else if user != nil && user.ProhibitLogin { - return fmt.Errorf("login is prohibited for the invited user") + return errors.New("login is prohibited for the invited user") } - inviteRedirect := url.QueryEscape(fmt.Sprintf("/org/invite/%s", invite.Token)) + inviteRedirect := url.QueryEscape("/org/invite/" + invite.Token) inviteURL := fmt.Sprintf("%suser/sign_up?redirect_to=%s", setting.AppURL, inviteRedirect) if (err == nil && user != nil) || setting.Service.DisableRegistration || setting.Service.AllowOnlyExternalRegistration { @@ -61,7 +62,7 @@ func MailTeamInvite(ctx context.Context, inviter *user_model.User, team *org_mod } var mailBody bytes.Buffer - if err := bodyTemplates.ExecuteTemplate(&mailBody, string(tplTeamInviteMail), mailMeta); err != nil { + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&mailBody, string(tplTeamInviteMail), mailMeta); err != nil { log.Error("ExecuteTemplate [%s]: %v", string(tplTeamInviteMail)+"/body", err) return err } diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go index 6aced705f3..24f5d39d50 100644 --- a/services/mailer/mail_test.go +++ b/services/mailer/mail_test.go @@ -16,6 +16,7 @@ import ( "testing" texttmpl "text/template" + actions_model "code.gitea.io/gitea/models/actions" activities_model "code.gitea.io/gitea/models/activities" "code.gitea.io/gitea/models/db" issues_model "code.gitea.io/gitea/models/issues" @@ -25,6 +26,7 @@ import ( "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/test" "code.gitea.io/gitea/services/attachment" sender_service "code.gitea.io/gitea/services/mailer/sender" @@ -95,6 +97,13 @@ func prepareMailerBase64Test(t *testing.T) (doer *user_model.User, repo *repo_mo return user, repo, issue, att1, att2 } +func prepareMailTemplates(name, subjectTmpl, bodyTmpl string) { + loadedTemplates.Store(&templates.MailTemplates{ + SubjectTemplates: texttmpl.Must(texttmpl.New(name).Parse(subjectTmpl)), + BodyTemplates: template.Must(template.New(name).Parse(bodyTmpl)), + }) +} + func TestComposeIssueComment(t *testing.T) { doer, _, issue, comment := prepareMailerTest(t) @@ -107,8 +116,7 @@ func TestComposeIssueComment(t *testing.T) { setting.IncomingEmail.Enabled = true defer func() { setting.IncomingEmail.Enabled = false }() - subjectTemplates = texttmpl.Must(texttmpl.New("issue/comment").Parse(subjectTpl)) - bodyTemplates = template.Must(template.New("issue/comment").Parse(bodyTpl)) + prepareMailTemplates("issue/comment", subjectTpl, bodyTpl) recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}} msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{ @@ -153,8 +161,7 @@ func TestComposeIssueComment(t *testing.T) { func TestMailMentionsComment(t *testing.T) { doer, _, issue, comment := prepareMailerTest(t) comment.Poster = doer - subjectTemplates = texttmpl.Must(texttmpl.New("issue/comment").Parse(subjectTpl)) - bodyTemplates = template.Must(template.New("issue/comment").Parse(bodyTpl)) + prepareMailTemplates("issue/comment", subjectTpl, bodyTpl) mails := 0 defer test.MockVariableValue(&SendAsync, func(msgs ...*sender_service.Message) { @@ -169,9 +176,7 @@ func TestMailMentionsComment(t *testing.T) { func TestComposeIssueMessage(t *testing.T) { doer, _, issue, _ := prepareMailerTest(t) - subjectTemplates = texttmpl.Must(texttmpl.New("issue/new").Parse(subjectTpl)) - bodyTemplates = template.Must(template.New("issue/new").Parse(bodyTpl)) - + prepareMailTemplates("issue/new", subjectTpl, bodyTpl) recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}} msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{ Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue, @@ -200,15 +205,14 @@ func TestTemplateSelection(t *testing.T) { doer, repo, issue, comment := prepareMailerTest(t) recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}} - subjectTemplates = texttmpl.Must(texttmpl.New("issue/default").Parse("issue/default/subject")) - texttmpl.Must(subjectTemplates.New("issue/new").Parse("issue/new/subject")) - texttmpl.Must(subjectTemplates.New("pull/comment").Parse("pull/comment/subject")) - texttmpl.Must(subjectTemplates.New("issue/close").Parse("")) // Must default to fallback subject + prepareMailTemplates("issue/default", "issue/default/subject", "issue/default/body") - bodyTemplates = template.Must(template.New("issue/default").Parse("issue/default/body")) - template.Must(bodyTemplates.New("issue/new").Parse("issue/new/body")) - template.Must(bodyTemplates.New("pull/comment").Parse("pull/comment/body")) - template.Must(bodyTemplates.New("issue/close").Parse("issue/close/body")) + texttmpl.Must(LoadedTemplates().SubjectTemplates.New("issue/new").Parse("issue/new/subject")) + texttmpl.Must(LoadedTemplates().SubjectTemplates.New("pull/comment").Parse("pull/comment/subject")) + texttmpl.Must(LoadedTemplates().SubjectTemplates.New("issue/close").Parse("")) // Must default to a fallback subject + template.Must(LoadedTemplates().BodyTemplates.New("issue/new").Parse("issue/new/body")) + template.Must(LoadedTemplates().BodyTemplates.New("pull/comment").Parse("pull/comment/body")) + template.Must(LoadedTemplates().BodyTemplates.New("issue/close").Parse("issue/close/body")) expect := func(t *testing.T, msg *sender_service.Message, expSubject, expBody string) { subject := msg.ToMessage().GetGenHeader("Subject") @@ -253,9 +257,7 @@ func TestTemplateServices(t *testing.T) { expect := func(t *testing.T, issue *issues_model.Issue, comment *issues_model.Comment, doer *user_model.User, actionType activities_model.ActionType, fromMention bool, tplSubject, tplBody, expSubject, expBody string, ) { - subjectTemplates = texttmpl.Must(texttmpl.New("issue/default").Parse(tplSubject)) - bodyTemplates = template.Must(template.New("issue/default").Parse(tplBody)) - + prepareMailTemplates("issue/default", tplSubject, tplBody) recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}} msg := testComposeIssueCommentMessage(t, &mailComment{ Issue: issue, Doer: doer, ActionType: actionType, @@ -297,13 +299,13 @@ func testComposeIssueCommentMessage(t *testing.T, ctx *mailComment, recipients [ return msgs[0] } -func TestGenerateAdditionalHeaders(t *testing.T) { +func TestGenerateAdditionalHeadersForIssue(t *testing.T) { doer, _, issue, _ := prepareMailerTest(t) comment := &mailComment{Issue: issue, Doer: doer} recipient := &user_model.User{Name: "test", Email: "test@gitea.com"} - headers := generateAdditionalHeaders(comment, "dummy-reason", recipient) + headers := generateAdditionalHeadersForIssue(comment, "dummy-reason", recipient) expected := map[string]string{ "List-ID": "user2/repo1 <repo1.user2.localhost>", @@ -440,6 +442,16 @@ func TestGenerateMessageIDForRelease(t *testing.T) { assert.Equal(t, "<owner/repo/releases/1@localhost>", msgID) } +func TestGenerateMessageIDForActionsWorkflowRunStatusEmail(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) + run := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRun{ID: 795, RepoID: repo.ID}) + assert.NoError(t, run.LoadAttributes(db.DefaultContext)) + msgID := generateMessageIDForActionsWorkflowRunStatusEmail(repo, run) + assert.Equal(t, "<user2/repo2/actions/runs/191@localhost>", msgID) +} + func TestFromDisplayName(t *testing.T) { tmpl, err := texttmpl.New("mailFrom").Parse("{{ .DisplayName }}") assert.NoError(t, err) @@ -467,7 +479,7 @@ func TestFromDisplayName(t *testing.T) { t.Run(tc.userDisplayName, func(t *testing.T) { user := &user_model.User{FullName: tc.userDisplayName, Name: "tmp"} got := fromDisplayName(user) - assert.EqualValues(t, tc.fromDisplayName, got) + assert.Equal(t, tc.fromDisplayName, got) }) } @@ -484,7 +496,7 @@ func TestFromDisplayName(t *testing.T) { setting.Domain = oldDomain }() - assert.EqualValues(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"})) + assert.Equal(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"})) }) } @@ -512,8 +524,7 @@ func TestEmbedBase64Images(t *testing.T) { att2ImgBase64 := fmt.Sprintf(`<img src="%s"/>`, att2Base64) t.Run("ComposeMessage", func(t *testing.T) { - subjectTemplates = texttmpl.Must(texttmpl.New("issue/new").Parse(subjectTpl)) - bodyTemplates = template.Must(template.New("issue/new").Parse(bodyTpl)) + prepareMailTemplates("issue/new", subjectTpl, bodyTpl) issue.Content = fmt.Sprintf(`MSG-BEFORE <image src="attachments/%s"> MSG-AFTER`, att1.UUID) require.NoError(t, issues_model.UpdateIssueCols(t.Context(), issue, "content")) @@ -528,7 +539,7 @@ func TestEmbedBase64Images(t *testing.T) { require.NoError(t, err) mailBody := msgs[0].Body - assert.Regexp(t, `MSG-BEFORE <a[^>]+><img src="data:image/png;base64,iVBORw0KGgo="/></a> MSG-AFTER`, mailBody) + assert.Regexp(t, `MSG-BEFORE <a[^>]+><img src="data:image/png;base64,iVBORw0KGgo=".*/></a> MSG-AFTER`, mailBody) }) t.Run("EmbedInstanceImageSkipExternalImage", func(t *testing.T) { diff --git a/services/mailer/mail_user.go b/services/mailer/mail_user.go index 5a200a5fa7..68df81f6a3 100644 --- a/services/mailer/mail_user.go +++ b/services/mailer/mail_user.go @@ -39,7 +39,7 @@ func sendUserMail(language string, u *user_model.User, tpl templates.TplName, co var content bytes.Buffer - if err := bodyTemplates.ExecuteTemplate(&content, string(tpl), data); err != nil { + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&content, string(tpl), data); err != nil { log.Error("Template: %v", err) return } @@ -90,7 +90,7 @@ func SendActivateEmailMail(u *user_model.User, email string) { var content bytes.Buffer - if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthActivateEmail), data); err != nil { + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&content, string(mailAuthActivateEmail), data); err != nil { log.Error("Template: %v", err) return } @@ -118,7 +118,7 @@ func SendRegisterNotifyMail(u *user_model.User) { var content bytes.Buffer - if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthRegisterNotify), data); err != nil { + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&content, string(mailAuthRegisterNotify), data); err != nil { log.Error("Template: %v", err) return } @@ -149,7 +149,7 @@ func SendCollaboratorMail(u, doer *user_model.User, repo *repo_model.Repository) var content bytes.Buffer - if err := bodyTemplates.ExecuteTemplate(&content, string(mailNotifyCollaborator), data); err != nil { + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&content, string(mailNotifyCollaborator), data); err != nil { log.Error("Template: %v", err) return } diff --git a/services/mailer/mail_workflow_run.go b/services/mailer/mail_workflow_run.go new file mode 100644 index 0000000000..29b3abda8e --- /dev/null +++ b/services/mailer/mail_workflow_run.go @@ -0,0 +1,165 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package mailer + +import ( + "bytes" + "context" + "fmt" + "sort" + + actions_model "code.gitea.io/gitea/models/actions" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/services/convert" + sender_service "code.gitea.io/gitea/services/mailer/sender" +) + +const tplWorkflowRun = "notify/workflow_run" + +type convertedWorkflowJob struct { + HTMLURL string + Status actions_model.Status + Name string + Attempt int64 +} + +func generateMessageIDForActionsWorkflowRunStatusEmail(repo *repo_model.Repository, run *actions_model.ActionRun) string { + return fmt.Sprintf("<%s/actions/runs/%d@%s>", repo.FullName(), run.Index, setting.Domain) +} + +func composeAndSendActionsWorkflowRunStatusEmail(ctx context.Context, repo *repo_model.Repository, run *actions_model.ActionRun, sender *user_model.User, recipients []*user_model.User) { + subject := "Run" + switch run.Status { + case actions_model.StatusFailure: + subject += " failed" + case actions_model.StatusCancelled: + subject += " cancelled" + case actions_model.StatusSuccess: + subject += " succeeded" + } + subject = fmt.Sprintf("%s: %s (%s)", subject, run.WorkflowID, base.ShortSha(run.CommitSHA)) + displayName := fromDisplayName(sender) + messageID := generateMessageIDForActionsWorkflowRunStatusEmail(repo, run) + metadataHeaders := generateMetadataHeaders(repo) + + jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID) + if err != nil { + log.Error("GetRunJobsByRunID: %v", err) + return + } + sort.SliceStable(jobs, func(i, j int) bool { + si, sj := jobs[i].Status, jobs[j].Status + /* + If both i and j are/are not success, leave it to si < sj. + If i is success and j is not, since the desired is j goes "smaller" and i goes "bigger", this func should return false. + If j is success and i is not, since the desired is i goes "smaller" and j goes "bigger", this func should return true. + */ + if si.IsSuccess() != sj.IsSuccess() { + return !si.IsSuccess() + } + return si < sj + }) + + convertedJobs := make([]convertedWorkflowJob, 0, len(jobs)) + for _, job := range jobs { + converted0, err := convert.ToActionWorkflowJob(ctx, repo, nil, job) + if err != nil { + log.Error("convert.ToActionWorkflowJob: %v", err) + continue + } + convertedJobs = append(convertedJobs, convertedWorkflowJob{ + HTMLURL: converted0.HTMLURL, + Name: converted0.Name, + Status: job.Status, + Attempt: converted0.RunAttempt, + }) + } + + langMap := make(map[string][]*user_model.User) + for _, user := range recipients { + langMap[user.Language] = append(langMap[user.Language], user) + } + for lang, tos := range langMap { + locale := translation.NewLocale(lang) + var runStatusText string + switch run.Status { + case actions_model.StatusSuccess: + runStatusText = "All jobs have succeeded" + case actions_model.StatusFailure: + runStatusText = "All jobs have failed" + for _, job := range jobs { + if !job.Status.IsFailure() { + runStatusText = "Some jobs were not successful" + break + } + } + case actions_model.StatusCancelled: + runStatusText = "All jobs have been cancelled" + } + var mailBody bytes.Buffer + if err := LoadedTemplates().BodyTemplates.ExecuteTemplate(&mailBody, tplWorkflowRun, map[string]any{ + "Subject": subject, + "Repo": repo, + "Run": run, + "RunStatusText": runStatusText, + "Jobs": convertedJobs, + "locale": locale, + }); err != nil { + log.Error("ExecuteTemplate [%s]: %v", tplWorkflowRun, err) + return + } + msgs := make([]*sender_service.Message, 0, len(tos)) + for _, rec := range tos { + msg := sender_service.NewMessageFrom( + rec.Email, + displayName, + setting.MailService.FromEmail, + subject, + mailBody.String(), + ) + msg.Info = subject + for k, v := range generateSenderRecipientHeaders(sender, rec) { + msg.SetHeader(k, v) + } + for k, v := range metadataHeaders { + msg.SetHeader(k, v) + } + msg.SetHeader("Message-ID", messageID) + msgs = append(msgs, msg) + } + SendAsync(msgs...) + } +} + +func MailActionsTrigger(ctx context.Context, sender *user_model.User, repo *repo_model.Repository, run *actions_model.ActionRun) { + if setting.MailService == nil { + return + } + if run.Status.IsSkipped() { + return + } + + recipients := make([]*user_model.User, 0) + + if !sender.IsGiteaActions() && !sender.IsGhost() && sender.IsMailable() { + notifyPref, err := user_model.GetUserSetting(ctx, sender.ID, + user_model.SettingsKeyEmailNotificationGiteaActions, user_model.SettingEmailNotificationGiteaActionsFailureOnly) + if err != nil { + log.Error("GetUserSetting: %v", err) + return + } + if notifyPref == user_model.SettingEmailNotificationGiteaActionsAll || !run.Status.IsSuccess() && notifyPref != user_model.SettingEmailNotificationGiteaActionsDisabled { + recipients = append(recipients, sender) + } + } + + if len(recipients) > 0 { + composeAndSendActionsWorkflowRunStatusEmail(ctx, repo, run, sender, recipients) + } +} diff --git a/services/mailer/mailer.go b/services/mailer/mailer.go index bcd4facca9..db00aac4f1 100644 --- a/services/mailer/mailer.go +++ b/services/mailer/mailer.go @@ -43,7 +43,7 @@ func NewContext(ctx context.Context) { sender = &sender_service.SMTPSender{} } - subjectTemplates, bodyTemplates = templates.Mailer(ctx) + templates.LoadMailTemplates(ctx, &loadedTemplates) mailQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "mail", func(items ...*sender_service.Message) []*sender_service.Message { for _, msg := range items { diff --git a/services/mailer/notify.go b/services/mailer/notify.go index a27177e8f5..c008685e13 100644 --- a/services/mailer/notify.go +++ b/services/mailer/notify.go @@ -7,6 +7,7 @@ import ( "context" "fmt" + actions_model "code.gitea.io/gitea/models/actions" activities_model "code.gitea.io/gitea/models/activities" issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" @@ -31,15 +32,16 @@ func (m *mailNotifier) CreateIssueComment(ctx context.Context, doer *user_model. issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User, ) { var act activities_model.ActionType - if comment.Type == issues_model.CommentTypeClose { + switch comment.Type { + case issues_model.CommentTypeClose: act = activities_model.ActionCloseIssue - } else if comment.Type == issues_model.CommentTypeReopen { + case issues_model.CommentTypeReopen: act = activities_model.ActionReopenIssue - } else if comment.Type == issues_model.CommentTypeComment { + case issues_model.CommentTypeComment: act = activities_model.ActionCommentIssue - } else if comment.Type == issues_model.CommentTypeCode { + case issues_model.CommentTypeCode: act = activities_model.ActionCommentIssue - } else if comment.Type == issues_model.CommentTypePullRequestPush { + case issues_model.CommentTypePullRequestPush: act = 0 } @@ -95,11 +97,12 @@ func (m *mailNotifier) NewPullRequest(ctx context.Context, pr *issues_model.Pull func (m *mailNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, r *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) { var act activities_model.ActionType - if comment.Type == issues_model.CommentTypeClose { + switch comment.Type { + case issues_model.CommentTypeClose: act = activities_model.ActionCloseIssue - } else if comment.Type == issues_model.CommentTypeReopen { + case issues_model.CommentTypeReopen: act = activities_model.ActionReopenIssue - } else if comment.Type == issues_model.CommentTypeComment { + case issues_model.CommentTypeComment: act = activities_model.ActionCommentPull } if err := MailParticipantsComment(ctx, comment, act, pr.Issue, mentions); err != nil { @@ -203,3 +206,10 @@ func (m *mailNotifier) RepoPendingTransfer(ctx context.Context, doer, newOwner * log.Error("SendRepoTransferNotifyMail: %v", err) } } + +func (m *mailNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { + if !run.Status.IsDone() { + return + } + MailActionsTrigger(ctx, sender, repo, run) +} diff --git a/services/mailer/sender/message_test.go b/services/mailer/sender/message_test.go index 63d0bc349a..ae153ebf05 100644 --- a/services/mailer/sender/message_test.go +++ b/services/mailer/sender/message_test.go @@ -108,9 +108,9 @@ func extractMailHeaderAndContent(t *testing.T, mail string) (map[string]string, } content := strings.TrimSpace("boundary=" + parts[1]) - hParts := strings.Split(parts[0], "\n") + hParts := strings.SplitSeq(parts[0], "\n") - for _, hPart := range hParts { + for hPart := range hParts { parts := strings.SplitN(hPart, ":", 2) hk := strings.TrimSpace(parts[0]) if hk != "" { diff --git a/services/mailer/sender/smtp.go b/services/mailer/sender/smtp.go index c53c3da997..8dc1b40b74 100644 --- a/services/mailer/sender/smtp.go +++ b/services/mailer/sender/smtp.go @@ -5,6 +5,7 @@ package sender import ( "crypto/tls" + "errors" "fmt" "io" "net" @@ -99,7 +100,7 @@ func (s *SMTPSender) Send(from string, to []string, msg io.WriterTo) error { canAuth, options := client.Extension("AUTH") if len(opts.User) > 0 { if !canAuth { - return fmt.Errorf("SMTP server does not support AUTH, but credentials provided") + return errors.New("SMTP server does not support AUTH, but credentials provided") } var auth smtp.Auth diff --git a/services/mailer/sender/smtp_auth.go b/services/mailer/sender/smtp_auth.go index 260b12437b..c60e0dbfbb 100644 --- a/services/mailer/sender/smtp_auth.go +++ b/services/mailer/sender/smtp_auth.go @@ -4,6 +4,7 @@ package sender import ( + "errors" "fmt" "github.com/Azure/go-ntlmssp" @@ -60,7 +61,7 @@ func (a *ntlmAuth) Start(server *smtp.ServerInfo) (string, []byte, error) { func (a *ntlmAuth) Next(fromServer []byte, more bool) ([]byte, error) { if more { if len(fromServer) == 0 { - return nil, fmt.Errorf("ntlm ChallengeMessage is empty") + return nil, errors.New("ntlm ChallengeMessage is empty") } authenticateMessage, err := ntlmssp.ProcessChallenge(fromServer, a.username, a.password, a.domainNeeded) return authenticateMessage, err diff --git a/services/markup/renderhelper_codepreview.go b/services/markup/renderhelper_codepreview.go index d638af7ff0..fa1eb824a2 100644 --- a/services/markup/renderhelper_codepreview.go +++ b/services/markup/renderhelper_codepreview.go @@ -6,7 +6,7 @@ package markup import ( "bufio" "context" - "fmt" + "errors" "html/template" "strings" @@ -14,13 +14,13 @@ import ( "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/charset" + "code.gitea.io/gitea/modules/git/languagestats" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/indexer/code" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" gitea_context "code.gitea.io/gitea/services/context" - "code.gitea.io/gitea/services/repository/files" ) func renderRepoFileCodePreview(ctx context.Context, opts markup.RenderCodePreviewOptions) (template.HTML, error) { @@ -38,7 +38,7 @@ func renderRepoFileCodePreview(ctx context.Context, opts markup.RenderCodePrevie webCtx := gitea_context.GetWebContext(ctx) if webCtx == nil { - return "", fmt.Errorf("context is not a web context") + return "", errors.New("context is not a web context") } doer := webCtx.Doer @@ -61,14 +61,14 @@ func renderRepoFileCodePreview(ctx context.Context, opts markup.RenderCodePrevie return "", err } - language, _ := files.TryGetContentLanguage(gitRepo, opts.CommitID, opts.FilePath) + language, _ := languagestats.GetFileLanguage(ctx, gitRepo, opts.CommitID, opts.FilePath) blob, err := commit.GetBlobByPath(opts.FilePath) if err != nil { return "", err } if blob.Size() > setting.UI.MaxDisplayFileSize { - return "", fmt.Errorf("file is too large") + return "", errors.New("file is too large") } dataRc, err := blob.DataAsync() diff --git a/services/markup/renderhelper_issueicontitle.go b/services/markup/renderhelper_issueicontitle.go index fd8f9d43fa..27b5595fa9 100644 --- a/services/markup/renderhelper_issueicontitle.go +++ b/services/markup/renderhelper_issueicontitle.go @@ -5,6 +5,7 @@ package markup import ( "context" + "errors" "fmt" "html/template" @@ -20,7 +21,7 @@ import ( func renderRepoIssueIconTitle(ctx context.Context, opts markup.RenderIssueIconTitleOptions) (_ template.HTML, err error) { webCtx := gitea_context.GetWebContext(ctx) if webCtx == nil { - return "", fmt.Errorf("context is not a web context") + return "", errors.New("context is not a web context") } textIssueIndex := fmt.Sprintf("(#%d)", opts.IssueIndex) diff --git a/services/markup/renderhelper_mention_test.go b/services/markup/renderhelper_mention_test.go index d05fbb6fba..d54ab13a48 100644 --- a/services/markup/renderhelper_mention_test.go +++ b/services/markup/renderhelper_mention_test.go @@ -37,7 +37,7 @@ func TestRenderHelperMention(t *testing.T) { assert.False(t, FormalRenderHelperFuncs().IsUsernameMentionable(t.Context(), userNoSuch)) // when using web context, use user.IsUserVisibleToViewer to check - req, err := http.NewRequest("GET", "/", nil) + req, err := http.NewRequest(http.MethodGet, "/", nil) assert.NoError(t, err) base := gitea_context.NewBaseContextForTest(httptest.NewRecorder(), req) giteaCtx := gitea_context.NewWebContext(base, &contexttest.MockRender{}, nil) diff --git a/services/migrations/codebase.go b/services/migrations/codebase.go index 880dd21497..240c7bcdc9 100644 --- a/services/migrations/codebase.go +++ b/services/migrations/codebase.go @@ -134,7 +134,7 @@ func (d *CodebaseDownloader) callAPI(ctx context.Context, endpoint string, param u.RawQuery = query.Encode() } - req, err := http.NewRequestWithContext(ctx, "GET", u.String(), nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) if err != nil { return err } diff --git a/services/migrations/codecommit.go b/services/migrations/codecommit.go index c45f9e5943..d08b2e6d4a 100644 --- a/services/migrations/codecommit.go +++ b/services/migrations/codecommit.go @@ -5,7 +5,7 @@ package migrations import ( "context" - "fmt" + "errors" "net/url" "strconv" "strings" @@ -42,13 +42,13 @@ func (c *CodeCommitDownloaderFactory) New(ctx context.Context, opts base.Migrate hostElems := strings.Split(u.Host, ".") if len(hostElems) != 4 { - return nil, fmt.Errorf("cannot get the region from clone URL") + return nil, errors.New("cannot get the region from clone URL") } region := hostElems[1] pathElems := strings.Split(u.Path, "/") if len(pathElems) == 0 { - return nil, fmt.Errorf("cannot get the repo name from clone URL") + return nil, errors.New("cannot get the repo name from clone URL") } repoName := pathElems[len(pathElems)-1] @@ -155,10 +155,7 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag } startIndex := (page - 1) * perPage - endIndex := page * perPage - if endIndex > len(allPullRequestIDs) { - endIndex = len(allPullRequestIDs) - } + endIndex := min(page*perPage, len(allPullRequestIDs)) batch := allPullRequestIDs[startIndex:endIndex] prs := make([]*base.PullRequest, 0, len(batch)) @@ -180,11 +177,15 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag continue } target := orig.PullRequestTargets[0] + description := "" + if orig.Description != nil { + description = *orig.Description + } pr := &base.PullRequest{ Number: number, Title: *orig.Title, PosterName: c.getUsernameFromARN(*orig.AuthorArn), - Content: *orig.Description, + Content: description, State: "open", Created: *orig.CreationDate, Updated: *orig.LastActivityDate, @@ -206,6 +207,10 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag pr.State = "closed" pr.Closed = orig.LastActivityDate } + if pr.Merged { + pr.MergeCommitSHA = *target.MergeMetadata.MergeCommitId + pr.MergedTime = orig.LastActivityDate + } _ = CheckAndEnsureSafePR(pr, c.baseURL, c) prs = append(prs, pr) diff --git a/services/migrations/dump.go b/services/migrations/dump.go index b4ca1e41e0..8edd567b08 100644 --- a/services/migrations/dump.go +++ b/services/migrations/dump.go @@ -488,7 +488,7 @@ func (g *RepositoryDumper) handlePullRequest(ctx context.Context, pr *base.PullR if pr.Head.CloneURL == "" || pr.Head.Ref == "" { // Set head information if pr.Head.SHA is available if pr.Head.SHA != "" { - _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()}) + _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitHeadRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()}) if err != nil { log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err) } @@ -518,7 +518,7 @@ func (g *RepositoryDumper) handlePullRequest(ctx context.Context, pr *base.PullR if !ok { // Set head information if pr.Head.SHA is available if pr.Head.SHA != "" { - _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()}) + _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitHeadRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()}) if err != nil { log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err) } @@ -577,7 +577,7 @@ func (g *RepositoryDumper) handlePullRequest(ctx context.Context, pr *base.PullR pr.Head.SHA = headSha } if pr.Head.SHA != "" { - _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()}) + _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitHeadRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()}) if err != nil { log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err) } diff --git a/services/migrations/error.go b/services/migrations/error.go index c7d912f50b..9b470149bf 100644 --- a/services/migrations/error.go +++ b/services/migrations/error.go @@ -7,7 +7,7 @@ package migrations import ( "errors" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v71/github" ) // ErrRepoNotCreated returns the error that repository not created diff --git a/services/migrations/gitea_downloader.go b/services/migrations/gitea_downloader.go index f92f318293..5d48d2f003 100644 --- a/services/migrations/gitea_downloader.go +++ b/services/migrations/gitea_downloader.go @@ -298,7 +298,7 @@ func (g *GiteaDownloader) convertGiteaRelease(rel *gitea_sdk.Release) *base.Rele } // FIXME: for a private download? - req, err := http.NewRequest("GET", assetDownloadURL, nil) + req, err := http.NewRequest(http.MethodGet, assetDownloadURL, nil) if err != nil { return nil, err } diff --git a/services/migrations/gitea_downloader_test.go b/services/migrations/gitea_downloader_test.go index da56120065..bb1760e889 100644 --- a/services/migrations/gitea_downloader_test.go +++ b/services/migrations/gitea_downloader_test.go @@ -47,7 +47,7 @@ func TestGiteaDownloadRepo(t *testing.T) { topics, err := downloader.GetTopics(ctx) assert.NoError(t, err) sort.Strings(topics) - assert.EqualValues(t, []string{"ci", "gitea", "migration", "test"}, topics) + assert.Equal(t, []string{"ci", "gitea", "migration", "test"}, topics) labels, err := downloader.GetLabels(ctx) assert.NoError(t, err) @@ -134,7 +134,7 @@ func TestGiteaDownloadRepo(t *testing.T) { assert.NoError(t, err) assert.True(t, isEnd) assert.Len(t, issues, 7) - assert.EqualValues(t, "open", issues[0].State) + assert.Equal(t, "open", issues[0].State) issues, isEnd, err = downloader.GetIssues(ctx, 3, 2) assert.NoError(t, err) diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go index b17cc3ce41..75eb06d01f 100644 --- a/services/migrations/gitea_uploader.go +++ b/services/migrations/gitea_uploader.go @@ -107,7 +107,7 @@ func (g *GiteaLocalUploader) CreateRepo(ctx context.Context, repo *base.Reposito IsPrivate: opts.Private || setting.Repository.ForcePrivate, IsMirror: opts.Mirror, Status: repo_model.RepositoryBeingMigrated, - }) + }, false) } else { r, err = repo_model.GetRepositoryByID(ctx, opts.MigrateToRepoID) } @@ -148,7 +148,7 @@ func (g *GiteaLocalUploader) CreateRepo(ctx context.Context, repo *base.Reposito return err } g.repo.ObjectFormatName = objectFormat.Name() - return repo_model.UpdateRepositoryCols(ctx, g.repo, "object_format_name") + return repo_model.UpdateRepositoryColsNoAutoTime(ctx, g.repo, "object_format_name") } // Close closes this uploader @@ -556,7 +556,7 @@ func (g *GiteaLocalUploader) CreatePullRequests(ctx context.Context, prs ...*bas } for _, pr := range gprs { g.issues[pr.Issue.Index] = pr.Issue - pull.AddToTaskQueue(ctx, pr) + pull.StartPullRequestCheckImmediately(ctx, pr) } return nil } @@ -681,7 +681,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *ba pr.Head.SHA = headSha } - _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()}) + _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitHeadRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()}) if err != nil { return "", err } @@ -701,10 +701,10 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *ba _, _, err = git.NewCommand("rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()}) if err != nil { // Git update-ref remove bad references with a relative path - log.Warn("Deprecated local head %s for PR #%d in %s/%s, removing %s", pr.Head.SHA, pr.Number, g.repoOwner, g.repoName, pr.GetGitRefName()) + log.Warn("Deprecated local head %s for PR #%d in %s/%s, removing %s", pr.Head.SHA, pr.Number, g.repoOwner, g.repoName, pr.GetGitHeadRefName()) } else { // set head information - _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()}) + _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitHeadRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()}) if err != nil { log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err) } @@ -765,7 +765,7 @@ func (g *GiteaLocalUploader) newPullRequest(ctx context.Context, pr *base.PullRe issue := issues_model.Issue{ RepoID: g.repo.ID, Repo: g.repo, - Title: prTitle, + Title: util.TruncateRunes(prTitle, 255), Index: pr.Number, Content: pr.Content, MilestoneID: milestoneID, @@ -880,9 +880,9 @@ func (g *GiteaLocalUploader) CreateReviews(ctx context.Context, reviews ...*base continue } - headCommitID, err := g.gitRepo.GetRefCommitID(pr.GetGitRefName()) + headCommitID, err := g.gitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil { - log.Warn("PR #%d GetRefCommitID[%s] in %s/%s: %v, all review comments will be ignored", pr.Index, pr.GetGitRefName(), g.repoOwner, g.repoName, err) + log.Warn("PR #%d GetRefCommitID[%s] in %s/%s: %v, all review comments will be ignored", pr.Index, pr.GetGitHeadRefName(), g.repoOwner, g.repoName, err) continue } @@ -975,7 +975,7 @@ func (g *GiteaLocalUploader) Finish(ctx context.Context) error { } g.repo.Status = repo_model.RepositoryReady - return repo_model.UpdateRepositoryCols(ctx, g.repo, "status") + return repo_model.UpdateRepositoryColsWithAutoTime(ctx, g.repo, "status") } func (g *GiteaLocalUploader) remapUser(ctx context.Context, source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error { @@ -1017,7 +1017,7 @@ func (g *GiteaLocalUploader) remapLocalUser(ctx context.Context, source user_mod func (g *GiteaLocalUploader) remapExternalUser(ctx context.Context, source user_model.ExternalUserMigrated) (userid int64, err error) { userid, ok := g.userMap[source.GetExternalID()] if !ok { - userid, err = user_model.GetUserIDByExternalUserID(ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID())) + userid, err = user_model.GetUserIDByExternalUserID(ctx, g.gitServiceType.Name(), strconv.FormatInt(source.GetExternalID(), 10)) if err != nil { log.Error("GetUserIDByExternalUserID: %v", err) return 0, err diff --git a/services/migrations/gitea_uploader_test.go b/services/migrations/gitea_uploader_test.go index f52d4157c8..1970c0550c 100644 --- a/services/migrations/gitea_uploader_test.go +++ b/services/migrations/gitea_uploader_test.go @@ -64,7 +64,7 @@ func TestGiteaUploadRepo(t *testing.T) { repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, Name: repoName}) assert.True(t, repo.HasWiki()) - assert.EqualValues(t, repo_model.RepositoryReady, repo.Status) + assert.Equal(t, repo_model.RepositoryReady, repo.Status) milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{ RepoID: repo.ID, @@ -152,7 +152,7 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) { uploader.userMap = make(map[int64]int64) err := uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, doer.ID, target.GetUserID()) + assert.Equal(t, doer.ID, target.GetUserID()) // // The externalID matches a known user but the name does not match, @@ -163,7 +163,7 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) { uploader.userMap = make(map[int64]int64) err = uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, doer.ID, target.GetUserID()) + assert.Equal(t, doer.ID, target.GetUserID()) // // The externalID and externalName match an existing user, everything @@ -174,7 +174,7 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) { uploader.userMap = make(map[int64]int64) err = uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, user.ID, target.GetUserID()) + assert.Equal(t, user.ID, target.GetUserID()) } func TestGiteaUploadRemapExternalUser(t *testing.T) { @@ -202,7 +202,7 @@ func TestGiteaUploadRemapExternalUser(t *testing.T) { target := repo_model.Release{} err := uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, doer.ID, target.GetUserID()) + assert.Equal(t, doer.ID, target.GetUserID()) // // Link the external ID to an existing user @@ -225,7 +225,7 @@ func TestGiteaUploadRemapExternalUser(t *testing.T) { target = repo_model.Release{} err = uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, linkedUser.ID, target.GetUserID()) + assert.Equal(t, linkedUser.ID, target.GetUserID()) } func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { @@ -239,7 +239,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { assert.NoError(t, git.InitRepository(git.DefaultContext, fromRepo.RepoPath(), false, fromRepo.ObjectFormatName)) err := git.NewCommand("symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseRef).Run(git.DefaultContext, &git.RunOpts{Dir: fromRepo.RepoPath()}) assert.NoError(t, err) - assert.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# Testing Repository\n\nOriginally created in: %s", fromRepo.RepoPath())), 0o644)) + assert.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte("# Testing Repository\n\nOriginally created in: "+fromRepo.RepoPath()), 0o644)) assert.NoError(t, git.AddChanges(fromRepo.RepoPath(), true)) signature := git.Signature{ Email: "test@example.com", @@ -287,7 +287,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { })) _, _, err = git.NewCommand("checkout", "-b").AddDynamicArguments(forkHeadRef).RunStdString(git.DefaultContext, &git.RunOpts{Dir: forkRepo.RepoPath()}) assert.NoError(t, err) - assert.NoError(t, os.WriteFile(filepath.Join(forkRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# branch2 %s", forkRepo.RepoPath())), 0o644)) + assert.NoError(t, os.WriteFile(filepath.Join(forkRepo.RepoPath(), "README.md"), []byte("# branch2 "+forkRepo.RepoPath()), 0o644)) assert.NoError(t, git.AddChanges(forkRepo.RepoPath(), true)) assert.NoError(t, git.CommitChanges(forkRepo.RepoPath(), git.CommitChangesOptions{ Committer: &signature, @@ -508,14 +508,14 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { head, err := uploader.updateGitForPullRequest(ctx, &testCase.pr) assert.NoError(t, err) - assert.EqualValues(t, testCase.head, head) + assert.Equal(t, testCase.head, head) log.Info(stopMark) logFiltered, logStopped := logChecker.Check(5 * time.Second) assert.True(t, logStopped) if len(testCase.logFilter) > 0 { - assert.EqualValues(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter) + assert.Equal(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter) } }) } diff --git a/services/migrations/github.go b/services/migrations/github.go index b00d6ed27f..c6cd6ea173 100644 --- a/services/migrations/github.go +++ b/services/migrations/github.go @@ -20,7 +20,7 @@ import ( "code.gitea.io/gitea/modules/proxy" "code.gitea.io/gitea/modules/structs" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v71/github" "golang.org/x/oauth2" ) @@ -89,8 +89,8 @@ func NewGithubDownloaderV3(_ context.Context, baseURL, userName, password, token } if token != "" { - tokens := strings.Split(token, ",") - for _, token := range tokens { + tokens := strings.SplitSeq(token, ",") + for token := range tokens { token = strings.TrimSpace(token) ts := oauth2.StaticTokenSource( &oauth2.Token{AccessToken: token}, @@ -133,7 +133,7 @@ func (g *GithubDownloaderV3) LogString() string { func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) { githubClient := github.NewClient(client) if baseURL != "https://github.com" { - githubClient, _ = github.NewClient(client).WithEnterpriseURLs(baseURL, baseURL) + githubClient, _ = githubClient.WithEnterpriseURLs(baseURL, baseURL) } g.clients = append(g.clients, githubClient) g.rates = append(g.rates, nil) @@ -322,7 +322,10 @@ func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *gith httpClient := NewMigrationHTTPClient() for _, asset := range rel.Assets { - assetID := *asset.ID // Don't optimize this, for closure we need a local variable + assetID := asset.GetID() // Don't optimize this, for closure we need a local variable TODO: no need to do so in new Golang + if assetID == 0 { + continue + } r.Assets = append(r.Assets, &base.ReleaseAsset{ ID: asset.GetID(), Name: asset.GetName(), @@ -358,7 +361,7 @@ func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *gith } g.waitAndPickClient(ctx) - req, err := http.NewRequestWithContext(ctx, "GET", redirectURL, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, redirectURL, nil) if err != nil { return nil, err } @@ -441,9 +444,11 @@ func (g *GithubDownloaderV3) GetIssues(ctx context.Context, page, perPage int) ( if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListOptions{ - Page: i, - PerPage: perPage, + res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: perPage, + }, }) if err != nil { return nil, false, err @@ -527,9 +532,11 @@ func (g *GithubDownloaderV3) getComments(ctx context.Context, commentable base.C if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{ - Page: i, - PerPage: g.maxPerPage, + res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }, }) if err != nil { return nil, err @@ -602,9 +609,11 @@ func (g *GithubDownloaderV3) GetAllComments(ctx context.Context, page, perPage i if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{ - Page: i, - PerPage: g.maxPerPage, + res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }, }) if err != nil { return nil, false, err @@ -673,9 +682,11 @@ func (g *GithubDownloaderV3) GetPullRequests(ctx context.Context, page, perPage if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListOptions{ - Page: i, - PerPage: perPage, + res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: perPage, + }, }) if err != nil { return nil, false, err @@ -760,9 +771,11 @@ func (g *GithubDownloaderV3) convertGithubReviewComments(ctx context.Context, cs if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListOptions{ - Page: i, - PerPage: g.maxPerPage, + res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }, }) if err != nil { return nil, err @@ -872,3 +885,18 @@ func (g *GithubDownloaderV3) GetReviews(ctx context.Context, reviewable base.Rev } return allReviews, nil } + +// FormatCloneURL add authentication into remote URLs +func (g *GithubDownloaderV3) FormatCloneURL(opts MigrateOptions, remoteAddr string) (string, error) { + u, err := url.Parse(remoteAddr) + if err != nil { + return "", err + } + if len(opts.AuthToken) > 0 { + // "multiple tokens" are used to benefit more "API rate limit quota" + // git clone doesn't count for rate limits, so only use the first token. + // source: https://github.com/orgs/community/discussions/44515 + u.User = url.UserPassword("oauth2", strings.Split(opts.AuthToken, ",")[0]) + } + return u.String(), nil +} diff --git a/services/migrations/github_test.go b/services/migrations/github_test.go index 2625fb62ec..6d1a5378b9 100644 --- a/services/migrations/github_test.go +++ b/services/migrations/github_test.go @@ -12,6 +12,7 @@ import ( base "code.gitea.io/gitea/modules/migration" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestGitHubDownloadRepo(t *testing.T) { @@ -429,3 +430,36 @@ func TestGitHubDownloadRepo(t *testing.T) { }, }, reviews) } + +func TestGithubMultiToken(t *testing.T) { + testCases := []struct { + desc string + token string + expectedCloneURL string + }{ + { + desc: "Single Token", + token: "single_token", + expectedCloneURL: "https://oauth2:single_token@github.com", + }, + { + desc: "Multi Token", + token: "token1,token2", + expectedCloneURL: "https://oauth2:token1@github.com", + }, + } + factory := GithubDownloaderV3Factory{} + + for _, tC := range testCases { + t.Run(tC.desc, func(t *testing.T) { + opts := base.MigrateOptions{CloneAddr: "https://github.com/go-gitea/gitea", AuthToken: tC.token} + client, err := factory.New(t.Context(), opts) + require.NoError(t, err) + + cloneURL, err := client.FormatCloneURL(opts, "https://github.com") + require.NoError(t, err) + + assert.Equal(t, tC.expectedCloneURL, cloneURL) + }) + } +} diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go index efc5b960cf..a19a04bc44 100644 --- a/services/migrations/gitlab.go +++ b/services/migrations/gitlab.go @@ -16,6 +16,7 @@ import ( "time" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/log" base "code.gitea.io/gitea/modules/migration" @@ -340,7 +341,7 @@ func (g *GitlabDownloader) convertGitlabRelease(ctx context.Context, rel *gitlab return io.NopCloser(strings.NewReader(link.URL)), nil } - req, err := http.NewRequest("GET", link.URL, nil) + req, err := http.NewRequest(http.MethodGet, link.URL, nil) if err != nil { return nil, err } @@ -535,11 +536,15 @@ func (g *GitlabDownloader) GetComments(ctx context.Context, commentable base.Com } for _, stateEvent := range stateEvents { + posterUserID, posterUsername := user.GhostUserID, user.GhostUserName + if stateEvent.User != nil { + posterUserID, posterUsername = int64(stateEvent.User.ID), stateEvent.User.Username + } comment := &base.Comment{ IssueIndex: commentable.GetLocalIndex(), Index: int64(stateEvent.ID), - PosterID: int64(stateEvent.User.ID), - PosterName: stateEvent.User.Username, + PosterID: posterUserID, + PosterName: posterUsername, Content: "", Created: *stateEvent.CreatedAt, } diff --git a/services/migrations/gitlab_test.go b/services/migrations/gitlab_test.go index 52f5827dfe..73a1b6a276 100644 --- a/services/migrations/gitlab_test.go +++ b/services/migrations/gitlab_test.go @@ -50,7 +50,7 @@ func TestGitlabDownloadRepo(t *testing.T) { topics, err := downloader.GetTopics(ctx) assert.NoError(t, err) assert.Len(t, topics, 2) - assert.EqualValues(t, []string{"migration", "test"}, topics) + assert.Equal(t, []string{"migration", "test"}, topics) milestones, err := downloader.GetMilestones(ctx) assert.NoError(t, err) @@ -501,7 +501,7 @@ func TestAwardsToReactions(t *testing.T) { assert.NoError(t, json.Unmarshal([]byte(testResponse), &awards)) reactions := downloader.awardsToReactions(awards) - assert.EqualValues(t, []*base.Reaction{ + assert.Equal(t, []*base.Reaction{ { UserName: "lafriks", UserID: 1241334, @@ -593,7 +593,7 @@ func TestNoteToComment(t *testing.T) { for i, note := range notes { actualComment := *downloader.convertNoteToComment(17, ¬e) - assert.EqualValues(t, actualComment, comments[i]) + assert.Equal(t, actualComment, comments[i]) } } diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go index 5dda12286f..eba9c79df5 100644 --- a/services/migrations/migrate.go +++ b/services/migrations/migrate.go @@ -6,6 +6,7 @@ package migrations import ( "context" + "errors" "fmt" "net" "net/url" @@ -74,11 +75,9 @@ func IsMigrateURLAllowed(remoteURL string, doer *user_model.User) error { return &git.ErrInvalidCloneAddr{Host: u.Host, IsProtocolInvalid: true, IsPermissionDenied: true, IsURLError: true} } - hostName, _, err := net.SplitHostPort(u.Host) - if err != nil { - // u.Host can be "host" or "host:port" - err = nil //nolint - hostName = u.Host + hostName, _, errIgnored := net.SplitHostPort(u.Host) + if errIgnored != nil { + hostName = u.Host // u.Host can be "host" or "host:port" } // some users only use proxy, there is no DNS resolver. it's safe to ignore the LookupIP error @@ -211,7 +210,7 @@ func migrateRepository(ctx context.Context, doer *user_model.User, downloader ba if cloneURL.Scheme == "file" || cloneURL.Scheme == "" { if cloneAddrURL.Scheme != "file" && cloneAddrURL.Scheme != "" { - return fmt.Errorf("repo info has changed from external to local filesystem") + return errors.New("repo info has changed from external to local filesystem") } } diff --git a/services/migrations/onedev.go b/services/migrations/onedev.go index 4ce35dd12e..e052cba0cc 100644 --- a/services/migrations/onedev.go +++ b/services/migrations/onedev.go @@ -128,7 +128,7 @@ func (d *OneDevDownloader) callAPI(ctx context.Context, endpoint string, paramet u.RawQuery = query.Encode() } - req, err := http.NewRequestWithContext(ctx, "GET", u.String(), nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) if err != nil { return err } diff --git a/services/mirror/mirror.go b/services/mirror/mirror.go index e029bbb1d6..7fb7fabb75 100644 --- a/services/mirror/mirror.go +++ b/services/mirror/mirror.go @@ -5,7 +5,7 @@ package mirror import ( "context" - "fmt" + "errors" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/log" @@ -29,7 +29,7 @@ func doMirrorSync(ctx context.Context, req *SyncRequest) { } } -var errLimit = fmt.Errorf("reached limit") +var errLimit = errors.New("reached limit") // Update checks and updates mirror repositories. func Update(ctx context.Context, pullLimit, pushLimit int) error { @@ -68,7 +68,7 @@ func Update(ctx context.Context, pullLimit, pushLimit int) error { // Check we've not been cancelled select { case <-ctx.Done(): - return fmt.Errorf("aborted") + return errors.New("aborted") default: } diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go index 658747e7c8..cb90af5894 100644 --- a/services/mirror/mirror_pull.go +++ b/services/mirror/mirror_pull.go @@ -71,7 +71,7 @@ func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error // erase authentication before storing in database u.User = nil m.Repo.OriginalURL = u.String() - return repo_model.UpdateRepositoryCols(ctx, m.Repo, "original_url") + return repo_model.UpdateRepositoryColsNoAutoTime(ctx, m.Repo, "original_url") } // mirrorSyncResult contains information of a updated reference. @@ -235,6 +235,24 @@ func pruneBrokenReferences(ctx context.Context, return pruneErr } +// checkRecoverableSyncError takes an error message from a git fetch command and returns false if it should be a fatal/blocking error +func checkRecoverableSyncError(stderrMessage string) bool { + switch { + case strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken"): + return true + case strings.Contains(stderrMessage, "remote error") && strings.Contains(stderrMessage, "not our ref"): + return true + case strings.Contains(stderrMessage, "cannot lock ref") && strings.Contains(stderrMessage, "but expected"): + return true + case strings.Contains(stderrMessage, "cannot lock ref") && strings.Contains(stderrMessage, "unable to resolve reference"): + return true + case strings.Contains(stderrMessage, "Unable to create") && strings.Contains(stderrMessage, ".lock"): + return true + default: + return false + } +} + // runSync returns true if sync finished without error. func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bool) { repoPath := m.Repo.RepoPath() @@ -275,7 +293,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stdoutMessage := util.SanitizeCredentialURLs(stdout) // Now check if the error is a resolve reference due to broken reference - if strings.Contains(stderr, "unable to resolve reference") && strings.Contains(stderr, "reference broken") { + if checkRecoverableSyncError(stderr) { log.Warn("SyncMirrors [repo: %-v]: failed to update mirror repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err) err = nil @@ -324,6 +342,15 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo return nil, false } + if m.LFS && setting.LFS.StartServer { + log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo) + endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint) + lfsClient := lfs.NewClient(endpoint, nil) + if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil { + log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err) + } + } + log.Trace("SyncMirrors [repo: %-v]: syncing branches...", m.Repo) if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, m.Repo, gitRepo, 0); err != nil { log.Error("SyncMirrors [repo: %-v]: failed to synchronize branches: %v", m.Repo, err) @@ -333,15 +360,6 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo if err = repo_module.SyncReleasesWithTags(ctx, m.Repo, gitRepo); err != nil { log.Error("SyncMirrors [repo: %-v]: failed to synchronize tags to releases: %v", m.Repo, err) } - - if m.LFS && setting.LFS.StartServer { - log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo) - endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint) - lfsClient := lfs.NewClient(endpoint, nil) - if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil { - log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err) - } - } gitRepo.Close() log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo) @@ -368,7 +386,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stdoutMessage := util.SanitizeCredentialURLs(stdout) // Now check if the error is a resolve reference due to broken reference - if strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken") { + if checkRecoverableSyncError(stderrMessage) { log.Warn("SyncMirrors [repo: %-v Wiki]: failed to update mirror wiki repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err) err = nil @@ -419,7 +437,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo } for _, branch := range branches { - cache.Remove(m.Repo.GetCommitsCountCacheKey(branch.Name, true)) + cache.Remove(m.Repo.GetCommitsCountCacheKey(branch, true)) } m.UpdatedUnix = timeutil.TimeStampNow() @@ -635,7 +653,7 @@ func checkAndUpdateEmptyRepository(ctx context.Context, m *repo_model.Mirror, re } m.Repo.IsEmpty = false // Update the is empty and default_branch columns - if err := repo_model.UpdateRepositoryCols(ctx, m.Repo, "default_branch", "is_empty"); err != nil { + if err := repo_model.UpdateRepositoryColsWithAutoTime(ctx, m.Repo, "default_branch", "is_empty"); err != nil { log.Error("Failed to update default branch of repository %-v. Error: %v", m.Repo, err) desc := fmt.Sprintf("Failed to update default branch of repository '%s': %v", m.Repo.RepoPath(), err) if err = system_model.CreateRepositoryNotice(desc); err != nil { diff --git a/services/mirror/mirror_pull_test.go b/services/mirror/mirror_pull_test.go new file mode 100644 index 0000000000..97859be5b0 --- /dev/null +++ b/services/mirror/mirror_pull_test.go @@ -0,0 +1,94 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package mirror + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_parseRemoteUpdateOutput(t *testing.T) { + output := ` + * [new tag] v0.1.8 -> v0.1.8 + * [new branch] master -> origin/master + - [deleted] (none) -> origin/test1 + - [deleted] (none) -> tag1 + + f895a1e...957a993 test2 -> origin/test2 (forced update) + 957a993..a87ba5f test3 -> origin/test3 + * [new ref] refs/pull/26595/head -> refs/pull/26595/head + * [new ref] refs/pull/26595/merge -> refs/pull/26595/merge + e0639e38fb..6db2410489 refs/pull/25873/head -> refs/pull/25873/head + + 1c97ebc746...976d27d52f refs/pull/25873/merge -> refs/pull/25873/merge (forced update) +` + results := parseRemoteUpdateOutput(output, "origin") + assert.Len(t, results, 10) + assert.Equal(t, "refs/tags/v0.1.8", results[0].refName.String()) + assert.Equal(t, gitShortEmptySha, results[0].oldCommitID) + assert.Empty(t, results[0].newCommitID) + + assert.Equal(t, "refs/heads/master", results[1].refName.String()) + assert.Equal(t, gitShortEmptySha, results[1].oldCommitID) + assert.Empty(t, results[1].newCommitID) + + assert.Equal(t, "refs/heads/test1", results[2].refName.String()) + assert.Empty(t, results[2].oldCommitID) + assert.Equal(t, gitShortEmptySha, results[2].newCommitID) + + assert.Equal(t, "refs/tags/tag1", results[3].refName.String()) + assert.Empty(t, results[3].oldCommitID) + assert.Equal(t, gitShortEmptySha, results[3].newCommitID) + + assert.Equal(t, "refs/heads/test2", results[4].refName.String()) + assert.Equal(t, "f895a1e", results[4].oldCommitID) + assert.Equal(t, "957a993", results[4].newCommitID) + + assert.Equal(t, "refs/heads/test3", results[5].refName.String()) + assert.Equal(t, "957a993", results[5].oldCommitID) + assert.Equal(t, "a87ba5f", results[5].newCommitID) + + assert.Equal(t, "refs/pull/26595/head", results[6].refName.String()) + assert.Equal(t, gitShortEmptySha, results[6].oldCommitID) + assert.Empty(t, results[6].newCommitID) + + assert.Equal(t, "refs/pull/26595/merge", results[7].refName.String()) + assert.Equal(t, gitShortEmptySha, results[7].oldCommitID) + assert.Empty(t, results[7].newCommitID) + + assert.Equal(t, "refs/pull/25873/head", results[8].refName.String()) + assert.Equal(t, "e0639e38fb", results[8].oldCommitID) + assert.Equal(t, "6db2410489", results[8].newCommitID) + + assert.Equal(t, "refs/pull/25873/merge", results[9].refName.String()) + assert.Equal(t, "1c97ebc746", results[9].oldCommitID) + assert.Equal(t, "976d27d52f", results[9].newCommitID) +} + +func Test_checkRecoverableSyncError(t *testing.T) { + cases := []struct { + recoverable bool + message string + }{ + // A race condition in http git-fetch where certain refs were listed on the remote and are no longer there, would exit status 128 + {true, "fatal: remote error: upload-pack: not our ref 988881adc9fc3655077dc2d4d757d480b5ea0e11"}, + // A race condition where a local gc/prune removes a named ref during a git-fetch would exit status 1 + {true, "cannot lock ref 'refs/pull/123456/merge': unable to resolve reference 'refs/pull/134153/merge'"}, + // A race condition in http git-fetch where named refs were listed on the remote and are no longer there + {true, "error: cannot lock ref 'refs/remotes/origin/foo': unable to resolve reference 'refs/remotes/origin/foo': reference broken"}, + // A race condition in http git-fetch where named refs were force-pushed during the update, would exit status 128 + {true, "error: cannot lock ref 'refs/pull/123456/merge': is at 988881adc9fc3655077dc2d4d757d480b5ea0e11 but expected 7f894307ffc9553edbd0b671cab829786866f7b2"}, + // A race condition with other local git operations, such as git-maintenance, would exit status 128 (well, "Unable" the "U" is uppercase) + {true, "fatal: Unable to create '/data/gitea-repositories/foo-org/bar-repo.git/./objects/info/commit-graphs/commit-graph-chain.lock': File exists."}, + // Missing or unauthorized credentials, would exit status 128 + {false, "fatal: Authentication failed for 'https://example.com/foo-does-not-exist/bar.git/'"}, + // A non-existent remote repository, would exit status 128 + {false, "fatal: Could not read from remote repository."}, + // A non-functioning proxy, would exit status 128 + {false, "fatal: unable to access 'https://example.com/foo-does-not-exist/bar.git/': Failed to connect to configured-https-proxy port 1080 after 0 ms: Couldn't connect to server"}, + } + + for _, c := range cases { + assert.Equal(t, c.recoverable, checkRecoverableSyncError(c.message), "test case: %s", c.message) + } +} diff --git a/services/mirror/mirror_test.go b/services/mirror/mirror_test.go deleted file mode 100644 index 76632b6872..0000000000 --- a/services/mirror/mirror_test.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2023 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package mirror - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func Test_parseRemoteUpdateOutput(t *testing.T) { - output := ` - * [new tag] v0.1.8 -> v0.1.8 - * [new branch] master -> origin/master - - [deleted] (none) -> origin/test1 - - [deleted] (none) -> tag1 - + f895a1e...957a993 test2 -> origin/test2 (forced update) - 957a993..a87ba5f test3 -> origin/test3 - * [new ref] refs/pull/26595/head -> refs/pull/26595/head - * [new ref] refs/pull/26595/merge -> refs/pull/26595/merge - e0639e38fb..6db2410489 refs/pull/25873/head -> refs/pull/25873/head - + 1c97ebc746...976d27d52f refs/pull/25873/merge -> refs/pull/25873/merge (forced update) -` - results := parseRemoteUpdateOutput(output, "origin") - assert.Len(t, results, 10) - assert.EqualValues(t, "refs/tags/v0.1.8", results[0].refName.String()) - assert.EqualValues(t, gitShortEmptySha, results[0].oldCommitID) - assert.EqualValues(t, "", results[0].newCommitID) - - assert.EqualValues(t, "refs/heads/master", results[1].refName.String()) - assert.EqualValues(t, gitShortEmptySha, results[1].oldCommitID) - assert.EqualValues(t, "", results[1].newCommitID) - - assert.EqualValues(t, "refs/heads/test1", results[2].refName.String()) - assert.EqualValues(t, "", results[2].oldCommitID) - assert.EqualValues(t, gitShortEmptySha, results[2].newCommitID) - - assert.EqualValues(t, "refs/tags/tag1", results[3].refName.String()) - assert.EqualValues(t, "", results[3].oldCommitID) - assert.EqualValues(t, gitShortEmptySha, results[3].newCommitID) - - assert.EqualValues(t, "refs/heads/test2", results[4].refName.String()) - assert.EqualValues(t, "f895a1e", results[4].oldCommitID) - assert.EqualValues(t, "957a993", results[4].newCommitID) - - assert.EqualValues(t, "refs/heads/test3", results[5].refName.String()) - assert.EqualValues(t, "957a993", results[5].oldCommitID) - assert.EqualValues(t, "a87ba5f", results[5].newCommitID) - - assert.EqualValues(t, "refs/pull/26595/head", results[6].refName.String()) - assert.EqualValues(t, gitShortEmptySha, results[6].oldCommitID) - assert.EqualValues(t, "", results[6].newCommitID) - - assert.EqualValues(t, "refs/pull/26595/merge", results[7].refName.String()) - assert.EqualValues(t, gitShortEmptySha, results[7].oldCommitID) - assert.EqualValues(t, "", results[7].newCommitID) - - assert.EqualValues(t, "refs/pull/25873/head", results[8].refName.String()) - assert.EqualValues(t, "e0639e38fb", results[8].oldCommitID) - assert.EqualValues(t, "6db2410489", results[8].newCommitID) - - assert.EqualValues(t, "refs/pull/25873/merge", results[9].refName.String()) - assert.EqualValues(t, "1c97ebc746", results[9].oldCommitID) - assert.EqualValues(t, "976d27d52f", results[9].newCommitID) -} diff --git a/services/notify/notifier.go b/services/notify/notifier.go index 40428454be..875a70e564 100644 --- a/services/notify/notifier.go +++ b/services/notify/notifier.go @@ -79,5 +79,7 @@ type Notifier interface { CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus) + WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) + WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) } diff --git a/services/notify/notify.go b/services/notify/notify.go index 9f8be4b577..2416cbd2e0 100644 --- a/services/notify/notify.go +++ b/services/notify/notify.go @@ -46,10 +46,25 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model } } +func shouldSendCommentChangeNotification(ctx context.Context, comment *issues_model.Comment) bool { + if err := comment.LoadReview(ctx); err != nil { + log.Error("LoadReview: %v", err) + return false + } else if comment.Review != nil && comment.Review.Type == issues_model.ReviewTypePending { + // Pending review comments updating should not triggered + return false + } + return true +} + // CreateIssueComment notifies issue comment related message to notifiers func CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User, ) { + if !shouldSendCommentChangeNotification(ctx, comment) { + return + } + for _, notifier := range notifiers { notifier.CreateIssueComment(ctx, doer, repo, issue, comment, mentions) } @@ -156,6 +171,10 @@ func PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issue // UpdateComment notifies update comment to notifiers func UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) { + if !shouldSendCommentChangeNotification(ctx, c) { + return + } + for _, notifier := range notifiers { notifier.UpdateComment(ctx, doer, c, oldContent) } @@ -163,6 +182,10 @@ func UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.C // DeleteComment notifies delete comment to notifiers func DeleteComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment) { + if !shouldSendCommentChangeNotification(ctx, c) { + return + } + for _, notifier := range notifiers { notifier.DeleteComment(ctx, doer, c) } @@ -376,6 +399,12 @@ func CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit } } +func WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { + for _, notifier := range notifiers { + notifier.WorkflowRunStatusUpdate(ctx, repo, sender, run) + } +} + func WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) { for _, notifier := range notifiers { notifier.WorkflowJobStatusUpdate(ctx, repo, sender, job, task) diff --git a/services/notify/null.go b/services/notify/null.go index 9c794a2342..c3085d7c9e 100644 --- a/services/notify/null.go +++ b/services/notify/null.go @@ -214,5 +214,8 @@ func (*NullNotifier) ChangeDefaultBranch(ctx context.Context, repo *repo_model.R func (*NullNotifier) CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus) { } +func (*NullNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { +} + func (*NullNotifier) WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) { } diff --git a/services/oauth2_provider/access_token.go b/services/oauth2_provider/access_token.go index 5cb6fb64c5..5a190d8616 100644 --- a/services/oauth2_provider/access_token.go +++ b/services/oauth2_provider/access_token.go @@ -1,12 +1,13 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( "context" "fmt" "slices" + "strconv" "strings" auth "code.gitea.io/gitea/models/auth" @@ -15,7 +16,9 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "github.com/golang-jwt/jwt/v5" ) @@ -82,7 +85,7 @@ func GrantAdditionalScopes(grantScopes string) auth.AccessTokenScope { } var accessScopes []string // the scopes for access control, but not for general information - for _, scope := range strings.Split(grantScopes, " ") { + for scope := range strings.SplitSeq(grantScopes, " ") { if scope != "" && !slices.Contains(generalScopesSupported, scope) { accessScopes = append(accessScopes, scope) } @@ -103,6 +106,20 @@ func GrantAdditionalScopes(grantScopes string) auth.AccessTokenScope { return auth.AccessTokenScopeAll } +func NewJwtRegisteredClaimsFromUser(clientID string, grantUserID int64, exp *jwt.NumericDate) jwt.RegisteredClaims { + // https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfig + // The issuer value returned MUST be identical to the Issuer URL that was used as the prefix to /.well-known/openid-configuration + // to retrieve the configuration information. This MUST also be identical to the "iss" Claim value in ID Tokens issued from this Issuer. + // * https://accounts.google.com/.well-known/openid-configuration + // * https://github.com/login/oauth/.well-known/openid-configuration + return jwt.RegisteredClaims{ + Issuer: strings.TrimSuffix(setting.AppURL, "/"), + Audience: []string{clientID}, + Subject: strconv.FormatInt(grantUserID, 10), + ExpiresAt: exp, + } +} + func NewAccessTokenResponse(ctx context.Context, grant *auth.OAuth2Grant, serverKey, clientKey JWTSigningKey) (*AccessTokenResponse, *AccessTokenError) { if setting.OAuth2.InvalidateRefreshTokens { if err := grant.IncreaseCounter(ctx); err != nil { @@ -173,13 +190,8 @@ func NewAccessTokenResponse(ctx context.Context, grant *auth.OAuth2Grant, server } idToken := &OIDCToken{ - RegisteredClaims: jwt.RegisteredClaims{ - ExpiresAt: jwt.NewNumericDate(expirationDate.AsTime()), - Issuer: setting.AppURL, - Audience: []string{app.ClientID}, - Subject: fmt.Sprint(grant.UserID), - }, - Nonce: grant.Nonce, + RegisteredClaims: NewJwtRegisteredClaimsFromUser(app.ClientID, grant.UserID, jwt.NewNumericDate(expirationDate.AsTime())), + Nonce: grant.Nonce, } if grant.ScopeContains("profile") { idToken.Name = user.DisplayName() @@ -230,12 +242,11 @@ func NewAccessTokenResponse(ctx context.Context, grant *auth.OAuth2Grant, server }, nil } -// returns a list of "org" and "org:team" strings, -// that the given user is a part of. +// GetOAuthGroupsForUser returns a list of "org" and "org:team" strings, that the given user is a part of. func GetOAuthGroupsForUser(ctx context.Context, user *user_model.User, onlyPublicGroups bool) ([]string, error) { orgs, err := db.Find[org_model.Organization](ctx, org_model.FindOrgOptions{ - UserID: user.ID, - IncludePrivate: !onlyPublicGroups, + UserID: user.ID, + IncludeVisibility: util.Iif(onlyPublicGroups, api.VisibleTypePublic, api.VisibleTypePrivate), }) if err != nil { return nil, fmt.Errorf("GetUserOrgList: %w", err) diff --git a/services/oauth2_provider/additional_scopes_test.go b/services/oauth2_provider/additional_scopes_test.go index 2d4df7aea2..5f375346dc 100644 --- a/services/oauth2_provider/additional_scopes_test.go +++ b/services/oauth2_provider/additional_scopes_test.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( "testing" diff --git a/services/oauth2_provider/init.go b/services/oauth2_provider/init.go index e5958099a6..c412bd6433 100644 --- a/services/oauth2_provider/init.go +++ b/services/oauth2_provider/init.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( "context" diff --git a/services/oauth2_provider/jwtsigningkey.go b/services/oauth2_provider/jwtsigningkey.go index 6c668db463..03c7403f75 100644 --- a/services/oauth2_provider/jwtsigningkey.go +++ b/services/oauth2_provider/jwtsigningkey.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( "crypto/ecdsa" @@ -31,7 +31,7 @@ type ErrInvalidAlgorithmType struct { } func (err ErrInvalidAlgorithmType) Error() string { - return fmt.Sprintf("JWT signing algorithm is not supported: %s", err.Algorithm) + return "JWT signing algorithm is not supported: " + err.Algorithm } // JWTSigningKey represents a algorithm/key pair to sign JWTs diff --git a/services/oauth2_provider/token.go b/services/oauth2_provider/token.go index b71b11906e..935c4cc01f 100644 --- a/services/oauth2_provider/token.go +++ b/services/oauth2_provider/token.go @@ -1,9 +1,10 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( + "errors" "fmt" "time" @@ -44,12 +45,12 @@ func ParseToken(jwtToken string, signingKey JWTSigningKey) (*Token, error) { return nil, err } if !parsedToken.Valid { - return nil, fmt.Errorf("invalid token") + return nil, errors.New("invalid token") } var token *Token var ok bool if token, ok = parsedToken.Claims.(*Token); !ok || !parsedToken.Valid { - return nil, fmt.Errorf("invalid token") + return nil, errors.New("invalid token") } return token, nil } diff --git a/services/org/team.go b/services/org/team.go index ee3bd898ea..773bd11f49 100644 --- a/services/org/team.go +++ b/services/org/team.go @@ -54,39 +54,33 @@ func NewTeam(ctx context.Context, t *organization.Team) (err error) { return organization.ErrTeamAlreadyExist{OrgID: t.OrgID, Name: t.LowerName} } - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if err = db.Insert(ctx, t); err != nil { - return err - } - - // insert units for team - if len(t.Units) > 0 { - for _, unit := range t.Units { - unit.TeamID = t.ID - } - if err = db.Insert(ctx, &t.Units); err != nil { + return db.WithTx(ctx, func(ctx context.Context) error { + if err = db.Insert(ctx, t); err != nil { return err } - } - // Add all repositories to the team if it has access to all of them. - if t.IncludesAllRepositories { - err = repo_service.AddAllRepositoriesToTeam(ctx, t) - if err != nil { - return fmt.Errorf("addAllRepositories: %w", err) + // insert units for team + if len(t.Units) > 0 { + for _, unit := range t.Units { + unit.TeamID = t.ID + } + if err = db.Insert(ctx, &t.Units); err != nil { + return err + } + } + + // Add all repositories to the team if it has access to all of them. + if t.IncludesAllRepositories { + err = repo_service.AddAllRepositoriesToTeam(ctx, t) + if err != nil { + return fmt.Errorf("addAllRepositories: %w", err) + } } - } - // Update organization number of teams. - if _, err = db.Exec(ctx, "UPDATE `user` SET num_teams=num_teams+1 WHERE id = ?", t.OrgID); err != nil { + // Update organization number of teams. + _, err = db.Exec(ctx, "UPDATE `user` SET num_teams=num_teams+1 WHERE id = ?", t.OrgID) return err - } - return committer.Commit() + }) } // UpdateTeam updates information of team. @@ -99,128 +93,117 @@ func UpdateTeam(ctx context.Context, t *organization.Team, authChanged, includeA t.Description = t.Description[:255] } - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - t.LowerName = strings.ToLower(t.Name) - has, err := db.Exist[organization.Team](ctx, builder.Eq{ - "org_id": t.OrgID, - "lower_name": t.LowerName, - }.And(builder.Neq{"id": t.ID}), - ) - if err != nil { - return err - } else if has { - return organization.ErrTeamAlreadyExist{OrgID: t.OrgID, Name: t.LowerName} - } - - sess := db.GetEngine(ctx) - if _, err = sess.ID(t.ID).Cols("name", "lower_name", "description", - "can_create_org_repo", "authorize", "includes_all_repositories").Update(t); err != nil { - return fmt.Errorf("update: %w", err) - } - - // update units for team - if len(t.Units) > 0 { - for _, unit := range t.Units { - unit.TeamID = t.ID - } - // Delete team-unit. - if _, err := sess. - Where("team_id=?", t.ID). - Delete(new(organization.TeamUnit)); err != nil { + return db.WithTx(ctx, func(ctx context.Context) error { + t.LowerName = strings.ToLower(t.Name) + has, err := db.Exist[organization.Team](ctx, builder.Eq{ + "org_id": t.OrgID, + "lower_name": t.LowerName, + }.And(builder.Neq{"id": t.ID}), + ) + if err != nil { return err + } else if has { + return organization.ErrTeamAlreadyExist{OrgID: t.OrgID, Name: t.LowerName} } - if _, err = sess.Cols("org_id", "team_id", "type", "access_mode").Insert(&t.Units); err != nil { - return err + + sess := db.GetEngine(ctx) + if _, err = sess.ID(t.ID).Cols("name", "lower_name", "description", + "can_create_org_repo", "authorize", "includes_all_repositories").Update(t); err != nil { + return fmt.Errorf("update: %w", err) } - } - // Update access for team members if needed. - if authChanged { - repos, err := repo_model.GetTeamRepositories(ctx, &repo_model.SearchTeamRepoOptions{ - TeamID: t.ID, - }) - if err != nil { - return fmt.Errorf("GetTeamRepositories: %w", err) + // update units for team + if len(t.Units) > 0 { + for _, unit := range t.Units { + unit.TeamID = t.ID + } + // Delete team-unit. + if _, err := sess. + Where("team_id=?", t.ID). + Delete(new(organization.TeamUnit)); err != nil { + return err + } + if _, err = sess.Cols("org_id", "team_id", "type", "access_mode").Insert(&t.Units); err != nil { + return err + } } - for _, repo := range repos { - if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil { - return fmt.Errorf("recalculateTeamAccesses: %w", err) + // Update access for team members if needed. + if authChanged { + repos, err := repo_model.GetTeamRepositories(ctx, &repo_model.SearchTeamRepoOptions{ + TeamID: t.ID, + }) + if err != nil { + return fmt.Errorf("GetTeamRepositories: %w", err) + } + + for _, repo := range repos { + if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil { + return fmt.Errorf("recalculateTeamAccesses: %w", err) + } } } - } - // Add all repositories to the team if it has access to all of them. - if includeAllChanged && t.IncludesAllRepositories { - err = repo_service.AddAllRepositoriesToTeam(ctx, t) - if err != nil { - return fmt.Errorf("addAllRepositories: %w", err) + // Add all repositories to the team if it has access to all of them. + if includeAllChanged && t.IncludesAllRepositories { + err = repo_service.AddAllRepositoriesToTeam(ctx, t) + if err != nil { + return fmt.Errorf("addAllRepositories: %w", err) + } } - } - return committer.Commit() + return nil + }) } // DeleteTeam deletes given team. // It's caller's responsibility to assign organization ID. func DeleteTeam(ctx context.Context, t *organization.Team) error { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if err := t.LoadMembers(ctx); err != nil { - return err - } - - // update branch protections - { - protections := make([]*git_model.ProtectedBranch, 0, 10) - err := db.GetEngine(ctx).In("repo_id", - builder.Select("id").From("repository").Where(builder.Eq{"owner_id": t.OrgID})). - Find(&protections) - if err != nil { - return fmt.Errorf("findProtectedBranches: %w", err) + return db.WithTx(ctx, func(ctx context.Context) error { + if err := t.LoadMembers(ctx); err != nil { + return err } - for _, p := range protections { - if err := git_model.RemoveTeamIDFromProtectedBranch(ctx, p, t.ID); err != nil { - return err + + // update branch protections + { + protections := make([]*git_model.ProtectedBranch, 0, 10) + err := db.GetEngine(ctx).In("repo_id", + builder.Select("id").From("repository").Where(builder.Eq{"owner_id": t.OrgID})). + Find(&protections) + if err != nil { + return fmt.Errorf("findProtectedBranches: %w", err) + } + for _, p := range protections { + if err := git_model.RemoveTeamIDFromProtectedBranch(ctx, p, t.ID); err != nil { + return err + } } } - } - - if err := repo_service.RemoveAllRepositoriesFromTeam(ctx, t); err != nil { - return err - } - if err := db.DeleteBeans(ctx, - &organization.Team{ID: t.ID}, - &organization.TeamUser{OrgID: t.OrgID, TeamID: t.ID}, - &organization.TeamUnit{TeamID: t.ID}, - &organization.TeamInvite{TeamID: t.ID}, - &issues_model.Review{Type: issues_model.ReviewTypeRequest, ReviewerTeamID: t.ID}, // batch delete the binding relationship between team and PR (request review from team) - ); err != nil { - return err - } + if err := repo_service.RemoveAllRepositoriesFromTeam(ctx, t); err != nil { + return err + } - for _, tm := range t.Members { - if err := removeInvalidOrgUser(ctx, t.OrgID, tm); err != nil { + if err := db.DeleteBeans(ctx, + &organization.Team{ID: t.ID}, + &organization.TeamUser{OrgID: t.OrgID, TeamID: t.ID}, + &organization.TeamUnit{TeamID: t.ID}, + &organization.TeamInvite{TeamID: t.ID}, + &issues_model.Review{Type: issues_model.ReviewTypeRequest, ReviewerTeamID: t.ID}, // batch delete the binding relationship between team and PR (request review from team) + ); err != nil { return err } - } - // Update organization number of teams. - if _, err := db.Exec(ctx, "UPDATE `user` SET num_teams=num_teams-1 WHERE id=?", t.OrgID); err != nil { - return err - } + for _, tm := range t.Members { + if err := removeInvalidOrgUser(ctx, t.OrgID, tm); err != nil { + return err + } + } - return committer.Commit() + // Update organization number of teams. + _, err := db.Exec(ctx, "UPDATE `user` SET num_teams=num_teams-1 WHERE id=?", t.OrgID) + return err + }) } // AddTeamMember adds new membership of given team to given organization, @@ -259,37 +242,6 @@ func AddTeamMember(ctx context.Context, team *organization.Team, user *user_mode } team.NumMembers++ - - // Give access to team repositories. - // update exist access if mode become bigger - subQuery := builder.Select("repo_id").From("team_repo"). - Where(builder.Eq{"team_id": team.ID}) - - if _, err := sess.Where("user_id=?", user.ID). - In("repo_id", subQuery). - And("mode < ?", team.AccessMode). - SetExpr("mode", team.AccessMode). - Update(new(access_model.Access)); err != nil { - return fmt.Errorf("update user accesses: %w", err) - } - - // for not exist access - var repoIDs []int64 - accessSubQuery := builder.Select("repo_id").From("access").Where(builder.Eq{"user_id": user.ID}) - if err := sess.SQL(subQuery.And(builder.NotIn("repo_id", accessSubQuery))).Find(&repoIDs); err != nil { - return fmt.Errorf("select id accesses: %w", err) - } - - accesses := make([]*access_model.Access, 0, 100) - for i, repoID := range repoIDs { - accesses = append(accesses, &access_model.Access{RepoID: repoID, UserID: user.ID, Mode: team.AccessMode}) - if (i%100 == 0 || i == len(repoIDs)-1) && len(accesses) > 0 { - if err = db.Insert(ctx, accesses); err != nil { - return fmt.Errorf("insert new user accesses: %w", err) - } - accesses = accesses[:0] - } - } return nil }) if err != nil { @@ -394,13 +346,7 @@ func removeInvalidOrgUser(ctx context.Context, orgID int64, user *user_model.Use // RemoveTeamMember removes member from given team of given organization. func RemoveTeamMember(ctx context.Context, team *organization.Team, user *user_model.User) error { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - if err := removeTeamMember(ctx, team, user); err != nil { - return err - } - return committer.Commit() + return db.WithTx(ctx, func(ctx context.Context) error { + return removeTeamMember(ctx, team, user) + }) } diff --git a/services/org/team_test.go b/services/org/team_test.go index 3791776e46..c1a69d8ee7 100644 --- a/services/org/team_test.go +++ b/services/org/team_test.go @@ -88,7 +88,7 @@ func TestUpdateTeam(t *testing.T) { assert.True(t, strings.HasPrefix(team.Description, "A long description!")) access := unittest.AssertExistsAndLoadBean(t, &access_model.Access{UserID: 4, RepoID: 3}) - assert.EqualValues(t, perm.AccessModeAdmin, access.Mode) + assert.Equal(t, perm.AccessModeAdmin, access.Mode) unittest.CheckConsistencyFor(t, &organization.Team{ID: team.ID}) } @@ -166,24 +166,6 @@ func TestRemoveTeamMember(t *testing.T) { assert.True(t, organization.IsErrLastOrgOwner(err)) } -func TestRepository_RecalculateAccesses3(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - team5 := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 5}) - user29 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 29}) - - has, err := db.GetEngine(db.DefaultContext).Get(&access_model.Access{UserID: user29.ID, RepoID: 23}) - assert.NoError(t, err) - assert.False(t, has) - - // adding user29 to team5 should add an explicit access row for repo 23 - // even though repo 23 is public - assert.NoError(t, AddTeamMember(db.DefaultContext, team5, user29)) - - has, err = db.GetEngine(db.DefaultContext).Get(&access_model.Access{UserID: user29.ID, RepoID: 23}) - assert.NoError(t, err) - assert.True(t, has) -} - func TestIncludesAllRepositoriesTeams(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) @@ -222,8 +204,9 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { // Create repos. repoIDs := make([]int64, 0) - for i := 0; i < 3; i++ { - r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), repo_service.CreateRepoOptions{Name: fmt.Sprintf("repo-%d", i)}) + for i := range 3 { + r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), + repo_service.CreateRepoOptions{Name: fmt.Sprintf("repo-%d", i)}, true) assert.NoError(t, err, "CreateRepository %d", i) if r != nil { repoIDs = append(repoIDs, r.ID) @@ -285,7 +268,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { } // Create repo and check teams repositories. - r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), repo_service.CreateRepoOptions{Name: "repo-last"}) + r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), repo_service.CreateRepoOptions{Name: "repo-last"}, true) assert.NoError(t, err, "CreateRepository last") if r != nil { repoIDs = append(repoIDs, r.ID) @@ -298,7 +281,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { } // Remove repo and check teams repositories. - assert.NoError(t, repo_service.DeleteRepositoryDirectly(db.DefaultContext, user, repoIDs[0]), "DeleteRepository") + assert.NoError(t, repo_service.DeleteRepositoryDirectly(db.DefaultContext, repoIDs[0]), "DeleteRepository") teamRepos[0] = repoIDs[1:] teamRepos[1] = repoIDs[1:] teamRepos[3] = repoIDs[1:3] @@ -310,7 +293,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { // Wipe created items. for i, rid := range repoIDs { if i > 0 { // first repo already deleted. - assert.NoError(t, repo_service.DeleteRepositoryDirectly(db.DefaultContext, user, rid), "DeleteRepository %d", i) + assert.NoError(t, repo_service.DeleteRepositoryDirectly(db.DefaultContext, rid), "DeleteRepository %d", i) } } assert.NoError(t, DeleteOrganization(db.DefaultContext, org, false), "DeleteOrganization") diff --git a/services/org/user.go b/services/org/user.go index 3565ecc2fc..26927253d2 100644 --- a/services/org/user.go +++ b/services/org/user.go @@ -64,10 +64,11 @@ func RemoveOrgUser(ctx context.Context, org *organization.Organization, user *us if err != nil { return fmt.Errorf("AccessibleReposEnv: %w", err) } - repoIDs, err := env.RepoIDs(ctx, 1, org.NumRepos) + repoIDs, err := env.RepoIDs(ctx) if err != nil { return fmt.Errorf("GetUserRepositories [%d]: %w", user.ID, err) } + for _, repoID := range repoIDs { repo, err := repo_model.GetRepositoryByID(ctx, repoID) if err != nil { diff --git a/services/org/user_test.go b/services/org/user_test.go index 96d1a1c8ca..c61d600d90 100644 --- a/services/org/user_test.go +++ b/services/org/user_test.go @@ -53,7 +53,7 @@ func TestRemoveOrgUser(t *testing.T) { assert.NoError(t, RemoveOrgUser(db.DefaultContext, org, user)) unittest.AssertNotExistsBean(t, &organization.OrgUser{OrgID: org.ID, UID: user.ID}) org = unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: org.ID}) - assert.EqualValues(t, expectedNumMembers, org.NumMembers) + assert.Equal(t, expectedNumMembers, org.NumMembers) } org3 := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}) diff --git a/services/packages/alpine/repository.go b/services/packages/alpine/repository.go index 27e6391980..277c188874 100644 --- a/services/packages/alpine/repository.go +++ b/services/packages/alpine/repository.go @@ -290,7 +290,7 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package privPem, _ := pem.Decode([]byte(priv)) if privPem == nil { - return fmt.Errorf("failed to decode private key pem") + return errors.New("failed to decode private key pem") } privKey, err := x509.ParsePKCS1PrivateKey(privPem.Bytes) diff --git a/services/packages/arch/repository.go b/services/packages/arch/repository.go index a12af82ba5..438bb10837 100644 --- a/services/packages/arch/repository.go +++ b/services/packages/arch/repository.go @@ -13,6 +13,7 @@ import ( "fmt" "io" "os" + "strconv" "strings" packages_model "code.gitea.io/gitea/models/packages" @@ -372,8 +373,8 @@ func writeDescription(tw *tar.Writer, opts *entryOptions) error { {"MD5SUM", opts.Blob.HashMD5}, {"SHA256SUM", opts.Blob.HashSHA256}, {"PGPSIG", opts.Signature}, - {"CSIZE", fmt.Sprintf("%d", opts.Blob.Size)}, - {"ISIZE", fmt.Sprintf("%d", opts.FileMetadata.InstalledSize)}, + {"CSIZE", strconv.FormatInt(opts.Blob.Size, 10)}, + {"ISIZE", strconv.FormatInt(opts.FileMetadata.InstalledSize, 10)}, {"NAME", opts.Package.Name}, {"BASE", opts.FileMetadata.Base}, {"ARCH", opts.FileMetadata.Architecture}, @@ -382,7 +383,7 @@ func writeDescription(tw *tar.Writer, opts *entryOptions) error { {"URL", opts.VersionMetadata.ProjectURL}, {"LICENSE", strings.Join(opts.VersionMetadata.Licenses, "\n")}, {"GROUPS", strings.Join(opts.FileMetadata.Groups, "\n")}, - {"BUILDDATE", fmt.Sprintf("%d", opts.FileMetadata.BuildDate)}, + {"BUILDDATE", strconv.FormatInt(opts.FileMetadata.BuildDate, 10)}, {"PACKAGER", opts.FileMetadata.Packager}, {"PROVIDES", strings.Join(opts.FileMetadata.Provides, "\n")}, {"REPLACES", strings.Join(opts.FileMetadata.Replaces, "\n")}, diff --git a/services/packages/arch/vercmp.go b/services/packages/arch/vercmp.go index 0d33dda0f1..d44aa530f0 100644 --- a/services/packages/arch/vercmp.go +++ b/services/packages/arch/vercmp.go @@ -34,13 +34,8 @@ func parseEVR(evr string) (epoch, version, release string) { func compareSegments(a, b []string) int { lenA, lenB := len(a), len(b) - var l int - if lenA > lenB { - l = lenB - } else { - l = lenA - } - for i := 0; i < l; i++ { + l := min(lenA, lenB) + for i := range l { if r := compare(a[i], b[i]); r != 0 { return r } diff --git a/services/packages/auth.go b/services/packages/auth.go index 4526a8e303..6e87643e29 100644 --- a/services/packages/auth.go +++ b/services/packages/auth.go @@ -4,6 +4,7 @@ package packages import ( + "errors" "fmt" "net/http" "strings" @@ -58,7 +59,7 @@ func ParseAuthorizationRequest(req *http.Request) (*PackageMeta, error) { parts := strings.SplitN(h, " ", 2) if len(parts) != 2 { log.Error("split token failed: %s", h) - return nil, fmt.Errorf("split token failed") + return nil, errors.New("split token failed") } return ParseAuthorizationToken(parts[1]) @@ -77,7 +78,7 @@ func ParseAuthorizationToken(tokenStr string) (*PackageMeta, error) { c, ok := token.Claims.(*packageClaims) if !token.Valid || !ok { - return nil, fmt.Errorf("invalid token claim") + return nil, errors.New("invalid token claim") } return &c.PackageMeta, nil diff --git a/services/packages/cargo/index.go b/services/packages/cargo/index.go index 0c8a98c40f..605335d0f1 100644 --- a/services/packages/cargo/index.go +++ b/services/packages/cargo/index.go @@ -213,7 +213,7 @@ func getOrCreateIndexRepository(ctx context.Context, doer, owner *user_model.Use if errors.Is(err, util.ErrNotExist) { repo, err = repo_service.CreateRepositoryDirectly(ctx, doer, owner, repo_service.CreateRepoOptions{ Name: IndexRepositoryName, - }) + }, true) if err != nil { return nil, fmt.Errorf("CreateRepository: %w", err) } @@ -247,7 +247,7 @@ func createOrUpdateConfigFile(ctx context.Context, repo *repo_model.Repository, "Initialize Cargo Config", func(t *files_service.TemporaryUploadRepository) error { var b bytes.Buffer - err := json.NewEncoder(&b).Encode(BuildConfig(owner, setting.Service.RequireSignInView || owner.Visibility != structs.VisibleTypePublic || repo.IsPrivate)) + err := json.NewEncoder(&b).Encode(BuildConfig(owner, setting.Service.RequireSignInViewStrict || owner.Visibility != structs.VisibleTypePublic || repo.IsPrivate)) if err != nil { return err } @@ -310,7 +310,7 @@ func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *re } func writeObjectToIndex(ctx context.Context, t *files_service.TemporaryUploadRepository, path string, r io.Reader) error { - hash, err := t.HashObject(ctx, r) + hash, err := t.HashObjectAndWrite(ctx, r) if err != nil { return err } diff --git a/services/packages/cleanup/cleanup.go b/services/packages/cleanup/cleanup.go index b7ba2b6ac4..ec860db1bb 100644 --- a/services/packages/cleanup/cleanup.go +++ b/services/packages/cleanup/cleanup.go @@ -32,165 +32,170 @@ func CleanupTask(ctx context.Context, olderThan time.Duration) error { return CleanupExpiredData(ctx, olderThan) } -func ExecuteCleanupRules(outerCtx context.Context) error { - ctx, committer, err := db.TxContext(outerCtx) +func executeCleanupOneRulePackage(ctx context.Context, pcr *packages_model.PackageCleanupRule, p *packages_model.Package) (versionDeleted bool, err error) { + olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays) + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + PackageID: p.ID, + IsInternal: optional.Some(false), + Sort: packages_model.SortCreatedDesc, + }) if err != nil { - return err + return false, fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err) } - defer committer.Close() - - err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { - select { - case <-outerCtx.Done(): - return db.ErrCancelledf("While processing package cleanup rules") - default: + if pcr.KeepCount > 0 { + if pcr.KeepCount < len(pvs) { + pvs = pvs[pcr.KeepCount:] + } else { + pvs = nil } - - if err := pcr.CompiledPattern(); err != nil { - return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err) + } + for _, pv := range pvs { + if pcr.Type == packages_model.TypeContainer { + if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil { + return false, fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err) + } else if skip { + log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version) + continue + } } - - olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays) - - packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type) - if err != nil { - return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err) + toMatch := pv.LowerVersion + if pcr.MatchFullName { + toMatch = p.LowerName + "/" + pv.LowerVersion + } + if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) { + log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version) + continue + } + if pv.CreatedUnix.AsLocalTime().After(olderThan) { + log.Debug("Rule[%d]: keep '%s/%s' (remove days) %v", pcr.ID, p.Name, pv.Version, pv.CreatedUnix.FormatDate()) + continue } + if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) { + log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version) + continue + } + log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version) + if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil { + log.Error("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %v", pcr.ID, err) + continue + } + versionDeleted = true + } + return versionDeleted, nil +} - anyVersionDeleted := false - for _, p := range packages { - pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ - PackageID: p.ID, - IsInternal: optional.Some(false), - Sort: packages_model.SortCreatedDesc, - Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200), - }) - if err != nil { - return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err) - } - versionDeleted := false - for _, pv := range pvs { - if pcr.Type == packages_model.TypeContainer { - if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil { - return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err) - } else if skip { - log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version) - continue - } - } +func executeCleanupOneRule(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { + if err := pcr.CompiledPattern(); err != nil { + return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err) + } - toMatch := pv.LowerVersion - if pcr.MatchFullName { - toMatch = p.LowerName + "/" + pv.LowerVersion - } + packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type) + if err != nil { + return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err) + } - if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) { - log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version) - continue - } - if pv.CreatedUnix.AsLocalTime().After(olderThan) { - log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version) - continue - } - if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) { - log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version) - continue + anyVersionDeleted := false + for _, p := range packages { + versionDeleted := false + err = db.WithTx(ctx, func(ctx context.Context) (err error) { + versionDeleted, err = executeCleanupOneRulePackage(ctx, pcr, p) + return err + }) + if err != nil { + log.Error("CleanupRule [%d]: executeCleanupOneRulePackage(%d) failed: %v", pcr.ID, p.ID, err) + continue + } + anyVersionDeleted = anyVersionDeleted || versionDeleted + if versionDeleted { + if pcr.Type == packages_model.TypeCargo { + owner, err := user_model.GetUserByID(ctx, pcr.OwnerID) + if err != nil { + return fmt.Errorf("GetUserByID failed: %w", err) } - - log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version) - - if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil { - return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err) + if err := cargo_service.UpdatePackageIndexIfExists(ctx, owner, owner, p.ID); err != nil { + return fmt.Errorf("CleanupRule [%d]: cargo.UpdatePackageIndexIfExists failed: %w", pcr.ID, err) } + } + } + } - versionDeleted = true - anyVersionDeleted = true + if anyVersionDeleted { + switch pcr.Type { + case packages_model.TypeDebian: + if err := debian_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { + return fmt.Errorf("CleanupRule [%d]: debian.BuildAllRepositoryFiles failed: %w", pcr.ID, err) + } + case packages_model.TypeAlpine: + if err := alpine_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { + return fmt.Errorf("CleanupRule [%d]: alpine.BuildAllRepositoryFiles failed: %w", pcr.ID, err) + } + case packages_model.TypeRpm: + if err := rpm_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { + return fmt.Errorf("CleanupRule [%d]: rpm.BuildAllRepositoryFiles failed: %w", pcr.ID, err) } + case packages_model.TypeArch: + release, err := arch_service.AquireRegistryLock(ctx, pcr.OwnerID) + if err != nil { + return err + } + defer release() - if versionDeleted { - if pcr.Type == packages_model.TypeCargo { - owner, err := user_model.GetUserByID(ctx, pcr.OwnerID) - if err != nil { - return fmt.Errorf("GetUserByID failed: %w", err) - } - if err := cargo_service.UpdatePackageIndexIfExists(ctx, owner, owner, p.ID); err != nil { - return fmt.Errorf("CleanupRule [%d]: cargo.UpdatePackageIndexIfExists failed: %w", pcr.ID, err) - } - } + if err := arch_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { + return fmt.Errorf("CleanupRule [%d]: arch.BuildAllRepositoryFiles failed: %w", pcr.ID, err) } } + } + return nil +} - if anyVersionDeleted { - switch pcr.Type { - case packages_model.TypeDebian: - if err := debian_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { - return fmt.Errorf("CleanupRule [%d]: debian.BuildAllRepositoryFiles failed: %w", pcr.ID, err) - } - case packages_model.TypeAlpine: - if err := alpine_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { - return fmt.Errorf("CleanupRule [%d]: alpine.BuildAllRepositoryFiles failed: %w", pcr.ID, err) - } - case packages_model.TypeRpm: - if err := rpm_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { - return fmt.Errorf("CleanupRule [%d]: rpm.BuildAllRepositoryFiles failed: %w", pcr.ID, err) - } - case packages_model.TypeArch: - release, err := arch_service.AquireRegistryLock(ctx, pcr.OwnerID) - if err != nil { - return err - } - defer release() +func ExecuteCleanupRules(ctx context.Context) error { + return packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { + select { + case <-ctx.Done(): + return db.ErrCancelledf("While processing package cleanup rules") + default: + } - if err := arch_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { - return fmt.Errorf("CleanupRule [%d]: arch.BuildAllRepositoryFiles failed: %w", pcr.ID, err) - } - } + err := executeCleanupOneRule(ctx, pcr) + if err != nil { + log.Error("CleanupRule [%d]: executeCleanupOneRule failed: %v", pcr.ID, err) } return nil }) - if err != nil { - return err - } - - return committer.Commit() } -func CleanupExpiredData(outerCtx context.Context, olderThan time.Duration) error { - ctx, committer, err := db.TxContext(outerCtx) - if err != nil { - return err - } - defer committer.Close() - - if err := container_service.Cleanup(ctx, olderThan); err != nil { - return err - } - - ps, err := packages_model.FindUnreferencedPackages(ctx) - if err != nil { - return err - } - for _, p := range ps { - if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, p.ID); err != nil { +func CleanupExpiredData(ctx context.Context, olderThan time.Duration) error { + pbs := make([]*packages_model.PackageBlob, 0, 100) + if err := db.WithTx(ctx, func(ctx context.Context) error { + if err := container_service.Cleanup(ctx, olderThan); err != nil { return err } - if err := packages_model.DeletePackageByID(ctx, p.ID); err != nil { + + ps, err := packages_model.FindUnreferencedPackages(ctx) + if err != nil { return err } - } - - pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan) - if err != nil { - return err - } + for _, p := range ps { + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, p.ID); err != nil { + return err + } + if err := packages_model.DeletePackageByID(ctx, p.ID); err != nil { + return err + } + } - for _, pb := range pbs { - if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil { + pbs, err = packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan) + if err != nil { return err } - } - if err := committer.Commit(); err != nil { + for _, pb := range pbs { + if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil { + return err + } + } + return nil + }); err != nil { return err } diff --git a/services/packages/container/blob_uploader.go b/services/packages/container/blob_uploader.go index bae2e2d6af..27bc4a5421 100644 --- a/services/packages/container/blob_uploader.go +++ b/services/packages/container/blob_uploader.go @@ -12,7 +12,7 @@ import ( packages_model "code.gitea.io/gitea/models/packages" packages_module "code.gitea.io/gitea/modules/packages" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/modules/tempdir" ) var ( @@ -30,8 +30,12 @@ type BlobUploader struct { reading bool } -func buildFilePath(id string) string { - return util.FilePathJoinAbs(setting.Packages.ChunkedUploadPath, id) +func uploadPathTempDir() *tempdir.TempDir { + return setting.AppDataTempDir("package-upload") +} + +func buildFilePath(uploadPath *tempdir.TempDir, id string) string { + return uploadPath.JoinPath(id) } // NewBlobUploader creates a new blob uploader for the given id @@ -48,7 +52,12 @@ func NewBlobUploader(ctx context.Context, id string) (*BlobUploader, error) { } } - f, err := os.OpenFile(buildFilePath(model.ID), os.O_RDWR|os.O_CREATE, 0o666) + uploadPath := uploadPathTempDir() + _, err = uploadPath.MkdirAllSub("") + if err != nil { + return nil, err + } + f, err := os.OpenFile(buildFilePath(uploadPath, model.ID), os.O_RDWR|os.O_CREATE, 0o666) if err != nil { return nil, err } @@ -118,13 +127,13 @@ func (u *BlobUploader) Read(p []byte) (int, error) { return u.file.Read(p) } -// Remove deletes the data and the model of a blob upload +// RemoveBlobUploadByID Remove deletes the data and the model of a blob upload func RemoveBlobUploadByID(ctx context.Context, id string) error { if err := packages_model.DeleteBlobUploadByID(ctx, id); err != nil { return err } - err := os.Remove(buildFilePath(id)) + err := os.Remove(buildFilePath(uploadPathTempDir(), id)) if err != nil && !os.IsNotExist(err) { return err } diff --git a/services/packages/container/cleanup.go b/services/packages/container/cleanup.go index 3f5f43bbc0..263562a396 100644 --- a/services/packages/container/cleanup.go +++ b/services/packages/container/cleanup.go @@ -13,7 +13,7 @@ import ( container_module "code.gitea.io/gitea/modules/packages/container" packages_service "code.gitea.io/gitea/services/packages" - digest "github.com/opencontainers/go-digest" + "github.com/opencontainers/go-digest" ) // Cleanup removes expired container data @@ -57,7 +57,7 @@ func cleanupExpiredUploadedBlobs(ctx context.Context, olderThan time.Duration) e Type: packages_model.TypeContainer, Version: packages_model.SearchValue{ ExactMatch: true, - Value: container_model.UploadVersion, + Value: container_module.UploadVersion, }, IsInternal: optional.Some(true), HasFiles: optional.Some(false), diff --git a/services/packages/container/common.go b/services/packages/container/common.go index 5a14ed5b7a..02cbff2286 100644 --- a/services/packages/container/common.go +++ b/services/packages/container/common.go @@ -5,11 +5,17 @@ package container import ( "context" + "io" "strings" packages_model "code.gitea.io/gitea/models/packages" + container_service "code.gitea.io/gitea/models/packages/container" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/packages" container_module "code.gitea.io/gitea/modules/packages/container" + + "github.com/opencontainers/image-spec/specs-go/v1" ) // UpdateRepositoryNames updates the repository name property for all packages of the specific owner @@ -22,7 +28,7 @@ func UpdateRepositoryNames(ctx context.Context, owner *user_model.User, newOwner newOwnerName = strings.ToLower(newOwnerName) for _, p := range ps { - if err := packages_model.DeletePropertyByName(ctx, packages_model.PropertyTypePackage, p.ID, container_module.PropertyRepository); err != nil { + if err := packages_model.DeletePropertiesByName(ctx, packages_model.PropertyTypePackage, p.ID, container_module.PropertyRepository); err != nil { return err } @@ -33,3 +39,26 @@ func UpdateRepositoryNames(ctx context.Context, owner *user_model.User, newOwner return nil } + +func ParseManifestMetadata(ctx context.Context, rd io.Reader, ownerID int64, imageName string) (*v1.Manifest, *packages_model.PackageFileDescriptor, *container_module.Metadata, error) { + var manifest v1.Manifest + if err := json.NewDecoder(rd).Decode(&manifest); err != nil { + return nil, nil, nil, err + } + configDescriptor, err := container_service.GetContainerBlob(ctx, &container_service.BlobSearchOptions{ + OwnerID: ownerID, + Image: imageName, + Digest: manifest.Config.Digest.String(), + }) + if err != nil { + return nil, nil, nil, err + } + + configReader, err := packages.NewContentStore().OpenBlob(packages.BlobHash256Key(configDescriptor.Blob.HashSHA256)) + if err != nil { + return nil, nil, nil, err + } + defer configReader.Close() + metadata, err := container_module.ParseImageConfig(manifest.Config.MediaType, configReader) + return &manifest, configDescriptor, metadata, err +} diff --git a/services/packages/packages.go b/services/packages/packages.go index bd1d460fd3..22b26b6563 100644 --- a/services/packages/packages.go +++ b/services/packages/packages.go @@ -9,6 +9,7 @@ import ( "errors" "fmt" "io" + "net/http" "net/url" "strings" @@ -468,24 +469,15 @@ func RemovePackageVersionByNameAndVersion(ctx context.Context, doer *user_model. // RemovePackageVersion deletes the package version and all associated files func RemovePackageVersion(ctx context.Context, doer *user_model.User, pv *packages_model.PackageVersion) error { - dbCtx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - pd, err := packages_model.GetPackageDescriptor(dbCtx, pv) + pd, err := packages_model.GetPackageDescriptor(ctx, pv) if err != nil { return err } - log.Trace("Deleting package: %v", pv.ID) - - if err := DeletePackageVersionAndReferences(dbCtx, pv); err != nil { - return err - } - - if err := committer.Commit(); err != nil { + if err := db.WithTx(ctx, func(ctx context.Context) error { + log.Trace("Deleting package: %v", pv.ID) + return DeletePackageVersionAndReferences(ctx, pv) + }); err != nil { return err } @@ -563,8 +555,8 @@ func DeletePackageFile(ctx context.Context, pf *packages_model.PackageFile) erro return packages_model.DeleteFileByID(ctx, pf.ID) } -// GetFileStreamByPackageNameAndVersion returns the content of the specific package file -func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { +// OpenFileForDownloadByPackageNameAndVersion returns the content of the specific package file and increases the download counter. +func OpenFileForDownloadByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo, method string) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { log.Trace("Getting package file stream: %v, %v, %s, %s, %s, %s", pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version, pfi.Filename, pfi.CompositeKey) pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version) @@ -576,32 +568,38 @@ func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, return nil, nil, nil, err } - return GetFileStreamByPackageVersion(ctx, pv, pfi) + return OpenFileForDownloadByPackageVersion(ctx, pv, pfi, method) } -// GetFileStreamByPackageVersion returns the content of the specific package file -func GetFileStreamByPackageVersion(ctx context.Context, pv *packages_model.PackageVersion, pfi *PackageFileInfo) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { +// OpenFileForDownloadByPackageVersion returns the content of the specific package file and increases the download counter. +func OpenFileForDownloadByPackageVersion(ctx context.Context, pv *packages_model.PackageVersion, pfi *PackageFileInfo, method string) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, pfi.Filename, pfi.CompositeKey) if err != nil { return nil, nil, nil, err } - return GetPackageFileStream(ctx, pf) + return OpenFileForDownload(ctx, pf, method) } -// GetPackageFileStream returns the content of the specific package file -func GetPackageFileStream(ctx context.Context, pf *packages_model.PackageFile) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { +// OpenFileForDownload returns the content of the specific package file and increases the download counter. +func OpenFileForDownload(ctx context.Context, pf *packages_model.PackageFile, method string) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { pb, err := packages_model.GetBlobByID(ctx, pf.BlobID) if err != nil { return nil, nil, nil, err } - return GetPackageBlobStream(ctx, pf, pb, nil) + return OpenBlobForDownload(ctx, pf, pb, method, nil) +} + +func OpenBlobStream(pb *packages_model.PackageBlob) (io.ReadSeekCloser, error) { + cs := packages_module.NewContentStore() + key := packages_module.BlobHash256Key(pb.HashSHA256) + return cs.OpenBlob(key) } -// GetPackageBlobStream returns the content of the specific package blob +// OpenBlobForDownload returns the content of the specific package blob and increases the download counter. // If the storage supports direct serving and it's enabled, only the direct serving url is returned. -func GetPackageBlobStream(ctx context.Context, pf *packages_model.PackageFile, pb *packages_model.PackageBlob, serveDirectReqParams url.Values) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { +func OpenBlobForDownload(ctx context.Context, pf *packages_model.PackageFile, pb *packages_model.PackageBlob, method string, serveDirectReqParams url.Values) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { key := packages_module.BlobHash256Key(pb.HashSHA256) cs := packages_module.NewContentStore() @@ -611,23 +609,24 @@ func GetPackageBlobStream(ctx context.Context, pf *packages_model.PackageFile, p var err error if cs.ShouldServeDirect() { - u, err = cs.GetServeDirectURL(key, pf.Name, serveDirectReqParams) + u, err = cs.GetServeDirectURL(key, pf.Name, method, serveDirectReqParams) if err != nil && !errors.Is(err, storage.ErrURLNotSupported) { - log.Error("Error getting serve direct url: %v", err) + log.Error("Error getting serve direct url (fallback to local reader): %v", err) } } if u == nil { - s, err = cs.Get(key) + s, err = cs.OpenBlob(key) + } + if err != nil { + return nil, nil, nil, err } - if err == nil { - if pf.IsLead { - if err := packages_model.IncrementDownloadCounter(ctx, pf.VersionID); err != nil { - log.Error("Error incrementing download counter: %v", err) - } + if pf.IsLead && method == http.MethodGet { + if err := packages_model.IncrementDownloadCounter(ctx, pf.VersionID); err != nil { + log.Error("Error incrementing download counter: %v", err) } } - return s, u, pf, err + return s, u, pf, nil } // RemoveAllPackages for User diff --git a/services/packages/rpm/repository.go b/services/packages/rpm/repository.go index a7d196c15c..fbbf8d7dad 100644 --- a/services/packages/rpm/repository.go +++ b/services/packages/rpm/repository.go @@ -408,7 +408,6 @@ func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs [] files = append(files, f) } } - packageVersion := fmt.Sprintf("%s-%s", pd.FileMetadata.Version, pd.FileMetadata.Release) packages = append(packages, &Package{ Type: "rpm", Name: pd.Package.Name, @@ -437,7 +436,7 @@ func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs [] Archive: pd.FileMetadata.ArchiveSize, }, Location: Location{ - Href: fmt.Sprintf("package/%s/%s/%s/%s-%s.%s.rpm", pd.Package.Name, packageVersion, pd.FileMetadata.Architecture, pd.Package.Name, packageVersion, pd.FileMetadata.Architecture), + Href: fmt.Sprintf("package/%s/%s/%s/%s-%s.%s.rpm", pd.Package.Name, pd.Version.Version, pd.FileMetadata.Architecture, pd.Package.Name, pd.Version.Version, pd.FileMetadata.Architecture), }, Format: Format{ License: pd.VersionMetadata.License, @@ -471,7 +470,7 @@ func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs [] } // https://docs.pulpproject.org/en/2.19/plugins/pulp_rpm/tech-reference/rpm.html#filelists-xml -func buildFilelists(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl +func buildFilelists(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl // duplicates with buildOther type Version struct { Epoch string `xml:"epoch,attr"` Version string `xml:"ver,attr"` @@ -518,7 +517,7 @@ func buildFilelists(ctx context.Context, pv *packages_model.PackageVersion, pfs } // https://docs.pulpproject.org/en/2.19/plugins/pulp_rpm/tech-reference/rpm.html#other-xml -func buildOther(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl +func buildOther(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl // duplicates with buildFilelists type Version struct { Epoch string `xml:"epoch,attr"` Version string `xml:"ver,attr"` diff --git a/services/projects/issue.go b/services/projects/issue.go index 090d19d2f4..590fe960d5 100644 --- a/services/projects/issue.go +++ b/services/projects/issue.go @@ -5,7 +5,7 @@ package project import ( "context" - "fmt" + "errors" "code.gitea.io/gitea/models/db" issues_model "code.gitea.io/gitea/models/issues" @@ -29,7 +29,7 @@ func MoveIssuesOnProjectColumn(ctx context.Context, doer *user_model.User, colum return err } if int(count) != len(sortedIssueIDs) { - return fmt.Errorf("all issues have to be added to a project first") + return errors.New("all issues have to be added to a project first") } issues, err := issues_model.GetIssuesByIDs(ctx, issueIDs) diff --git a/services/projects/issue_test.go b/services/projects/issue_test.go index b6f0b1dae1..e76d31e757 100644 --- a/services/projects/issue_test.go +++ b/services/projects/issue_test.go @@ -130,7 +130,7 @@ func Test_Projects(t *testing.T) { }) assert.NoError(t, err) assert.Len(t, projects, 1) - assert.EqualValues(t, project1.ID, projects[0].ID) + assert.Equal(t, project1.ID, projects[0].ID) t.Run("Authenticated user", func(t *testing.T) { columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{ diff --git a/services/pull/check.go b/services/pull/check.go index 9b159891d7..7fcec22f49 100644 --- a/services/pull/check.go +++ b/services/pull/check.go @@ -1,5 +1,4 @@ -// Copyright 2019 The Gitea Authors. -// All rights reserved. +// Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT package pull @@ -10,11 +9,13 @@ import ( "fmt" "strconv" "strings" + "time" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" access_model "code.gitea.io/gitea/models/perm/access" + "code.gitea.io/gitea/models/pull" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -25,8 +26,10 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/queue" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" asymkey_service "code.gitea.io/gitea/services/asymkey" + "code.gitea.io/gitea/services/automergequeue" notify_service "code.gitea.io/gitea/services/notify" ) @@ -34,27 +37,88 @@ import ( var prPatchCheckerQueue *queue.WorkerPoolQueue[string] var ( - ErrIsClosed = errors.New("pull is closed") - ErrUserNotAllowedToMerge = ErrDisallowedToMerge{} - ErrHasMerged = errors.New("has already been merged") - ErrIsWorkInProgress = errors.New("work in progress PRs cannot be merged") - ErrIsChecking = errors.New("cannot merge while conflict checking is in progress") - ErrNotMergeableState = errors.New("not in mergeable state") - ErrDependenciesLeft = errors.New("is blocked by an open dependency") + ErrIsClosed = errors.New("pull is closed") + ErrNoPermissionToMerge = errors.New("no permission to merge") + ErrNotReadyToMerge = errors.New("not ready to merge") + ErrHasMerged = errors.New("has already been merged") + ErrIsWorkInProgress = errors.New("work in progress PRs cannot be merged") + ErrIsChecking = errors.New("cannot merge while conflict checking is in progress") + ErrNotMergeableState = errors.New("not in mergeable state") + ErrDependenciesLeft = errors.New("is blocked by an open dependency") ) -// AddToTaskQueue adds itself to pull request test task queue. -func AddToTaskQueue(ctx context.Context, pr *issues_model.PullRequest) { +func markPullRequestStatusAsChecking(ctx context.Context, pr *issues_model.PullRequest) bool { pr.Status = issues_model.PullRequestStatusChecking err := pr.UpdateColsIfNotMerged(ctx, "status") if err != nil { - log.Error("AddToTaskQueue(%-v).UpdateCols.(add to queue): %v", pr, err) + log.Error("UpdateColsIfNotMerged failed, pr: %-v, err: %v", pr, err) + return false + } + pr, err = issues_model.GetPullRequestByID(ctx, pr.ID) + if err != nil { + log.Error("GetPullRequestByID failed, pr: %-v, err: %v", pr, err) + return false + } + return pr.Status == issues_model.PullRequestStatusChecking +} + +var AddPullRequestToCheckQueue = realAddPullRequestToCheckQueue + +func realAddPullRequestToCheckQueue(prID int64) { + err := prPatchCheckerQueue.Push(strconv.FormatInt(prID, 10)) + if err != nil && !errors.Is(err, queue.ErrAlreadyInQueue) { + log.Error("Error adding %v to the pull requests check queue: %v", prID, err) + } +} + +func StartPullRequestCheckImmediately(ctx context.Context, pr *issues_model.PullRequest) { + if !markPullRequestStatusAsChecking(ctx, pr) { return } - log.Trace("Adding %-v to the test pull requests queue", pr) - err = prPatchCheckerQueue.Push(strconv.FormatInt(pr.ID, 10)) - if err != nil && err != queue.ErrAlreadyInQueue { - log.Error("Error adding %-v to the test pull requests queue: %v", pr, err) + AddPullRequestToCheckQueue(pr.ID) +} + +// StartPullRequestCheckDelayable will delay the check if the pull request was not updated recently. +// When the "base" branch gets updated, all PRs targeting that "base" branch need to re-check whether +// they are mergeable. +// When there are too many stale PRs, each "base" branch update will consume a lot of system resources. +// So we can delay the checks for PRs that were not updated recently, only mark their status as +// "checking", and then next time when these PRs are updated or viewed, the real checks will run. +func StartPullRequestCheckDelayable(ctx context.Context, pr *issues_model.PullRequest) { + if !markPullRequestStatusAsChecking(ctx, pr) { + return + } + + if setting.Repository.PullRequest.DelayCheckForInactiveDays >= 0 { + if err := pr.LoadIssue(ctx); err != nil { + return + } + duration := 24 * time.Hour * time.Duration(setting.Repository.PullRequest.DelayCheckForInactiveDays) + if pr.Issue.UpdatedUnix.AddDuration(duration) <= timeutil.TimeStampNow() { + return + } + } + + AddPullRequestToCheckQueue(pr.ID) +} + +func StartPullRequestCheckOnView(ctx context.Context, pr *issues_model.PullRequest) { + // TODO: its correctness totally depends on the "unique queue" feature and the global lock. + // So duplicate "start" requests will be ignored if there is already a task in the queue or one is running. + // Ideally in the future we should decouple the "unique queue" feature from the "start" request. + if pr.Status == issues_model.PullRequestStatusChecking { + if setting.IsInTesting { + // In testing mode, there might be an "immediate" queue, which is not a real queue, everything is executed in the same goroutine + // So we can't use the global lock here, otherwise it will cause a deadlock. + AddPullRequestToCheckQueue(pr.ID) + } else { + // When a PR check starts, the task is popped from the queue and the task handler acquires the global lock + // So we need to acquire the global lock here to prevent from duplicate tasks + _, _ = globallock.TryLockAndDo(ctx, getPullWorkingLockKey(pr.ID), func(ctx context.Context) error { + AddPullRequestToCheckQueue(pr.ID) // the queue is a unique queue and won't add the same task again + return nil + }) + } } } @@ -84,7 +148,7 @@ func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *acc log.Error("Error whilst checking if %-v is allowed to merge %-v: %v", doer, pr, err) return err } else if !allowedMerge { - return ErrUserNotAllowedToMerge + return ErrNoPermissionToMerge } if mergeCheckType == MergeCheckTypeManually { @@ -105,7 +169,7 @@ func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *acc } if err := CheckPullBranchProtections(ctx, pr, false); err != nil { - if !IsErrDisallowedToMerge(err) { + if !errors.Is(err, ErrNotReadyToMerge) { log.Error("Error whilst checking pull branch protection for %-v: %v", pr, err) return err } @@ -167,15 +231,15 @@ func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer return true, nil } - sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.GetGitRefName()) + sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.GetGitHeadRefName()) return sign, err } -// checkAndUpdateStatus checks if pull request is possible to leaving checking status, +// markPullRequestAsMergeable checks if pull request is possible to leaving checking status, // and set to be either conflict or mergeable. -func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) { - // If status has not been changed to conflict by testPatch then we are mergeable +func markPullRequestAsMergeable(ctx context.Context, pr *issues_model.PullRequest) { + // If the status has not been changed to conflict by testPullRequestTmpRepoBranchMergeable then we are mergeable if pr.Status == issues_model.PullRequestStatusChecking { pr.Status = issues_model.PullRequestStatusMergeable } @@ -194,6 +258,16 @@ func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) { if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files"); err != nil { log.Error("Update[%-v]: %v", pr, err) } + + // if there is a scheduled merge for this pull request, start the auto merge check (again) + exist, _, err := pull.GetScheduledMergeByPullID(ctx, pr.ID) + if err != nil { + log.Error("GetScheduledMergeByPullID[%-v]: %v", pr, err) + return + } else if !exist { + return + } + automergequeue.StartPRCheckAndAutoMerge(ctx, pr) } // getMergeCommit checks if a pull request has been merged @@ -203,7 +277,7 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com return nil, fmt.Errorf("unable to load base repo for %s: %w", pr, err) } - prHeadRef := pr.GetGitRefName() + prHeadRef := pr.GetGitHeadRefName() // Check if the pull request is merged into BaseBranch if _, _, err := git.NewCommand("merge-base", "--is-ancestor"). @@ -310,6 +384,10 @@ func manuallyMerged(ctx context.Context, pr *issues_model.PullRequest) bool { // InitializePullRequests checks and tests untested patches of pull requests. func InitializePullRequests(ctx context.Context) { + // If we prefer to delay the checks, then no need to do any check during startup, there should be not much difference + if setting.Repository.PullRequest.DelayCheckForInactiveDays >= 0 { + return + } prs, err := issues_model.GetPullRequestIDsByCheckStatus(ctx, issues_model.PullRequestStatusChecking) if err != nil { log.Error("Find Checking PRs: %v", err) @@ -320,24 +398,12 @@ func InitializePullRequests(ctx context.Context) { case <-ctx.Done(): return default: - log.Trace("Adding PR[%d] to the pull requests patch checking queue", prID) - if err := prPatchCheckerQueue.Push(strconv.FormatInt(prID, 10)); err != nil { - log.Error("Error adding PR[%d] to the pull requests patch checking queue %v", prID, err) - } + AddPullRequestToCheckQueue(prID) } } } -// handle passed PR IDs and test the PRs -func handler(items ...string) []string { - for _, s := range items { - id, _ := strconv.ParseInt(s, 10, 64) - testPR(id) - } - return nil -} - -func testPR(id int64) { +func checkPullRequestMergeable(id int64) { ctx := graceful.GetManager().HammerContext() releaser, err := globallock.Lock(ctx, getPullWorkingLockKey(id)) if err != nil { @@ -351,7 +417,7 @@ func testPR(id int64) { pr, err := issues_model.GetPullRequestByID(ctx, id) if err != nil { - log.Error("Unable to GetPullRequestByID[%d] for testPR: %v", id, err) + log.Error("Unable to GetPullRequestByID[%d] for checkPullRequestMergeable: %v", id, err) return } @@ -370,15 +436,15 @@ func testPR(id int64) { return } - if err := TestPatch(pr); err != nil { - log.Error("testPatch[%-v]: %v", pr, err) + if err := testPullRequestBranchMergeable(pr); err != nil { + log.Error("testPullRequestTmpRepoBranchMergeable[%-v]: %v", pr, err) pr.Status = issues_model.PullRequestStatusError if err := pr.UpdateCols(ctx, "status"); err != nil { log.Error("update pr [%-v] status to PullRequestStatusError failed: %v", pr, err) } return } - checkAndUpdateStatus(ctx, pr) + markPullRequestAsMergeable(ctx, pr) } // CheckPRsForBaseBranch check all pulls with baseBrannch @@ -387,20 +453,24 @@ func CheckPRsForBaseBranch(ctx context.Context, baseRepo *repo_model.Repository, if err != nil { return err } - for _, pr := range prs { - AddToTaskQueue(ctx, pr) + StartPullRequestCheckImmediately(ctx, pr) } - return nil } // Init runs the task queue to test all the checking status pull requests func Init() error { - prPatchCheckerQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_patch_checker", handler) + prPatchCheckerQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_patch_checker", func(items ...string) []string { + for _, s := range items { + id, _ := strconv.ParseInt(s, 10, 64) + checkPullRequestMergeable(id) + } + return nil + }) if prPatchCheckerQueue == nil { - return fmt.Errorf("unable to create pr_patch_checker queue") + return errors.New("unable to create pr_patch_checker queue") } go graceful.GetManager().RunWithCancel(prPatchCheckerQueue) diff --git a/services/pull/check_test.go b/services/pull/check_test.go index 5508a70f45..eb66615dcf 100644 --- a/services/pull/check_test.go +++ b/services/pull/check_test.go @@ -1,5 +1,4 @@ -// Copyright 2019 The Gitea Authors. -// All rights reserved. +// Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT package pull @@ -11,11 +10,18 @@ import ( "code.gitea.io/gitea/models/db" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/pull" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" + "code.gitea.io/gitea/services/automergequeue" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestPullRequest_AddToTaskQueue(t *testing.T) { @@ -36,7 +42,7 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { assert.NoError(t, err) pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) - AddToTaskQueue(db.DefaultContext, pr) + StartPullRequestCheckImmediately(db.DefaultContext, pr) assert.Eventually(t, func() bool { pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) @@ -51,7 +57,7 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { select { case id := <-idChan: - assert.EqualValues(t, pr.ID, id) + assert.Equal(t, pr.ID, id) case <-time.After(time.Second): assert.FailNow(t, "Timeout: nothing was added to pullRequestQueue") } @@ -63,6 +69,46 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) assert.Equal(t, issues_model.PullRequestStatusChecking, pr.Status) - prPatchCheckerQueue.ShutdownWait(5 * time.Second) + prPatchCheckerQueue.ShutdownWait(time.Second) prPatchCheckerQueue = nil } + +func TestMarkPullRequestAsMergeable(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + prPatchCheckerQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_patch_checker", func(items ...string) []string { return nil }) + go prPatchCheckerQueue.Run() + defer func() { + prPatchCheckerQueue.ShutdownWait(time.Second) + prPatchCheckerQueue = nil + }() + + addToQueueShaChan := make(chan string, 1) + defer test.MockVariableValue(&automergequeue.AddToQueue, func(pr *issues_model.PullRequest, sha string) { + addToQueueShaChan <- sha + })() + ctx := t.Context() + _, _ = db.GetEngine(ctx).ID(2).Update(&issues_model.PullRequest{Status: issues_model.PullRequestStatusChecking}) + pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) + require.False(t, pr.HasMerged) + require.Equal(t, issues_model.PullRequestStatusChecking, pr.Status) + + err := pull.ScheduleAutoMerge(ctx, &user_model.User{ID: 99999}, pr.ID, repo_model.MergeStyleMerge, "test msg", true) + require.NoError(t, err) + + exist, scheduleMerge, err := pull.GetScheduledMergeByPullID(ctx, pr.ID) + require.NoError(t, err) + assert.True(t, exist) + assert.True(t, scheduleMerge.Doer.IsGhost()) + + markPullRequestAsMergeable(ctx, pr) + pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) + require.Equal(t, issues_model.PullRequestStatusMergeable, pr.Status) + + select { + case sha := <-addToQueueShaChan: + assert.Equal(t, "985f0301dba5e7b34be866819cd15ad3d8f508ee", sha) // ref: refs/pull/3/head + case <-time.After(1 * time.Second): + assert.FailNow(t, "Timeout: nothing was added to automergequeue") + } +} diff --git a/services/pull/commit_status.go b/services/pull/commit_status.go index 0bfff21746..d15d318149 100644 --- a/services/pull/commit_status.go +++ b/services/pull/commit_status.go @@ -10,93 +10,59 @@ import ( "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/modules/commitstatus" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/structs" "github.com/gobwas/glob" "github.com/pkg/errors" ) // MergeRequiredContextsCommitStatus returns a commit status state for given required contexts -func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus, requiredContexts []string) structs.CommitStatusState { - // matchedCount is the number of `CommitStatus.Context` that match any context of `requiredContexts` - matchedCount := 0 - returnedStatus := structs.CommitStatusSuccess - - if len(requiredContexts) > 0 { - requiredContextsGlob := make(map[string]glob.Glob, len(requiredContexts)) - for _, ctx := range requiredContexts { - if gp, err := glob.Compile(ctx); err != nil { - log.Error("glob.Compile %s failed. Error: %v", ctx, err) - } else { - requiredContextsGlob[ctx] = gp - } - } - - for _, gp := range requiredContextsGlob { - var targetStatus structs.CommitStatusState - for _, commitStatus := range commitStatuses { - if gp.Match(commitStatus.Context) { - targetStatus = commitStatus.State - matchedCount++ - break - } - } - - // If required rule not match any action, then it is pending - if targetStatus == "" { - if structs.CommitStatusPending.NoBetterThan(returnedStatus) { - returnedStatus = structs.CommitStatusPending - } - break - } - - if targetStatus.NoBetterThan(returnedStatus) { - returnedStatus = targetStatus - } - } +func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus, requiredContexts []string) commitstatus.CommitStatusState { + if len(commitStatuses) == 0 { + return commitstatus.CommitStatusPending } - if matchedCount == 0 && returnedStatus == structs.CommitStatusSuccess { - status := git_model.CalcCommitStatus(commitStatuses) - if status != nil { - return status.State - } - return structs.CommitStatusSuccess + if len(requiredContexts) == 0 { + return git_model.CalcCommitStatus(commitStatuses).State } - return returnedStatus -} - -// IsCommitStatusContextSuccess returns true if all required status check contexts succeed. -func IsCommitStatusContextSuccess(commitStatuses []*git_model.CommitStatus, requiredContexts []string) bool { - // If no specific context is required, require that last commit status is a success - if len(requiredContexts) == 0 { - status := git_model.CalcCommitStatus(commitStatuses) - if status == nil || status.State != structs.CommitStatusSuccess { - return false + requiredContextsGlob := make(map[string]glob.Glob, len(requiredContexts)) + for _, ctx := range requiredContexts { + if gp, err := glob.Compile(ctx); err != nil { + log.Error("glob.Compile %s failed. Error: %v", ctx, err) + } else { + requiredContextsGlob[ctx] = gp } - return true } - for _, ctx := range requiredContexts { - var found bool + requiredCommitStatuses := make([]*git_model.CommitStatus, 0, len(commitStatuses)) + allRequiredContextsMatched := true + for _, gp := range requiredContextsGlob { + requiredContextMatched := false for _, commitStatus := range commitStatuses { - if commitStatus.Context == ctx { - if commitStatus.State != structs.CommitStatusSuccess { - return false - } - - found = true - break + if gp.Match(commitStatus.Context) { + requiredCommitStatuses = append(requiredCommitStatuses, commitStatus) + requiredContextMatched = true } } - if !found { - return false - } + allRequiredContextsMatched = allRequiredContextsMatched && requiredContextMatched + } + if len(requiredCommitStatuses) == 0 { + return commitstatus.CommitStatusPending + } + + returnedStatus := git_model.CalcCommitStatus(requiredCommitStatuses).State + if allRequiredContextsMatched { + return returnedStatus + } + + if returnedStatus == commitstatus.CommitStatusFailure { + return commitstatus.CommitStatusFailure } - return true + // even if part of success, return pending + return commitstatus.CommitStatusPending } // IsPullCommitStatusPass returns if all required status checks PASS @@ -117,7 +83,7 @@ func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) ( } // GetPullRequestCommitStatusState returns pull request merged commit status state -func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullRequest) (structs.CommitStatusState, error) { +func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullRequest) (commitstatus.CommitStatusState, error) { // Ensure HeadRepo is loaded if err := pr.LoadHeadRepo(ctx); err != nil { return "", errors.Wrap(err, "LoadHeadRepo") @@ -133,7 +99,7 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR if pr.Flow == issues_model.PullRequestFlowGithub && !gitrepo.IsBranchExist(ctx, pr.HeadRepo, pr.HeadBranch) { return "", errors.New("Head branch does not exist, can not merge") } - if pr.Flow == issues_model.PullRequestFlowAGit && !gitrepo.IsReferenceExist(ctx, pr.HeadRepo, pr.GetGitRefName()) { + if pr.Flow == issues_model.PullRequestFlowAGit && !gitrepo.IsReferenceExist(ctx, pr.HeadRepo, pr.GetGitHeadRefName()) { return "", errors.New("Head branch does not exist, can not merge") } @@ -141,7 +107,7 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR if pr.Flow == issues_model.PullRequestFlowGithub { sha, err = headGitRepo.GetBranchCommitID(pr.HeadBranch) } else { - sha, err = headGitRepo.GetRefCommitID(pr.GetGitRefName()) + sha, err = headGitRepo.GetRefCommitID(pr.GetGitHeadRefName()) } if err != nil { return "", err @@ -151,7 +117,7 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR return "", errors.Wrap(err, "LoadBaseRepo") } - commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) + commitStatuses, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) if err != nil { return "", errors.Wrap(err, "GetLatestCommitStatus") } diff --git a/services/pull/commit_status_test.go b/services/pull/commit_status_test.go index 592acdd55c..a58e788c04 100644 --- a/services/pull/commit_status_test.go +++ b/services/pull/commit_status_test.go @@ -8,58 +8,85 @@ import ( "testing" git_model "code.gitea.io/gitea/models/git" - "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/commitstatus" "github.com/stretchr/testify/assert" ) func TestMergeRequiredContextsCommitStatus(t *testing.T) { - testCases := [][]*git_model.CommitStatus{ + cases := []struct { + commitStatuses []*git_model.CommitStatus + requiredContexts []string + expected commitstatus.CommitStatusState + }{ { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 3", State: structs.CommitStatusSuccess}, + commitStatuses: []*git_model.CommitStatus{}, + requiredContexts: []string{}, + expected: commitstatus.CommitStatusPending, }, { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 2t", State: structs.CommitStatusPending}, + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build xxx", State: commitstatus.CommitStatusSkipped}, + }, + requiredContexts: []string{"Build*"}, + expected: commitstatus.CommitStatusSuccess, }, { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 2t", State: structs.CommitStatusFailure}, + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSkipped}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 3", State: commitstatus.CommitStatusSuccess}, + }, + requiredContexts: []string{"Build*"}, + expected: commitstatus.CommitStatusSuccess, }, { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 2t", State: structs.CommitStatusSuccess}, + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusPending}, + }, + requiredContexts: []string{"Build*", "Build 2t*"}, + expected: commitstatus.CommitStatusPending, }, { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 2t", State: structs.CommitStatusSuccess}, + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusFailure}, + }, + requiredContexts: []string{"Build*", "Build 2t*"}, + expected: commitstatus.CommitStatusFailure, + }, + { + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusFailure}, + }, + requiredContexts: []string{"Build*"}, + expected: commitstatus.CommitStatusFailure, + }, + { + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusSuccess}, + }, + requiredContexts: []string{"Build*", "Build 2t*", "Build 3*"}, + expected: commitstatus.CommitStatusPending, + }, + { + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusSuccess}, + }, + requiredContexts: []string{"Build*", "Build *", "Build 2t*", "Build 1*"}, + expected: commitstatus.CommitStatusSuccess, }, } - testCasesRequiredContexts := [][]string{ - {"Build*"}, - {"Build*", "Build 2t*"}, - {"Build*", "Build 2t*"}, - {"Build*", "Build 2t*", "Build 3*"}, - {"Build*", "Build *", "Build 2t*", "Build 1*"}, - } - - testCasesExpected := []structs.CommitStatusState{ - structs.CommitStatusSuccess, - structs.CommitStatusPending, - structs.CommitStatusFailure, - structs.CommitStatusPending, - structs.CommitStatusSuccess, - } - - for i, commitStatuses := range testCases { - if MergeRequiredContextsCommitStatus(commitStatuses, testCasesRequiredContexts[i]) != testCasesExpected[i] { - assert.Fail(t, "Test case failed", "Test case %d failed", i+1) - } + for i, c := range cases { + assert.Equal(t, c.expected, MergeRequiredContextsCommitStatus(c.commitStatuses, c.requiredContexts), "case %d", i) } } diff --git a/services/pull/merge.go b/services/pull/merge.go index 1e1ca55bc1..a941c20435 100644 --- a/services/pull/merge.go +++ b/services/pull/merge.go @@ -6,12 +6,15 @@ package pull import ( "context" + "errors" "fmt" + "maps" "os" "path/filepath" "regexp" "strconv" "strings" + "unicode" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" @@ -59,7 +62,7 @@ func getMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issue issueReference = "!" } - reviewedOn := fmt.Sprintf("Reviewed-on: %s", httplib.MakeAbsoluteURL(ctx, pr.Issue.Link())) + reviewedOn := "Reviewed-on: " + httplib.MakeAbsoluteURL(ctx, pr.Issue.Link()) reviewedBy := pr.GetApprovers(ctx) if mergeStyle != "" { @@ -93,9 +96,7 @@ func getMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issue vars["HeadRepoOwnerName"] = pr.HeadRepo.OwnerName vars["HeadRepoName"] = pr.HeadRepo.Name } - for extraKey, extraValue := range extraVars { - vars[extraKey] = extraValue - } + maps.Copy(vars, extraVars) refs, err := pr.ResolveCrossReferences(ctx) if err == nil { closeIssueIndexes := make([]string, 0, len(refs)) @@ -160,6 +161,41 @@ func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr return getMergeMessage(ctx, baseGitRepo, pr, mergeStyle, nil) } +func AddCommitMessageTailer(message, tailerKey, tailerValue string) string { + tailerLine := tailerKey + ": " + tailerValue + message = strings.ReplaceAll(message, "\r\n", "\n") + message = strings.ReplaceAll(message, "\r", "\n") + if strings.Contains(message, "\n"+tailerLine+"\n") || strings.HasSuffix(message, "\n"+tailerLine) { + return message + } + + if !strings.HasSuffix(message, "\n") { + message += "\n" + } + pos1 := strings.LastIndexByte(message[:len(message)-1], '\n') + pos2 := -1 + if pos1 != -1 { + pos2 = strings.IndexByte(message[pos1:], ':') + if pos2 != -1 { + pos2 += pos1 + } + } + var lastLineKey string + if pos1 != -1 && pos2 != -1 { + lastLineKey = message[pos1+1 : pos2] + } + + isLikelyTailerLine := lastLineKey != "" && unicode.IsUpper(rune(lastLineKey[0])) && strings.Contains(message, "-") + for i := 0; isLikelyTailerLine && i < len(lastLineKey); i++ { + r := rune(lastLineKey[i]) + isLikelyTailerLine = unicode.IsLetter(r) || unicode.IsDigit(r) || r == '-' + } + if !strings.HasSuffix(message, "\n\n") && !isLikelyTailerLine { + message += "\n" + } + return message + tailerLine +} + // ErrInvalidMergeStyle represents an error if merging with disabled merge strategy type ErrInvalidMergeStyle struct { ID int64 @@ -289,7 +325,7 @@ func handleCloseCrossReferences(ctx context.Context, pr *issues_model.PullReques } // doMergeAndPush performs the merge operation without changing any pull information in database and pushes it up to the base repository -func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, pushTrigger repo_module.PushTrigger) (string, error) { //nolint:unparam +func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, pushTrigger repo_module.PushTrigger) (string, error) { //nolint:unparam // non-error result is never used // Clone base repo. mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, expectedHeadCommitID) if err != nil { @@ -395,10 +431,13 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use func commitAndSignNoAuthor(ctx *mergeContext, message string) error { cmdCommit := git.NewCommand("commit").AddOptionFormat("--message=%s", message) - if ctx.signKeyID == "" { + if ctx.signKey == nil { cmdCommit.AddArguments("--no-gpg-sign") } else { - cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID) + if ctx.signKey.Format != "" { + cmdCommit.AddConfig("gpg.format", ctx.signKey.Format) + } + cmdCommit.AddOptionFormat("-S%s", ctx.signKey.KeyID) } if err := cmdCommit.Run(ctx, ctx.RunOpts()); err != nil { log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) @@ -517,25 +556,6 @@ func IsUserAllowedToMerge(ctx context.Context, pr *issues_model.PullRequest, p a return false, nil } -// ErrDisallowedToMerge represents an error that a branch is protected and the current user is not allowed to modify it. -type ErrDisallowedToMerge struct { - Reason string -} - -// IsErrDisallowedToMerge checks if an error is an ErrDisallowedToMerge. -func IsErrDisallowedToMerge(err error) bool { - _, ok := err.(ErrDisallowedToMerge) - return ok -} - -func (err ErrDisallowedToMerge) Error() string { - return fmt.Sprintf("not allowed to merge [reason: %s]", err.Reason) -} - -func (err ErrDisallowedToMerge) Unwrap() error { - return util.ErrPermissionDenied -} - // CheckPullBranchProtections checks whether the PR is ready to be merged (reviews and status checks) func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullRequest, skipProtectedFilesCheck bool) (err error) { if err = pr.LoadBaseRepo(ctx); err != nil { @@ -555,31 +575,21 @@ func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullReques return err } if !isPass { - return ErrDisallowedToMerge{ - Reason: "Not all required status checks successful", - } + return util.ErrorWrap(ErrNotReadyToMerge, "Not all required status checks successful") } if !issues_model.HasEnoughApprovals(ctx, pb, pr) { - return ErrDisallowedToMerge{ - Reason: "Does not have enough approvals", - } + return util.ErrorWrap(ErrNotReadyToMerge, "Does not have enough approvals") } if issues_model.MergeBlockedByRejectedReview(ctx, pb, pr) { - return ErrDisallowedToMerge{ - Reason: "There are requested changes", - } + return util.ErrorWrap(ErrNotReadyToMerge, "There are requested changes") } if issues_model.MergeBlockedByOfficialReviewRequests(ctx, pb, pr) { - return ErrDisallowedToMerge{ - Reason: "There are official review requests", - } + return util.ErrorWrap(ErrNotReadyToMerge, "There are official review requests") } if issues_model.MergeBlockedByOutdatedBranch(pb, pr) { - return ErrDisallowedToMerge{ - Reason: "The head branch is behind the base branch", - } + return util.ErrorWrap(ErrNotReadyToMerge, "The head branch is behind the base branch") } if skipProtectedFilesCheck { @@ -587,9 +597,7 @@ func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullReques } if pb.MergeBlockedByProtectedFiles(pr.ChangedProtectedFiles) { - return ErrDisallowedToMerge{ - Reason: "Changed protected files", - } + return util.ErrorWrap(ErrNotReadyToMerge, "Changed protected files") } return nil @@ -621,13 +629,13 @@ func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *use objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName) if len(commitID) != objectFormat.FullLength() { - return fmt.Errorf("Wrong commit ID") + return errors.New("Wrong commit ID") } commit, err := baseGitRepo.GetCommit(commitID) if err != nil { if git.IsErrNotExist(err) { - return fmt.Errorf("Wrong commit ID") + return errors.New("Wrong commit ID") } return err } @@ -638,14 +646,14 @@ func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *use return err } if !ok { - return fmt.Errorf("Wrong commit ID") + return errors.New("Wrong commit ID") } var merged bool if merged, err = SetMerged(ctx, pr, commitID, timeutil.TimeStamp(commit.Author.When.Unix()), doer, issues_model.PullRequestStatusManuallyMerged); err != nil { return err } else if !merged { - return fmt.Errorf("SetMerged failed") + return errors.New("SetMerged failed") } return nil }) @@ -679,48 +687,40 @@ func SetMerged(ctx context.Context, pr *issues_model.PullRequest, mergedCommitID return false, fmt.Errorf("unable to merge PullRequest[%d], some required fields are empty", pr.Index) } - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return false, err - } - defer committer.Close() - - pr.Issue = nil - if err := pr.LoadIssue(ctx); err != nil { - return false, err - } - - if err := pr.Issue.LoadRepo(ctx); err != nil { - return false, err - } + return db.WithTx2(ctx, func(ctx context.Context) (bool, error) { + pr.Issue = nil + if err := pr.LoadIssue(ctx); err != nil { + return false, err + } - if err := pr.Issue.Repo.LoadOwner(ctx); err != nil { - return false, err - } + if err := pr.Issue.LoadRepo(ctx); err != nil { + return false, err + } - // Removing an auto merge pull and ignore if not exist - if err := pull_model.DeleteScheduledAutoMerge(ctx, pr.ID); err != nil && !db.IsErrNotExist(err) { - return false, fmt.Errorf("DeleteScheduledAutoMerge[%d]: %v", pr.ID, err) - } + if err := pr.Issue.Repo.LoadOwner(ctx); err != nil { + return false, err + } - // Set issue as closed - if _, err := issues_model.SetIssueAsClosed(ctx, pr.Issue, pr.Merger, true); err != nil { - return false, fmt.Errorf("ChangeIssueStatus: %w", err) - } + // Removing an auto merge pull and ignore if not exist + if err := pull_model.DeleteScheduledAutoMerge(ctx, pr.ID); err != nil && !db.IsErrNotExist(err) { + return false, fmt.Errorf("DeleteScheduledAutoMerge[%d]: %v", pr.ID, err) + } - // We need to save all of the data used to compute this merge as it may have already been changed by TestPatch. FIXME: need to set some state to prevent TestPatch from running whilst we are merging. - if cnt, err := db.GetEngine(ctx).Where("id = ?", pr.ID). - And("has_merged = ?", false). - Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix, conflicted_files"). - Update(pr); err != nil { - return false, fmt.Errorf("failed to update pr[%d]: %w", pr.ID, err) - } else if cnt != 1 { - return false, issues_model.ErrIssueAlreadyChanged - } + // Set issue as closed + if _, err := issues_model.SetIssueAsClosed(ctx, pr.Issue, pr.Merger, true); err != nil { + return false, fmt.Errorf("ChangeIssueStatus: %w", err) + } - if err := committer.Commit(); err != nil { - return false, err - } + // We need to save all of the data used to compute this merge as it may have already been changed by testPullRequestBranchMergeable. FIXME: need to set some state to prevent testPullRequestBranchMergeable from running whilst we are merging. + if cnt, err := db.GetEngine(ctx).Where("id = ?", pr.ID). + And("has_merged = ?", false). + Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix, conflicted_files"). + Update(pr); err != nil { + return false, fmt.Errorf("failed to update pr[%d]: %w", pr.ID, err) + } else if cnt != 1 { + return false, issues_model.ErrIssueAlreadyChanged + } - return true, nil + return true, nil + }) } diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go index 593cba550a..31a1e13734 100644 --- a/services/pull/merge_prepare.go +++ b/services/pull/merge_prepare.go @@ -23,11 +23,11 @@ import ( ) type mergeContext struct { - *prContext + *prTmpRepoContext doer *user_model.User sig *git.Signature committer *git.Signature - signKeyID string // empty for no-sign, non-empty to sign + signKey *git.SigningKey env []string } @@ -68,8 +68,8 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque } mergeCtx = &mergeContext{ - prContext: prCtx, - doer: doer, + prTmpRepoContext: prCtx, + doer: doer, } if expectedHeadCommitID != "" { @@ -99,9 +99,9 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque mergeCtx.committer = mergeCtx.sig // Determine if we should sign - sign, keyID, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch) + sign, key, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch) if sign { - mergeCtx.signKeyID = keyID + mergeCtx.signKey = key if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { mergeCtx.committer = signer } diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go index 076189fd7a..0049c0b117 100644 --- a/services/pull/merge_squash.go +++ b/services/pull/merge_squash.go @@ -5,7 +5,6 @@ package pull import ( "fmt" - "strings" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -66,18 +65,19 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error { if setting.Repository.PullRequest.AddCoCommitterTrailers && ctx.committer.String() != sig.String() { // add trailer - if !strings.Contains(message, fmt.Sprintf("Co-authored-by: %s", sig.String())) { - message += fmt.Sprintf("\nCo-authored-by: %s", sig.String()) - } - message += fmt.Sprintf("\nCo-committed-by: %s\n", sig.String()) + message = AddCommitMessageTailer(message, "Co-authored-by", sig.String()) + message = AddCommitMessageTailer(message, "Co-committed-by", sig.String()) // FIXME: this one should be removed, it is not really used or widely used } cmdCommit := git.NewCommand("commit"). AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email). AddOptionFormat("--message=%s", message) - if ctx.signKeyID == "" { + if ctx.signKey == nil { cmdCommit.AddArguments("--no-gpg-sign") } else { - cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID) + if ctx.signKey.Format != "" { + cmdCommit.AddConfig("gpg.format", ctx.signKey.Format) + } + cmdCommit.AddOptionFormat("-S%s", ctx.signKey.KeyID) } if err := cmdCommit.Run(ctx, ctx.RunOpts()); err != nil { log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) diff --git a/services/pull/merge_test.go b/services/pull/merge_test.go index 6df6f55d46..91abeb9d9c 100644 --- a/services/pull/merge_test.go +++ b/services/pull/merge_test.go @@ -65,3 +65,28 @@ func Test_expandDefaultMergeMessage(t *testing.T) { }) } } + +func TestAddCommitMessageTailer(t *testing.T) { + // add tailer for empty message + assert.Equal(t, "\n\nTest-tailer: TestValue", AddCommitMessageTailer("", "Test-tailer", "TestValue")) + + // add tailer for message without newlines + assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTailer("title", "Test-tailer", "TestValue")) + assert.Equal(t, "title\n\nNot tailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\nNot tailer: xxx", "Test-tailer", "TestValue")) + assert.Equal(t, "title\n\nNotTailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\nNotTailer: xxx", "Test-tailer", "TestValue")) + assert.Equal(t, "title\n\nnot-tailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\nnot-tailer: xxx", "Test-tailer", "TestValue")) + + // add tailer for message with one EOL + assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n", "Test-tailer", "TestValue")) + + // add tailer for message with two EOLs + assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\n", "Test-tailer", "TestValue")) + + // add tailer for message with existing tailer (won't duplicate) + assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\nTest-tailer: TestValue", "Test-tailer", "TestValue")) + assert.Equal(t, "title\n\nTest-tailer: TestValue\n", AddCommitMessageTailer("title\n\nTest-tailer: TestValue\n", "Test-tailer", "TestValue")) + + // add tailer for message with existing tailer and different value (will append) + assert.Equal(t, "title\n\nTest-tailer: v1\nTest-tailer: v2", AddCommitMessageTailer("title\n\nTest-tailer: v1", "Test-tailer", "v2")) + assert.Equal(t, "title\n\nTest-tailer: v1\nTest-tailer: v2", AddCommitMessageTailer("title\n\nTest-tailer: v1\n", "Test-tailer", "v2")) +} diff --git a/services/pull/patch.go b/services/pull/patch.go index 29f2f992ab..9d9b8d0d07 100644 --- a/services/pull/patch.go +++ b/services/pull/patch.go @@ -41,7 +41,7 @@ func DownloadDiffOrPatch(ctx context.Context, pr *issues_model.PullRequest, w io } defer closer.Close() - compareArg := pr.MergeBase + "..." + pr.GetGitRefName() + compareArg := pr.MergeBase + "..." + pr.GetGitHeadRefName() switch { case patch: err = gitRepo.GetPatch(compareArg, w) @@ -67,9 +67,8 @@ var patchErrorSuffices = []string{ ": does not exist in index", } -// TestPatch will test whether a simple patch will apply -func TestPatch(pr *issues_model.PullRequest) error { - ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("TestPatch: %s", pr)) +func testPullRequestBranchMergeable(pr *issues_model.PullRequest) error { + ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("testPullRequestBranchMergeable: %s", pr)) defer finished() prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr) @@ -81,10 +80,10 @@ func TestPatch(pr *issues_model.PullRequest) error { } defer cancel() - return testPatch(ctx, prCtx, pr) + return testPullRequestTmpRepoBranchMergeable(ctx, prCtx, pr) } -func testPatch(ctx context.Context, prCtx *prContext, pr *issues_model.PullRequest) error { +func testPullRequestTmpRepoBranchMergeable(ctx context.Context, prCtx *prTmpRepoContext, pr *issues_model.PullRequest) error { gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath) if err != nil { return fmt.Errorf("OpenRepository: %w", err) @@ -134,7 +133,7 @@ type errMergeConflict struct { } func (e *errMergeConflict) Error() string { - return fmt.Sprintf("conflict detected at: %s", e.filename) + return "conflict detected at: " + e.filename } func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, filesToRemove *[]string, filesToAdd *[]git.IndexObjectInfo) error { @@ -355,23 +354,19 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * } // 3b. Create a plain patch from head to base - tmpPatchFile, err := os.CreateTemp("", "patch") + tmpPatchFile, cleanup, err := setting.AppDataTempDir("git-repo-content").CreateTempFileRandom("patch") if err != nil { log.Error("Unable to create temporary patch file! Error: %v", err) return false, fmt.Errorf("unable to create temporary patch file! Error: %w", err) } - defer func() { - _ = util.Remove(tmpPatchFile.Name()) - }() + defer cleanup() if err := gitRepo.GetDiffBinary(pr.MergeBase+"...tracking", tmpPatchFile); err != nil { - tmpPatchFile.Close() log.Error("Unable to get patch file from %s to %s in %s Error: %v", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err) return false, fmt.Errorf("unable to get patch file from %s to %s in %s Error: %w", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err) } stat, err := tmpPatchFile.Stat() if err != nil { - tmpPatchFile.Close() return false, fmt.Errorf("unable to stat patch file: %w", err) } patchPath := tmpPatchFile.Name() @@ -384,7 +379,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * return false, nil } - log.Trace("PullRequest[%d].testPatch (patchPath): %s", pr.ID, patchPath) + log.Trace("PullRequest[%d].testPullRequestTmpRepoBranchMergeable (patchPath): %s", pr.ID, patchPath) // 4. Read the base branch in to the index of the temporary repository _, _, err = git.NewCommand("read-tree", "base").RunStdString(gitRepo.Ctx, &git.RunOpts{Dir: tmpBasePath}) @@ -454,7 +449,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * scanner := bufio.NewScanner(stderrReader) for scanner.Scan() { line := scanner.Text() - log.Trace("PullRequest[%d].testPatch: stderr: %s", pr.ID, line) + log.Trace("PullRequest[%d].testPullRequestTmpRepoBranchMergeable: stderr: %s", pr.ID, line) if strings.HasPrefix(line, prefix) { conflict = true filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0]) diff --git a/services/pull/pull.go b/services/pull/pull.go index 4641d4ac40..e55d4f5bb1 100644 --- a/services/pull/pull.go +++ b/services/pull/pull.go @@ -33,7 +33,6 @@ import ( repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - gitea_context "code.gitea.io/gitea/services/context" issue_service "code.gitea.io/gitea/services/issue" notify_service "code.gitea.io/gitea/services/notify" ) @@ -96,7 +95,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { } defer cancel() - if err := testPatch(ctx, prCtx, pr); err != nil { + if err := testPullRequestTmpRepoBranchMergeable(ctx, prCtx, pr); err != nil { return err } @@ -143,7 +142,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { } compareInfo, err := baseGitRepo.GetCompareInfo(pr.BaseRepo.RepoPath(), - git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false, false) + git.BranchPrefix+pr.BaseBranch, pr.GetGitHeadRefName(), false, false) if err != nil { return err } @@ -184,7 +183,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { return nil }); err != nil { // cleanup: this will only remove the reference, the real commit will be clean up when next GC - if err1 := baseGitRepo.RemoveReference(pr.GetGitRefName()); err1 != nil { + if err1 := baseGitRepo.RemoveReference(pr.GetGitHeadRefName()); err1 != nil { log.Error("RemoveReference: %v", err1) } return err @@ -314,12 +313,12 @@ func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer pr.BaseBranch = targetBranch // Refresh patch - if err := TestPatch(pr); err != nil { + if err := testPullRequestBranchMergeable(pr); err != nil { return err } // Update target branch, PR diff and status - // This is the same as checkAndUpdateStatus in check service, but also updates base_branch + // This is the same as markPullRequestAsMergeable in check service, but also updates base_branch if pr.Status == issues_model.PullRequestStatusChecking { pr.Status = issues_model.PullRequestStatusMergeable } @@ -409,7 +408,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) { continue } - AddToTaskQueue(ctx, pr) + StartPullRequestCheckImmediately(ctx, pr) comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID) if err == nil && comment != nil { notify_service.PullRequestPushCommits(ctx, opts.Doer, pr, comment) @@ -463,12 +462,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) { } if !pr.IsWorkInProgress(ctx) { - var reviewNotifiers []*issue_service.ReviewRequestNotifier - if opts.IsForcePush { - reviewNotifiers, err = issue_service.PullRequestCodeOwnersReview(ctx, pr) - } else { - reviewNotifiers, err = issue_service.PullRequestCodeOwnersReviewSpecialCommits(ctx, pr, opts.OldCommitID, opts.NewCommitID) - } + reviewNotifiers, err := issue_service.PullRequestCodeOwnersReview(ctx, pr) if err != nil { log.Error("PullRequestCodeOwnersReview: %v", err) } @@ -502,7 +496,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) { log.Error("UpdateCommitDivergence: %v", err) } } - AddToTaskQueue(ctx, pr) + StartPullRequestCheckDelayable(ctx, pr) } }) } @@ -572,7 +566,7 @@ func PushToBaseRepo(ctx context.Context, pr *issues_model.PullRequest) (err erro } func pushToBaseRepoHelper(ctx context.Context, pr *issues_model.PullRequest, prefixHeadBranch string) (err error) { - log.Trace("PushToBaseRepo[%d]: pushing commits to base repo '%s'", pr.BaseRepoID, pr.GetGitRefName()) + log.Trace("PushToBaseRepo[%d]: pushing commits to base repo '%s'", pr.BaseRepoID, pr.GetGitHeadRefName()) if err := pr.LoadHeadRepo(ctx); err != nil { log.Error("Unable to load head repository for PR[%d] Error: %v", pr.ID, err) @@ -593,7 +587,7 @@ func pushToBaseRepoHelper(ctx context.Context, pr *issues_model.PullRequest, pre return fmt.Errorf("unable to load poster %d for pr %d: %w", pr.Issue.PosterID, pr.ID, err) } - gitRefName := pr.GetGitRefName() + gitRefName := pr.GetGitHeadRefName() if err := git.Push(ctx, headRepoPath, git.PushOptions{ Remote: baseRepoPath, @@ -647,13 +641,13 @@ func UpdatePullsRefs(ctx context.Context, repo *repo_model.Repository, update *r // UpdateRef update refs/pull/id/head directly for agit flow pull request func UpdateRef(ctx context.Context, pr *issues_model.PullRequest) (err error) { - log.Trace("UpdateRef[%d]: upgate pull request ref in base repo '%s'", pr.ID, pr.GetGitRefName()) + log.Trace("UpdateRef[%d]: upgate pull request ref in base repo '%s'", pr.ID, pr.GetGitHeadRefName()) if err := pr.LoadBaseRepo(ctx); err != nil { log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err) return err } - _, _, err = git.NewCommand("update-ref").AddDynamicArguments(pr.GetGitRefName(), pr.HeadCommitID).RunStdString(ctx, &git.RunOpts{Dir: pr.BaseRepo.RepoPath()}) + _, _, err = git.NewCommand("update-ref").AddDynamicArguments(pr.GetGitHeadRefName(), pr.HeadCommitID).RunStdString(ctx, &git.RunOpts{Dir: pr.BaseRepo.RepoPath()}) if err != nil { log.Error("Unable to update ref in base repository for PR[%d] Error: %v", pr.ID, err) } @@ -763,7 +757,7 @@ func CloseRepoBranchesPulls(ctx context.Context, doer *user_model.User, repo *re var errs []error for _, branch := range branches { - prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repo.ID, branch.Name) + prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repo.ID, branch) if err != nil { return err } @@ -821,9 +815,9 @@ func GetSquashMergeCommitMessages(ctx context.Context, pr *issues_model.PullRequ if pr.Flow == issues_model.PullRequestFlowGithub { headCommit, err = gitRepo.GetBranchCommit(pr.HeadBranch) } else { - pr.HeadCommitID, err = gitRepo.GetRefCommitID(pr.GetGitRefName()) + pr.HeadCommitID, err = gitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil { - log.Error("Unable to get head commit: %s Error: %v", pr.GetGitRefName(), err) + log.Error("Unable to get head commit: %s Error: %v", pr.GetGitHeadRefName(), err) return "" } headCommit, err = gitRepo.GetCommit(pr.HeadCommitID) @@ -950,12 +944,6 @@ func GetSquashMergeCommitMessages(ctx context.Context, pr *issues_model.PullRequ return stringBuilder.String() } -// GetIssuesLastCommitStatus returns a map of issue ID to the most recent commit's latest status -func GetIssuesLastCommitStatus(ctx context.Context, issues issues_model.IssueList) (map[int64]*git_model.CommitStatus, error) { - _, lastStatus, err := GetIssuesAllCommitStatus(ctx, issues) - return lastStatus, err -} - // GetIssuesAllCommitStatus returns a map of issue ID to a list of all statuses for the most recent commit as well as a map of issue ID to only the commit's latest status func GetIssuesAllCommitStatus(ctx context.Context, issues issues_model.IssueList) (map[int64][]*git_model.CommitStatus, map[int64]*git_model.CommitStatus, error) { if err := issues.LoadPullRequests(ctx); err != nil { @@ -1004,12 +992,12 @@ func GetIssuesAllCommitStatus(ctx context.Context, issues issues_model.IssueList // getAllCommitStatus get pr's commit statuses. func getAllCommitStatus(ctx context.Context, gitRepo *git.Repository, pr *issues_model.PullRequest) (statuses []*git_model.CommitStatus, lastStatus *git_model.CommitStatus, err error) { - sha, shaErr := gitRepo.GetRefCommitID(pr.GetGitRefName()) + sha, shaErr := gitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if shaErr != nil { return nil, nil, shaErr } - statuses, _, err = git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) + statuses, err = git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) lastStatus = git_model.CalcCommitStatus(statuses) return statuses, lastStatus, err } @@ -1054,7 +1042,7 @@ func IsHeadEqualWithBranch(ctx context.Context, pr *issues_model.PullRequest, br return false, err } } else { - pr.HeadCommitID, err = baseGitRepo.GetRefCommitID(pr.GetGitRefName()) + pr.HeadCommitID, err = baseGitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil { return false, err } @@ -1076,11 +1064,9 @@ type CommitInfo struct { // GetPullCommits returns all commits on given pull request and the last review commit sha // Attention: The last review commit sha must be from the latest review whose commit id is not empty. // So the type of the latest review cannot be "ReviewTypeRequest". -func GetPullCommits(ctx *gitea_context.Context, issue *issues_model.Issue) ([]CommitInfo, string, error) { +func GetPullCommits(ctx context.Context, baseGitRepo *git.Repository, doer *user_model.User, issue *issues_model.Issue) ([]CommitInfo, string, error) { pull := issue.PullRequest - baseGitRepo := ctx.Repo.GitRepo - if err := pull.LoadBaseRepo(ctx); err != nil { return nil, "", err } @@ -1088,7 +1074,7 @@ func GetPullCommits(ctx *gitea_context.Context, issue *issues_model.Issue) ([]Co if pull.HasMerged { baseBranch = pull.MergeBase } - prInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(), baseBranch, pull.GetGitRefName(), true, false) + prInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(), baseBranch, pull.GetGitHeadRefName(), true, false) if err != nil { return nil, "", err } @@ -1116,11 +1102,11 @@ func GetPullCommits(ctx *gitea_context.Context, issue *issues_model.Issue) ([]Co } var lastReviewCommitID string - if ctx.IsSigned { + if doer != nil { // get last review of current user and store information in context (if available) lastreview, err := issues_model.FindLatestReviews(ctx, issues_model.FindReviewOptions{ IssueID: issue.ID, - ReviewerID: ctx.Doer.ID, + ReviewerID: doer.ID, Types: []issues_model.ReviewType{ issues_model.ReviewTypeApprove, issues_model.ReviewTypeComment, diff --git a/services/pull/review.go b/services/pull/review.go index 78723a58ae..ee18db3859 100644 --- a/services/pull/review.go +++ b/services/pull/review.go @@ -200,7 +200,7 @@ func createCodeComment(ctx context.Context, doer *user_model.User, repo *repo_mo defer closer.Close() invalidated := false - head := pr.GetGitRefName() + head := pr.GetGitHeadRefName() if line > 0 { if reviewID != 0 { first, err := issues_model.FindComments(ctx, &issues_model.FindCommentsOptions{ @@ -237,16 +237,16 @@ func createCodeComment(ctx context.Context, doer *user_model.User, repo *repo_mo if err == nil { commitID = commit.ID.String() } else if !(strings.Contains(err.Error(), "exit status 128 - fatal: no such path") || notEnoughLines.MatchString(err.Error())) { - return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %w", pr.GetGitRefName(), gitRepo.Path, treePath, line, err) + return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %w", pr.GetGitHeadRefName(), gitRepo.Path, treePath, line, err) } } } // Only fetch diff if comment is review comment if len(patch) == 0 && reviewID != 0 { - headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName()) + headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil { - return nil, fmt.Errorf("GetRefCommitID[%s]: %w", pr.GetGitRefName(), err) + return nil, fmt.Errorf("GetRefCommitID[%s]: %w", pr.GetGitHeadRefName(), err) } if len(commitID) == 0 { commitID = headCommitID @@ -301,7 +301,7 @@ func SubmitReview(ctx context.Context, doer *user_model.User, gitRepo *git.Repos return nil, nil, ErrSubmitReviewOnClosedPR } - headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName()) + headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil { return nil, nil, err } @@ -395,7 +395,7 @@ func DismissReview(ctx context.Context, reviewID, repoID int64, message string, } if review.Type != issues_model.ReviewTypeApprove && review.Type != issues_model.ReviewTypeReject { - return nil, fmt.Errorf("not need to dismiss this review because it's type is not Approve or change request") + return nil, errors.New("not need to dismiss this review because it's type is not Approve or change request") } // load data for notify @@ -405,7 +405,7 @@ func DismissReview(ctx context.Context, reviewID, repoID int64, message string, // Check if the review's repoID is the one we're currently expecting. if review.Issue.RepoID != repoID { - return nil, fmt.Errorf("reviews's repository is not the same as the one we expect") + return nil, errors.New("reviews's repository is not the same as the one we expect") } issue := review.Issue diff --git a/services/pull/reviewer.go b/services/pull/reviewer.go index bf0d8cb298..52f2f3401c 100644 --- a/services/pull/reviewer.go +++ b/services/pull/reviewer.go @@ -85,5 +85,5 @@ func GetReviewerTeams(ctx context.Context, repo *repo_model.Repository) ([]*orga return nil, nil } - return organization.GetTeamsWithAccessToRepoUnit(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead, unit.TypePullRequests) + return organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead, unit.TypePullRequests) } diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go index 3f33370798..89f150fd92 100644 --- a/services/pull/temp_repo.go +++ b/services/pull/temp_repo.go @@ -28,7 +28,7 @@ const ( stagingBranch = "staging" // this is used for a working branch ) -type prContext struct { +type prTmpRepoContext struct { context.Context tmpBasePath string pr *issues_model.PullRequest @@ -36,7 +36,7 @@ type prContext struct { errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use } -func (ctx *prContext) RunOpts() *git.RunOpts { +func (ctx *prTmpRepoContext) RunOpts() *git.RunOpts { ctx.outbuf.Reset() ctx.errbuf.Reset() return &git.RunOpts{ @@ -48,7 +48,7 @@ func (ctx *prContext) RunOpts() *git.RunOpts { // createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch // it also create a second base branch called "original_base" -func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prContext, cancel context.CancelFunc, err error) { +func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prTmpRepoContext, cancel context.CancelFunc, err error) { if err := pr.LoadHeadRepo(ctx); err != nil { log.Error("%-v LoadHeadRepo: %v", pr, err) return nil, nil, fmt.Errorf("%v LoadHeadRepo: %w", pr, err) @@ -74,23 +74,20 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) } // Clone base repo. - tmpBasePath, err := repo_module.CreateTemporaryPath("pull") + tmpBasePath, cleanup, err := repo_module.CreateTemporaryPath("pull") if err != nil { log.Error("CreateTemporaryPath[%-v]: %v", pr, err) return nil, nil, err } - prCtx = &prContext{ + cancel = cleanup + + prCtx = &prTmpRepoContext{ Context: ctx, tmpBasePath: tmpBasePath, pr: pr, outbuf: &strings.Builder{}, errbuf: &strings.Builder{}, } - cancel = func() { - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("Error whilst removing removing temporary repo for %-v: %v", pr, err) - } - } baseRepoPath := pr.BaseRepo.RepoPath() headRepoPath := pr.HeadRepo.RepoPath() @@ -177,7 +174,7 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) } else if len(pr.HeadCommitID) == objectFormat.FullLength() { // for not created pull request headBranch = pr.HeadCommitID } else { - headBranch = pr.GetGitRefName() + headBranch = pr.GetGitHeadRefName() } if err := git.NewCommand("fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch). Run(ctx, prCtx.RunOpts()); err != nil { diff --git a/services/pull/update.go b/services/pull/update.go index 3e00dd4e65..b8f84e3d65 100644 --- a/services/pull/update.go +++ b/services/pull/update.go @@ -5,6 +5,7 @@ package pull import ( "context" + "errors" "fmt" git_model "code.gitea.io/gitea/models/git" @@ -23,7 +24,7 @@ import ( func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error { if pr.Flow == issues_model.PullRequestFlowAGit { // TODO: update of agit flow pull request's head branch is unsupported - return fmt.Errorf("update of agit flow pull request's head branch is unsupported") + return errors.New("update of agit flow pull request's head branch is unsupported") } releaser, err := globallock.Lock(ctx, getPullWorkingLockKey(pr.ID)) @@ -40,22 +41,6 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model. return fmt.Errorf("HeadBranch of PR %d is up to date", pr.Index) } - if rebase { - defer func() { - go AddTestPullRequestTask(TestPullRequestOptions{ - RepoID: pr.BaseRepo.ID, - Doer: doer, - Branch: pr.BaseBranch, - IsSync: false, - IsForcePush: false, - OldCommitID: "", - NewCommitID: "", - }) - }() - - return updateHeadByRebaseOnToBase(ctx, pr, doer) - } - if err := pr.LoadBaseRepo(ctx); err != nil { log.Error("unable to load BaseRepo for %-v during update-by-merge: %v", pr, err) return fmt.Errorf("unable to load BaseRepo for PR[%d] during update-by-merge: %w", pr.ID, err) @@ -73,6 +58,22 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model. return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err) } + defer func() { + go AddTestPullRequestTask(TestPullRequestOptions{ + RepoID: pr.BaseRepo.ID, + Doer: doer, + Branch: pr.BaseBranch, + IsSync: false, + IsForcePush: false, + OldCommitID: "", + NewCommitID: "", + }) + }() + + if rebase { + return updateHeadByRebaseOnToBase(ctx, pr, doer) + } + // TODO: FakePR: it is somewhat hacky, but it is the only way to "merge" at the moment // ideally in the future the "merge" functions should be refactored to decouple from the PullRequest // now use a fake reverse PR to switch head&base repos/branches @@ -89,19 +90,6 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model. } _, err = doMergeAndPush(ctx, reversePR, doer, repo_model.MergeStyleMerge, "", message, repository.PushTriggerPRUpdateWithBase) - - defer func() { - go AddTestPullRequestTask(TestPullRequestOptions{ - RepoID: reversePR.HeadRepo.ID, - Doer: doer, - Branch: reversePR.HeadBranch, - IsSync: false, - IsForcePush: false, - OldCommitID: "", - NewCommitID: "", - }) - }() - return err } diff --git a/services/release/release_test.go b/services/release/release_test.go index 95a54832b9..36a9f667d6 100644 --- a/services/release/release_test.go +++ b/services/release/release_test.go @@ -250,9 +250,9 @@ func TestRelease_Update(t *testing.T) { assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, []string{attach.UUID}, nil, nil)) assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release)) assert.Len(t, release.Attachments, 1) - assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID) - assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID) - assert.EqualValues(t, attach.Name, release.Attachments[0].Name) + assert.Equal(t, attach.UUID, release.Attachments[0].UUID) + assert.Equal(t, release.ID, release.Attachments[0].ReleaseID) + assert.Equal(t, attach.Name, release.Attachments[0].Name) // update the attachment name assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, nil, nil, map[string]string{ @@ -261,9 +261,9 @@ func TestRelease_Update(t *testing.T) { release.Attachments = nil assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release)) assert.Len(t, release.Attachments, 1) - assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID) - assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID) - assert.EqualValues(t, "test2.txt", release.Attachments[0].Name) + assert.Equal(t, attach.UUID, release.Attachments[0].UUID) + assert.Equal(t, release.ID, release.Attachments[0].ReleaseID) + assert.Equal(t, "test2.txt", release.Attachments[0].Name) // delete the attachment assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, nil, []string{attach.UUID}, nil)) diff --git a/services/repository/adopt.go b/services/repository/adopt.go index b7321156d9..2bd1c55de4 100644 --- a/services/repository/adopt.go +++ b/services/repository/adopt.go @@ -16,7 +16,6 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" - "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" @@ -28,18 +27,30 @@ import ( "github.com/gobwas/glob" ) +func deleteFailedAdoptRepository(repoID int64) error { + return db.WithTx(db.DefaultContext, func(ctx context.Context) error { + if err := deleteDBRepository(ctx, repoID); err != nil { + return fmt.Errorf("deleteDBRepository: %w", err) + } + if err := git_model.DeleteRepoBranches(ctx, repoID); err != nil { + return fmt.Errorf("deleteRepoBranches: %w", err) + } + return repo_model.DeleteRepoReleases(ctx, repoID) + }) +} + // AdoptRepository adopts pre-existing repository files for the user/organization. -func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) { - if !doer.IsAdmin && !u.CanCreateRepo() { +func AdoptRepository(ctx context.Context, doer, owner *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) { + if !doer.CanCreateRepoIn(owner) { return nil, repo_model.ErrReachLimitOfRepo{ - Limit: u.MaxRepoCreation, + Limit: owner.MaxRepoCreation, } } repo := &repo_model.Repository{ - OwnerID: u.ID, - Owner: u, - OwnerName: u.Name, + OwnerID: owner.ID, + Owner: owner, + OwnerName: owner.Name, Name: opts.Name, LowerName: strings.ToLower(opts.Name), Description: opts.Description, @@ -48,59 +59,52 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR IsPrivate: opts.IsPrivate, IsFsckEnabled: !opts.IsMirror, CloseIssuesViaCommitInAnyBranch: setting.Repository.DefaultCloseIssuesViaCommitsInAnyBranch, - Status: opts.Status, + Status: repo_model.RepositoryBeingMigrated, IsEmpty: !opts.AutoInit, } - if err := db.WithTx(ctx, func(ctx context.Context) error { - isExist, err := gitrepo.IsRepositoryExist(ctx, repo) + // 1 - create the repository database operations first + err := db.WithTx(ctx, func(ctx context.Context) error { + return createRepositoryInDB(ctx, doer, owner, repo, false) + }) + if err != nil { + return nil, err + } + + // last - clean up if something goes wrong + // WARNING: Don't override all later err with local variables + defer func() { if err != nil { - log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) - return err - } - if !isExist { - return repo_model.ErrRepoNotExist{ - OwnerName: u.Name, - Name: repo.Name, + // we can not use the ctx because it maybe canceled or timeout + if errDel := deleteFailedAdoptRepository(repo.ID); errDel != nil { + log.Error("Failed to delete repository %s that could not be adopted: %v", repo.FullName(), errDel) } } + }() - if err := CreateRepositoryByExample(ctx, doer, u, repo, true, false); err != nil { - return err - } - - // Re-fetch the repository from database before updating it (else it would - // override changes that were done earlier with sql) - if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil { - return fmt.Errorf("getRepositoryByID: %w", err) - } - return nil - }); err != nil { - return nil, err + // Re-fetch the repository from database before updating it (else it would + // override changes that were done earlier with sql) + if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil { + return nil, fmt.Errorf("getRepositoryByID: %w", err) } - if err := func() error { - if err := adoptRepository(ctx, repo, opts.DefaultBranch); err != nil { - return fmt.Errorf("adoptRepository: %w", err) - } + // 2 - adopt the repository from disk + if err = adoptRepository(ctx, repo, opts.DefaultBranch); err != nil { + return nil, fmt.Errorf("adoptRepository: %w", err) + } - if err := repo_module.CheckDaemonExportOK(ctx, repo); err != nil { - return fmt.Errorf("checkDaemonExportOK: %w", err) - } + // 3 - Update the git repository + if err = updateGitRepoAfterCreate(ctx, repo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) + } - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil { - log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) - return fmt.Errorf("CreateRepository(git update-server-info): %w", err) - } - return nil - }(); err != nil { - if errDel := DeleteRepository(ctx, doer, repo, false /* no notify */); errDel != nil { - log.Error("Failed to delete repository %s that could not be adopted: %v", repo.FullName(), errDel) - } - return nil, err + // 4 - update repository status + repo.Status = repo_model.RepositoryReady + if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "status"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) } - notify_service.AdoptRepository(ctx, doer, u, repo) + + notify_service.AdoptRepository(ctx, doer, owner, repo) return repo, nil } @@ -192,8 +196,13 @@ func adoptRepository(ctx context.Context, repo *repo_model.Repository, defaultBr return fmt.Errorf("setDefaultBranch: %w", err) } } - if err = repo_module.UpdateRepository(ctx, repo, false); err != nil { - return fmt.Errorf("updateRepository: %w", err) + + if err = repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_empty", "default_branch"); err != nil { + return fmt.Errorf("UpdateRepositoryCols: %w", err) + } + + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { + log.Error("Failed to update size for repository: %v", err) } return nil @@ -256,7 +265,7 @@ func checkUnadoptedRepositories(ctx context.Context, userName string, repoNamesT } return err } - repos, _, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{ + repos, _, err := repo_model.GetUserRepositories(ctx, repo_model.SearchRepoOptions{ Actor: ctxUser, Private: true, ListOptions: db.ListOptions{ diff --git a/services/repository/adopt_test.go b/services/repository/adopt_test.go index 294185ea1f..86f586c748 100644 --- a/services/repository/adopt_test.go +++ b/services/repository/adopt_test.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" ) @@ -28,7 +29,7 @@ func TestCheckUnadoptedRepositories_Add(t *testing.T) { } total := 30 - for i := 0; i < total; i++ { + for range total { unadopted.add("something") } @@ -89,10 +90,36 @@ func TestListUnadoptedRepositories_ListOptions(t *testing.T) { func TestAdoptRepository(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - assert.NoError(t, unittest.SyncDirs(filepath.Join(setting.RepoRootPath, "user2", "repo1.git"), filepath.Join(setting.RepoRootPath, "user2", "test-adopt.git"))) + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) - _, err := AdoptRepository(db.DefaultContext, user2, user2, CreateRepoOptions{Name: "test-adopt"}) + + // a successful adopt + destDir := filepath.Join(setting.RepoRootPath, user2.Name, "test-adopt.git") + assert.NoError(t, unittest.SyncDirs(filepath.Join(setting.RepoRootPath, user2.Name, "repo1.git"), destDir)) + + adoptedRepo, err := AdoptRepository(db.DefaultContext, user2, user2, CreateRepoOptions{Name: "test-adopt"}) assert.NoError(t, err) repoTestAdopt := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{Name: "test-adopt"}) assert.Equal(t, "sha1", repoTestAdopt.ObjectFormatName) + + // just delete the adopted repo's db records + err = deleteFailedAdoptRepository(adoptedRepo.ID) + assert.NoError(t, err) + + unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: "test-adopt"}) + + // a failed adopt because some mock data + // remove the hooks directory and create a file so that we cannot create the hooks successfully + _ = os.RemoveAll(filepath.Join(destDir, "hooks", "update.d")) + assert.NoError(t, os.WriteFile(filepath.Join(destDir, "hooks", "update.d"), []byte("tests"), os.ModePerm)) + + adoptedRepo, err = AdoptRepository(db.DefaultContext, user2, user2, CreateRepoOptions{Name: "test-adopt"}) + assert.Error(t, err) + assert.Nil(t, adoptedRepo) + + unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: "test-adopt"}) + + exist, err := util.IsExist(repo_model.RepoPath(user2.Name, "test-adopt")) + assert.NoError(t, err) + assert.True(t, exist) // the repository should be still in the disk } diff --git a/services/repository/archiver/archiver.go b/services/repository/archiver/archiver.go index d39acc080d..a657e3884c 100644 --- a/services/repository/archiver/archiver.go +++ b/services/repository/archiver/archiver.go @@ -44,7 +44,7 @@ type ErrUnknownArchiveFormat struct { // Error implements error func (err ErrUnknownArchiveFormat) Error() string { - return fmt.Sprintf("unknown format: %s", err.RequestNameType) + return "unknown format: " + err.RequestNameType } // Is implements error @@ -60,7 +60,7 @@ type RepoRefNotFoundError struct { // Error implements error. func (e RepoRefNotFoundError) Error() string { - return fmt.Sprintf("unrecognized repository reference: %s", e.RefShortName) + return "unrecognized repository reference: " + e.RefShortName } func (e RepoRefNotFoundError) Is(err error) bool { diff --git a/services/repository/archiver/archiver_test.go b/services/repository/archiver/archiver_test.go index 522f90558a..87324ad38c 100644 --- a/services/repository/archiver/archiver_test.go +++ b/services/repository/archiver/archiver_test.go @@ -33,7 +33,7 @@ func TestArchive_Basic(t *testing.T) { bogusReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip") assert.NoError(t, err) assert.NotNil(t, bogusReq) - assert.EqualValues(t, firstCommit+".zip", bogusReq.GetArchiveName()) + assert.Equal(t, firstCommit+".zip", bogusReq.GetArchiveName()) // Check a series of bogus requests. // Step 1, valid commit with a bad extension. @@ -54,12 +54,12 @@ func TestArchive_Basic(t *testing.T) { bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "master.zip") assert.NoError(t, err) assert.NotNil(t, bogusReq) - assert.EqualValues(t, "master.zip", bogusReq.GetArchiveName()) + assert.Equal(t, "master.zip", bogusReq.GetArchiveName()) bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "test/archive.zip") assert.NoError(t, err) assert.NotNil(t, bogusReq) - assert.EqualValues(t, "test-archive.zip", bogusReq.GetArchiveName()) + assert.Equal(t, "test-archive.zip", bogusReq.GetArchiveName()) // Now two valid requests, firstCommit with valid extensions. zipReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip") diff --git a/services/repository/avatar.go b/services/repository/avatar.go index 15e51d4a25..998ac42230 100644 --- a/services/repository/avatar.go +++ b/services/repository/avatar.go @@ -29,35 +29,30 @@ func UploadAvatar(ctx context.Context, repo *repo_model.Repository, data []byte) return nil } - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - oldAvatarPath := repo.CustomAvatarRelativePath() - - // Users can upload the same image to other repo - prefix it with ID - // Then repo will be removed - only it avatar file will be removed - repo.Avatar = newAvatar - if err := repo_model.UpdateRepositoryCols(ctx, repo, "avatar"); err != nil { - return fmt.Errorf("UploadAvatar: Update repository avatar: %w", err) - } - - if err := storage.SaveFrom(storage.RepoAvatars, repo.CustomAvatarRelativePath(), func(w io.Writer) error { - _, err := w.Write(avatarData) - return err - }); err != nil { - return fmt.Errorf("UploadAvatar %s failed: Failed to remove old repo avatar %s: %w", repo.RepoPath(), newAvatar, err) - } + return db.WithTx(ctx, func(ctx context.Context) error { + oldAvatarPath := repo.CustomAvatarRelativePath() + + // Users can upload the same image to other repo - prefix it with ID + // Then repo will be removed - only it avatar file will be removed + repo.Avatar = newAvatar + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "avatar"); err != nil { + return fmt.Errorf("UploadAvatar: Update repository avatar: %w", err) + } - if len(oldAvatarPath) > 0 { - if err := storage.RepoAvatars.Delete(oldAvatarPath); err != nil { - return fmt.Errorf("UploadAvatar: Failed to remove old repo avatar %s: %w", oldAvatarPath, err) + if err := storage.SaveFrom(storage.RepoAvatars, repo.CustomAvatarRelativePath(), func(w io.Writer) error { + _, err := w.Write(avatarData) + return err + }); err != nil { + return fmt.Errorf("UploadAvatar %s failed: Failed to remove old repo avatar %s: %w", repo.RepoPath(), newAvatar, err) } - } - return committer.Commit() + if len(oldAvatarPath) > 0 { + if err := storage.RepoAvatars.Delete(oldAvatarPath); err != nil { + return fmt.Errorf("UploadAvatar: Failed to remove old repo avatar %s: %w", oldAvatarPath, err) + } + } + return nil + }) } // DeleteAvatar deletes the repos's custom avatar. @@ -70,22 +65,17 @@ func DeleteAvatar(ctx context.Context, repo *repo_model.Repository) error { avatarPath := repo.CustomAvatarRelativePath() log.Trace("DeleteAvatar[%d]: %s", repo.ID, avatarPath) - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - repo.Avatar = "" - if err := repo_model.UpdateRepositoryCols(ctx, repo, "avatar"); err != nil { - return fmt.Errorf("DeleteAvatar: Update repository avatar: %w", err) - } - - if err := storage.RepoAvatars.Delete(avatarPath); err != nil { - return fmt.Errorf("DeleteAvatar: Failed to remove %s: %w", avatarPath, err) - } + return db.WithTx(ctx, func(ctx context.Context) error { + repo.Avatar = "" + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "avatar"); err != nil { + return fmt.Errorf("DeleteAvatar: Update repository avatar: %w", err) + } - return committer.Commit() + if err := storage.RepoAvatars.Delete(avatarPath); err != nil { + return fmt.Errorf("DeleteAvatar: Failed to remove %s: %w", avatarPath, err) + } + return nil + }) } // RemoveRandomAvatars removes the randomly generated avatars that were created for repositories @@ -112,5 +102,5 @@ func generateAvatar(ctx context.Context, templateRepo, generateRepo *repo_model. return err } - return repo_model.UpdateRepositoryCols(ctx, generateRepo, "avatar") + return repo_model.UpdateRepositoryColsNoAutoTime(ctx, generateRepo, "avatar") } diff --git a/services/repository/avatar_test.go b/services/repository/avatar_test.go index bea820e85f..2dc5173eec 100644 --- a/services/repository/avatar_test.go +++ b/services/repository/avatar_test.go @@ -59,7 +59,7 @@ func TestDeleteAvatar(t *testing.T) { err = DeleteAvatar(db.DefaultContext, repo) assert.NoError(t, err) - assert.Equal(t, "", repo.Avatar) + assert.Empty(t, repo.Avatar) } func TestGenerateAvatar(t *testing.T) { diff --git a/services/repository/branch.go b/services/repository/branch.go index 8804778bd5..6e0065b277 100644 --- a/services/repository/branch.go +++ b/services/repository/branch.go @@ -233,7 +233,7 @@ func loadOneBranch(ctx context.Context, repo *repo_model.Repository, dbBranch *g defer baseGitRepo.Close() repoIDToGitRepo[pr.BaseRepoID] = baseGitRepo } - pullCommit, err := baseGitRepo.GetRefCommitID(pr.GetGitRefName()) + pullCommit, err := baseGitRepo.GetRefCommitID(pr.GetGitHeadRefName()) if err != nil && !git.IsErrNotExist(err) { return nil, fmt.Errorf("GetBranchCommitID: %v", err) } @@ -303,7 +303,7 @@ func SyncBranchesToDB(ctx context.Context, repoID, pusherID int64, branchNames, // For other batches, it will hit optimization 4. if len(branchNames) != len(commitIDs) { - return fmt.Errorf("branchNames and commitIDs length not match") + return errors.New("branchNames and commitIDs length not match") } return db.WithTx(ctx, func(ctx context.Context) error { @@ -663,6 +663,11 @@ func SetRepoDefaultBranch(ctx context.Context, repo *repo_model.Repository, newB } } + // clear divergence cache + if err := DelRepoDivergenceFromCache(ctx, repo.ID); err != nil { + log.Error("DelRepoDivergenceFromCache: %v", err) + } + notify_service.ChangeDefaultBranch(ctx, repo) return nil diff --git a/services/repository/check.go b/services/repository/check.go index b475fbc487..ffcd5ac749 100644 --- a/services/repository/check.go +++ b/services/repository/check.go @@ -162,7 +162,7 @@ func DeleteMissingRepositories(ctx context.Context, doer *user_model.User) error default: } log.Trace("Deleting %d/%d...", repo.OwnerID, repo.ID) - if err := DeleteRepositoryDirectly(ctx, doer, repo.ID); err != nil { + if err := DeleteRepositoryDirectly(ctx, repo.ID); err != nil { log.Error("Failed to DeleteRepository %-v: Error: %v", repo, err) if err2 := system_model.CreateRepositoryNotice("Failed to DeleteRepository %s [%d]: Error: %v", repo.FullName(), repo.ID, err); err2 != nil { log.Error("CreateRepositoryNotice: %v", err) diff --git a/services/repository/collaboration.go b/services/repository/collaboration.go index b5fc523623..53b3c2e203 100644 --- a/services/repository/collaboration.go +++ b/services/repository/collaboration.go @@ -71,40 +71,32 @@ func DeleteCollaboration(ctx context.Context, repo *repo_model.Repository, colla UserID: collaborator.ID, } - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if has, err := db.GetEngine(ctx).Delete(collaboration); err != nil { - return err - } else if has == 0 { - return committer.Commit() - } - - if err := repo.LoadOwner(ctx); err != nil { - return err - } + return db.WithTx(ctx, func(ctx context.Context) error { + if has, err := db.GetEngine(ctx).Delete(collaboration); err != nil { + return err + } else if has == 0 { + return nil + } - if err = access_model.RecalculateAccesses(ctx, repo); err != nil { - return err - } + if err := repo.LoadOwner(ctx); err != nil { + return err + } - if err = repo_model.WatchRepo(ctx, collaborator, repo, false); err != nil { - return err - } + if err = access_model.RecalculateAccesses(ctx, repo); err != nil { + return err + } - if err = ReconsiderWatches(ctx, repo, collaborator); err != nil { - return err - } + if err = repo_model.WatchRepo(ctx, collaborator, repo, false); err != nil { + return err + } - // Unassign a user from any issue (s)he has been assigned to in the repository - if err := ReconsiderRepoIssuesAssignee(ctx, repo, collaborator); err != nil { - return err - } + if err = ReconsiderWatches(ctx, repo, collaborator); err != nil { + return err + } - return committer.Commit() + // Unassign a user from any issue (s)he has been assigned to in the repository + return ReconsiderRepoIssuesAssignee(ctx, repo, collaborator) + }) } func ReconsiderRepoIssuesAssignee(ctx context.Context, repo *repo_model.Repository, user *user_model.User) error { diff --git a/services/repository/commitstatus/commitstatus.go b/services/repository/commitstatus/commitstatus.go index f369a303e6..fa7a89882a 100644 --- a/services/repository/commitstatus/commitstatus.go +++ b/services/repository/commitstatus/commitstatus.go @@ -14,17 +14,17 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/commitstatus" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" repo_module "code.gitea.io/gitea/modules/repository" - api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/services/notify" ) func getCacheKey(repoID int64, brancheName string) string { - hashBytes := sha256.Sum256([]byte(fmt.Sprintf("%d:%s", repoID, brancheName))) + hashBytes := sha256.Sum256(fmt.Appendf(nil, "%d:%s", repoID, brancheName)) return fmt.Sprintf("commit_status:%x", hashBytes) } @@ -47,7 +47,7 @@ func getCommitStatusCache(repoID int64, branchName string) *commitStatusCacheVal return nil } -func updateCommitStatusCache(repoID int64, branchName string, state api.CommitStatusState, targetURL string) error { +func updateCommitStatusCache(repoID int64, branchName string, state commitstatus.CommitStatusState, targetURL string) error { c := cache.GetCache() bs, err := json.Marshal(commitStatusCacheValue{ State: state.String(), @@ -127,7 +127,7 @@ func FindReposLastestCommitStatuses(ctx context.Context, repos []*repo_model.Rep for i, repo := range repos { if cv := getCommitStatusCache(repo.ID, repo.DefaultBranch); cv != nil { results[i] = &git_model.CommitStatus{ - State: api.CommitStatusState(cv.State), + State: commitstatus.CommitStatusState(cv.State), TargetURL: cv.TargetURL, } } else { diff --git a/services/repository/contributors_graph_test.go b/services/repository/contributors_graph_test.go index 6db93f6a64..7d32b1c931 100644 --- a/services/repository/contributors_graph_test.go +++ b/services/repository/contributors_graph_test.go @@ -38,14 +38,14 @@ func TestRepository_ContributorsGraph(t *testing.T) { keys = append(keys, k) } slices.Sort(keys) - assert.EqualValues(t, []string{ + assert.Equal(t, []string{ "ethantkoenig@gmail.com", "jimmy.praet@telenet.be", "jon@allspice.io", "total", // generated summary }, keys) - assert.EqualValues(t, &ContributorData{ + assert.Equal(t, &ContributorData{ Name: "Ethan Koenig", AvatarLink: "/assets/img/avatar_default.png", TotalCommits: 1, @@ -58,7 +58,7 @@ func TestRepository_ContributorsGraph(t *testing.T) { }, }, }, data["ethantkoenig@gmail.com"]) - assert.EqualValues(t, &ContributorData{ + assert.Equal(t, &ContributorData{ Name: "Total", AvatarLink: "", TotalCommits: 3, diff --git a/services/repository/create.go b/services/repository/create.go index 1a6a68b35a..bed02e5d7e 100644 --- a/services/repository/create.go +++ b/services/repository/create.go @@ -17,6 +17,7 @@ import ( "code.gitea.io/gitea/models/perm" access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" + system_model "code.gitea.io/gitea/models/system" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/models/webhook" @@ -28,7 +29,6 @@ import ( "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/templates/vars" - "code.gitea.io/gitea/modules/util" ) // CreateRepoOptions contains the create repository options @@ -100,8 +100,8 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir // .gitignore if len(opts.Gitignores) > 0 { var buf bytes.Buffer - names := strings.Split(opts.Gitignores, ",") - for _, name := range names { + names := strings.SplitSeq(opts.Gitignores, ",") + for name := range names { data, err = options.Gitignore(name) if err != nil { return fmt.Errorf("GetRepoInitFile[%s]: %w", name, err) @@ -140,21 +140,20 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir // InitRepository initializes README and .gitignore if needed. func initRepository(ctx context.Context, u *user_model.User, repo *repo_model.Repository, opts CreateRepoOptions) (err error) { - if err = repo_module.CheckInitRepository(ctx, repo); err != nil { - return err + // Init git bare new repository. + if err = git.InitRepository(ctx, repo.RepoPath(), true, repo.ObjectFormatName); err != nil { + return fmt.Errorf("git.InitRepository: %w", err) + } else if err = gitrepo.CreateDelegateHooks(ctx, repo); err != nil { + return fmt.Errorf("createDelegateHooks: %w", err) } // Initialize repository according to user's choice. if opts.AutoInit { - tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-"+repo.Name) + tmpDir, cleanup, err := setting.AppDataTempDir("git-repo-content").MkdirTempRandom("repos-" + repo.Name) if err != nil { - return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.FullName(), err) + return fmt.Errorf("failed to create temp dir for repository %s: %w", repo.FullName(), err) } - defer func() { - if err := util.RemoveAll(tmpDir); err != nil { - log.Warn("Unable to remove temporary directory: %s: Error: %v", tmpDir, err) - } - }() + defer cleanup() if err = prepareRepoCommit(ctx, repo, tmpDir, opts); err != nil { return fmt.Errorf("prepareRepoCommit: %w", err) @@ -192,18 +191,25 @@ func initRepository(ctx context.Context, u *user_model.User, repo *repo_model.Re } } - if err = UpdateRepository(ctx, repo, false); err != nil { + if err = repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_empty", "default_branch", "default_wiki_branch"); err != nil { return fmt.Errorf("updateRepository: %w", err) } + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { + log.Error("Failed to update size for repository: %v", err) + } + return nil } // CreateRepositoryDirectly creates a repository for the user/organization. -func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) { - if !doer.IsAdmin && !u.CanCreateRepo() { +// if needsUpdateToReady is true, it will update the repository status to ready when success +func CreateRepositoryDirectly(ctx context.Context, doer, owner *user_model.User, + opts CreateRepoOptions, needsUpdateToReady bool, +) (*repo_model.Repository, error) { + if !doer.CanCreateRepoIn(owner) { return nil, repo_model.ErrReachLimitOfRepo{ - Limit: u.MaxRepoCreation, + Limit: owner.MaxRepoCreation, } } @@ -223,9 +229,9 @@ func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opt } repo := &repo_model.Repository{ - OwnerID: u.ID, - Owner: u, - OwnerName: u.Name, + OwnerID: owner.ID, + Owner: owner, + OwnerName: owner.Name, Name: opts.Name, LowerName: strings.ToLower(opts.Name), Description: opts.Description, @@ -244,100 +250,91 @@ func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opt ObjectFormatName: opts.ObjectFormatName, } - var rollbackRepo *repo_model.Repository - - if err := db.WithTx(ctx, func(ctx context.Context) error { - if err := CreateRepositoryByExample(ctx, doer, u, repo, false, false); err != nil { - return err - } - - // No need for init mirror. - if opts.IsMirror { - return nil - } + // 1 - create the repository database operations first + err := db.WithTx(ctx, func(ctx context.Context) error { + return createRepositoryInDB(ctx, doer, owner, repo, false) + }) + if err != nil { + return nil, err + } - isExist, err := gitrepo.IsRepositoryExist(ctx, repo) + // last - clean up if something goes wrong + // WARNING: Don't override all later err with local variables + defer func() { if err != nil { - log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) - return err - } - if isExist { - // repo already exists - We have two or three options. - // 1. We fail stating that the directory exists - // 2. We create the db repository to go with this data and adopt the git repo - // 3. We delete it and start afresh - // - // Previously Gitea would just delete and start afresh - this was naughty. - // So we will now fail and delegate to other functionality to adopt or delete - log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName()) - return repo_model.ErrRepoFilesAlreadyExist{ - Uname: u.Name, - Name: repo.Name, - } + // we can not use the ctx because it maybe canceled or timeout + cleanupRepository(repo.ID) } + }() - if err = initRepository(ctx, doer, repo, opts); err != nil { - if err2 := gitrepo.DeleteRepository(ctx, repo); err2 != nil { - log.Error("initRepository: %v", err) - return fmt.Errorf( - "delete repo directory %s/%s failed(2): %v", u.Name, repo.Name, err2) - } - return fmt.Errorf("initRepository: %w", err) - } + // No need for init mirror. + if opts.IsMirror { + return repo, nil + } - // Initialize Issue Labels if selected - if len(opts.IssueLabels) > 0 { - if err = repo_module.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil { - rollbackRepo = repo - rollbackRepo.OwnerID = u.ID - return fmt.Errorf("InitializeLabels: %w", err) - } + // 2 - check whether the repository with the same storage exists + var isExist bool + isExist, err = gitrepo.IsRepositoryExist(ctx, repo) + if err != nil { + log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) + return nil, err + } + if isExist { + log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName()) + // Don't return directly, we need err in defer to cleanupRepository + err = repo_model.ErrRepoFilesAlreadyExist{ + Uname: repo.OwnerName, + Name: repo.Name, } + return nil, err + } - if err := repo_module.CheckDaemonExportOK(ctx, repo); err != nil { - return fmt.Errorf("checkDaemonExportOK: %w", err) - } + // 3 - init git repository in storage + if err = initRepository(ctx, doer, repo, opts); err != nil { + return nil, fmt.Errorf("initRepository: %w", err) + } - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil { - log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) - rollbackRepo = repo - rollbackRepo.OwnerID = u.ID - return fmt.Errorf("CreateRepository(git update-server-info): %w", err) + // 4 - Initialize Issue Labels if selected + if len(opts.IssueLabels) > 0 { + if err = repo_module.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil { + return nil, fmt.Errorf("InitializeLabels: %w", err) } + } - // update licenses - var licenses []string - if len(opts.License) > 0 { - licenses = append(licenses, opts.License) + // 5 - Update the git repository + if err = updateGitRepoAfterCreate(ctx, repo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) + } - stdout, _, err := git.NewCommand("rev-parse", "HEAD").RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}) - if err != nil { - log.Error("CreateRepository(git rev-parse HEAD) in %v: Stdout: %s\nError: %v", repo, stdout, err) - rollbackRepo = repo - rollbackRepo.OwnerID = u.ID - return fmt.Errorf("CreateRepository(git rev-parse HEAD): %w", err) - } - if err := repo_model.UpdateRepoLicenses(ctx, repo, stdout, licenses); err != nil { - return err - } + // 6 - update licenses + var licenses []string + if len(opts.License) > 0 { + licenses = append(licenses, opts.License) + + var stdout string + stdout, _, err = git.NewCommand("rev-parse", "HEAD").RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}) + if err != nil { + log.Error("CreateRepository(git rev-parse HEAD) in %v: Stdout: %s\nError: %v", repo, stdout, err) + return nil, fmt.Errorf("CreateRepository(git rev-parse HEAD): %w", err) } - return nil - }); err != nil { - if rollbackRepo != nil { - if errDelete := DeleteRepositoryDirectly(ctx, doer, rollbackRepo.ID); errDelete != nil { - log.Error("Rollback deleteRepository: %v", errDelete) - } + if err = repo_model.UpdateRepoLicenses(ctx, repo, stdout, licenses); err != nil { + return nil, err } + } - return nil, err + // 7 - update repository status to be ready + if needsUpdateToReady { + repo.Status = repo_model.RepositoryReady + if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "status"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) + } } return repo, nil } -// CreateRepositoryByExample creates a repository for the user/organization. -func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository, overwriteOrAdopt, isFork bool) (err error) { +// createRepositoryInDB creates a repository for the user/organization. +func createRepositoryInDB(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository, isFork bool) (err error) { if err = repo_model.IsUsableRepoName(repo.Name); err != nil { return err } @@ -352,19 +349,6 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re } } - isExist, err := gitrepo.IsRepositoryExist(ctx, repo) - if err != nil { - log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) - return err - } - if !overwriteOrAdopt && isExist { - log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName()) - return repo_model.ErrRepoFilesAlreadyExist{ - Uname: u.Name, - Name: repo.Name, - } - } - if err = db.Insert(ctx, repo); err != nil { return err } @@ -384,7 +368,8 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re } units := make([]repo_model.RepoUnit, 0, len(defaultUnits)) for _, tp := range defaultUnits { - if tp == unit.TypeIssues { + switch tp { + case unit.TypeIssues: units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, Type: tp, @@ -394,7 +379,7 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re EnableDependencies: setting.Service.DefaultEnableDependencies, }, }) - } else if tp == unit.TypePullRequests { + case unit.TypePullRequests: units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, Type: tp, @@ -404,13 +389,13 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re AllowRebaseUpdate: true, }, }) - } else if tp == unit.TypeProjects { + case unit.TypeProjects: units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, Type: tp, Config: &repo_model.ProjectsConfig{ProjectsMode: repo_model.ProjectsModeAll}, }) - } else { + default: units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, Type: tp, @@ -472,3 +457,26 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re return nil } + +func cleanupRepository(repoID int64) { + if errDelete := DeleteRepositoryDirectly(db.DefaultContext, repoID); errDelete != nil { + log.Error("cleanupRepository failed: %v", errDelete) + // add system notice + if err := system_model.CreateRepositoryNotice("DeleteRepositoryDirectly failed when cleanup repository: %v", errDelete); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + } +} + +func updateGitRepoAfterCreate(ctx context.Context, repo *repo_model.Repository) error { + if err := checkDaemonExportOK(ctx, repo); err != nil { + return fmt.Errorf("checkDaemonExportOK: %w", err) + } + + if stdout, _, err := git.NewCommand("update-server-info"). + RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil { + log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) + return fmt.Errorf("CreateRepository(git update-server-info): %w", err) + } + return nil +} diff --git a/services/repository/create_test.go b/services/repository/create_test.go new file mode 100644 index 0000000000..fe464c1441 --- /dev/null +++ b/services/repository/create_test.go @@ -0,0 +1,57 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package repository + +import ( + "os" + "testing" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/util" + + "github.com/stretchr/testify/assert" +) + +func TestCreateRepositoryDirectly(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + // a successful creating repository + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + createdRepo, err := CreateRepositoryDirectly(git.DefaultContext, user2, user2, CreateRepoOptions{ + Name: "created-repo", + }, true) + assert.NoError(t, err) + assert.NotNil(t, createdRepo) + + exist, err := util.IsExist(repo_model.RepoPath(user2.Name, createdRepo.Name)) + assert.NoError(t, err) + assert.True(t, exist) + + unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: createdRepo.Name}) + + err = DeleteRepositoryDirectly(db.DefaultContext, createdRepo.ID) + assert.NoError(t, err) + + // a failed creating because some mock data + // create the repository directory so that the creation will fail after database record created. + assert.NoError(t, os.MkdirAll(repo_model.RepoPath(user2.Name, createdRepo.Name), os.ModePerm)) + + createdRepo2, err := CreateRepositoryDirectly(db.DefaultContext, user2, user2, CreateRepoOptions{ + Name: "created-repo", + }, true) + assert.Nil(t, createdRepo2) + assert.Error(t, err) + + // assert the cleanup is successful + unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: createdRepo.Name}) + + exist, err = util.IsExist(repo_model.RepoPath(user2.Name, createdRepo.Name)) + assert.NoError(t, err) + assert.False(t, exist) +} diff --git a/services/repository/delete.go b/services/repository/delete.go index ff74a20817..c48d6e1d56 100644 --- a/services/repository/delete.go +++ b/services/repository/delete.go @@ -27,14 +27,29 @@ import ( "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/storage" + actions_service "code.gitea.io/gitea/services/actions" asymkey_service "code.gitea.io/gitea/services/asymkey" + issue_service "code.gitea.io/gitea/services/issue" "xorm.io/builder" ) +func deleteDBRepository(ctx context.Context, repoID int64) error { + if cnt, err := db.GetEngine(ctx).ID(repoID).Delete(&repo_model.Repository{}); err != nil { + return err + } else if cnt != 1 { + return repo_model.ErrRepoNotExist{ + ID: repoID, + OwnerName: "", + Name: "", + } + } + return nil +} + // DeleteRepository deletes a repository for a user or organization. // make sure if you call this func to close open sessions (sqlite will otherwise get a deadlock) -func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID int64, ignoreOrgTeams ...bool) error { +func DeleteRepositoryDirectly(ctx context.Context, repoID int64, ignoreOrgTeams ...bool) error { ctx, committer, err := db.TxContext(ctx) if err != nil { return err @@ -82,14 +97,8 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID } needRewriteKeysFile := deleted > 0 - if cnt, err := sess.ID(repoID).Delete(&repo_model.Repository{}); err != nil { + if err := deleteDBRepository(ctx, repoID); err != nil { return err - } else if cnt != 1 { - return repo_model.ErrRepoNotExist{ - ID: repoID, - OwnerName: "", - Name: "", - } } if org != nil && org.IsOrganization() { @@ -126,6 +135,14 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID return err } + // CleanupEphemeralRunnersByPickedTaskOfRepo deletes ephemeral global/org/user that have started any task of this repo + // The cannot pick a second task hardening for ephemeral runners expect that task objects remain available until runner deletion + // This method will delete affected ephemeral global/org/user runners + // &actions_model.ActionRunner{RepoID: repoID} does only handle ephemeral repository runners + if err := actions_service.CleanupEphemeralRunnersByPickedTaskOfRepo(ctx, repoID); err != nil { + return fmt.Errorf("cleanupEphemeralRunners: %w", err) + } + if err := db.DeleteBeans(ctx, &access_model.Access{RepoID: repo.ID}, &activities_model.Action{RepoID: repo.ID}, @@ -177,7 +194,7 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID // Delete Issues and related objects var attachmentPaths []string - if attachmentPaths, err = issues_model.DeleteIssuesByRepoID(ctx, repoID); err != nil { + if attachmentPaths, err = issue_service.DeleteIssuesByRepoID(ctx, repoID); err != nil { return err } @@ -358,7 +375,7 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID // DeleteOwnerRepositoriesDirectly calls DeleteRepositoryDirectly for all repos of the given owner func DeleteOwnerRepositoriesDirectly(ctx context.Context, owner *user_model.User) error { for { - repos, _, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{ + repos, _, err := repo_model.GetUserRepositories(ctx, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ PageSize: repo_model.RepositoryListDefaultPageSize, Page: 1, @@ -374,7 +391,7 @@ func DeleteOwnerRepositoriesDirectly(ctx context.Context, owner *user_model.User break } for _, repo := range repos { - if err := DeleteRepositoryDirectly(ctx, owner, repo.ID); err != nil { + if err := DeleteRepositoryDirectly(ctx, repo.ID); err != nil { return fmt.Errorf("unable to delete repository %s for %s[%d]. Error: %w", repo.Name, owner.Name, owner.ID, err) } } diff --git a/services/repository/files/cherry_pick.go b/services/repository/files/cherry_pick.go index 0e069fb2ce..6818bb343d 100644 --- a/services/repository/files/cherry_pick.go +++ b/services/repository/files/cherry_pick.go @@ -5,6 +5,7 @@ package files import ( "context" + "errors" "fmt" "strings" @@ -100,7 +101,7 @@ func CherryPick(ctx context.Context, repo *repo_model.Repository, doer *user_mod } if conflict { - return nil, fmt.Errorf("failed to merge due to conflicts") + return nil, errors.New("failed to merge due to conflicts") } treeHash, err := t.WriteTree(ctx) diff --git a/services/repository/files/content.go b/services/repository/files/content.go index 0ab7422ce2..2c1e88bb59 100644 --- a/services/repository/files/content.go +++ b/services/repository/files/content.go @@ -5,17 +5,18 @@ package files import ( "context" - "fmt" + "io" "net/url" "path" "strings" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/api/v1/utils" ) // ContentType repo content type @@ -23,14 +24,10 @@ type ContentType string // The string representations of different content types const ( - // ContentTypeRegular regular content type (file) - ContentTypeRegular ContentType = "file" - // ContentTypeDir dir content type (dir) - ContentTypeDir ContentType = "dir" - // ContentLink link content type (symlink) - ContentTypeLink ContentType = "symlink" - // ContentTag submodule content type (submodule) - ContentTypeSubmodule ContentType = "submodule" + ContentTypeRegular ContentType = "file" // regular content type (file) + ContentTypeDir ContentType = "dir" // dir content type (dir) + ContentTypeLink ContentType = "symlink" // link content type (symlink) + ContentTypeSubmodule ContentType = "submodule" // submodule content type (submodule) ) // String gets the string of ContentType @@ -38,67 +35,52 @@ func (ct *ContentType) String() string { return string(*ct) } -// GetContentsOrList gets the meta data of a file's contents (*ContentsResponse) if treePath not a tree -// directory, otherwise a listing of file contents ([]*ContentsResponse). Ref can be a branch, commit or tag -func GetContentsOrList(ctx context.Context, repo *repo_model.Repository, treePath, ref string) (any, error) { - if repo.IsEmpty { - return make([]any, 0), nil - } - if ref == "" { - ref = repo.DefaultBranch - } - origRef := ref - - // Check that the path given in opts.treePath is valid (not a git path) - cleanTreePath := CleanUploadFileName(treePath) - if cleanTreePath == "" && treePath != "" { - return nil, ErrFilenameInvalid{ - Path: treePath, - } - } - treePath = cleanTreePath - - gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo) - if err != nil { - return nil, err - } - defer closer.Close() +type GetContentsOrListOptions struct { + TreePath string + IncludeSingleFileContent bool // include the file's content when the tree path is a file + IncludeLfsMetadata bool + IncludeCommitMetadata bool + IncludeCommitMessage bool +} - // Get the commit object for the ref - commit, err := gitRepo.GetCommit(ref) - if err != nil { - return nil, err +// GetContentsOrList gets the metadata of a file's contents (*ContentsResponse) if treePath not a tree +// directory, otherwise a listing of file contents ([]*ContentsResponse). Ref can be a branch, commit or tag +func GetContentsOrList(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, opts GetContentsOrListOptions) (ret api.ContentsExtResponse, _ error) { + entry, err := prepareGetContentsEntry(refCommit, &opts.TreePath) + if repo.IsEmpty && opts.TreePath == "" { + return api.ContentsExtResponse{DirContents: make([]*api.ContentsResponse, 0)}, nil } - - entry, err := commit.GetTreeEntryByPath(treePath) if err != nil { - return nil, err + return ret, err } + // get file contents if entry.Type() != "tree" { - return GetContents(ctx, repo, treePath, origRef, false) + ret.FileContents, err = getFileContentsByEntryInternal(ctx, repo, gitRepo, refCommit, entry, opts) + return ret, err } - // We are in a directory, so we return a list of FileContentResponse objects - var fileList []*api.ContentsResponse - - gitTree, err := commit.SubTree(treePath) + // list directory contents + gitTree, err := refCommit.Commit.SubTree(opts.TreePath) if err != nil { - return nil, err + return ret, err } entries, err := gitTree.ListEntries() if err != nil { - return nil, err + return ret, err } + ret.DirContents = make([]*api.ContentsResponse, 0, len(entries)) for _, e := range entries { - subTreePath := path.Join(treePath, e.Name()) - fileContentResponse, err := GetContents(ctx, repo, subTreePath, origRef, true) + subOpts := opts + subOpts.TreePath = path.Join(opts.TreePath, e.Name()) + subOpts.IncludeSingleFileContent = false // never include file content when listing a directory + fileContentResponse, err := GetFileContents(ctx, repo, gitRepo, refCommit, subOpts) if err != nil { - return nil, err + return ret, err } - fileList = append(fileList, fileContentResponse) + ret.DirContents = append(ret.DirContents, fileContentResponse) } - return fileList, nil + return ret, nil } // GetObjectTypeFromTreeEntry check what content is behind it @@ -117,86 +99,96 @@ func GetObjectTypeFromTreeEntry(entry *git.TreeEntry) ContentType { } } -// GetContents gets the meta data on a file's contents. Ref can be a branch, commit or tag -func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref string, forList bool) (*api.ContentsResponse, error) { - if ref == "" { - ref = repo.DefaultBranch - } - origRef := ref - +func prepareGetContentsEntry(refCommit *utils.RefCommit, treePath *string) (*git.TreeEntry, error) { // Check that the path given in opts.treePath is valid (not a git path) - cleanTreePath := CleanUploadFileName(treePath) - if cleanTreePath == "" && treePath != "" { - return nil, ErrFilenameInvalid{ - Path: treePath, - } + cleanTreePath := CleanGitTreePath(*treePath) + if cleanTreePath == "" && *treePath != "" { + return nil, ErrFilenameInvalid{Path: *treePath} } - treePath = cleanTreePath + *treePath = cleanTreePath - gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo) - if err != nil { - return nil, err + // Only allow safe ref types + refType := refCommit.RefName.RefType() + if refType != git.RefTypeBranch && refType != git.RefTypeTag && refType != git.RefTypeCommit { + return nil, util.NewNotExistErrorf("no commit found for the ref [ref: %s]", refCommit.RefName) } - defer closer.Close() - // Get the commit object for the ref - commit, err := gitRepo.GetCommit(ref) - if err != nil { - return nil, err - } - commitID := commit.ID.String() - if len(ref) >= 4 && strings.HasPrefix(commitID, ref) { - ref = commit.ID.String() - } + return refCommit.Commit.GetTreeEntryByPath(*treePath) +} - entry, err := commit.GetTreeEntryByPath(treePath) +// GetFileContents gets the metadata on a file's contents. Ref can be a branch, commit or tag +func GetFileContents(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, opts GetContentsOrListOptions) (*api.ContentsResponse, error) { + entry, err := prepareGetContentsEntry(refCommit, &opts.TreePath) if err != nil { return nil, err } + return getFileContentsByEntryInternal(ctx, repo, gitRepo, refCommit, entry, opts) +} - refType := gitRepo.GetRefType(ref) - if refType == "invalid" { - return nil, fmt.Errorf("no commit found for the ref [ref: %s]", ref) - } - - selfURL, err := url.Parse(repo.APIURL() + "/contents/" + util.PathEscapeSegments(treePath) + "?ref=" + url.QueryEscape(origRef)) +func getFileContentsByEntryInternal(_ context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, entry *git.TreeEntry, opts GetContentsOrListOptions) (*api.ContentsResponse, error) { + refType := refCommit.RefName.RefType() + commit := refCommit.Commit + selfURL, err := url.Parse(repo.APIURL() + "/contents/" + util.PathEscapeSegments(opts.TreePath) + "?ref=" + url.QueryEscape(refCommit.InputRef)) if err != nil { return nil, err } selfURLString := selfURL.String() - err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(ref, refType != git.ObjectCommit), repo.FullName(), commitID) - if err != nil { - return nil, err - } - - lastCommit, err := commit.GetCommitByPath(treePath) - if err != nil { - return nil, err - } - // All content types have these fields in populated contentsResponse := &api.ContentsResponse{ - Name: entry.Name(), - Path: treePath, - SHA: entry.ID.String(), - LastCommitSHA: lastCommit.ID.String(), - Size: entry.Size(), - URL: &selfURLString, + Name: entry.Name(), + Path: opts.TreePath, + SHA: entry.ID.String(), + Size: entry.Size(), + URL: &selfURLString, Links: &api.FileLinksResponse{ Self: &selfURLString, }, } - // Now populate the rest of the ContentsResponse based on entry type + if opts.IncludeCommitMetadata || opts.IncludeCommitMessage { + err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(refCommit.InputRef, refType != git.RefTypeCommit), repo.FullName(), refCommit.CommitID) + if err != nil { + return nil, err + } + + lastCommit, err := refCommit.Commit.GetCommitByPath(opts.TreePath) + if err != nil { + return nil, err + } + + if opts.IncludeCommitMetadata { + contentsResponse.LastCommitSHA = util.ToPointer(lastCommit.ID.String()) + // GitHub doesn't have these fields in the response, but we could follow other similar APIs to name them + // https://docs.github.com/en/rest/commits/commits?apiVersion=2022-11-28#list-commits + if lastCommit.Committer != nil { + contentsResponse.LastCommitterDate = util.ToPointer(lastCommit.Committer.When) + } + if lastCommit.Author != nil { + contentsResponse.LastAuthorDate = util.ToPointer(lastCommit.Author.When) + } + } + if opts.IncludeCommitMessage { + contentsResponse.LastCommitMessage = util.ToPointer(lastCommit.Message()) + } + } + + // Now populate the rest of the ContentsResponse based on the entry type if entry.IsRegular() || entry.IsExecutable() { contentsResponse.Type = string(ContentTypeRegular) - if blobResponse, err := GetBlobBySHA(ctx, repo, gitRepo, entry.ID.String()); err != nil { - return nil, err - } else if !forList { - // We don't show the content if we are getting a list of FileContentResponses - contentsResponse.Encoding = &blobResponse.Encoding - contentsResponse.Content = &blobResponse.Content + // if it is listing the repo root dir, don't waste system resources on reading content + if opts.IncludeSingleFileContent { + blobResponse, err := GetBlobBySHA(repo, gitRepo, entry.ID.String()) + if err != nil { + return nil, err + } + contentsResponse.Encoding, contentsResponse.Content = blobResponse.Encoding, blobResponse.Content + contentsResponse.LfsOid, contentsResponse.LfsSize = blobResponse.LfsOid, blobResponse.LfsSize + } else if opts.IncludeLfsMetadata { + contentsResponse.LfsOid, contentsResponse.LfsSize, err = parsePossibleLfsPointerBlob(gitRepo, entry.ID.String()) + if err != nil { + return nil, err + } } } else if entry.IsDir() { contentsResponse.Type = string(ContentTypeDir) @@ -210,7 +202,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref contentsResponse.Target = &targetFromContent } else if entry.IsSubModule() { contentsResponse.Type = string(ContentTypeSubmodule) - submodule, err := commit.GetSubModule(treePath) + submodule, err := commit.GetSubModule(opts.TreePath) if err != nil { return nil, err } @@ -220,7 +212,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref } // Handle links if entry.IsRegular() || entry.IsLink() || entry.IsExecutable() { - downloadURL, err := url.Parse(repo.HTMLURL() + "/raw/" + url.PathEscape(string(refType)) + "/" + util.PathEscapeSegments(ref) + "/" + util.PathEscapeSegments(treePath)) + downloadURL, err := url.Parse(repo.HTMLURL() + "/raw/" + refCommit.RefName.RefWebLinkPath() + "/" + util.PathEscapeSegments(opts.TreePath)) if err != nil { return nil, err } @@ -228,7 +220,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref contentsResponse.DownloadURL = &downloadURLString } if !entry.IsSubModule() { - htmlURL, err := url.Parse(repo.HTMLURL() + "/src/" + url.PathEscape(string(refType)) + "/" + util.PathEscapeSegments(ref) + "/" + util.PathEscapeSegments(treePath)) + htmlURL, err := url.Parse(repo.HTMLURL() + "/src/" + refCommit.RefName.RefWebLinkPath() + "/" + util.PathEscapeSegments(opts.TreePath)) if err != nil { return nil, err } @@ -248,49 +240,59 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref return contentsResponse, nil } -// GetBlobBySHA get the GitBlobResponse of a repository using a sha hash. -func GetBlobBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, sha string) (*api.GitBlobResponse, error) { +func GetBlobBySHA(repo *repo_model.Repository, gitRepo *git.Repository, sha string) (*api.GitBlobResponse, error) { gitBlob, err := gitRepo.GetBlob(sha) if err != nil { return nil, err } - content := "" - if gitBlob.Size() <= setting.API.DefaultMaxBlobSize { - content, err = gitBlob.GetBlobContentBase64() - if err != nil { - return nil, err - } + ret := &api.GitBlobResponse{ + SHA: gitBlob.ID.String(), + URL: repo.APIURL() + "/git/blobs/" + url.PathEscape(gitBlob.ID.String()), + Size: gitBlob.Size(), } - return &api.GitBlobResponse{ - SHA: gitBlob.ID.String(), - URL: repo.APIURL() + "/git/blobs/" + url.PathEscape(gitBlob.ID.String()), - Size: gitBlob.Size(), - Encoding: "base64", - Content: content, - }, nil -} -// TryGetContentLanguage tries to get the (linguist) language of the file content -func TryGetContentLanguage(gitRepo *git.Repository, commitID, treePath string) (string, error) { - indexFilename, worktree, deleteTemporaryFile, err := gitRepo.ReadTreeToTemporaryIndex(commitID) - if err != nil { - return "", err + blobSize := gitBlob.Size() + if blobSize > setting.API.DefaultMaxBlobSize { + return ret, nil } - defer deleteTemporaryFile() + var originContent *strings.Builder + if 0 < blobSize && blobSize < lfs.MetaFileMaxSize { + originContent = &strings.Builder{} + } - filename2attribute2info, err := gitRepo.CheckAttribute(git.CheckAttributeOpts{ - CachedOnly: true, - Attributes: []string{git.AttributeLinguistLanguage, git.AttributeGitlabLanguage}, - Filenames: []string{treePath}, - IndexFile: indexFilename, - WorkTree: worktree, - }) + content, err := gitBlob.GetBlobContentBase64(originContent) if err != nil { - return "", err + return nil, err + } + + ret.Encoding, ret.Content = util.ToPointer("base64"), &content + if originContent != nil { + ret.LfsOid, ret.LfsSize = parsePossibleLfsPointerBuffer(strings.NewReader(originContent.String())) } + return ret, nil +} - language := git.TryReadLanguageAttribute(filename2attribute2info[treePath]) +func parsePossibleLfsPointerBuffer(r io.Reader) (*string, *int64) { + p, _ := lfs.ReadPointer(r) + if p.IsValid() { + return &p.Oid, &p.Size + } + return nil, nil +} - return language.Value(), nil +func parsePossibleLfsPointerBlob(gitRepo *git.Repository, sha string) (*string, *int64, error) { + gitBlob, err := gitRepo.GetBlob(sha) + if err != nil { + return nil, nil, err + } + if gitBlob.Size() > lfs.MetaFileMaxSize { + return nil, nil, nil // not a LFS pointer + } + buf, err := gitBlob.GetBlobContent(lfs.MetaFileMaxSize) + if err != nil { + return nil, nil, err + } + oid, size := parsePossibleLfsPointerBuffer(strings.NewReader(buf)) + return oid, size, nil } diff --git a/services/repository/files/content_test.go b/services/repository/files/content_test.go index 7cb46c0bb6..d72f918074 100644 --- a/services/repository/files/content_test.go +++ b/services/repository/files/content_test.go @@ -7,8 +7,8 @@ import ( "testing" "code.gitea.io/gitea/models/unittest" - "code.gitea.io/gitea/modules/gitrepo" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/contexttest" _ "code.gitea.io/gitea/models/actions" @@ -20,36 +20,6 @@ func TestMain(m *testing.M) { unittest.MainTest(m) } -func getExpectedReadmeContentsResponse() *api.ContentsResponse { - treePath := "README.md" - sha := "4b4851ad51df6a7d9f25c979345979eaeb5b349f" - encoding := "base64" - content := "IyByZXBvMQoKRGVzY3JpcHRpb24gZm9yIHJlcG8x" - selfURL := "https://try.gitea.io/api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master" - htmlURL := "https://try.gitea.io/user2/repo1/src/branch/master/" + treePath - gitURL := "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/" + sha - downloadURL := "https://try.gitea.io/user2/repo1/raw/branch/master/" + treePath - return &api.ContentsResponse{ - Name: treePath, - Path: treePath, - SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f", - LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", - Type: "file", - Size: 30, - Encoding: &encoding, - Content: &content, - URL: &selfURL, - HTMLURL: &htmlURL, - GitURL: &gitURL, - DownloadURL: &downloadURL, - Links: &api.FileLinksResponse{ - Self: &selfURL, - GitURL: &gitURL, - HTMLURL: &htmlURL, - }, - } -} - func TestGetContents(t *testing.T) { unittest.PrepareTestEnv(t) ctx, _ := contexttest.MockContext(t, "user2/repo1") @@ -58,195 +28,22 @@ func TestGetContents(t *testing.T) { contexttest.LoadRepoCommit(t, ctx) contexttest.LoadUser(t, ctx, 2) contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - treePath := "README.md" - ref := ctx.Repo.Repository.DefaultBranch - - expectedContentsResponse := getExpectedReadmeContentsResponse() - - t.Run("Get README.md contents with GetContents(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContents(ctx, ctx.Repo.Repository, treePath, ref, false) - assert.EqualValues(t, expectedContentsResponse, fileContentResponse) - assert.NoError(t, err) - }) - - t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContents(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContents(ctx, ctx.Repo.Repository, treePath, "", false) - assert.EqualValues(t, expectedContentsResponse, fileContentResponse) - assert.NoError(t, err) - }) -} - -func TestGetContentsOrListForDir(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - treePath := "" // root dir - ref := ctx.Repo.Repository.DefaultBranch - - readmeContentsResponse := getExpectedReadmeContentsResponse() - // because will be in a list, doesn't have encoding and content - readmeContentsResponse.Encoding = nil - readmeContentsResponse.Content = nil - - expectedContentsListResponse := []*api.ContentsResponse{ - readmeContentsResponse, - } - - t.Run("Get root dir contents with GetContentsOrList(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContentsOrList(ctx, ctx.Repo.Repository, treePath, ref) - assert.EqualValues(t, expectedContentsListResponse, fileContentResponse) - assert.NoError(t, err) - }) - - t.Run("Get root dir contents with ref as empty string (should then use the repo's default branch) with GetContentsOrList(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContentsOrList(ctx, ctx.Repo.Repository, treePath, "") - assert.EqualValues(t, expectedContentsListResponse, fileContentResponse) - assert.NoError(t, err) - }) -} - -func TestGetContentsOrListForFile(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - treePath := "README.md" - ref := ctx.Repo.Repository.DefaultBranch - - expectedContentsResponse := getExpectedReadmeContentsResponse() - - t.Run("Get README.md contents with GetContentsOrList(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContentsOrList(ctx, ctx.Repo.Repository, treePath, ref) - assert.EqualValues(t, expectedContentsResponse, fileContentResponse) - assert.NoError(t, err) - }) - - t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContentsOrList(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContentsOrList(ctx, ctx.Repo.Repository, treePath, "") - assert.EqualValues(t, expectedContentsResponse, fileContentResponse) - assert.NoError(t, err) - }) -} - -func TestGetContentsErrors(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - repo := ctx.Repo.Repository - treePath := "README.md" - ref := repo.DefaultBranch - - t.Run("bad treePath", func(t *testing.T) { - badTreePath := "bad/tree.md" - fileContentResponse, err := GetContents(ctx, repo, badTreePath, ref, false) - assert.Error(t, err) - assert.EqualError(t, err, "object does not exist [id: , rel_path: bad]") - assert.Nil(t, fileContentResponse) - }) - - t.Run("bad ref", func(t *testing.T) { - badRef := "bad_ref" - fileContentResponse, err := GetContents(ctx, repo, treePath, badRef, false) - assert.Error(t, err) - assert.EqualError(t, err, "object does not exist [id: "+badRef+", rel_path: ]") - assert.Nil(t, fileContentResponse) - }) -} - -func TestGetContentsOrListErrors(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - repo := ctx.Repo.Repository - treePath := "README.md" - ref := repo.DefaultBranch - - t.Run("bad treePath", func(t *testing.T) { - badTreePath := "bad/tree.md" - fileContentResponse, err := GetContentsOrList(ctx, repo, badTreePath, ref) - assert.Error(t, err) - assert.EqualError(t, err, "object does not exist [id: , rel_path: bad]") - assert.Nil(t, fileContentResponse) - }) - - t.Run("bad ref", func(t *testing.T) { - badRef := "bad_ref" - fileContentResponse, err := GetContentsOrList(ctx, repo, treePath, badRef) - assert.Error(t, err) - assert.EqualError(t, err, "object does not exist [id: "+badRef+", rel_path: ]") - assert.Nil(t, fileContentResponse) - }) -} - -func TestGetContentsOrListOfEmptyRepos(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user30/empty") - ctx.SetPathParam("id", "52") - contexttest.LoadRepo(t, ctx, 52) - contexttest.LoadUser(t, ctx, 30) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - repo := ctx.Repo.Repository - - t.Run("empty repo", func(t *testing.T) { - contents, err := GetContentsOrList(ctx, repo, "", "") + // GetContentsOrList's behavior is fully tested in integration tests, so we don't need to test it here. + + t.Run("GetBlobBySHA", func(t *testing.T) { + sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d" + ctx.SetPathParam("id", "1") + ctx.SetPathParam("sha", sha) + gbr, err := GetBlobBySHA(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("sha")) + expectedGBR := &api.GitBlobResponse{ + Content: util.ToPointer("dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK"), + Encoding: util.ToPointer("base64"), + URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/65f1bf27bc3bf70f64657658635e66094edbcb4d", + SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", + Size: 180, + } assert.NoError(t, err) - assert.Empty(t, contents) + assert.Equal(t, expectedGBR, gbr) }) } - -func TestGetBlobBySHA(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d" - ctx.SetPathParam("id", "1") - ctx.SetPathParam("sha", sha) - - gitRepo, err := gitrepo.OpenRepository(ctx, ctx.Repo.Repository) - if err != nil { - t.Fail() - } - - gbr, err := GetBlobBySHA(ctx, ctx.Repo.Repository, gitRepo, ctx.PathParam("sha")) - expectedGBR := &api.GitBlobResponse{ - Content: "dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK", - Encoding: "base64", - URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/65f1bf27bc3bf70f64657658635e66094edbcb4d", - SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", - Size: 180, - } - assert.NoError(t, err) - assert.Equal(t, expectedGBR, gbr) -} diff --git a/services/repository/files/diff.go b/services/repository/files/diff.go index 0b3550452a..50d01f9d7c 100644 --- a/services/repository/files/diff.go +++ b/services/repository/files/diff.go @@ -29,7 +29,7 @@ func GetDiffPreview(ctx context.Context, repo *repo_model.Repository, branch, tr } // Add the object to the database - objectHash, err := t.HashObject(ctx, strings.NewReader(content)) + objectHash, err := t.HashObjectAndWrite(ctx, strings.NewReader(content)) if err != nil { return nil, err } diff --git a/services/repository/files/diff_test.go b/services/repository/files/diff_test.go index a8514791cc..ae702e4189 100644 --- a/services/repository/files/diff_test.go +++ b/services/repository/files/diff_test.go @@ -118,7 +118,7 @@ func TestGetDiffPreview(t *testing.T) { assert.NoError(t, err) bs, err := json.Marshal(diff) assert.NoError(t, err) - assert.EqualValues(t, string(expectedBs), string(bs)) + assert.Equal(t, string(expectedBs), string(bs)) }) t.Run("empty branch, same results", func(t *testing.T) { @@ -128,7 +128,7 @@ func TestGetDiffPreview(t *testing.T) { assert.NoError(t, err) bs, err := json.Marshal(diff) assert.NoError(t, err) - assert.EqualValues(t, expectedBs, bs) + assert.Equal(t, expectedBs, bs) }) } diff --git a/services/repository/files/file.go b/services/repository/files/file.go index 2caa1b4946..f48e32b427 100644 --- a/services/repository/files/file.go +++ b/services/repository/files/file.go @@ -5,6 +5,7 @@ package files import ( "context" + "errors" "fmt" "net/url" "strings" @@ -12,18 +13,40 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/api/v1/utils" ) -func GetFilesResponseFromCommit(ctx context.Context, repo *repo_model.Repository, commit *git.Commit, branch string, treeNames []string) (*api.FilesResponse, error) { - files := []*api.ContentsResponse{} - for _, file := range treeNames { - fileContents, _ := GetContents(ctx, repo, file, branch, false) // ok if fails, then will be nil +func GetContentsListFromTreePaths(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, treePaths []string) (files []*api.ContentsResponse) { + var size int64 + for _, treePath := range treePaths { + // ok if fails, then will be nil + fileContents, _ := GetFileContents(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{ + TreePath: treePath, + IncludeSingleFileContent: true, + IncludeCommitMetadata: true, + }) + if fileContents != nil && fileContents.Content != nil && *fileContents.Content != "" { + // if content isn't empty (e.g., due to the single blob being too large), add file size to response size + size += int64(len(*fileContents.Content)) + } + if size > setting.API.DefaultMaxResponseSize { + break // stop if max response size would be exceeded + } files = append(files, fileContents) + if len(files) == setting.API.DefaultPagingNum { + break // stop if paging num reached + } } - fileCommitResponse, _ := GetFileCommitResponse(repo, commit) // ok if fails, then will be nil - verification := GetPayloadCommitVerification(ctx, commit) + return files +} + +func GetFilesResponseFromCommit(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, treeNames []string) (*api.FilesResponse, error) { + files := GetContentsListFromTreePaths(ctx, repo, gitRepo, refCommit, treeNames) + fileCommitResponse, _ := GetFileCommitResponse(repo, refCommit.Commit) // ok if fails, then will be nil + verification := GetPayloadCommitVerification(ctx, refCommit.Commit) filesResponse := &api.FilesResponse{ Files: files, Commit: fileCommitResponse, @@ -32,19 +55,6 @@ func GetFilesResponseFromCommit(ctx context.Context, repo *repo_model.Repository return filesResponse, nil } -// GetFileResponseFromCommit Constructs a FileResponse from a Commit object -func GetFileResponseFromCommit(ctx context.Context, repo *repo_model.Repository, commit *git.Commit, branch, treeName string) (*api.FileResponse, error) { - fileContents, _ := GetContents(ctx, repo, treeName, branch, false) // ok if fails, then will be nil - fileCommitResponse, _ := GetFileCommitResponse(repo, commit) // ok if fails, then will be nil - verification := GetPayloadCommitVerification(ctx, commit) - fileResponse := &api.FileResponse{ - Content: fileContents, - Commit: fileCommitResponse, - Verification: verification, - } - return fileResponse, nil -} - // constructs a FileResponse with the file at the index from FilesResponse func GetFileResponseFromFilesResponse(filesResponse *api.FilesResponse, index int) *api.FileResponse { content := &api.ContentsResponse{} @@ -62,10 +72,10 @@ func GetFileResponseFromFilesResponse(filesResponse *api.FilesResponse, index in // GetFileCommitResponse Constructs a FileCommitResponse from a Commit object func GetFileCommitResponse(repo *repo_model.Repository, commit *git.Commit) (*api.FileCommitResponse, error) { if repo == nil { - return nil, fmt.Errorf("repo cannot be nil") + return nil, errors.New("repo cannot be nil") } if commit == nil { - return nil, fmt.Errorf("commit cannot be nil") + return nil, errors.New("commit cannot be nil") } commitURL, _ := url.Parse(repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String())) commitTreeURL, _ := url.Parse(repo.APIURL() + "/git/trees/" + url.PathEscape(commit.Tree.ID.String())) @@ -129,15 +139,17 @@ func (err ErrFilenameInvalid) Unwrap() error { return util.ErrInvalidArgument } -// CleanUploadFileName Trims a filename and returns empty string if it is a .git directory -func CleanUploadFileName(name string) string { - // Rebase the filename +// CleanGitTreePath cleans a tree path for git, it returns an empty string the path is invalid (e.g.: contains ".git" part) +func CleanGitTreePath(name string) string { name = util.PathJoinRel(name) // Git disallows any filenames to have a .git directory in them. - for _, part := range strings.Split(name, "/") { - if strings.ToLower(part) == ".git" { + for part := range strings.SplitSeq(name, "/") { + if strings.EqualFold(part, ".git") { return "" } } + if name == "." { + name = "" + } return name } diff --git a/services/repository/files/file_test.go b/services/repository/files/file_test.go index 52c0574883..cdb6a266ff 100644 --- a/services/repository/files/file_test.go +++ b/services/repository/files/file_test.go @@ -6,115 +6,22 @@ package files import ( "testing" - "code.gitea.io/gitea/models/unittest" - "code.gitea.io/gitea/modules/gitrepo" - "code.gitea.io/gitea/modules/setting" - api "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/services/contexttest" - "github.com/stretchr/testify/assert" ) func TestCleanUploadFileName(t *testing.T) { - t.Run("Clean regular file", func(t *testing.T) { - name := "this/is/test" - cleanName := CleanUploadFileName(name) - expectedCleanName := name - assert.EqualValues(t, expectedCleanName, cleanName) - }) - - t.Run("Clean a .git path", func(t *testing.T) { - name := "this/is/test/.git" - cleanName := CleanUploadFileName(name) - expectedCleanName := "" - assert.EqualValues(t, expectedCleanName, cleanName) - }) -} - -func getExpectedFileResponse() *api.FileResponse { - treePath := "README.md" - sha := "4b4851ad51df6a7d9f25c979345979eaeb5b349f" - encoding := "base64" - content := "IyByZXBvMQoKRGVzY3JpcHRpb24gZm9yIHJlcG8x" - selfURL := setting.AppURL + "api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master" - htmlURL := setting.AppURL + "user2/repo1/src/branch/master/" + treePath - gitURL := setting.AppURL + "api/v1/repos/user2/repo1/git/blobs/" + sha - downloadURL := setting.AppURL + "user2/repo1/raw/branch/master/" + treePath - return &api.FileResponse{ - Content: &api.ContentsResponse{ - Name: treePath, - Path: treePath, - SHA: sha, - LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", - Type: "file", - Size: 30, - Encoding: &encoding, - Content: &content, - URL: &selfURL, - HTMLURL: &htmlURL, - GitURL: &gitURL, - DownloadURL: &downloadURL, - Links: &api.FileLinksResponse{ - Self: &selfURL, - GitURL: &gitURL, - HTMLURL: &htmlURL, - }, - }, - Commit: &api.FileCommitResponse{ - CommitMeta: api.CommitMeta{ - URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/commits/65f1bf27bc3bf70f64657658635e66094edbcb4d", - SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", - }, - HTMLURL: "https://try.gitea.io/user2/repo1/commit/65f1bf27bc3bf70f64657658635e66094edbcb4d", - Author: &api.CommitUser{ - Identity: api.Identity{ - Name: "user1", - Email: "address1@example.com", - }, - Date: "2017-03-19T20:47:59Z", - }, - Committer: &api.CommitUser{ - Identity: api.Identity{ - Name: "Ethan Koenig", - Email: "ethantkoenig@gmail.com", - }, - Date: "2017-03-19T20:47:59Z", - }, - Parents: []*api.CommitMeta{}, - Message: "Initial commit\n", - Tree: &api.CommitMeta{ - URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/trees/2a2f1d4670728a2e10049e345bd7a276468beab6", - SHA: "2a2f1d4670728a2e10049e345bd7a276468beab6", - }, - }, - Verification: &api.PayloadCommitVerification{ - Verified: false, - Reason: "gpg.error.not_signed_commit", - Signature: "", - Payload: "", - }, + cases := []struct { + input, expected string + }{ + {"", ""}, + {".", ""}, + {"a/./b", "a/b"}, + {"a.git", "a.git"}, + {".git/b", ""}, + {"a/.git", ""}, + {"/a/../../b", "b"}, + } + for _, c := range cases { + assert.Equal(t, c.expected, CleanGitTreePath(c.input), "input: %q", c.input) } -} - -func TestGetFileResponseFromCommit(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - repo := ctx.Repo.Repository - branch := repo.DefaultBranch - treePath := "README.md" - gitRepo, _ := gitrepo.OpenRepository(ctx, repo) - defer gitRepo.Close() - commit, _ := gitRepo.GetBranchCommit(branch) - expectedFileResponse := getExpectedFileResponse() - - fileResponse, err := GetFileResponseFromCommit(ctx, repo, commit, branch, treePath) - assert.NoError(t, err) - assert.EqualValues(t, expectedFileResponse, fileResponse) } diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go index 1941adb86a..11a8744b7f 100644 --- a/services/repository/files/patch.go +++ b/services/repository/files/patch.go @@ -12,7 +12,6 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" @@ -45,7 +44,6 @@ type ApplyDiffPatchOptions struct { NewBranch string Message string Content string - SHA string Author *IdentityOptions Committer *IdentityOptions Dates *CommitDateOptions @@ -62,29 +60,26 @@ func (opts *ApplyDiffPatchOptions) Validate(ctx context.Context, repo *repo_mode opts.NewBranch = opts.OldBranch } - gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo) - if err != nil { - return err - } - defer closer.Close() - // oldBranch must exist for this operation - if _, err := gitRepo.GetBranch(opts.OldBranch); err != nil { + if exist, err := git_model.IsBranchExist(ctx, repo.ID, opts.OldBranch); err != nil { return err + } else if !exist { + return git_model.ErrBranchNotExist{ + BranchName: opts.OldBranch, + } } // A NewBranch can be specified for the patch to be applied to. // Check to make sure the branch does not already exist, otherwise we can't proceed. // If we aren't branching to a new branch, make sure user can commit to the given branch if opts.NewBranch != opts.OldBranch { - existingBranch, err := gitRepo.GetBranch(opts.NewBranch) - if existingBranch != nil { + exist, err := git_model.IsBranchExist(ctx, repo.ID, opts.NewBranch) + if err != nil { + return err + } else if exist { return git_model.ErrBranchAlreadyExists{ BranchName: opts.NewBranch, } } - if err != nil && !git.IsErrBranchNotExist(err) { - return err - } } else { protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, opts.OldBranch) if err != nil { diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go index d2c70a7a34..c2f61c8223 100644 --- a/services/repository/files/temp_repo.go +++ b/services/repository/files/temp_repo.go @@ -6,6 +6,7 @@ package files import ( "bytes" "context" + "errors" "fmt" "io" "os" @@ -29,23 +30,24 @@ type TemporaryUploadRepository struct { repo *repo_model.Repository gitRepo *git.Repository basePath string + cleanup func() } // NewTemporaryUploadRepository creates a new temporary upload repository func NewTemporaryUploadRepository(repo *repo_model.Repository) (*TemporaryUploadRepository, error) { - basePath, err := repo_module.CreateTemporaryPath("upload") + basePath, cleanup, err := repo_module.CreateTemporaryPath("upload") if err != nil { return nil, err } - t := &TemporaryUploadRepository{repo: repo, basePath: basePath} + t := &TemporaryUploadRepository{repo: repo, basePath: basePath, cleanup: cleanup} return t, nil } // Close the repository cleaning up all files func (t *TemporaryUploadRepository) Close() { defer t.gitRepo.Close() - if err := repo_module.RemoveTemporaryPath(t.basePath); err != nil { - log.Error("Failed to remove temporary path %s: %v", t.basePath, err) + if t.cleanup != nil { + t.cleanup() } } @@ -126,7 +128,7 @@ func (t *TemporaryUploadRepository) LsFiles(ctx context.Context, filenames ...st } fileList := make([]string, 0, len(filenames)) - for _, line := range bytes.Split(stdOut.Bytes(), []byte{'\000'}) { + for line := range bytes.SplitSeq(stdOut.Bytes(), []byte{'\000'}) { fileList = append(fileList, string(line)) } @@ -162,8 +164,8 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, fi return nil } -// HashObject writes the provided content to the object db and returns its hash -func (t *TemporaryUploadRepository) HashObject(ctx context.Context, content io.Reader) (string, error) { +// HashObjectAndWrite writes the provided content to the object db and returns its hash +func (t *TemporaryUploadRepository) HashObjectAndWrite(ctx context.Context, content io.Reader) (string, error) { stdOut := new(bytes.Buffer) stdErr := new(bytes.Buffer) @@ -291,15 +293,18 @@ func (t *TemporaryUploadRepository) CommitTree(ctx context.Context, opts *Commit } var sign bool - var keyID string + var key *git.SigningKey var signer *git.Signature if opts.ParentCommitID != "" { - sign, keyID, signer, _ = asymkey_service.SignCRUDAction(ctx, t.repo.RepoPath(), opts.DoerUser, t.basePath, opts.ParentCommitID) + sign, key, signer, _ = asymkey_service.SignCRUDAction(ctx, t.repo.RepoPath(), opts.DoerUser, t.basePath, opts.ParentCommitID) } else { - sign, keyID, signer, _ = asymkey_service.SignInitialCommit(ctx, t.repo.RepoPath(), opts.DoerUser) + sign, key, signer, _ = asymkey_service.SignInitialCommit(ctx, t.repo.RepoPath(), opts.DoerUser) } if sign { - cmdCommitTree.AddOptionFormat("-S%s", keyID) + if key.Format != "" { + cmdCommitTree.AddConfig("gpg.format", key.Format) + } + cmdCommitTree.AddOptionFormat("-S%s", key.KeyID) if t.repo.GetTrustModel() == repo_model.CommitterTrustModel || t.repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { if committerSig.Name != authorSig.Name || committerSig.Email != authorSig.Email { // Add trailers @@ -414,7 +419,7 @@ func (t *TemporaryUploadRepository) DiffIndex(ctx context.Context) (*gitdiff.Dif // GetBranchCommit Gets the commit object of the given branch func (t *TemporaryUploadRepository) GetBranchCommit(branch string) (*git.Commit, error) { if t.gitRepo == nil { - return nil, fmt.Errorf("repository has not been cloned") + return nil, errors.New("repository has not been cloned") } return t.gitRepo.GetBranchCommit(branch) } @@ -422,7 +427,7 @@ func (t *TemporaryUploadRepository) GetBranchCommit(branch string) (*git.Commit, // GetCommit Gets the commit object of the given commit ID func (t *TemporaryUploadRepository) GetCommit(commitID string) (*git.Commit, error) { if t.gitRepo == nil { - return nil, fmt.Errorf("repository has not been cloned") + return nil, errors.New("repository has not been cloned") } return t.gitRepo.GetCommit(commitID) } diff --git a/services/repository/files/tree.go b/services/repository/files/tree.go index 9142416347..e481a3e7d2 100644 --- a/services/repository/files/tree.go +++ b/services/repository/files/tree.go @@ -6,12 +6,14 @@ package files import ( "context" "fmt" + "html/template" "net/url" "path" "sort" "strings" repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/fileicon" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" @@ -88,15 +90,8 @@ func GetTreeBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git if rangeStart >= len(entries) { return tree, nil } - var rangeEnd int - if len(entries) > perPage { - tree.Truncated = true - } - if rangeStart+perPage < len(entries) { - rangeEnd = rangeStart + perPage - } else { - rangeEnd = len(entries) - } + rangeEnd := min(rangeStart+perPage, len(entries)) + tree.Truncated = rangeEnd < len(entries) tree.Entries = make([]api.GitEntry, rangeEnd-rangeStart) for e := rangeStart; e < rangeEnd; e++ { i := e - rangeStart @@ -140,8 +135,13 @@ func entryModeString(entryMode git.EntryMode) string { } type TreeViewNode struct { - EntryName string `json:"entryName"` - EntryMode string `json:"entryMode"` + EntryName string `json:"entryName"` + EntryMode string `json:"entryMode"` + EntryIcon template.HTML `json:"entryIcon"` + EntryIconOpen template.HTML `json:"entryIconOpen,omitempty"` + + SymLinkedToMode string `json:"symLinkedToMode,omitempty"` // TODO: for the EntryMode="symlink" + FullPath string `json:"fullPath"` SubmoduleURL string `json:"submoduleUrl,omitempty"` Children []*TreeViewNode `json:"children,omitempty"` @@ -151,20 +151,29 @@ func (node *TreeViewNode) sortLevel() int { return util.Iif(node.EntryMode == "tree" || node.EntryMode == "commit", 0, 1) } -func newTreeViewNodeFromEntry(ctx context.Context, commit *git.Commit, parentDir string, entry *git.TreeEntry) *TreeViewNode { +func newTreeViewNodeFromEntry(ctx context.Context, repoLink string, renderedIconPool *fileicon.RenderedIconPool, commit *git.Commit, parentDir string, entry *git.TreeEntry) *TreeViewNode { node := &TreeViewNode{ EntryName: entry.Name(), EntryMode: entryModeString(entry.Mode()), FullPath: path.Join(parentDir, entry.Name()), } + entryInfo := fileicon.EntryInfoFromGitTreeEntry(commit, node.FullPath, entry) + node.EntryIcon = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo) + if entryInfo.EntryMode.IsDir() { + entryInfo.IsOpen = true + node.EntryIconOpen = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo) + } + if node.EntryMode == "commit" { if subModule, err := commit.GetSubModule(node.FullPath); err != nil { log.Error("GetSubModule: %v", err) } else if subModule != nil { - submoduleFile := git.NewCommitSubmoduleFile(subModule.URL, entry.ID.String()) - webLink := submoduleFile.SubmoduleWebLink(ctx) - node.SubmoduleURL = webLink.CommitWebLink + submoduleFile := git.NewCommitSubmoduleFile(repoLink, node.FullPath, subModule.URL, entry.ID.String()) + webLink := submoduleFile.SubmoduleWebLinkTree(ctx) + if webLink != nil { + node.SubmoduleURL = webLink.CommitWebLink + } } } @@ -182,7 +191,7 @@ func sortTreeViewNodes(nodes []*TreeViewNode) { }) } -func listTreeNodes(ctx context.Context, commit *git.Commit, tree *git.Tree, treePath, subPath string) ([]*TreeViewNode, error) { +func listTreeNodes(ctx context.Context, repoLink string, renderedIconPool *fileicon.RenderedIconPool, commit *git.Commit, tree *git.Tree, treePath, subPath string) ([]*TreeViewNode, error) { entries, err := tree.ListEntries() if err != nil { return nil, err @@ -191,14 +200,14 @@ func listTreeNodes(ctx context.Context, commit *git.Commit, tree *git.Tree, tree subPathDirName, subPathRemaining, _ := strings.Cut(subPath, "/") nodes := make([]*TreeViewNode, 0, len(entries)) for _, entry := range entries { - node := newTreeViewNodeFromEntry(ctx, commit, treePath, entry) + node := newTreeViewNodeFromEntry(ctx, repoLink, renderedIconPool, commit, treePath, entry) nodes = append(nodes, node) if entry.IsDir() && subPathDirName == entry.Name() { subTreePath := treePath + "/" + node.EntryName if subTreePath[0] == '/' { subTreePath = subTreePath[1:] } - subNodes, err := listTreeNodes(ctx, commit, entry.Tree(), subTreePath, subPathRemaining) + subNodes, err := listTreeNodes(ctx, repoLink, renderedIconPool, commit, entry.Tree(), subTreePath, subPathRemaining) if err != nil { log.Error("listTreeNodes: %v", err) } else { @@ -210,10 +219,10 @@ func listTreeNodes(ctx context.Context, commit *git.Commit, tree *git.Tree, tree return nodes, nil } -func GetTreeViewNodes(ctx context.Context, commit *git.Commit, treePath, subPath string) ([]*TreeViewNode, error) { +func GetTreeViewNodes(ctx context.Context, repoLink string, renderedIconPool *fileicon.RenderedIconPool, commit *git.Commit, treePath, subPath string) ([]*TreeViewNode, error) { entry, err := commit.GetTreeEntryByPath(treePath) if err != nil { return nil, err } - return listTreeNodes(ctx, commit, entry.Tree(), treePath, subPath) + return listTreeNodes(ctx, repoLink, renderedIconPool, commit, entry.Tree(), treePath, subPath) } diff --git a/services/repository/files/tree_test.go b/services/repository/files/tree_test.go index 8ea54969ce..38ac9f25fc 100644 --- a/services/repository/files/tree_test.go +++ b/services/repository/files/tree_test.go @@ -4,9 +4,11 @@ package files import ( + "html/template" "testing" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/fileicon" "code.gitea.io/gitea/modules/git" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/services/contexttest" @@ -49,7 +51,7 @@ func TestGetTreeBySHA(t *testing.T) { TotalCount: 1, } - assert.EqualValues(t, expectedTree, tree) + assert.Equal(t, expectedTree, tree) } func TestGetTreeViewNodes(t *testing.T) { @@ -62,40 +64,57 @@ func TestGetTreeViewNodes(t *testing.T) { contexttest.LoadGitRepo(t, ctx) defer ctx.Repo.GitRepo.Close() - treeNodes, err := GetTreeViewNodes(ctx, ctx.Repo.Commit, "", "") + curRepoLink := "/any/repo-link" + renderedIconPool := fileicon.NewRenderedIconPool() + mockIconForFile := func(id string) template.HTML { + return template.HTML(`<svg class="svg git-entry-icon octicon-file" width="16" height="16" aria-hidden="true"><use xlink:href="#` + id + `"></use></svg>`) + } + mockIconForFolder := func(id string) template.HTML { + return template.HTML(`<svg class="svg git-entry-icon octicon-file-directory-fill" width="16" height="16" aria-hidden="true"><use xlink:href="#` + id + `"></use></svg>`) + } + mockOpenIconForFolder := func(id string) template.HTML { + return template.HTML(`<svg class="svg git-entry-icon octicon-file-directory-open-fill" width="16" height="16" aria-hidden="true"><use xlink:href="#` + id + `"></use></svg>`) + } + treeNodes, err := GetTreeViewNodes(ctx, curRepoLink, renderedIconPool, ctx.Repo.Commit, "", "") assert.NoError(t, err) assert.Equal(t, []*TreeViewNode{ { - EntryName: "docs", - EntryMode: "tree", - FullPath: "docs", + EntryName: "docs", + EntryMode: "tree", + FullPath: "docs", + EntryIcon: mockIconForFolder(`svg-mfi-folder-docs`), + EntryIconOpen: mockOpenIconForFolder(`svg-mfi-folder-docs`), }, }, treeNodes) - treeNodes, err = GetTreeViewNodes(ctx, ctx.Repo.Commit, "", "docs/README.md") + treeNodes, err = GetTreeViewNodes(ctx, curRepoLink, renderedIconPool, ctx.Repo.Commit, "", "docs/README.md") assert.NoError(t, err) assert.Equal(t, []*TreeViewNode{ { - EntryName: "docs", - EntryMode: "tree", - FullPath: "docs", + EntryName: "docs", + EntryMode: "tree", + FullPath: "docs", + EntryIcon: mockIconForFolder(`svg-mfi-folder-docs`), + EntryIconOpen: mockOpenIconForFolder(`svg-mfi-folder-docs`), Children: []*TreeViewNode{ { EntryName: "README.md", EntryMode: "blob", FullPath: "docs/README.md", + EntryIcon: mockIconForFile(`svg-mfi-readme`), }, }, }, }, treeNodes) - treeNodes, err = GetTreeViewNodes(ctx, ctx.Repo.Commit, "docs", "README.md") + treeNodes, err = GetTreeViewNodes(ctx, curRepoLink, renderedIconPool, ctx.Repo.Commit, "docs", "README.md") assert.NoError(t, err) assert.Equal(t, []*TreeViewNode{ { EntryName: "README.md", EntryMode: "blob", FullPath: "docs/README.md", + EntryIcon: mockIconForFile(`svg-mfi-readme`), }, }, treeNodes) } diff --git a/services/repository/files/update.go b/services/repository/files/update.go index cade7ba2bf..e871f777e5 100644 --- a/services/repository/files/update.go +++ b/services/repository/files/update.go @@ -8,6 +8,7 @@ import ( "fmt" "io" "path" + "slices" "strings" "time" @@ -15,12 +16,14 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/git/attribute" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/api/v1/utils" asymkey_service "code.gitea.io/gitea/services/asymkey" pull_service "code.gitea.io/gitea/services/pull" ) @@ -85,14 +88,32 @@ func (err ErrRepoFileDoesNotExist) Unwrap() error { return util.ErrNotExist } +type LazyReadSeeker interface { + io.ReadSeeker + io.Closer + OpenLazyReader() error +} + // ChangeRepoFiles adds, updates or removes multiple files in the given repository -func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *ChangeRepoFilesOptions) (*structs.FilesResponse, error) { +func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *ChangeRepoFilesOptions) (_ *structs.FilesResponse, errRet error) { + var addedLfsPointers []lfs.Pointer + defer func() { + if errRet != nil { + for _, lfsPointer := range addedLfsPointers { + _, err := git_model.RemoveLFSMetaObjectByOid(ctx, repo.ID, lfsPointer.Oid) + if err != nil { + log.Error("ChangeRepoFiles: RemoveLFSMetaObjectByOid failed: %v", err) + } + } + } + }() + err := repo.MustNotBeArchived() if err != nil { return nil, err } - // If no branch name is set, assume default branch + // If no branch name is set, assume the default branch if opts.OldBranch == "" { opts.OldBranch = repo.DefaultBranch } @@ -107,8 +128,13 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use defer closer.Close() // oldBranch must exist for this operation - if _, err := gitRepo.GetBranch(opts.OldBranch); err != nil && !repo.IsEmpty { + if exist, err := git_model.IsBranchExist(ctx, repo.ID, opts.OldBranch); err != nil { return nil, err + } else if !exist && !repo.IsEmpty { + return nil, git_model.ErrBranchNotExist{ + RepoID: repo.ID, + BranchName: opts.OldBranch, + } } var treePaths []string @@ -119,14 +145,14 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use } // Check that the path given in opts.treePath is valid (not a git path) - treePath := CleanUploadFileName(file.TreePath) + treePath := CleanGitTreePath(file.TreePath) if treePath == "" { return nil, ErrFilenameInvalid{ Path: file.TreePath, } } // If there is a fromTreePath (we are copying it), also clean it up - fromTreePath := CleanUploadFileName(file.FromTreePath) + fromTreePath := CleanGitTreePath(file.FromTreePath) if fromTreePath == "" && file.FromTreePath != "" { return nil, ErrFilenameInvalid{ Path: file.FromTreePath, @@ -145,15 +171,15 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use // Check to make sure the branch does not already exist, otherwise we can't proceed. // If we aren't branching to a new branch, make sure user can commit to the given branch if opts.NewBranch != opts.OldBranch { - existingBranch, err := gitRepo.GetBranch(opts.NewBranch) - if existingBranch != nil { + exist, err := git_model.IsBranchExist(ctx, repo.ID, opts.NewBranch) + if err != nil { + return nil, err + } + if exist { return nil, git_model.ErrBranchAlreadyExists{ BranchName: opts.NewBranch, } } - if err != nil && !git.IsErrBranchNotExist(err) { - return nil, err - } } else if err := VerifyBranchProtection(ctx, repo, doer, opts.OldBranch, treePaths); err != nil { return nil, err } @@ -196,13 +222,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use } // Find the file we want to delete in the index - inFilelist := false - for _, indexFile := range filesInIndex { - if indexFile == file.TreePath { - inFilelist = true - break - } - } + inFilelist := slices.Contains(filesInIndex, file.TreePath) if !inFilelist { return nil, ErrRepoFileDoesNotExist{ Path: file.TreePath, @@ -218,7 +238,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use return nil, err // Couldn't get a commit for the branch } - // Assigned LastCommitID in opts if it hasn't been set + // Assigned LastCommitID in "opts" if it hasn't been set if opts.LastCommitID == "" { opts.LastCommitID = commit.ID.String() } else { @@ -230,22 +250,25 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use } for _, file := range opts.Files { - if err := handleCheckErrors(file, commit, opts); err != nil { + if err = handleCheckErrors(file, commit, opts); err != nil { return nil, err } } } - contentStore := lfs.NewContentStore() + lfsContentStore := lfs.NewContentStore() for _, file := range opts.Files { switch file.Operation { - case "create", "update": - if err := CreateOrUpdateFile(ctx, t, file, contentStore, repo.ID, hasOldBranch); err != nil { + case "create", "update", "rename", "upload": + addedLfsPointer, err := modifyFile(ctx, t, file, lfsContentStore, repo.ID) + if err != nil { return nil, err } + if addedLfsPointer != nil { + addedLfsPointers = append(addedLfsPointers, *addedLfsPointer) + } case "delete": - // Remove the file from the index - if err := t.RemoveFilesFromIndex(ctx, file.TreePath); err != nil { + if err = t.RemoveFilesFromIndex(ctx, file.TreePath); err != nil { return nil, err } default: @@ -290,14 +313,16 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use return nil, err } - filesResponse, err := GetFilesResponseFromCommit(ctx, repo, commit, opts.NewBranch, treePaths) + // FIXME: this call seems not right, why it needs to read the file content again + // FIXME: why it uses the NewBranch as "ref", it should use the commit ID because the response is only for this commit + filesResponse, err := GetFilesResponseFromCommit(ctx, repo, gitRepo, utils.NewRefCommit(git.RefNameFromBranch(opts.NewBranch), commit), treePaths) if err != nil { return nil, err } if repo.IsEmpty { if isEmpty, err := gitRepo.IsEmpty(); err == nil && !isEmpty { - _ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: repo.ID, IsEmpty: false, DefaultBranch: opts.NewBranch}, "is_empty", "default_branch") + _ = repo_model.UpdateRepositoryColsWithAutoTime(ctx, &repo_model.Repository{ID: repo.ID, IsEmpty: false, DefaultBranch: opts.NewBranch}, "is_empty", "default_branch") } } @@ -363,22 +388,33 @@ func (err ErrSHAOrCommitIDNotProvided) Error() string { // handles the check for various issues for ChangeRepoFiles func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions) error { - if file.Operation == "update" || file.Operation == "delete" { - fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath) - if err != nil { - return err + // check old entry (fromTreePath/fromEntry) + if file.Operation == "update" || file.Operation == "upload" || file.Operation == "delete" || file.Operation == "rename" { + var fromEntryIDString string + { + fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath) + if file.Operation == "upload" && git.IsErrNotExist(err) { + fromEntry = nil + } else if err != nil { + return err + } + if fromEntry != nil { + fromEntryIDString = fromEntry.ID.String() + file.Options.executable = fromEntry.IsExecutable() // FIXME: legacy hacky approach, it shouldn't prepare the "Options" in the "check" function + } } + if file.SHA != "" { - // If a SHA was given and the SHA given doesn't match the SHA of the fromTreePath, throw error - if file.SHA != fromEntry.ID.String() { + // If the SHA given doesn't match the SHA of the fromTreePath, throw error + if file.SHA != fromEntryIDString { return pull_service.ErrSHADoesNotMatch{ Path: file.Options.treePath, GivenSHA: file.SHA, - CurrentSHA: fromEntry.ID.String(), + CurrentSHA: fromEntryIDString, } } } else if opts.LastCommitID != "" { - // If a lastCommitID was given and it doesn't match the commitID of the head of the branch throw + // If a lastCommitID given doesn't match the branch head's commitID throw // an error, but only if we aren't creating a new branch. if commit.ID.String() != opts.LastCommitID && opts.OldBranch == opts.NewBranch { if changed, err := commit.FileChangedSinceCommit(file.Options.treePath, opts.LastCommitID); err != nil { @@ -396,13 +432,13 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep // haven't been made. We throw an error if one wasn't provided. return ErrSHAOrCommitIDNotProvided{} } - file.Options.executable = fromEntry.IsExecutable() } - if file.Operation == "create" || file.Operation == "update" { - // For the path where this file will be created/updated, we need to make - // sure no parts of the path are existing files or links except for the last - // item in the path which is the file name, and that shouldn't exist IF it is - // a new file OR is being moved to a new path. + + // check new entry (treePath/treeEntry) + if file.Operation == "create" || file.Operation == "update" || file.Operation == "upload" || file.Operation == "rename" { + // For operation's target path, we need to make sure no parts of the path are existing files or links + // except for the last item in the path (which is the file name). + // And that shouldn't exist IF it is a new file OR is being moved to a new path. treePathParts := strings.Split(file.Options.treePath, "/") subTreePath := "" for index, part := range treePathParts { @@ -439,7 +475,7 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep Type: git.EntryModeTree, } } else if file.Options.fromTreePath != file.Options.treePath || file.Operation == "create" { - // The entry shouldn't exist if we are creating new file or moving to a new path + // The entry shouldn't exist if we are creating the new file or moving to a new path return ErrRepoFileAlreadyExists{ Path: file.Options.treePath, } @@ -450,21 +486,23 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep return nil } -// CreateOrUpdateFile handles creating or updating a file for ChangeRepoFiles -func CreateOrUpdateFile(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile, contentStore *lfs.ContentStore, repoID int64, hasOldBranch bool) error { +func modifyFile(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile, contentStore *lfs.ContentStore, repoID int64) (addedLfsPointer *lfs.Pointer, _ error) { + if rd, ok := file.ContentReader.(LazyReadSeeker); ok { + if err := rd.OpenLazyReader(); err != nil { + return nil, fmt.Errorf("OpenLazyReader: %w", err) + } + defer rd.Close() + } + // Get the two paths (might be the same if not moving) from the index if they exist filesInIndex, err := t.LsFiles(ctx, file.TreePath, file.FromTreePath) if err != nil { - return fmt.Errorf("UpdateRepoFile: %w", err) + return nil, fmt.Errorf("LsFiles: %w", err) } // If is a new file (not updating) then the given path shouldn't exist if file.Operation == "create" { - for _, indexFile := range filesInIndex { - if indexFile == file.TreePath { - return ErrRepoFileAlreadyExists{ - Path: file.TreePath, - } - } + if slices.Contains(filesInIndex, file.TreePath) { + return nil, ErrRepoFileAlreadyExists{Path: file.TreePath} } } @@ -472,79 +510,178 @@ func CreateOrUpdateFile(ctx context.Context, t *TemporaryUploadRepository, file if file.Options.fromTreePath != file.Options.treePath && len(filesInIndex) > 0 { for _, indexFile := range filesInIndex { if indexFile == file.Options.fromTreePath { - if err := t.RemoveFilesFromIndex(ctx, file.FromTreePath); err != nil { - return err + if err = t.RemoveFilesFromIndex(ctx, file.FromTreePath); err != nil { + return nil, err } } } } - treeObjectContentReader := file.ContentReader - var lfsMetaObject *git_model.LFSMetaObject - if setting.LFS.StartServer && hasOldBranch { - // Check there is no way this can return multiple infos - filename2attribute2info, err := t.gitRepo.CheckAttribute(git.CheckAttributeOpts{ - Attributes: []string{"filter"}, - Filenames: []string{file.Options.treePath}, - CachedOnly: true, - }) + var writeObjectRet *writeRepoObjectRet + switch file.Operation { + case "create", "update", "upload": + writeObjectRet, err = writeRepoObjectForModify(ctx, t, file) + case "rename": + writeObjectRet, err = writeRepoObjectForRename(ctx, t, file) + default: + return nil, util.NewInvalidArgumentErrorf("unknown file modification operation: '%s'", file.Operation) + } + if err != nil { + return nil, err + } + + // Add the object to the index, the "file.Options.executable" is set in handleCheckErrors by the caller (legacy hacky approach) + if err = t.AddObjectToIndex(ctx, util.Iif(file.Options.executable, "100755", "100644"), writeObjectRet.ObjectHash, file.Options.treePath); err != nil { + return nil, err + } + + if writeObjectRet.LfsContent == nil { + return nil, nil // No LFS pointer, so nothing to do + } + defer writeObjectRet.LfsContent.Close() + + // Now we must store the content into an LFS object + lfsMetaObject, err := git_model.NewLFSMetaObject(ctx, repoID, writeObjectRet.LfsPointer) + if err != nil { + return nil, err + } + exist, err := contentStore.Exists(lfsMetaObject.Pointer) + if err != nil { + return nil, err + } + if !exist { + err = contentStore.Put(lfsMetaObject.Pointer, writeObjectRet.LfsContent) if err != nil { - return err + if _, errRemove := git_model.RemoveLFSMetaObjectByOid(ctx, repoID, lfsMetaObject.Oid); errRemove != nil { + return nil, fmt.Errorf("unable to remove failed inserted LFS object %s: %v (Prev Error: %w)", lfsMetaObject.Oid, errRemove, err) + } + return nil, err } + } + return &lfsMetaObject.Pointer, nil +} + +func checkIsLfsFileInGitAttributes(ctx context.Context, t *TemporaryUploadRepository, paths []string) (ret []bool, err error) { + attributesMap, err := attribute.CheckAttributes(ctx, t.gitRepo, "" /* use temp repo's working dir */, attribute.CheckAttributeOpts{ + Attributes: []string{attribute.Filter}, + Filenames: paths, + }) + if err != nil { + return nil, err + } + for _, p := range paths { + isLFSFile := attributesMap[p] != nil && attributesMap[p].Get(attribute.Filter).ToString().Value() == "lfs" + ret = append(ret, isLFSFile) + } + return ret, nil +} - if filename2attribute2info[file.Options.treePath] != nil && filename2attribute2info[file.Options.treePath]["filter"] == "lfs" { - // OK so we are supposed to LFS this data! - pointer, err := lfs.GeneratePointer(treeObjectContentReader) +type writeRepoObjectRet struct { + ObjectHash string + LfsContent io.ReadCloser // if not nil, then the caller should store its content in LfsPointer, then close it + LfsPointer lfs.Pointer +} + +// writeRepoObjectForModify hashes the git object for create or update operations +func writeRepoObjectForModify(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile) (ret *writeRepoObjectRet, err error) { + ret = &writeRepoObjectRet{} + treeObjectContentReader := file.ContentReader + if setting.LFS.StartServer { + checkIsLfsFiles, err := checkIsLfsFileInGitAttributes(ctx, t, []string{file.Options.treePath}) + if err != nil { + return nil, err + } + if checkIsLfsFiles[0] { + // OK, so we are supposed to LFS this data! + ret.LfsPointer, err = lfs.GeneratePointer(file.ContentReader) if err != nil { - return err + return nil, err } - lfsMetaObject = &git_model.LFSMetaObject{Pointer: pointer, RepositoryID: repoID} - treeObjectContentReader = strings.NewReader(pointer.StringContent()) + if _, err = file.ContentReader.Seek(0, io.SeekStart); err != nil { + return nil, err + } + ret.LfsContent = io.NopCloser(file.ContentReader) + treeObjectContentReader = strings.NewReader(ret.LfsPointer.StringContent()) } } - // Add the object to the database - objectHash, err := t.HashObject(ctx, treeObjectContentReader) + ret.ObjectHash, err = t.HashObjectAndWrite(ctx, treeObjectContentReader) if err != nil { - return err + return nil, err } + return ret, nil +} - // Add the object to the index - if file.Options.executable { - if err := t.AddObjectToIndex(ctx, "100755", objectHash, file.Options.treePath); err != nil { - return err - } - } else { - if err := t.AddObjectToIndex(ctx, "100644", objectHash, file.Options.treePath); err != nil { - return err +// writeRepoObjectForRename the same as writeRepoObjectForModify buf for "rename" +func writeRepoObjectForRename(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile) (ret *writeRepoObjectRet, err error) { + lastCommitID, err := t.GetLastCommit(ctx) + if err != nil { + return nil, err + } + commit, err := t.GetCommit(lastCommitID) + if err != nil { + return nil, err + } + oldEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath) + if err != nil { + return nil, err + } + + ret = &writeRepoObjectRet{ObjectHash: oldEntry.ID.String()} + if !setting.LFS.StartServer { + return ret, nil + } + + checkIsLfsFiles, err := checkIsLfsFileInGitAttributes(ctx, t, []string{file.Options.fromTreePath, file.Options.treePath}) + if err != nil { + return nil, err + } + oldIsLfs, newIsLfs := checkIsLfsFiles[0], checkIsLfsFiles[1] + + // If the old and new paths are both in lfs or both not in lfs, the object hash of the old file can be used directly + // as the object doesn't change + if oldIsLfs == newIsLfs { + return ret, nil + } + + oldEntryBlobPointerBy := func(f func(r io.Reader) (lfs.Pointer, error)) (lfsPointer lfs.Pointer, err error) { + r, err := oldEntry.Blob().DataAsync() + if err != nil { + return lfsPointer, err } + defer r.Close() + return f(r) } - if lfsMetaObject != nil { - // We have an LFS object - create it - lfsMetaObject, err = git_model.NewLFSMetaObject(ctx, lfsMetaObject.RepositoryID, lfsMetaObject.Pointer) + var treeObjectContentReader io.ReadCloser + if oldIsLfs { + // If the old is in lfs but the new isn't, read the content from lfs and add it as a normal git object + pointer, err := oldEntryBlobPointerBy(lfs.ReadPointer) if err != nil { - return err + return nil, err } - exist, err := contentStore.Exists(lfsMetaObject.Pointer) + treeObjectContentReader, err = lfs.ReadMetaObject(pointer) if err != nil { - return err + return nil, err } - if !exist { - _, err := file.ContentReader.Seek(0, io.SeekStart) - if err != nil { - return err - } - if err := contentStore.Put(lfsMetaObject.Pointer, file.ContentReader); err != nil { - if _, err2 := git_model.RemoveLFSMetaObjectByOid(ctx, repoID, lfsMetaObject.Oid); err2 != nil { - return fmt.Errorf("unable to remove failed inserted LFS object %s: %v (Prev Error: %w)", lfsMetaObject.Oid, err2, err) - } - return err - } + defer treeObjectContentReader.Close() + } else { + // If the new is in lfs but the old isn't, read the content from the git object and generate a lfs pointer of it + ret.LfsPointer, err = oldEntryBlobPointerBy(lfs.GeneratePointer) + if err != nil { + return nil, err + } + ret.LfsContent, err = oldEntry.Blob().DataAsync() + if err != nil { + return nil, err } + treeObjectContentReader = io.NopCloser(strings.NewReader(ret.LfsPointer.StringContent())) } - - return nil + ret.ObjectHash, err = t.HashObjectAndWrite(ctx, treeObjectContentReader) + if err != nil { + return nil, err + } + return ret, nil } // VerifyBranchProtection verify the branch protection for modifying the given treePath on the given branch diff --git a/services/repository/files/upload.go b/services/repository/files/upload.go index 2e4ed1744e..b783cbd01d 100644 --- a/services/repository/files/upload.go +++ b/services/repository/files/upload.go @@ -8,14 +8,11 @@ import ( "fmt" "os" "path" - "strings" + "sync" - git_model "code.gitea.io/gitea/models/git" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/lfs" - "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/log" ) // UploadRepoFileOptions contains the uploaded repository file options @@ -31,202 +28,84 @@ type UploadRepoFileOptions struct { Committer *IdentityOptions } -type uploadInfo struct { - upload *repo_model.Upload - lfsMetaObject *git_model.LFSMetaObject +type lazyLocalFileReader struct { + *os.File + localFilename string + counter int + mu sync.Mutex } -func cleanUpAfterFailure(ctx context.Context, infos *[]uploadInfo, t *TemporaryUploadRepository, original error) error { - for _, info := range *infos { - if info.lfsMetaObject == nil { - continue - } - if !info.lfsMetaObject.Existing { - if _, err := git_model.RemoveLFSMetaObjectByOid(ctx, t.repo.ID, info.lfsMetaObject.Oid); err != nil { - original = fmt.Errorf("%w, %v", original, err) // We wrap the original error - as this is the underlying error that required the fallback - } - } - } - return original -} - -// UploadRepoFiles uploads files to the given repository -func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *UploadRepoFileOptions) error { - if len(opts.Files) == 0 { - return nil - } +var _ LazyReadSeeker = (*lazyLocalFileReader)(nil) - uploads, err := repo_model.GetUploadsByUUIDs(ctx, opts.Files) - if err != nil { - return fmt.Errorf("GetUploadsByUUIDs [uuids: %v]: %w", opts.Files, err) - } +func (l *lazyLocalFileReader) Close() error { + l.mu.Lock() + defer l.mu.Unlock() - names := make([]string, len(uploads)) - infos := make([]uploadInfo, len(uploads)) - for i, upload := range uploads { - // Check file is not lfs locked, will return nil if lock setting not enabled - filepath := path.Join(opts.TreePath, upload.Name) - lfsLock, err := git_model.GetTreePathLock(ctx, repo.ID, filepath) - if err != nil { - return err - } - if lfsLock != nil && lfsLock.OwnerID != doer.ID { - u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID) - if err != nil { - return err + if l.counter > 0 { + l.counter-- + if l.counter == 0 { + if err := l.File.Close(); err != nil { + return fmt.Errorf("close file %s: %w", l.localFilename, err) } - return git_model.ErrLFSFileLocked{RepoID: repo.ID, Path: filepath, UserName: u.Name} - } - - names[i] = upload.Name - infos[i] = uploadInfo{upload: upload} - } - - t, err := NewTemporaryUploadRepository(repo) - if err != nil { - return err - } - defer t.Close() - - hasOldBranch := true - if err = t.Clone(ctx, opts.OldBranch, true); err != nil { - if !git.IsErrBranchNotExist(err) || !repo.IsEmpty { - return err - } - if err = t.Init(ctx, repo.ObjectFormatName); err != nil { - return err - } - hasOldBranch = false - opts.LastCommitID = "" - } - if hasOldBranch { - if err = t.SetDefaultIndex(ctx); err != nil { - return err - } - } - - var filename2attribute2info map[string]map[string]string - if setting.LFS.StartServer { - filename2attribute2info, err = t.gitRepo.CheckAttribute(git.CheckAttributeOpts{ - Attributes: []string{"filter"}, - Filenames: names, - CachedOnly: true, - }) - if err != nil { - return err + l.File = nil } + return nil } + return fmt.Errorf("file %s already closed", l.localFilename) +} - // Copy uploaded files into repository. - for i := range infos { - if err := copyUploadedLFSFileIntoRepository(ctx, &infos[i], filename2attribute2info, t, opts.TreePath); err != nil { - return err - } - } +func (l *lazyLocalFileReader) OpenLazyReader() error { + l.mu.Lock() + defer l.mu.Unlock() - // Now write the tree - treeHash, err := t.WriteTree(ctx) - if err != nil { - return err + if l.File != nil { + l.counter++ + return nil } - // Now commit the tree - commitOpts := &CommitTreeUserOptions{ - ParentCommitID: opts.LastCommitID, - TreeHash: treeHash, - CommitMessage: opts.Message, - SignOff: opts.Signoff, - DoerUser: doer, - AuthorIdentity: opts.Author, - CommitterIdentity: opts.Committer, - } - commitHash, err := t.CommitTree(ctx, commitOpts) + file, err := os.Open(l.localFilename) if err != nil { return err } + l.File = file + l.counter = 1 + return nil +} - // Now deal with LFS objects - for i := range infos { - if infos[i].lfsMetaObject == nil { - continue - } - infos[i].lfsMetaObject, err = git_model.NewLFSMetaObject(ctx, infos[i].lfsMetaObject.RepositoryID, infos[i].lfsMetaObject.Pointer) - if err != nil { - // OK Now we need to cleanup - return cleanUpAfterFailure(ctx, &infos, t, err) - } - // Don't move the files yet - we need to ensure that - // everything can be inserted first - } - - // OK now we can insert the data into the store - there's no way to clean up the store - // once it's in there, it's in there. - contentStore := lfs.NewContentStore() - for _, info := range infos { - if err := uploadToLFSContentStore(info, contentStore); err != nil { - return cleanUpAfterFailure(ctx, &infos, t, err) - } - } - - // Then push this tree to NewBranch - if err := t.Push(ctx, doer, commitHash, opts.NewBranch); err != nil { - return err +// UploadRepoFiles uploads files to the given repository +func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *UploadRepoFileOptions) error { + if len(opts.Files) == 0 { + return nil } - return repo_model.DeleteUploads(ctx, uploads...) -} - -func copyUploadedLFSFileIntoRepository(ctx context.Context, info *uploadInfo, filename2attribute2info map[string]map[string]string, t *TemporaryUploadRepository, treePath string) error { - file, err := os.Open(info.upload.LocalPath()) + uploads, err := repo_model.GetUploadsByUUIDs(ctx, opts.Files) if err != nil { - return err + return fmt.Errorf("GetUploadsByUUIDs [uuids: %v]: %w", opts.Files, err) } - defer file.Close() - var objectHash string - if setting.LFS.StartServer && filename2attribute2info[info.upload.Name] != nil && filename2attribute2info[info.upload.Name]["filter"] == "lfs" { - // Handle LFS - // FIXME: Inefficient! this should probably happen in models.Upload - pointer, err := lfs.GeneratePointer(file) - if err != nil { - return err - } - - info.lfsMetaObject = &git_model.LFSMetaObject{Pointer: pointer, RepositoryID: t.repo.ID} - - if objectHash, err = t.HashObject(ctx, strings.NewReader(pointer.StringContent())); err != nil { - return err - } - } else if objectHash, err = t.HashObject(ctx, file); err != nil { - return err + changeOpts := &ChangeRepoFilesOptions{ + LastCommitID: opts.LastCommitID, + OldBranch: opts.OldBranch, + NewBranch: opts.NewBranch, + Message: opts.Message, + Signoff: opts.Signoff, + Author: opts.Author, + Committer: opts.Committer, + } + for _, upload := range uploads { + changeOpts.Files = append(changeOpts.Files, &ChangeRepoFile{ + Operation: "upload", + TreePath: path.Join(opts.TreePath, upload.Name), + ContentReader: &lazyLocalFileReader{localFilename: upload.LocalPath()}, + }) } - // Add the object to the index - return t.AddObjectToIndex(ctx, "100644", objectHash, path.Join(treePath, info.upload.Name)) -} - -func uploadToLFSContentStore(info uploadInfo, contentStore *lfs.ContentStore) error { - if info.lfsMetaObject == nil { - return nil - } - exist, err := contentStore.Exists(info.lfsMetaObject.Pointer) + _, err = ChangeRepoFiles(ctx, repo, doer, changeOpts) if err != nil { return err } - if !exist { - file, err := os.Open(info.upload.LocalPath()) - if err != nil { - return err - } - - defer file.Close() - // FIXME: Put regenerates the hash and copies the file over. - // I guess this strictly ensures the soundness of the store but this is inefficient. - if err := contentStore.Put(info.lfsMetaObject.Pointer, file); err != nil { - // OK Now we need to cleanup - // Can't clean up the store, once uploaded there they're there. - return err - } + if err := repo_model.DeleteUploads(ctx, uploads...); err != nil { + log.Error("DeleteUploads: %v", err) } return nil } diff --git a/services/repository/fork.go b/services/repository/fork.go index 5b1ba7a418..8bd3498b17 100644 --- a/services/repository/fork.go +++ b/services/repository/fork.go @@ -65,7 +65,7 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork } // Fork is prohibited, if user has reached maximum limit of repositories - if !owner.CanForkRepo() { + if !doer.CanForkRepoIn(owner) { return nil, repo_model.ErrReachLimitOfRepo{ Limit: owner.MaxRepoCreation, } @@ -100,114 +100,106 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork IsFork: true, ForkID: opts.BaseRepo.ID, ObjectFormatName: opts.BaseRepo.ObjectFormatName, + Status: repo_model.RepositoryBeingMigrated, } - oldRepoPath := opts.BaseRepo.RepoPath() - - needsRollback := false - rollbackFn := func() { - if !needsRollback { - return - } - - if exists, _ := gitrepo.IsRepositoryExist(ctx, repo); !exists { - return - } - - // As the transaction will be failed and hence database changes will be destroyed we only need - // to delete the related repository on the filesystem - if errDelete := gitrepo.DeleteRepository(ctx, repo); errDelete != nil { - log.Error("Failed to remove fork repo") - } - } - - needsRollbackInPanic := true - defer func() { - panicErr := recover() - if panicErr == nil { - return - } - - if needsRollbackInPanic { - rollbackFn() - } - panic(panicErr) - }() - - err = db.WithTx(ctx, func(txCtx context.Context) error { - if err = CreateRepositoryByExample(txCtx, doer, owner, repo, false, true); err != nil { + // 1 - Create the repository in the database + err = db.WithTx(ctx, func(ctx context.Context) error { + if err = createRepositoryInDB(ctx, doer, owner, repo, true); err != nil { return err } - - if err = repo_model.IncrementRepoForkNum(txCtx, opts.BaseRepo.ID); err != nil { + if err = repo_model.IncrementRepoForkNum(ctx, opts.BaseRepo.ID); err != nil { return err } // copy lfs files failure should not be ignored - if err = git_model.CopyLFS(txCtx, repo, opts.BaseRepo); err != nil { - return err - } - - needsRollback = true + return git_model.CopyLFS(ctx, repo, opts.BaseRepo) + }) + if err != nil { + return nil, err + } - cloneCmd := git.NewCommand("clone", "--bare") - if opts.SingleBranch != "" { - cloneCmd.AddArguments("--single-branch", "--branch").AddDynamicArguments(opts.SingleBranch) - } - if stdout, _, err := cloneCmd.AddDynamicArguments(oldRepoPath, repo.RepoPath()). - RunStdBytes(txCtx, &git.RunOpts{Timeout: 10 * time.Minute}); err != nil { - log.Error("Fork Repository (git clone) Failed for %v (from %v):\nStdout: %s\nError: %v", repo, opts.BaseRepo, stdout, err) - return fmt.Errorf("git clone: %w", err) + // last - clean up if something goes wrong + // WARNING: Don't override all later err with local variables + defer func() { + if err != nil { + // we can not use the ctx because it maybe canceled or timeout + cleanupRepository(repo.ID) } + }() - if err := repo_module.CheckDaemonExportOK(txCtx, repo); err != nil { - return fmt.Errorf("checkDaemonExportOK: %w", err) + // 2 - check whether the repository with the same storage exists + var isExist bool + isExist, err = gitrepo.IsRepositoryExist(ctx, repo) + if err != nil { + log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) + return nil, err + } + if isExist { + log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName()) + // Don't return directly, we need err in defer to cleanupRepository + err = repo_model.ErrRepoFilesAlreadyExist{ + Uname: repo.OwnerName, + Name: repo.Name, } + return nil, err + } - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(txCtx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil { - log.Error("Fork Repository (git update-server-info) failed for %v:\nStdout: %s\nError: %v", repo, stdout, err) - return fmt.Errorf("git update-server-info: %w", err) - } + // 3 - Clone the repository + cloneCmd := git.NewCommand("clone", "--bare") + if opts.SingleBranch != "" { + cloneCmd.AddArguments("--single-branch", "--branch").AddDynamicArguments(opts.SingleBranch) + } + var stdout []byte + if stdout, _, err = cloneCmd.AddDynamicArguments(opts.BaseRepo.RepoPath(), repo.RepoPath()). + RunStdBytes(ctx, &git.RunOpts{Timeout: 10 * time.Minute}); err != nil { + log.Error("Fork Repository (git clone) Failed for %v (from %v):\nStdout: %s\nError: %v", repo, opts.BaseRepo, stdout, err) + return nil, fmt.Errorf("git clone: %w", err) + } - if err = gitrepo.CreateDelegateHooks(ctx, repo); err != nil { - return fmt.Errorf("createDelegateHooks: %w", err) - } + // 4 - Update the git repository + if err = updateGitRepoAfterCreate(ctx, repo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) + } - gitRepo, err := gitrepo.OpenRepository(txCtx, repo) - if err != nil { - return fmt.Errorf("OpenRepository: %w", err) - } - defer gitRepo.Close() + // 5 - Create hooks + if err = gitrepo.CreateDelegateHooks(ctx, repo); err != nil { + return nil, fmt.Errorf("createDelegateHooks: %w", err) + } - _, err = repo_module.SyncRepoBranchesWithRepo(txCtx, repo, gitRepo, doer.ID) - return err - }) - needsRollbackInPanic = false + // 6 - Sync the repository branches and tags + var gitRepo *git.Repository + gitRepo, err = gitrepo.OpenRepository(ctx, repo) if err != nil { - rollbackFn() - return nil, err + return nil, fmt.Errorf("OpenRepository: %w", err) } + defer gitRepo.Close() + if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, doer.ID); err != nil { + return nil, fmt.Errorf("SyncRepoBranchesWithRepo: %w", err) + } + if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil { + return nil, fmt.Errorf("Sync releases from git tags failed: %v", err) + } + + // 7 - Update the repository // even if below operations failed, it could be ignored. And they will be retried - if err := repo_module.UpdateRepoSize(ctx, repo); err != nil { + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { log.Error("Failed to update size for repository: %v", err) + err = nil } - if err := repo_model.CopyLanguageStat(ctx, opts.BaseRepo, repo); err != nil { + if err = repo_model.CopyLanguageStat(ctx, opts.BaseRepo, repo); err != nil { log.Error("Copy language stat from oldRepo failed: %v", err) + err = nil } - if err := repo_model.CopyLicense(ctx, opts.BaseRepo, repo); err != nil { + if err = repo_model.CopyLicense(ctx, opts.BaseRepo, repo); err != nil { return nil, err } - gitRepo, err := gitrepo.OpenRepository(ctx, repo) - if err != nil { - log.Error("Open created git repository failed: %v", err) - } else { - defer gitRepo.Close() - if err := repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil { - log.Error("Sync releases from git tags failed: %v", err) - } + // 8 - update repository status to be ready + repo.Status = repo_model.RepositoryReady + if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "status"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) } notify_service.ForkRepository(ctx, doer, opts.BaseRepo, repo) @@ -217,7 +209,7 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork // ConvertForkToNormalRepository convert the provided repo from a forked repo to normal repo func ConvertForkToNormalRepository(ctx context.Context, repo *repo_model.Repository) error { - err := db.WithTx(ctx, func(ctx context.Context) error { + return db.WithTx(ctx, func(ctx context.Context) error { repo, err := repo_model.GetRepositoryByID(ctx, repo.ID) if err != nil { return err @@ -234,16 +226,8 @@ func ConvertForkToNormalRepository(ctx context.Context, repo *repo_model.Reposit repo.IsFork = false repo.ForkID = 0 - - if err := repo_module.UpdateRepository(ctx, repo, false); err != nil { - log.Error("Unable to update repository %-v whilst converting from fork. Error: %v", repo, err) - return err - } - - return nil + return repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_fork", "fork_id") }) - - return err } type findForksOptions struct { diff --git a/services/repository/fork_test.go b/services/repository/fork_test.go index 452798b25b..5375f79028 100644 --- a/services/repository/fork_test.go +++ b/services/repository/fork_test.go @@ -4,13 +4,17 @@ package repository import ( + "os" "testing" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" + "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" ) @@ -35,7 +39,7 @@ func TestForkRepository(t *testing.T) { assert.False(t, repo_model.IsErrReachLimitOfRepo(err)) // change AllowForkWithoutMaximumLimit to false for the test - setting.Repository.AllowForkWithoutMaximumLimit = false + defer test.MockVariableValue(&setting.Repository.AllowForkWithoutMaximumLimit, false)() // user has reached maximum limit of repositories user.MaxRepoCreation = 0 fork2, err := ForkRepository(git.DefaultContext, user, user, ForkRepoOptions{ @@ -46,3 +50,43 @@ func TestForkRepository(t *testing.T) { assert.Nil(t, fork2) assert.True(t, repo_model.IsErrReachLimitOfRepo(err)) } + +func TestForkRepositoryCleanup(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + // a successful fork + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + repo10 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10}) + + fork, err := ForkRepository(git.DefaultContext, user2, user2, ForkRepoOptions{ + BaseRepo: repo10, + Name: "test", + }) + assert.NoError(t, err) + assert.NotNil(t, fork) + + exist, err := util.IsExist(repo_model.RepoPath(user2.Name, "test")) + assert.NoError(t, err) + assert.True(t, exist) + + err = DeleteRepositoryDirectly(db.DefaultContext, fork.ID) + assert.NoError(t, err) + + // a failed creating because some mock data + // create the repository directory so that the creation will fail after database record created. + assert.NoError(t, os.MkdirAll(repo_model.RepoPath(user2.Name, "test"), os.ModePerm)) + + fork2, err := ForkRepository(db.DefaultContext, user2, user2, ForkRepoOptions{ + BaseRepo: repo10, + Name: "test", + }) + assert.Nil(t, fork2) + assert.Error(t, err) + + // assert the cleanup is successful + unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: "test"}) + + exist, err = util.IsExist(repo_model.RepoPath(user2.Name, "test")) + assert.NoError(t, err) + assert.False(t, exist) +} diff --git a/services/repository/generate.go b/services/repository/generate.go index 9d2bbb1f7f..867b5d7855 100644 --- a/services/repository/generate.go +++ b/services/repository/generate.go @@ -17,11 +17,11 @@ import ( git_model "code.gitea.io/gitea/models/git" repo_model "code.gitea.io/gitea/models/repo" - user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" "github.com/gobwas/glob" @@ -42,10 +42,8 @@ type expansion struct { var defaultTransformers = []transformer{ {Name: "SNAKE", Transform: xstrings.ToSnakeCase}, {Name: "KEBAB", Transform: xstrings.ToKebabCase}, - {Name: "CAMEL", Transform: func(str string) string { - return xstrings.FirstRuneToLower(xstrings.ToCamelCase(str)) - }}, - {Name: "PASCAL", Transform: xstrings.ToCamelCase}, + {Name: "CAMEL", Transform: xstrings.ToCamelCase}, + {Name: "PASCAL", Transform: xstrings.ToPascalCase}, {Name: "LOWER", Transform: strings.ToLower}, {Name: "UPPER", Transform: strings.ToUpper}, {Name: "TITLE", Transform: util.ToTitleCase}, @@ -255,48 +253,35 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r return initRepoCommit(ctx, tmpDir, repo, repo.Owner, defaultBranch) } -func generateGitContent(ctx context.Context, repo, templateRepo, generateRepo *repo_model.Repository) (err error) { - tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-"+repo.Name) +// GenerateGitContent generates git content from a template repository +func GenerateGitContent(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) (err error) { + tmpDir, cleanup, err := setting.AppDataTempDir("git-repo-content").MkdirTempRandom("gitea-" + generateRepo.Name) if err != nil { - return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.FullName(), err) + return fmt.Errorf("failed to create temp dir for repository %s: %w", generateRepo.FullName(), err) } + defer cleanup() - defer func() { - if err := util.RemoveAll(tmpDir); err != nil { - log.Error("RemoveAll: %v", err) - } - }() - - if err = generateRepoCommit(ctx, repo, templateRepo, generateRepo, tmpDir); err != nil { + if err = generateRepoCommit(ctx, generateRepo, templateRepo, generateRepo, tmpDir); err != nil { return fmt.Errorf("generateRepoCommit: %w", err) } // re-fetch repo - if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil { + if generateRepo, err = repo_model.GetRepositoryByID(ctx, generateRepo.ID); err != nil { return fmt.Errorf("getRepositoryByID: %w", err) } // if there was no default branch supplied when generating the repo, use the default one from the template - if strings.TrimSpace(repo.DefaultBranch) == "" { - repo.DefaultBranch = templateRepo.DefaultBranch + if strings.TrimSpace(generateRepo.DefaultBranch) == "" { + generateRepo.DefaultBranch = templateRepo.DefaultBranch } - if err = gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil { + if err = gitrepo.SetDefaultBranch(ctx, generateRepo, generateRepo.DefaultBranch); err != nil { return fmt.Errorf("setDefaultBranch: %w", err) } - if err = UpdateRepository(ctx, repo, false); err != nil { + if err = repo_model.UpdateRepositoryColsNoAutoTime(ctx, generateRepo, "default_branch"); err != nil { return fmt.Errorf("updateRepository: %w", err) } - return nil -} - -// GenerateGitContent generates git content from a template repository -func GenerateGitContent(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error { - if err := generateGitContent(ctx, generateRepo, templateRepo, generateRepo); err != nil { - return err - } - if err := repo_module.UpdateRepoSize(ctx, generateRepo); err != nil { return fmt.Errorf("failed to update size for repository: %w", err) } @@ -328,57 +313,6 @@ func (gro GenerateRepoOptions) IsValid() bool { gro.IssueLabels || gro.ProtectedBranch // or other items as they are added } -// generateRepository generates a repository from a template -func generateRepository(ctx context.Context, doer, owner *user_model.User, templateRepo *repo_model.Repository, opts GenerateRepoOptions) (_ *repo_model.Repository, err error) { - generateRepo := &repo_model.Repository{ - OwnerID: owner.ID, - Owner: owner, - OwnerName: owner.Name, - Name: opts.Name, - LowerName: strings.ToLower(opts.Name), - Description: opts.Description, - DefaultBranch: opts.DefaultBranch, - IsPrivate: opts.Private, - IsEmpty: !opts.GitContent || templateRepo.IsEmpty, - IsFsckEnabled: templateRepo.IsFsckEnabled, - TemplateID: templateRepo.ID, - TrustModel: templateRepo.TrustModel, - ObjectFormatName: templateRepo.ObjectFormatName, - } - - if err = CreateRepositoryByExample(ctx, doer, owner, generateRepo, false, false); err != nil { - return nil, err - } - - isExist, err := gitrepo.IsRepositoryExist(ctx, generateRepo) - if err != nil { - log.Error("Unable to check if %s exists. Error: %v", generateRepo.FullName(), err) - return nil, err - } - if isExist { - return nil, repo_model.ErrRepoFilesAlreadyExist{ - Uname: generateRepo.OwnerName, - Name: generateRepo.Name, - } - } - - if err = repo_module.CheckInitRepository(ctx, generateRepo); err != nil { - return generateRepo, err - } - - if err = repo_module.CheckDaemonExportOK(ctx, generateRepo); err != nil { - return generateRepo, fmt.Errorf("checkDaemonExportOK: %w", err) - } - - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(ctx, &git.RunOpts{Dir: generateRepo.RepoPath()}); err != nil { - log.Error("GenerateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", generateRepo, stdout, err) - return generateRepo, fmt.Errorf("error in GenerateRepository(git update-server-info): %w", err) - } - - return generateRepo, nil -} - var fileNameSanitizeRegexp = regexp.MustCompile(`(?i)\.\.|[<>:\"/\\|?*\x{0000}-\x{001F}]|^(con|prn|aux|nul|com\d|lpt\d)$`) // Sanitize user input to valid OS filenames diff --git a/services/repository/generate_test.go b/services/repository/generate_test.go index b0f97d0ffb..1163c392c9 100644 --- a/services/repository/generate_test.go +++ b/services/repository/generate_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) var giteaTemplate = []byte(` @@ -65,3 +66,26 @@ func TestFileNameSanitize(t *testing.T) { assert.Equal(t, "_", fileNameSanitize("\u0000")) assert.Equal(t, "ç›®æ ‡", fileNameSanitize("ç›®æ ‡")) } + +func TestTransformers(t *testing.T) { + cases := []struct { + name string + expected string + }{ + {"SNAKE", "abc_def_xyz"}, + {"KEBAB", "abc-def-xyz"}, + {"CAMEL", "abcDefXyz"}, + {"PASCAL", "AbcDefXyz"}, + {"LOWER", "abc_def-xyz"}, + {"UPPER", "ABC_DEF-XYZ"}, + {"TITLE", "Abc_def-Xyz"}, + } + + input := "Abc_Def-XYZ" + assert.Len(t, defaultTransformers, len(cases)) + for i, c := range cases { + tf := defaultTransformers[i] + require.Equal(t, c.name, tf.Name) + assert.Equal(t, c.expected, tf.Transform(input), "case %s", c.name) + } +} diff --git a/services/repository/gitgraph/graph_models.go b/services/repository/gitgraph/graph_models.go index c45662836b..02b0268cd9 100644 --- a/services/repository/gitgraph/graph_models.go +++ b/services/repository/gitgraph/graph_models.go @@ -121,7 +121,7 @@ func (graph *Graph) LoadAndProcessCommits(ctx context.Context, repository *repo_ return repo_model.IsOwnerMemberCollaborator(ctx, repository, user.ID) }, &keyMap) - statuses, _, err := git_model.GetLatestCommitStatus(ctx, repository.ID, c.Commit.ID.String(), db.ListOptions{}) + statuses, err := git_model.GetLatestCommitStatus(ctx, repository.ID, c.Commit.ID.String(), db.ListOptionsAll) if err != nil { log.Error("GetLatestCommitStatus: %v", err) } else { @@ -232,8 +232,8 @@ func newRefsFromRefNames(refNames []byte) []git.Reference { continue } refName := string(refNameBytes) - if strings.HasPrefix(refName, "tag: ") { - refName = strings.TrimPrefix(refName, "tag: ") + if after, ok := strings.CutPrefix(refName, "tag: "); ok { + refName = after } else { refName = strings.TrimPrefix(refName, "HEAD -> ") } diff --git a/services/repository/gitgraph/graph_test.go b/services/repository/gitgraph/graph_test.go index 4c48b94aa2..93fa1aec6a 100644 --- a/services/repository/gitgraph/graph_test.go +++ b/services/repository/gitgraph/graph_test.go @@ -6,6 +6,7 @@ package gitgraph import ( "bytes" "fmt" + "slices" "strings" "testing" @@ -117,13 +118,7 @@ func TestReleaseUnusedColors(t *testing.T) { if parser.firstAvailable == -1 { // All in use for _, color := range parser.availableColors { - found := false - for _, oldColor := range parser.oldColors { - if oldColor == color { - found = true - break - } - } + found := slices.Contains(parser.oldColors, color) if !found { t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should be available but is not", testcase.availableColors, @@ -141,13 +136,7 @@ func TestReleaseUnusedColors(t *testing.T) { // Some in use for i := parser.firstInUse; i != parser.firstAvailable; i = (i + 1) % len(parser.availableColors) { color := parser.availableColors[i] - found := false - for _, oldColor := range parser.oldColors { - if oldColor == color { - found = true - break - } - } + found := slices.Contains(parser.oldColors, color) if !found { t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should be available but is not", testcase.availableColors, @@ -163,13 +152,7 @@ func TestReleaseUnusedColors(t *testing.T) { } for i := parser.firstAvailable; i != parser.firstInUse; i = (i + 1) % len(parser.availableColors) { color := parser.availableColors[i] - found := false - for _, oldColor := range parser.oldColors { - if oldColor == color { - found = true - break - } - } + found := slices.Contains(parser.oldColors, color) if found { t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should not be available but is", testcase.availableColors, diff --git a/services/repository/init.go b/services/repository/init.go index bd777b8a2f..1eeeb4aa4f 100644 --- a/services/repository/init.go +++ b/services/repository/init.go @@ -42,9 +42,12 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi cmd := git.NewCommand("commit", "--message=Initial commit"). AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email) - sign, keyID, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u) + sign, key, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u) if sign { - cmd.AddOptionFormat("-S%s", keyID) + if key.Format != "" { + cmd.AddConfig("gpg.format", key.Format) + } + cmd.AddOptionFormat("-S%s", key.KeyID) if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { // need to set the committer to the KeyID owner diff --git a/services/repository/license_test.go b/services/repository/license_test.go index 9e74a268f5..eb897f3c03 100644 --- a/services/repository/license_test.go +++ b/services/repository/license_test.go @@ -4,7 +4,6 @@ package repository import ( - "fmt" "strings" "testing" @@ -45,7 +44,7 @@ func Test_detectLicense(t *testing.T) { assert.NoError(t, err) tests = append(tests, DetectLicenseTest{ - name: fmt.Sprintf("single license test: %s", licenseName), + name: "single license test: " + licenseName, arg: string(license), want: []string{licenseName}, }) diff --git a/services/repository/merge_upstream.go b/services/repository/merge_upstream.go index 34e01df723..8d6f11372c 100644 --- a/services/repository/merge_upstream.go +++ b/services/repository/merge_upstream.go @@ -18,7 +18,7 @@ import ( ) // MergeUpstream merges the base repository's default branch into the fork repository's current branch. -func MergeUpstream(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, branch string) (mergeStyle string, err error) { +func MergeUpstream(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, branch string, ffOnly bool) (mergeStyle string, err error) { if err = repo.MustNotBeArchived(); err != nil { return "", err } @@ -45,6 +45,11 @@ func MergeUpstream(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_ return "", err } + // If ff_only is requested and fast-forward failed, return error + if ffOnly { + return "", util.NewInvalidArgumentErrorf("fast-forward merge not possible: branch has diverged") + } + // TODO: FakePR: it is somewhat hacky, but it is the only way to "merge" at the moment // ideally in the future the "merge" functions should be refactored to decouple from the PullRequest fakeIssue := &issue_model.Issue{ diff --git a/services/repository/migrate.go b/services/repository/migrate.go index 5b6feccb8d..0a3dc45339 100644 --- a/services/repository/migrate.go +++ b/services/repository/migrate.go @@ -13,6 +13,7 @@ import ( "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" + unit_model "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" @@ -117,14 +118,8 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, repo.Owner = u } - if err := repo_module.CheckDaemonExportOK(ctx, repo); err != nil { - return repo, fmt.Errorf("checkDaemonExportOK: %w", err) - } - - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(ctx, &git.RunOpts{Dir: repoPath}); err != nil { - log.Error("MigrateRepositoryGitData(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) - return repo, fmt.Errorf("error in MigrateRepositoryGitData(git update-server-info): %w", err) + if err := updateGitRepoAfterCreate(ctx, repo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) } gitRepo, err := git.OpenRepository(ctx, repoPath) @@ -141,12 +136,12 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, if !repo.IsEmpty { if len(repo.DefaultBranch) == 0 { // Try to get HEAD branch and set it as default branch. - headBranch, err := gitRepo.GetHEADBranch() + headBranchName, err := git.GetDefaultBranch(ctx, repoPath) if err != nil { return repo, fmt.Errorf("GetHEADBranch: %w", err) } - if headBranch != nil { - repo.DefaultBranch = headBranch.Name + if headBranchName != "" { + repo.DefaultBranch = headBranchName } } @@ -154,9 +149,9 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, return repo, fmt.Errorf("SyncRepoBranchesWithRepo: %v", err) } + // if releases migration are not requested, we will sync all tags here + // otherwise, the releases sync will be done out of this function if !opts.Releases { - // note: this will greatly improve release (tag) sync - // for pull-mirrors with many tags repo.IsMirror = opts.Mirror if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil { log.Error("Failed to synchronize tags to releases for repository: %v", err) @@ -225,10 +220,14 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, } repo.IsMirror = true - if err = UpdateRepository(ctx, repo, false); err != nil { + if err = repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "num_watches", "is_empty", "default_branch", "default_wiki_branch", "is_mirror"); err != nil { return nil, err } + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { + log.Error("Failed to update size for repository: %v", err) + } + // this is necessary for sync local tags from remote configName := fmt.Sprintf("remote.%s.fetch", mirrorModel.GetRemoteName()) if stdout, _, err := git.NewCommand("config"). @@ -246,6 +245,19 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, } } + var enableRepoUnits []repo_model.RepoUnit + if opts.Releases && !unit_model.TypeReleases.UnitGlobalDisabled() { + enableRepoUnits = append(enableRepoUnits, repo_model.RepoUnit{RepoID: repo.ID, Type: unit_model.TypeReleases}) + } + if opts.Wiki && !unit_model.TypeWiki.UnitGlobalDisabled() { + enableRepoUnits = append(enableRepoUnits, repo_model.RepoUnit{RepoID: repo.ID, Type: unit_model.TypeWiki}) + } + if len(enableRepoUnits) > 0 { + err = UpdateRepositoryUnits(ctx, repo, enableRepoUnits, nil) + if err != nil { + return nil, err + } + } return repo, committer.Commit() } diff --git a/services/repository/push.go b/services/repository/push.go index 6d3b9dd252..7c68a7f176 100644 --- a/services/repository/push.go +++ b/services/repository/push.go @@ -66,7 +66,7 @@ func PushUpdates(opts []*repo_module.PushUpdateOptions) error { for _, opt := range opts { if opt.IsNewRef() && opt.IsDelRef() { - return fmt.Errorf("Old and new revisions are both NULL") + return errors.New("Old and new revisions are both NULL") } } @@ -232,7 +232,7 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { } if len(addTags)+len(delTags) > 0 { - if err := PushUpdateAddDeleteTags(ctx, repo, gitRepo, addTags, delTags); err != nil { + if err := PushUpdateAddDeleteTags(ctx, repo, gitRepo, pusher, addTags, delTags); err != nil { return fmt.Errorf("PushUpdateAddDeleteTags: %w", err) } } @@ -283,7 +283,7 @@ func pushNewBranch(ctx context.Context, repo *repo_model.Repository, pusher *use } } // Update the is empty and default_branch columns - if err := repo_model.UpdateRepositoryCols(ctx, repo, "default_branch", "is_empty"); err != nil { + if err := repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "default_branch", "is_empty"); err != nil { return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) } } @@ -342,17 +342,17 @@ func pushDeleteBranch(ctx context.Context, repo *repo_model.Repository, pusher * } // PushUpdateAddDeleteTags updates a number of added and delete tags -func PushUpdateAddDeleteTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, addTags, delTags []string) error { +func PushUpdateAddDeleteTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, pusher *user_model.User, addTags, delTags []string) error { return db.WithTx(ctx, func(ctx context.Context) error { - if err := repo_model.PushUpdateDeleteTagsContext(ctx, repo, delTags); err != nil { + if err := repo_model.PushUpdateDeleteTags(ctx, repo, delTags); err != nil { return err } - return pushUpdateAddTags(ctx, repo, gitRepo, addTags) + return pushUpdateAddTags(ctx, repo, gitRepo, pusher, addTags) }) } // pushUpdateAddTags updates a number of add tags -func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, tags []string) error { +func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, pusher *user_model.User, tags []string) error { if len(tags) == 0 { return nil } @@ -378,14 +378,12 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo newReleases := make([]*repo_model.Release, 0, len(lowerTags)-len(relMap)) - emailToUser := make(map[string]*user_model.User) - for i, lowerTag := range lowerTags { tag, err := gitRepo.GetTag(tags[i]) if err != nil { return fmt.Errorf("GetTag: %w", err) } - commit, err := tag.Commit(gitRepo) + commit, err := gitRepo.GetTagCommit(tag.Name) if err != nil { return fmt.Errorf("Commit: %w", err) } @@ -397,69 +395,42 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo if sig == nil { sig = commit.Committer } - var author *user_model.User - createdAt := time.Unix(1, 0) + createdAt := time.Unix(1, 0) if sig != nil { - var ok bool - author, ok = emailToUser[sig.Email] - if !ok { - author, err = user_model.GetUserByEmail(ctx, sig.Email) - if err != nil && !user_model.IsErrUserNotExist(err) { - return fmt.Errorf("GetUserByEmail: %w", err) - } - if author != nil { - emailToUser[sig.Email] = author - } - } createdAt = sig.When } - commitsCount, err := commit.CommitsCount() - if err != nil { - return fmt.Errorf("CommitsCount: %w", err) - } - rel, has := relMap[lowerTag] - - parts := strings.SplitN(tag.Message, "\n", 2) - note := "" - if len(parts) > 1 { - note = parts[1] - } + title, note := git.SplitCommitTitleBody(tag.Message, 255) if !has { rel = &repo_model.Release{ RepoID: repo.ID, - Title: parts[0], + Title: title, TagName: tags[i], LowerTagName: lowerTag, Target: "", Sha1: commit.ID.String(), - NumCommits: commitsCount, + NumCommits: -1, // the commits count will be updated when the UI needs it Note: note, IsDraft: false, IsPrerelease: false, IsTag: true, + PublisherID: pusher.ID, CreatedUnix: timeutil.TimeStamp(createdAt.Unix()), } - if author != nil { - rel.PublisherID = author.ID - } newReleases = append(newReleases, rel) } else { rel.Sha1 = commit.ID.String() rel.CreatedUnix = timeutil.TimeStamp(createdAt.Unix()) - rel.NumCommits = commitsCount if rel.IsTag { - rel.Title = parts[0] + rel.Title = title rel.Note = note - if author != nil { - rel.PublisherID = author.ID - } } else { rel.IsDraft = false } + rel.PublisherID = pusher.ID if err = repo_model.UpdateRelease(ctx, rel); err != nil { return fmt.Errorf("Update: %w", err) } diff --git a/services/repository/repo_team.go b/services/repository/repo_team.go index 672ee49fea..8ea186f8cc 100644 --- a/services/repository/repo_team.go +++ b/services/repository/repo_team.go @@ -86,17 +86,9 @@ func RemoveAllRepositoriesFromTeam(ctx context.Context, t *organization.Team) (e return nil } - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if err = removeAllRepositoriesFromTeam(ctx, t); err != nil { - return err - } - - return committer.Commit() + return db.WithTx(ctx, func(ctx context.Context) error { + return removeAllRepositoriesFromTeam(ctx, t) + }) } // removeAllRepositoriesFromTeam removes all repositories from team and recalculates access @@ -167,17 +159,9 @@ func RemoveRepositoryFromTeam(ctx context.Context, t *organization.Team, repoID return err } - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if err = removeRepositoryFromTeam(ctx, t, repo, true); err != nil { - return err - } - - return committer.Commit() + return db.WithTx(ctx, func(ctx context.Context) error { + return removeRepositoryFromTeam(ctx, t, repo, true) + }) } // removeRepositoryFromTeam removes a repository from a team and recalculates access diff --git a/services/repository/repository.go b/services/repository/repository.go index fcc617979e..e574dc6c01 100644 --- a/services/repository/repository.go +++ b/services/repository/repository.go @@ -5,22 +5,29 @@ package repository import ( "context" + "errors" "fmt" + "os" + "path/filepath" + "strings" + activities_model "code.gitea.io/gitea/models/activities" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/organization" + access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" - system_model "code.gitea.io/gitea/models/system" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/graceful" + issue_indexer "code.gitea.io/gitea/modules/indexer/issues" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/queue" repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" notify_service "code.gitea.io/gitea/services/notify" pull_service "code.gitea.io/gitea/services/pull" ) @@ -40,7 +47,7 @@ type WebSearchResults struct { // CreateRepository creates a repository for the user/organization. func CreateRepository(ctx context.Context, doer, owner *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) { - repo, err := CreateRepositoryDirectly(ctx, doer, owner, opts) + repo, err := CreateRepositoryDirectly(ctx, doer, owner, opts, true) if err != nil { // No need to rollback here we should do this in CreateRepository... return nil, err @@ -62,7 +69,7 @@ func DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_mod notify_service.DeleteRepository(ctx, doer, repo) } - return DeleteRepositoryDirectly(ctx, doer, repo.ID) + return DeleteRepositoryDirectly(ctx, repo.ID) } // PushCreateRepo creates a repository when a new repository is pushed to an appropriate namespace @@ -72,10 +79,10 @@ func PushCreateRepo(ctx context.Context, authUser, owner *user_model.User, repoN if ok, err := organization.CanCreateOrgRepo(ctx, owner.ID, authUser.ID); err != nil { return nil, err } else if !ok { - return nil, fmt.Errorf("cannot push-create repository for org") + return nil, errors.New("cannot push-create repository for org") } } else if authUser.ID != owner.ID { - return nil, fmt.Errorf("cannot push-create repository for another user") + return nil, errors.New("cannot push-create repository for another user") } } @@ -94,15 +101,13 @@ func PushCreateRepo(ctx context.Context, authUser, owner *user_model.User, repoN func Init(ctx context.Context) error { licenseUpdaterQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "repo_license_updater", repoLicenseUpdater) if licenseUpdaterQueue == nil { - return fmt.Errorf("unable to create repo_license_updater queue") + return errors.New("unable to create repo_license_updater queue") } go graceful.GetManager().RunWithCancel(licenseUpdaterQueue) if err := repo_module.LoadRepoConfig(); err != nil { return err } - system_model.RemoveAllWithNotice(ctx, "Clean up temporary repository uploads", setting.Repository.Upload.TempPath) - system_model.RemoveAllWithNotice(ctx, "Clean up temporary repositories", repo_module.LocalCopyPath()) if err := initPushQueue(); err != nil { return err } @@ -111,42 +116,107 @@ func Init(ctx context.Context) error { // UpdateRepository updates a repository func UpdateRepository(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) (err error) { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if err = repo_module.UpdateRepository(ctx, repo, visibilityChanged); err != nil { - return fmt.Errorf("updateRepository: %w", err) - } - - return committer.Commit() + return db.WithTx(ctx, func(ctx context.Context) error { + if err = updateRepository(ctx, repo, visibilityChanged); err != nil { + return fmt.Errorf("updateRepository: %w", err) + } + return nil + }) } -func UpdateRepositoryVisibility(ctx context.Context, repo *repo_model.Repository, isPrivate bool) (err error) { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } +func MakeRepoPublic(ctx context.Context, repo *repo_model.Repository) (err error) { + return db.WithTx(ctx, func(ctx context.Context) error { + repo.IsPrivate = false + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_private"); err != nil { + return err + } - defer committer.Close() + if err = repo.LoadOwner(ctx); err != nil { + return fmt.Errorf("LoadOwner: %w", err) + } + if repo.Owner.IsOrganization() { + // Organization repository need to recalculate access table when visibility is changed. + if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil { + return fmt.Errorf("recalculateTeamAccesses: %w", err) + } + } - repo.IsPrivate = isPrivate + // Create/Remove git-daemon-export-ok for git-daemon... + if err := checkDaemonExportOK(ctx, repo); err != nil { + return err + } - if err = repo_module.UpdateRepository(ctx, repo, true); err != nil { - return fmt.Errorf("UpdateRepositoryVisibility: %w", err) - } + forkRepos, err := repo_model.GetRepositoriesByForkID(ctx, repo.ID) + if err != nil { + return fmt.Errorf("getRepositoriesByForkID: %w", err) + } - return committer.Commit() -} + if repo.Owner.Visibility != structs.VisibleTypePrivate { + for i := range forkRepos { + if err = MakeRepoPublic(ctx, forkRepos[i]); err != nil { + return fmt.Errorf("MakeRepoPublic[%d]: %w", forkRepos[i].ID, err) + } + } + } -func MakeRepoPublic(ctx context.Context, repo *repo_model.Repository) (err error) { - return UpdateRepositoryVisibility(ctx, repo, false) + // If visibility is changed, we need to update the issue indexer. + // Since the data in the issue indexer have field to indicate if the repo is public or not. + issue_indexer.UpdateRepoIndexer(ctx, repo.ID) + + return nil + }) } func MakeRepoPrivate(ctx context.Context, repo *repo_model.Repository) (err error) { - return UpdateRepositoryVisibility(ctx, repo, true) + return db.WithTx(ctx, func(ctx context.Context) error { + repo.IsPrivate = true + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_private"); err != nil { + return err + } + + if err = repo.LoadOwner(ctx); err != nil { + return fmt.Errorf("LoadOwner: %w", err) + } + if repo.Owner.IsOrganization() { + // Organization repository need to recalculate access table when visibility is changed. + if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil { + return fmt.Errorf("recalculateTeamAccesses: %w", err) + } + } + + // If repo has become private, we need to set its actions to private. + _, err = db.GetEngine(ctx).Where("repo_id = ?", repo.ID).Cols("is_private").Update(&activities_model.Action{ + IsPrivate: true, + }) + if err != nil { + return err + } + + if err = repo_model.ClearRepoStars(ctx, repo.ID); err != nil { + return err + } + + // Create/Remove git-daemon-export-ok for git-daemon... + if err := checkDaemonExportOK(ctx, repo); err != nil { + return err + } + + forkRepos, err := repo_model.GetRepositoriesByForkID(ctx, repo.ID) + if err != nil { + return fmt.Errorf("getRepositoriesByForkID: %w", err) + } + for i := range forkRepos { + if err = MakeRepoPrivate(ctx, forkRepos[i]); err != nil { + return fmt.Errorf("MakeRepoPrivate[%d]: %w", forkRepos[i].ID, err) + } + } + + // If visibility is changed, we need to update the issue indexer. + // Since the data in the issue indexer have field to indicate if the repo is public or not. + issue_indexer.UpdateRepoIndexer(ctx, repo.ID) + + return nil + }) } // LinkedRepository returns the linked repo if any @@ -172,3 +242,97 @@ func LinkedRepository(ctx context.Context, a *repo_model.Attachment) (*repo_mode } return nil, -1, nil } + +// checkDaemonExportOK creates/removes git-daemon-export-ok for git-daemon... +func checkDaemonExportOK(ctx context.Context, repo *repo_model.Repository) error { + if err := repo.LoadOwner(ctx); err != nil { + return err + } + + // Create/Remove git-daemon-export-ok for git-daemon... + daemonExportFile := filepath.Join(repo.RepoPath(), `git-daemon-export-ok`) + + isExist, err := util.IsExist(daemonExportFile) + if err != nil { + log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err) + return err + } + + isPublic := !repo.IsPrivate && repo.Owner.Visibility == structs.VisibleTypePublic + if !isPublic && isExist { + if err = util.Remove(daemonExportFile); err != nil { + log.Error("Failed to remove %s: %v", daemonExportFile, err) + } + } else if isPublic && !isExist { + if f, err := os.Create(daemonExportFile); err != nil { + log.Error("Failed to create %s: %v", daemonExportFile, err) + } else { + f.Close() + } + } + + return nil +} + +// updateRepository updates a repository with db context +func updateRepository(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) (err error) { + repo.LowerName = strings.ToLower(repo.Name) + + e := db.GetEngine(ctx) + + if _, err = e.ID(repo.ID).NoAutoTime().AllCols().Update(repo); err != nil { + return fmt.Errorf("update: %w", err) + } + + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { + log.Error("Failed to update size for repository: %v", err) + } + + if visibilityChanged { + if err = repo.LoadOwner(ctx); err != nil { + return fmt.Errorf("LoadOwner: %w", err) + } + if repo.Owner.IsOrganization() { + // Organization repository need to recalculate access table when visibility is changed. + if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil { + return fmt.Errorf("recalculateTeamAccesses: %w", err) + } + } + + // If repo has become private, we need to set its actions to private. + if repo.IsPrivate { + _, err = e.Where("repo_id = ?", repo.ID).Cols("is_private").Update(&activities_model.Action{ + IsPrivate: true, + }) + if err != nil { + return err + } + + if err = repo_model.ClearRepoStars(ctx, repo.ID); err != nil { + return err + } + } + + // Create/Remove git-daemon-export-ok for git-daemon... + if err := checkDaemonExportOK(ctx, repo); err != nil { + return err + } + + forkRepos, err := repo_model.GetRepositoriesByForkID(ctx, repo.ID) + if err != nil { + return fmt.Errorf("getRepositoriesByForkID: %w", err) + } + for i := range forkRepos { + forkRepos[i].IsPrivate = repo.IsPrivate || repo.Owner.Visibility == structs.VisibleTypePrivate + if err = updateRepository(ctx, forkRepos[i], true); err != nil { + return fmt.Errorf("updateRepository[%d]: %w", forkRepos[i].ID, err) + } + } + + // If visibility is changed, we need to update the issue indexer. + // Since the data in the issue indexer have field to indicate if the repo is public or not. + issue_indexer.UpdateRepoIndexer(ctx, repo.ID) + } + + return nil +} diff --git a/services/repository/repository_test.go b/services/repository/repository_test.go index 892a11a23e..8f9fdf8fa1 100644 --- a/services/repository/repository_test.go +++ b/services/repository/repository_test.go @@ -6,6 +6,7 @@ package repository import ( "testing" + activities_model "code.gitea.io/gitea/models/activities" "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -40,3 +41,23 @@ func TestLinkedRepository(t *testing.T) { }) } } + +func TestUpdateRepositoryVisibilityChanged(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + // Get sample repo and change visibility + repo, err := repo_model.GetRepositoryByID(db.DefaultContext, 9) + assert.NoError(t, err) + repo.IsPrivate = true + + // Update it + err = updateRepository(db.DefaultContext, repo, true) + assert.NoError(t, err) + + // Check visibility of action has become private + act := activities_model.Action{} + _, err = db.GetEngine(db.DefaultContext).ID(3).Get(&act) + + assert.NoError(t, err) + assert.True(t, act.IsPrivate) +} diff --git a/services/repository/setting.go b/services/repository/setting.go index e0c787dd2d..b6873691eb 100644 --- a/services/repository/setting.go +++ b/services/repository/setting.go @@ -16,41 +16,37 @@ import ( // UpdateRepositoryUnits updates a repository's units func UpdateRepositoryUnits(ctx context.Context, repo *repo_model.Repository, units []repo_model.RepoUnit, deleteUnitTypes []unit.Type) (err error) { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - // Delete existing settings of units before adding again - for _, u := range units { - deleteUnitTypes = append(deleteUnitTypes, u.Type) - } - - if slices.Contains(deleteUnitTypes, unit.TypeActions) { - if err := actions_service.CleanRepoScheduleTasks(ctx, repo); err != nil { - log.Error("CleanRepoScheduleTasks: %v", err) + return db.WithTx(ctx, func(ctx context.Context) error { + // Delete existing settings of units before adding again + for _, u := range units { + deleteUnitTypes = append(deleteUnitTypes, u.Type) } - } - for _, u := range units { - if u.Type == unit.TypeActions { - if err := actions_service.DetectAndHandleSchedules(ctx, repo); err != nil { - log.Error("DetectAndHandleSchedules: %v", err) + if slices.Contains(deleteUnitTypes, unit.TypeActions) { + if err := actions_service.CleanRepoScheduleTasks(ctx, repo); err != nil { + log.Error("CleanRepoScheduleTasks: %v", err) } - break } - } - if _, err = db.GetEngine(ctx).Where("repo_id = ?", repo.ID).In("type", deleteUnitTypes).Delete(new(repo_model.RepoUnit)); err != nil { - return err - } + for _, u := range units { + if u.Type == unit.TypeActions { + if err := actions_service.DetectAndHandleSchedules(ctx, repo); err != nil { + log.Error("DetectAndHandleSchedules: %v", err) + } + break + } + } - if len(units) > 0 { - if err = db.Insert(ctx, units); err != nil { + if _, err = db.GetEngine(ctx).Where("repo_id = ?", repo.ID).In("type", deleteUnitTypes).Delete(new(repo_model.RepoUnit)); err != nil { return err } - } - return committer.Commit() + if len(units) > 0 { + if err = db.Insert(ctx, units); err != nil { + return err + } + } + + return nil + }) } diff --git a/services/repository/template.go b/services/repository/template.go index 36a680c8e2..6906a60083 100644 --- a/services/repository/template.go +++ b/services/repository/template.go @@ -5,12 +5,17 @@ package repository import ( "context" + "fmt" + "strings" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/log" notify_service "code.gitea.io/gitea/services/notify" ) @@ -63,70 +68,124 @@ func GenerateProtectedBranch(ctx context.Context, templateRepo, generateRepo *re // GenerateRepository generates a repository from a template func GenerateRepository(ctx context.Context, doer, owner *user_model.User, templateRepo *repo_model.Repository, opts GenerateRepoOptions) (_ *repo_model.Repository, err error) { - if !doer.IsAdmin && !owner.CanCreateRepo() { + if !doer.CanCreateRepoIn(owner) { return nil, repo_model.ErrReachLimitOfRepo{ Limit: owner.MaxRepoCreation, } } - var generateRepo *repo_model.Repository - if err = db.WithTx(ctx, func(ctx context.Context) error { - generateRepo, err = generateRepository(ctx, doer, owner, templateRepo, opts) + generateRepo := &repo_model.Repository{ + OwnerID: owner.ID, + Owner: owner, + OwnerName: owner.Name, + Name: opts.Name, + LowerName: strings.ToLower(opts.Name), + Description: opts.Description, + DefaultBranch: opts.DefaultBranch, + IsPrivate: opts.Private, + IsEmpty: !opts.GitContent || templateRepo.IsEmpty, + IsFsckEnabled: templateRepo.IsFsckEnabled, + TemplateID: templateRepo.ID, + TrustModel: templateRepo.TrustModel, + ObjectFormatName: templateRepo.ObjectFormatName, + Status: repo_model.RepositoryBeingMigrated, + } + + // 1 - Create the repository in the database + if err := db.WithTx(ctx, func(ctx context.Context) error { + return createRepositoryInDB(ctx, doer, owner, generateRepo, false) + }); err != nil { + return nil, err + } + + // last - clean up the repository if something goes wrong + defer func() { if err != nil { - return err + // we can not use the ctx because it maybe canceled or timeout + cleanupRepository(generateRepo.ID) } + }() - // Git Content - if opts.GitContent && !templateRepo.IsEmpty { - if err = GenerateGitContent(ctx, templateRepo, generateRepo); err != nil { - return err - } + // 2 - check whether the repository with the same storage exists + isExist, err := gitrepo.IsRepositoryExist(ctx, generateRepo) + if err != nil { + log.Error("Unable to check if %s exists. Error: %v", generateRepo.FullName(), err) + return nil, err + } + if isExist { + // Don't return directly, we need err in defer to cleanupRepository + err = repo_model.ErrRepoFilesAlreadyExist{ + Uname: generateRepo.OwnerName, + Name: generateRepo.Name, } + return nil, err + } - // Topics - if opts.Topics { - if err = repo_model.GenerateTopics(ctx, templateRepo, generateRepo); err != nil { - return err - } + // 3 -Init git bare new repository. + if err = git.InitRepository(ctx, generateRepo.RepoPath(), true, generateRepo.ObjectFormatName); err != nil { + return nil, fmt.Errorf("git.InitRepository: %w", err) + } else if err = gitrepo.CreateDelegateHooks(ctx, generateRepo); err != nil { + return nil, fmt.Errorf("createDelegateHooks: %w", err) + } + + // 4 - Update the git repository + if err = updateGitRepoAfterCreate(ctx, generateRepo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) + } + + // 5 - generate the repository contents according to the template + // Git Content + if opts.GitContent && !templateRepo.IsEmpty { + if err = GenerateGitContent(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - // Git Hooks - if opts.GitHooks { - if err = GenerateGitHooks(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Topics + if opts.Topics { + if err = repo_model.GenerateTopics(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - // Webhooks - if opts.Webhooks { - if err = GenerateWebhooks(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Git Hooks + if opts.GitHooks { + if err = GenerateGitHooks(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - // Avatar - if opts.Avatar && len(templateRepo.Avatar) > 0 { - if err = generateAvatar(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Webhooks + if opts.Webhooks { + if err = GenerateWebhooks(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - // Issue Labels - if opts.IssueLabels { - if err = GenerateIssueLabels(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Avatar + if opts.Avatar && len(templateRepo.Avatar) > 0 { + if err = generateAvatar(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - if opts.ProtectedBranch { - if err = GenerateProtectedBranch(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Issue Labels + if opts.IssueLabels { + if err = GenerateIssueLabels(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - return nil - }); err != nil { - return nil, err + if opts.ProtectedBranch { + if err = GenerateProtectedBranch(ctx, templateRepo, generateRepo); err != nil { + return nil, err + } + } + + // 6 - update repository status to be ready + generateRepo.Status = repo_model.RepositoryReady + if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, generateRepo, "status"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) } notify_service.CreateRepository(ctx, doer, owner, generateRepo) diff --git a/services/repository/transfer.go b/services/repository/transfer.go index a589bc469d..5ad63cca67 100644 --- a/services/repository/transfer.go +++ b/services/repository/transfer.go @@ -20,10 +20,22 @@ import ( "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/globallock" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" notify_service "code.gitea.io/gitea/services/notify" ) +type LimitReachedError struct{ Limit int } + +func (LimitReachedError) Error() string { + return "Repository limit has been reached" +} + +func IsRepositoryLimitReached(err error) bool { + _, ok := err.(LimitReachedError) + return ok +} + func getRepoWorkingLockKey(repoID int64) string { return fmt.Sprintf("repo_working_%d", repoID) } @@ -49,6 +61,11 @@ func AcceptTransferOwnership(ctx context.Context, repo *repo_model.Repository, d return err } + if !doer.CanCreateRepoIn(repoTransfer.Recipient) { + limit := util.Iif(repoTransfer.Recipient.MaxRepoCreation >= 0, repoTransfer.Recipient.MaxRepoCreation, setting.Repository.MaxCreationLimit) + return LimitReachedError{Limit: limit} + } + if !repoTransfer.CanUserAcceptOrRejectTransfer(ctx, doer) { return util.ErrPermissionDenied } @@ -143,7 +160,7 @@ func transferOwnership(ctx context.Context, doer *user_model.User, newOwnerName repo.OwnerName = newOwner.Name // Update repository. - if err := repo_model.UpdateRepositoryCols(ctx, repo, "owner_id", "owner_name"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "owner_id", "owner_name"); err != nil { return fmt.Errorf("update owner: %w", err) } @@ -287,7 +304,7 @@ func transferOwnership(ctx context.Context, doer *user_model.User, newOwnerName return fmt.Errorf("deleteRepositoryTransfer: %w", err) } repo.Status = repo_model.RepositoryReady - if err := repo_model.UpdateRepositoryCols(ctx, repo, "status"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "status"); err != nil { return err } @@ -399,6 +416,11 @@ func StartRepositoryTransfer(ctx context.Context, doer, newOwner *user_model.Use return err } + if !doer.CanForkRepoIn(newOwner) { + limit := util.Iif(newOwner.MaxRepoCreation >= 0, newOwner.MaxRepoCreation, setting.Repository.MaxCreationLimit) + return LimitReachedError{Limit: limit} + } + var isDirectTransfer bool oldOwnerName := repo.OwnerName @@ -473,7 +495,7 @@ func RejectRepositoryTransfer(ctx context.Context, repo *repo_model.Repository, } repo.Status = repo_model.RepositoryReady - if err := repo_model.UpdateRepositoryCols(ctx, repo, "status"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "status"); err != nil { return err } @@ -521,7 +543,7 @@ func CancelRepositoryTransfer(ctx context.Context, repoTransfer *repo_model.Repo } repoTransfer.Repo.Status = repo_model.RepositoryReady - if err := repo_model.UpdateRepositoryCols(ctx, repoTransfer.Repo, "status"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repoTransfer.Repo, "status"); err != nil { return err } diff --git a/services/repository/transfer_test.go b/services/repository/transfer_test.go index 16a8fb6e1e..80a073e9f9 100644 --- a/services/repository/transfer_test.go +++ b/services/repository/transfer_test.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2025 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT package repository @@ -14,11 +14,14 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/feed" notify_service "code.gitea.io/gitea/services/notify" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) var notifySync sync.Once @@ -125,3 +128,40 @@ func TestRepositoryTransfer(t *testing.T) { err = RejectRepositoryTransfer(db.DefaultContext, repo2, doer) assert.True(t, repo_model.IsErrNoPendingTransfer(err)) } + +// Test transfer rejections +func TestRepositoryTransferRejection(t *testing.T) { + require.NoError(t, unittest.PrepareTestDatabase()) + // Set limit to 0 repositories so no repositories can be transferred + defer test.MockVariableValue(&setting.Repository.MaxCreationLimit, 0)() + + // Admin case + doerAdmin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 5}) + + transfer, err := repo_model.GetPendingRepositoryTransfer(db.DefaultContext, repo) + require.NoError(t, err) + require.NotNil(t, transfer) + require.NoError(t, transfer.LoadRecipient(db.DefaultContext)) + + require.True(t, doerAdmin.CanCreateRepoIn(transfer.Recipient)) // admin is not subject to limits + + // Administrator should not be affected by the limits so transfer should be successful + assert.NoError(t, AcceptTransferOwnership(db.DefaultContext, repo, doerAdmin)) + + // Non admin user case + doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 10}) + repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21}) + + transfer, err = repo_model.GetPendingRepositoryTransfer(db.DefaultContext, repo) + require.NoError(t, err) + require.NotNil(t, transfer) + require.NoError(t, transfer.LoadRecipient(db.DefaultContext)) + + require.False(t, doer.CanCreateRepoIn(transfer.Recipient)) // regular user is subject to limits + + // Cannot accept because of the limit + err = AcceptTransferOwnership(db.DefaultContext, repo, doer) + assert.Error(t, err) + assert.True(t, IsRepositoryLimitReached(err)) +} diff --git a/services/task/task.go b/services/task/task.go index c90ee91270..ee5fa1f348 100644 --- a/services/task/task.go +++ b/services/task/task.go @@ -5,6 +5,7 @@ package task import ( "context" + "errors" "fmt" admin_model "code.gitea.io/gitea/models/admin" @@ -41,7 +42,7 @@ func Run(ctx context.Context, t *admin_model.Task) error { func Init() error { taskQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "task", handler) if taskQueue == nil { - return fmt.Errorf("unable to create task queue") + return errors.New("unable to create task queue") } go graceful.GetManager().RunWithCancel(taskQueue) return nil @@ -110,7 +111,7 @@ func CreateMigrateTask(ctx context.Context, doer, u *user_model.User, opts base. IsPrivate: opts.Private || setting.Repository.ForcePrivate, IsMirror: opts.Mirror, Status: repo_model.RepositoryBeingMigrated, - }) + }, false) if err != nil { task.EndTime = timeutil.TimeStampNow() task.Status = structs.TaskStatusFailed diff --git a/services/user/avatar.go b/services/user/avatar.go index 3f87466eaa..df188e5adc 100644 --- a/services/user/avatar.go +++ b/services/user/avatar.go @@ -24,26 +24,22 @@ func UploadAvatar(ctx context.Context, u *user_model.User, data []byte) error { return err } - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - u.UseCustomAvatar = true - u.Avatar = avatar.HashAvatar(u.ID, data) - if err = user_model.UpdateUserCols(ctx, u, "use_custom_avatar", "avatar"); err != nil { - return fmt.Errorf("updateUser: %w", err) - } + return db.WithTx(ctx, func(ctx context.Context) error { + u.UseCustomAvatar = true + u.Avatar = avatar.HashAvatar(u.ID, data) + if err = user_model.UpdateUserCols(ctx, u, "use_custom_avatar", "avatar"); err != nil { + return fmt.Errorf("updateUser: %w", err) + } - if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error { - _, err := w.Write(avatarData) - return err - }); err != nil { - return fmt.Errorf("Failed to create dir %s: %w", u.CustomAvatarRelativePath(), err) - } + if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error { + _, err := w.Write(avatarData) + return err + }); err != nil { + return fmt.Errorf("Failed to create dir %s: %w", u.CustomAvatarRelativePath(), err) + } - return committer.Commit() + return nil + }) } // DeleteAvatar deletes the user's custom avatar. diff --git a/services/user/update.go b/services/user/update.go index 4a39f4f783..d7354542bf 100644 --- a/services/user/update.go +++ b/services/user/update.go @@ -15,6 +15,26 @@ import ( "code.gitea.io/gitea/modules/structs" ) +type UpdateOptionField[T any] struct { + FieldValue T + FromSync bool +} + +func UpdateOptionFieldFromValue[T any](value T) optional.Option[UpdateOptionField[T]] { + return optional.Some(UpdateOptionField[T]{FieldValue: value}) +} + +func UpdateOptionFieldFromSync[T any](value T) optional.Option[UpdateOptionField[T]] { + return optional.Some(UpdateOptionField[T]{FieldValue: value, FromSync: true}) +} + +func UpdateOptionFieldFromPtr[T any](value *T) optional.Option[UpdateOptionField[T]] { + if value == nil { + return optional.None[UpdateOptionField[T]]() + } + return UpdateOptionFieldFromValue(*value) +} + type UpdateOptions struct { KeepEmailPrivate optional.Option[bool] FullName optional.Option[string] @@ -32,7 +52,7 @@ type UpdateOptions struct { DiffViewStyle optional.Option[string] AllowCreateOrganization optional.Option[bool] IsActive optional.Option[bool] - IsAdmin optional.Option[bool] + IsAdmin optional.Option[UpdateOptionField[bool]] EmailNotificationsPreference optional.Option[string] SetLastLogin bool RepoAdminChangeTeamAccess optional.Option[bool] @@ -111,13 +131,18 @@ func UpdateUser(ctx context.Context, u *user_model.User, opts *UpdateOptions) er cols = append(cols, "is_restricted") } if opts.IsAdmin.Has() { - if !opts.IsAdmin.Value() && user_model.IsLastAdminUser(ctx, u) { - return user_model.ErrDeleteLastAdminUser{UID: u.ID} + if opts.IsAdmin.Value().FieldValue /* true */ { + u.IsAdmin = opts.IsAdmin.Value().FieldValue // set IsAdmin=true + cols = append(cols, "is_admin") + } else if !user_model.IsLastAdminUser(ctx, u) /* not the last admin */ { + u.IsAdmin = opts.IsAdmin.Value().FieldValue // it's safe to change it from false to true (not the last admin) + cols = append(cols, "is_admin") + } else /* IsAdmin=false but this is the last admin user */ { //nolint:gocritic // make it easier to read + if !opts.IsAdmin.Value().FromSync { + return user_model.ErrDeleteLastAdminUser{UID: u.ID} + } + // else: syncing from external-source, this user is the last admin, so skip the "IsAdmin=false" change } - - u.IsAdmin = opts.IsAdmin.Value() - - cols = append(cols, "is_admin") } if opts.Visibility.Has() { diff --git a/services/user/update_test.go b/services/user/update_test.go index fc24a6c212..27513e8040 100644 --- a/services/user/update_test.go +++ b/services/user/update_test.go @@ -22,7 +22,11 @@ func TestUpdateUser(t *testing.T) { admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) assert.Error(t, UpdateUser(db.DefaultContext, admin, &UpdateOptions{ - IsAdmin: optional.Some(false), + IsAdmin: UpdateOptionFieldFromValue(false), + })) + + assert.NoError(t, UpdateUser(db.DefaultContext, admin, &UpdateOptions{ + IsAdmin: UpdateOptionFieldFromSync(false), })) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 28}) @@ -38,7 +42,7 @@ func TestUpdateUser(t *testing.T) { MaxRepoCreation: optional.Some(10), IsRestricted: optional.Some(true), IsActive: optional.Some(false), - IsAdmin: optional.Some(true), + IsAdmin: UpdateOptionFieldFromValue(true), Visibility: optional.Some(structs.VisibleTypePrivate), KeepActivityPrivate: optional.Some(true), Language: optional.Some("lang"), @@ -60,7 +64,7 @@ func TestUpdateUser(t *testing.T) { assert.Equal(t, opts.MaxRepoCreation.Value(), user.MaxRepoCreation) assert.Equal(t, opts.IsRestricted.Value(), user.IsRestricted) assert.Equal(t, opts.IsActive.Value(), user.IsActive) - assert.Equal(t, opts.IsAdmin.Value(), user.IsAdmin) + assert.Equal(t, opts.IsAdmin.Value().FieldValue, user.IsAdmin) assert.Equal(t, opts.Visibility.Value(), user.Visibility) assert.Equal(t, opts.KeepActivityPrivate.Value(), user.KeepActivityPrivate) assert.Equal(t, opts.Language.Value(), user.Language) @@ -80,7 +84,7 @@ func TestUpdateUser(t *testing.T) { assert.Equal(t, opts.MaxRepoCreation.Value(), user.MaxRepoCreation) assert.Equal(t, opts.IsRestricted.Value(), user.IsRestricted) assert.Equal(t, opts.IsActive.Value(), user.IsActive) - assert.Equal(t, opts.IsAdmin.Value(), user.IsAdmin) + assert.Equal(t, opts.IsAdmin.Value().FieldValue, user.IsAdmin) assert.Equal(t, opts.Visibility.Value(), user.Visibility) assert.Equal(t, opts.KeepActivityPrivate.Value(), user.KeepActivityPrivate) assert.Equal(t, opts.Language.Value(), user.Language) diff --git a/services/user/user.go b/services/user/user.go index 1aeebff142..c7252430de 100644 --- a/services/user/user.go +++ b/services/user/user.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/agit" asymkey_service "code.gitea.io/gitea/services/asymkey" @@ -177,8 +178,8 @@ func DeleteUser(ctx context.Context, u *user_model.User, purge bool) error { PageSize: repo_model.RepositoryListDefaultPageSize, Page: 1, }, - UserID: u.ID, - IncludePrivate: true, + UserID: u.ID, + IncludeVisibility: structs.VisibleTypePrivate, }) if err != nil { return fmt.Errorf("unable to find org list for %s[%d]. Error: %w", u.Name, u.ID, err) diff --git a/services/user/user_test.go b/services/user/user_test.go index 162a735cd4..28a0df8628 100644 --- a/services/user/user_test.go +++ b/services/user/user_test.go @@ -150,7 +150,7 @@ func TestRenameUser(t *testing.T) { redirectUID, err := user_model.LookupUserRedirect(db.DefaultContext, oldUsername) assert.NoError(t, err) - assert.EqualValues(t, user.ID, redirectUID) + assert.Equal(t, user.ID, redirectUID) unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, OwnerName: user.Name}) }) diff --git a/services/versioned_migration/migration.go b/services/versioned_migration/migration.go index daec89d7c1..b66d853531 100644 --- a/services/versioned_migration/migration.go +++ b/services/versioned_migration/migration.go @@ -1,7 +1,7 @@ // Copyright 2025 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package versioned_migration //nolint +package versioned_migration import ( "context" diff --git a/services/webhook/deliver.go b/services/webhook/deliver.go index df32d5741e..e8e6ed19c1 100644 --- a/services/webhook/deliver.go +++ b/services/webhook/deliver.go @@ -10,6 +10,7 @@ import ( "crypto/sha256" "crypto/tls" "encoding/hex" + "errors" "fmt" "io" "net/http" @@ -41,7 +42,7 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook case http.MethodPost: switch w.ContentType { case webhook_model.ContentTypeJSON: - req, err = http.NewRequest("POST", w.URL, strings.NewReader(t.PayloadContent)) + req, err = http.NewRequest(http.MethodPost, w.URL, strings.NewReader(t.PayloadContent)) if err != nil { return nil, nil, err } @@ -52,7 +53,7 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook "payload": []string{t.PayloadContent}, } - req, err = http.NewRequest("POST", w.URL, strings.NewReader(forms.Encode())) + req, err = http.NewRequest(http.MethodPost, w.URL, strings.NewReader(forms.Encode())) if err != nil { return nil, nil, err } @@ -69,7 +70,7 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook vals := u.Query() vals["payload"] = []string{t.PayloadContent} u.RawQuery = vals.Encode() - req, err = http.NewRequest("GET", u.String(), nil) + req, err = http.NewRequest(http.MethodGet, u.String(), nil) if err != nil { return nil, nil, err } @@ -81,7 +82,7 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook return nil, nil, err } url := fmt.Sprintf("%s/%s", w.URL, url.PathEscape(txnID)) - req, err = http.NewRequest("PUT", url, strings.NewReader(t.PayloadContent)) + req, err = http.NewRequest(http.MethodPut, url, strings.NewReader(t.PayloadContent)) if err != nil { return nil, nil, err } @@ -328,7 +329,7 @@ func Init() error { hookQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "webhook_sender", handler) if hookQueue == nil { - return fmt.Errorf("unable to create webhook_sender queue") + return errors.New("unable to create webhook_sender queue") } go graceful.GetManager().RunWithCancel(hookQueue) diff --git a/services/webhook/deliver_test.go b/services/webhook/deliver_test.go index 6a74b1455c..1d32d7b772 100644 --- a/services/webhook/deliver_test.go +++ b/services/webhook/deliver_test.go @@ -64,7 +64,7 @@ func TestWebhookProxy(t *testing.T) { } for _, tt := range tests { t.Run(tt.req, func(t *testing.T) { - req, err := http.NewRequest("POST", tt.req, nil) + req, err := http.NewRequest(http.MethodPost, tt.req, nil) require.NoError(t, err) u, err := webhookProxy(allowedHostMatcher)(req) @@ -91,7 +91,7 @@ func TestWebhookDeliverAuthorizationHeader(t *testing.T) { s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "/webhook", r.URL.Path) assert.Equal(t, "Bearer s3cr3t-t0ken", r.Header.Get("Authorization")) - w.WriteHeader(200) + w.WriteHeader(http.StatusOK) done <- struct{}{} })) t.Cleanup(s.Close) @@ -138,7 +138,7 @@ func TestWebhookDeliverHookTask(t *testing.T) { case "/webhook/66d222a5d6349e1311f551e50722d837e30fce98": // Version 1 assert.Equal(t, "push", r.Header.Get("X-GitHub-Event")) - assert.Equal(t, "", r.Header.Get("Content-Type")) + assert.Empty(t, r.Header.Get("Content-Type")) body, err := io.ReadAll(r.Body) assert.NoError(t, err) assert.Equal(t, `{"data": 42}`, string(body)) @@ -152,11 +152,11 @@ func TestWebhookDeliverHookTask(t *testing.T) { assert.Len(t, body, 2147) default: - w.WriteHeader(404) + w.WriteHeader(http.StatusNotFound) t.Fatalf("unexpected url path %s", r.URL.Path) return } - w.WriteHeader(200) + w.WriteHeader(http.StatusOK) done <- struct{}{} })) t.Cleanup(s.Close) diff --git a/services/webhook/dingtalk.go b/services/webhook/dingtalk.go index 5afca8d65a..5bbc610fe5 100644 --- a/services/webhook/dingtalk.go +++ b/services/webhook/dingtalk.go @@ -30,7 +30,7 @@ func (dc dingtalkConvertor) Create(p *api.CreatePayload) (DingtalkPayload, error refName := git.RefName(p.Ref).ShortName() title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName) - return createDingtalkPayload(title, title, fmt.Sprintf("view ref %s", refName), p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil + return createDingtalkPayload(title, title, "view ref "+refName, p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil } // Delete implements PayloadConvertor Delete method @@ -39,14 +39,14 @@ func (dc dingtalkConvertor) Delete(p *api.DeletePayload) (DingtalkPayload, error refName := git.RefName(p.Ref).ShortName() title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName) - return createDingtalkPayload(title, title, fmt.Sprintf("view ref %s", refName), p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil + return createDingtalkPayload(title, title, "view ref "+refName, p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil } // Fork implements PayloadConvertor Fork method func (dc dingtalkConvertor) Fork(p *api.ForkPayload) (DingtalkPayload, error) { title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName) - return createDingtalkPayload(title, title, fmt.Sprintf("view forked repo %s", p.Repo.FullName), p.Repo.HTMLURL), nil + return createDingtalkPayload(title, title, "view forked repo "+p.Repo.FullName, p.Repo.HTMLURL), nil } // Push implements PayloadConvertor Push method @@ -176,6 +176,12 @@ func (dc dingtalkConvertor) Status(p *api.CommitStatusPayload) (DingtalkPayload, return createDingtalkPayload(text, text, "Status Changed", p.TargetURL), nil } +func (dingtalkConvertor) WorkflowRun(p *api.WorkflowRunPayload) (DingtalkPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, noneLinkFormatter, true) + + return createDingtalkPayload(text, text, "Workflow Run", p.WorkflowRun.HTMLURL), nil +} + func (dingtalkConvertor) WorkflowJob(p *api.WorkflowJobPayload) (DingtalkPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true) diff --git a/services/webhook/discord.go b/services/webhook/discord.go index 0a7eb0b166..0426964181 100644 --- a/services/webhook/discord.go +++ b/services/webhook/discord.go @@ -101,6 +101,13 @@ var ( redColor = color("ff3232") ) +// https://discord.com/developers/docs/resources/message#embed-object-embed-limits +// Discord has some limits in place for the embeds. +// According to some tests, there is no consistent limit for different character sets. +// For example: 4096 ASCII letters are allowed, but only 2490 emoji characters are allowed. +// To keep it simple, we currently truncate at 2000. +const discordDescriptionCharactersLimit = 2000 + type discordConvertor struct { Username string AvatarURL string @@ -271,6 +278,12 @@ func (d discordConvertor) Status(p *api.CommitStatusPayload) (DiscordPayload, er return d.createPayload(p.Sender, text, "", p.TargetURL, color), nil } +func (d discordConvertor) WorkflowRun(p *api.WorkflowRunPayload) (DiscordPayload, error) { + text, color := getWorkflowRunPayloadInfo(p, noneLinkFormatter, false) + + return d.createPayload(p.Sender, text, "", p.WorkflowRun.HTMLURL, color), nil +} + func (d discordConvertor) WorkflowJob(p *api.WorkflowJobPayload) (DiscordPayload, error) { text, color := getWorkflowJobPayloadInfo(p, noneLinkFormatter, false) @@ -298,7 +311,7 @@ func parseHookPullRequestEventType(event webhook_module.HookEventType) (string, case webhook_module.HookEventPullRequestReviewApproved: return "approved", nil case webhook_module.HookEventPullRequestReviewRejected: - return "rejected", nil + return "requested changes", nil case webhook_module.HookEventPullRequestReviewComment: return "comment", nil default: @@ -313,7 +326,7 @@ func (d discordConvertor) createPayload(s *api.User, title, text, url string, co Embeds: []DiscordEmbed{ { Title: title, - Description: text, + Description: util.TruncateRunes(text, discordDescriptionCharactersLimit), URL: url, Color: color, Author: DiscordEmbedAuthor{ diff --git a/services/webhook/feishu.go b/services/webhook/feishu.go index 274aaf90b3..b6ee80c44c 100644 --- a/services/webhook/feishu.go +++ b/services/webhook/feishu.go @@ -5,9 +5,13 @@ package webhook import ( "context" + "crypto/hmac" + "crypto/sha256" + "encoding/base64" "fmt" "net/http" "strings" + "time" webhook_model "code.gitea.io/gitea/models/webhook" "code.gitea.io/gitea/modules/git" @@ -16,10 +20,12 @@ import ( ) type ( - // FeishuPayload represents + // FeishuPayload represents the payload for Feishu webhook FeishuPayload struct { - MsgType string `json:"msg_type"` // text / post / image / share_chat / interactive / file /audio / media - Content struct { + Timestamp int64 `json:"timestamp,omitempty"` // Unix timestamp for signature verification + Sign string `json:"sign,omitempty"` // Signature for verification + MsgType string `json:"msg_type"` // text / post / image / share_chat / interactive / file /audio / media + Content struct { Text string `json:"text"` } `json:"content"` } @@ -172,15 +178,41 @@ func (fc feishuConvertor) Status(p *api.CommitStatusPayload) (FeishuPayload, err return newFeishuTextPayload(text), nil } +func (feishuConvertor) WorkflowRun(p *api.WorkflowRunPayload) (FeishuPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, noneLinkFormatter, true) + + return newFeishuTextPayload(text), nil +} + func (feishuConvertor) WorkflowJob(p *api.WorkflowJobPayload) (FeishuPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true) return newFeishuTextPayload(text), nil } +// feishuGenSign generates a signature for Feishu webhook +// https://open.feishu.cn/document/client-docs/bot-v3/add-custom-bot +func feishuGenSign(secret string, timestamp int64) string { + // key="{timestamp}\n{secret}", then hmac-sha256, then base64 encode + stringToSign := fmt.Sprintf("%d\n%s", timestamp, secret) + h := hmac.New(sha256.New, []byte(stringToSign)) + return base64.StdEncoding.EncodeToString(h.Sum(nil)) +} + func newFeishuRequest(_ context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) { - var pc payloadConvertor[FeishuPayload] = feishuConvertor{} - return newJSONRequest(pc, w, t, true) + payload, err := newPayload(feishuConvertor{}, []byte(t.PayloadContent), t.EventType) + if err != nil { + return nil, nil, err + } + + // Add timestamp and signature if secret is provided + if w.Secret != "" { + timestamp := time.Now().Unix() + payload.Timestamp = timestamp + payload.Sign = feishuGenSign(w.Secret, timestamp) + } + + return prepareJSONRequest(payload, w, t, false /* no default headers */) } func init() { diff --git a/services/webhook/feishu_test.go b/services/webhook/feishu_test.go index c4249bdb30..7e200ea132 100644 --- a/services/webhook/feishu_test.go +++ b/services/webhook/feishu_test.go @@ -168,6 +168,7 @@ func TestFeishuJSONPayload(t *testing.T) { URL: "https://feishu.example.com/", Meta: `{}`, HTTPMethod: "POST", + Secret: "secret", } task := &webhook_model.HookTask{ HookID: hook.ID, @@ -183,10 +184,13 @@ func TestFeishuJSONPayload(t *testing.T) { assert.Equal(t, "POST", req.Method) assert.Equal(t, "https://feishu.example.com/", req.URL.String()) - assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256")) assert.Equal(t, "application/json", req.Header.Get("Content-Type")) var body FeishuPayload err = json.NewDecoder(req.Body).Decode(&body) assert.NoError(t, err) assert.Equal(t, "[test/repo:test] \r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", body.Content.Text) + assert.Equal(t, feishuGenSign(hook.Secret, body.Timestamp), body.Sign) + + // a separate sign test, the result is generated by official python code, so the algo must be correct + assert.Equal(t, "rWZ84lcag1x9aBFhn1gtV4ZN+4gme3pilfQNMk86vKg=", feishuGenSign("a", 1)) } diff --git a/services/webhook/general.go b/services/webhook/general.go index ea75038faf..be457e46f5 100644 --- a/services/webhook/general.go +++ b/services/webhook/general.go @@ -39,19 +39,20 @@ func getPullRequestInfo(p *api.PullRequestPayload) (title, link, by, operator, o for i, user := range assignList { assignStringList[i] = user.UserName } - if p.Action == api.HookIssueAssigned { + switch p.Action { + case api.HookIssueAssigned: operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName) - } else if p.Action == api.HookIssueUnassigned { - operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName) - } else if p.Action == api.HookIssueMilestoned { + case api.HookIssueUnassigned: + operateResult = p.Sender.UserName + " unassigned this for someone" + case api.HookIssueMilestoned: operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.PullRequest.Milestone.ID) } link = p.PullRequest.HTMLURL - by = fmt.Sprintf("PullRequest by %s", p.PullRequest.Poster.UserName) + by = "PullRequest by " + p.PullRequest.Poster.UserName if len(assignStringList) > 0 { - assignees = fmt.Sprintf("Assignees: %s", strings.Join(assignStringList, ", ")) + assignees = "Assignees: " + strings.Join(assignStringList, ", ") } - operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + operator = "Operator: " + p.Sender.UserName return title, link, by, operator, operateResult, assignees } @@ -64,19 +65,20 @@ func getIssuesInfo(p *api.IssuePayload) (issueTitle, link, by, operator, operate for i, user := range assignList { assignStringList[i] = user.UserName } - if p.Action == api.HookIssueAssigned { + switch p.Action { + case api.HookIssueAssigned: operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName) - } else if p.Action == api.HookIssueUnassigned { - operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName) - } else if p.Action == api.HookIssueMilestoned { + case api.HookIssueUnassigned: + operateResult = p.Sender.UserName + " unassigned this for someone" + case api.HookIssueMilestoned: operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.Issue.Milestone.ID) } link = p.Issue.HTMLURL - by = fmt.Sprintf("Issue by %s", p.Issue.Poster.UserName) + by = "Issue by " + p.Issue.Poster.UserName if len(assignStringList) > 0 { - assignees = fmt.Sprintf("Assignees: %s", strings.Join(assignStringList, ", ")) + assignees = "Assignees: " + strings.Join(assignStringList, ", ") } - operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + operator = "Operator: " + p.Sender.UserName return issueTitle, link, by, operator, operateResult, assignees } @@ -85,11 +87,11 @@ func getIssuesCommentInfo(p *api.IssueCommentPayload) (title, link, by, operator title = fmt.Sprintf("[Comment-%s #%d]: %s\n%s", p.Repository.FullName, p.Issue.Index, p.Action, p.Issue.Title) link = p.Issue.HTMLURL if p.IsPull { - by = fmt.Sprintf("PullRequest by %s", p.Issue.Poster.UserName) + by = "PullRequest by " + p.Issue.Poster.UserName } else { - by = fmt.Sprintf("Issue by %s", p.Issue.Poster.UserName) + by = "Issue by " + p.Issue.Poster.UserName } - operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + operator = "Operator: " + p.Sender.UserName return title, link, by, operator } @@ -133,7 +135,7 @@ func getIssuesPayloadInfo(p *api.IssuePayload, linkFormatter linkFormatter, with text = fmt.Sprintf("[%s] Issue milestone cleared: %s", repoLink, titleLink) } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } if p.Action == api.HookIssueOpened || p.Action == api.HookIssueEdited { @@ -198,7 +200,7 @@ func getPullRequestPayloadInfo(p *api.PullRequestPayload, linkFormatter linkForm text = fmt.Sprintf("[%s] Pull request review request removed: %s", repoLink, titleLink) } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName) } return text, issueTitle, extraMarkdown, color @@ -220,7 +222,7 @@ func getReleasePayloadInfo(p *api.ReleasePayload, linkFormatter linkFormatter, w color = redColor } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, color @@ -249,7 +251,7 @@ func getWikiPayloadInfo(p *api.WikiPayload, linkFormatter linkFormatter, withSen } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, color, pageLink @@ -285,7 +287,7 @@ func getIssueCommentPayloadInfo(p *api.IssueCommentPayload, linkFormatter linkFo color = redColor } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, issueTitle, color @@ -296,14 +298,14 @@ func getPackagePayloadInfo(p *api.PackagePayload, linkFormatter linkFormatter, w switch p.Action { case api.HookPackageCreated: - text = fmt.Sprintf("Package created: %s", refLink) + text = "Package created: " + refLink color = greenColor case api.HookPackageDeleted: - text = fmt.Sprintf("Package deleted: %s", refLink) + text = "Package deleted: " + refLink color = redColor } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, color @@ -316,15 +318,46 @@ func getStatusPayloadInfo(p *api.CommitStatusPayload, linkFormatter linkFormatte color = greenColor if withSender { if user_model.IsGiteaActionsUserName(p.Sender.UserName) { - text += fmt.Sprintf(" by %s", p.Sender.FullName) + text += " by " + p.Sender.FullName } else { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } } return text, color } +func getWorkflowRunPayloadInfo(p *api.WorkflowRunPayload, linkFormatter linkFormatter, withSender bool) (text string, color int) { + description := p.WorkflowRun.Conclusion + if description == "" { + description = p.WorkflowRun.Status + } + refLink := linkFormatter(p.WorkflowRun.HTMLURL, fmt.Sprintf("%s(#%d)", p.WorkflowRun.DisplayTitle, p.WorkflowRun.ID)+"["+base.ShortSha(p.WorkflowRun.HeadSha)+"]:"+description) + + text = fmt.Sprintf("Workflow Run %s: %s", p.Action, refLink) + switch description { + case "waiting": + color = orangeColor + case "queued": + color = orangeColorLight + case "success": + color = greenColor + case "failure": + color = redColor + case "cancelled": + color = yellowColor + case "skipped": + color = purpleColor + default: + color = greyColor + } + if withSender { + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) + } + + return text, color +} + func getWorkflowJobPayloadInfo(p *api.WorkflowJobPayload, linkFormatter linkFormatter, withSender bool) (text string, color int) { description := p.WorkflowJob.Conclusion if description == "" { @@ -350,7 +383,7 @@ func getWorkflowJobPayloadInfo(p *api.WorkflowJobPayload, linkFormatter linkForm color = greyColor } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, color diff --git a/services/webhook/matrix.go b/services/webhook/matrix.go index 5bc7ba097e..3e9163f78c 100644 --- a/services/webhook/matrix.go +++ b/services/webhook/matrix.go @@ -252,6 +252,12 @@ func (m matrixConvertor) Status(p *api.CommitStatusPayload) (MatrixPayload, erro return m.newPayload(text) } +func (m matrixConvertor) WorkflowRun(p *api.WorkflowRunPayload) (MatrixPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, htmlLinkFormatter, true) + + return m.newPayload(text) +} + func (m matrixConvertor) WorkflowJob(p *api.WorkflowJobPayload) (MatrixPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, htmlLinkFormatter, true) diff --git a/services/webhook/msteams.go b/services/webhook/msteams.go index f70e235f20..450a544b42 100644 --- a/services/webhook/msteams.go +++ b/services/webhook/msteams.go @@ -8,6 +8,7 @@ import ( "fmt" "net/http" "net/url" + "strconv" "strings" webhook_model "code.gitea.io/gitea/models/webhook" @@ -73,7 +74,7 @@ func (m msteamsConvertor) Create(p *api.CreatePayload) (MSTeamsPayload, error) { "", p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName), greenColor, - &MSTeamsFact{fmt.Sprintf("%s:", p.RefType), refName}, + &MSTeamsFact{p.RefType + ":", refName}, ), nil } @@ -90,7 +91,7 @@ func (m msteamsConvertor) Delete(p *api.DeletePayload) (MSTeamsPayload, error) { "", p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName), yellowColor, - &MSTeamsFact{fmt.Sprintf("%s:", p.RefType), refName}, + &MSTeamsFact{p.RefType + ":", refName}, ), nil } @@ -148,7 +149,7 @@ func (m msteamsConvertor) Push(p *api.PushPayload) (MSTeamsPayload, error) { text, titleLink, greenColor, - &MSTeamsFact{"Commit count:", fmt.Sprintf("%d", p.TotalCommits)}, + &MSTeamsFact{"Commit count:", strconv.Itoa(p.TotalCommits)}, ), nil } @@ -163,7 +164,7 @@ func (m msteamsConvertor) Issue(p *api.IssuePayload) (MSTeamsPayload, error) { extraMarkdown, p.Issue.HTMLURL, color, - &MSTeamsFact{"Issue #:", fmt.Sprintf("%d", p.Issue.ID)}, + &MSTeamsFact{"Issue #:", strconv.FormatInt(p.Issue.ID, 10)}, ), nil } @@ -178,7 +179,7 @@ func (m msteamsConvertor) IssueComment(p *api.IssueCommentPayload) (MSTeamsPaylo p.Comment.Body, p.Comment.HTMLURL, color, - &MSTeamsFact{"Issue #:", fmt.Sprintf("%d", p.Issue.ID)}, + &MSTeamsFact{"Issue #:", strconv.FormatInt(p.Issue.ID, 10)}, ), nil } @@ -193,7 +194,7 @@ func (m msteamsConvertor) PullRequest(p *api.PullRequestPayload) (MSTeamsPayload extraMarkdown, p.PullRequest.HTMLURL, color, - &MSTeamsFact{"Pull request #:", fmt.Sprintf("%d", p.PullRequest.ID)}, + &MSTeamsFact{"Pull request #:", strconv.FormatInt(p.PullRequest.ID, 10)}, ), nil } @@ -230,7 +231,7 @@ func (m msteamsConvertor) Review(p *api.PullRequestPayload, event webhook_module text, p.PullRequest.HTMLURL, color, - &MSTeamsFact{"Pull request #:", fmt.Sprintf("%d", p.PullRequest.ID)}, + &MSTeamsFact{"Pull request #:", strconv.FormatInt(p.PullRequest.ID, 10)}, ), nil } @@ -317,6 +318,20 @@ func (m msteamsConvertor) Status(p *api.CommitStatusPayload) (MSTeamsPayload, er ), nil } +func (msteamsConvertor) WorkflowRun(p *api.WorkflowRunPayload) (MSTeamsPayload, error) { + title, color := getWorkflowRunPayloadInfo(p, noneLinkFormatter, false) + + return createMSTeamsPayload( + p.Repo, + p.Sender, + title, + "", + p.WorkflowRun.HTMLURL, + color, + &MSTeamsFact{"WorkflowRun:", p.WorkflowRun.DisplayTitle}, + ), nil +} + func (msteamsConvertor) WorkflowJob(p *api.WorkflowJobPayload) (MSTeamsPayload, error) { title, color := getWorkflowJobPayloadInfo(p, noneLinkFormatter, false) diff --git a/services/webhook/msteams_test.go b/services/webhook/msteams_test.go index d5020d3ff5..0d98b94bad 100644 --- a/services/webhook/msteams_test.go +++ b/services/webhook/msteams_test.go @@ -335,7 +335,7 @@ func TestMSTeamsPayload(t *testing.T) { assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment)", pl.Summary) assert.Len(t, pl.Sections, 1) assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle) - assert.Equal(t, "", pl.Sections[0].Text) + assert.Empty(t, pl.Sections[0].Text) assert.Len(t, pl.Sections[0].Facts, 2) for _, fact := range pl.Sections[0].Facts { if fact.Name == "Repository:" { @@ -356,7 +356,7 @@ func TestMSTeamsPayload(t *testing.T) { assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment)", pl.Summary) assert.Len(t, pl.Sections, 1) assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle) - assert.Equal(t, "", pl.Sections[0].Text) + assert.Empty(t, pl.Sections[0].Text) assert.Len(t, pl.Sections[0].Facts, 2) for _, fact := range pl.Sections[0].Facts { if fact.Name == "Repository:" { diff --git a/services/webhook/notifier.go b/services/webhook/notifier.go index 9e3f21de29..80de6b00fd 100644 --- a/services/webhook/notifier.go +++ b/services/webhook/notifier.go @@ -5,10 +5,8 @@ package webhook import ( "context" - "fmt" actions_model "code.gitea.io/gitea/models/actions" - "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/organization" @@ -18,6 +16,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/httplib" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/repository" @@ -295,6 +294,43 @@ func (m *webhookNotifier) NewIssue(ctx context.Context, issue *issues_model.Issu } } +func (m *webhookNotifier) DeleteIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) { + permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer) + if issue.IsPull { + if err := issue.LoadPullRequest(ctx); err != nil { + log.Error("LoadPullRequest: %v", err) + return + } + if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequest, &api.PullRequestPayload{ + Action: api.HookIssueDeleted, + Index: issue.Index, + PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer), + Repository: convert.ToRepo(ctx, issue.Repo, permission), + Sender: convert.ToUser(ctx, doer, nil), + }); err != nil { + log.Error("PrepareWebhooks: %v", err) + } + } else { + if err := issue.LoadRepo(ctx); err != nil { + log.Error("issue.LoadRepo: %v", err) + return + } + if err := issue.LoadPoster(ctx); err != nil { + log.Error("issue.LoadPoster: %v", err) + return + } + if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssues, &api.IssuePayload{ + Action: api.HookIssueDeleted, + Index: issue.Index, + Issue: convert.ToAPIIssue(ctx, issue.Poster, issue), + Repository: convert.ToRepo(ctx, issue.Repo, permission), + Sender: convert.ToUser(ctx, doer, nil), + }); err != nil { + log.Error("PrepareWebhooks: %v", err) + } + } +} + func (m *webhookNotifier) NewPullRequest(ctx context.Context, pull *issues_model.PullRequest, mentions []*user_model.User) { if err := pull.LoadIssue(ctx); err != nil { log.Error("pull.LoadIssue: %v", err) @@ -445,6 +481,7 @@ func (m *webhookNotifier) DeleteComment(ctx context.Context, doer *user_model.Us log.Error("LoadPoster: %v", err) return } + comment.Issue = nil // reload issue to ensure it has the latest data, especially the number of comments if err = comment.LoadIssue(ctx); err != nil { log.Error("LoadIssue: %v", err) return @@ -956,72 +993,61 @@ func (*webhookNotifier) WorkflowJobStatusUpdate(ctx context.Context, repo *repo_ org = convert.ToOrganization(ctx, organization.OrgFromUser(repo.Owner)) } - err := job.LoadAttributes(ctx) + status, _ := convert.ToActionsStatus(job.Status) + + convertedJob, err := convert.ToActionWorkflowJob(ctx, repo, task, job) if err != nil { - log.Error("Error loading job attributes: %v", err) + log.Error("ToActionWorkflowJob: %v", err) return } - jobIndex := 0 - jobs, err := actions_model.GetRunJobsByRunID(ctx, job.RunID) + if err := PrepareWebhooks(ctx, source, webhook_module.HookEventWorkflowJob, &api.WorkflowJobPayload{ + Action: status, + WorkflowJob: convertedJob, + Organization: org, + Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}), + Sender: convert.ToUser(ctx, sender, nil), + }); err != nil { + log.Error("PrepareWebhooks: %v", err) + } +} + +func (*webhookNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { + source := EventSource{ + Repository: repo, + Owner: repo.Owner, + } + + var org *api.Organization + if repo.Owner.IsOrganization() { + org = convert.ToOrganization(ctx, organization.OrgFromUser(repo.Owner)) + } + + status := convert.ToWorkflowRunAction(run.Status) + + gitRepo, err := gitrepo.OpenRepository(ctx, repo) if err != nil { - log.Error("Error loading getting run jobs: %v", err) + log.Error("OpenRepository: %v", err) return } - for i, j := range jobs { - if j.ID == job.ID { - jobIndex = i - break - } - } + defer gitRepo.Close() - status, conclusion := toActionStatus(job.Status) - var runnerID int64 - var runnerName string - var steps []*api.ActionWorkflowStep + convertedWorkflow, err := convert.GetActionWorkflow(ctx, gitRepo, repo, run.WorkflowID) + if err != nil { + log.Error("GetActionWorkflow: %v", err) + return + } - if task != nil { - runnerID = task.RunnerID - if runner, ok, _ := db.GetByID[actions_model.ActionRunner](ctx, runnerID); ok { - runnerName = runner.Name - } - for i, step := range task.Steps { - stepStatus, stepConclusion := toActionStatus(job.Status) - steps = append(steps, &api.ActionWorkflowStep{ - Name: step.Name, - Number: int64(i), - Status: stepStatus, - Conclusion: stepConclusion, - StartedAt: step.Started.AsTime().UTC(), - CompletedAt: step.Stopped.AsTime().UTC(), - }) - } + convertedRun, err := convert.ToActionWorkflowRun(ctx, repo, run) + if err != nil { + log.Error("ToActionWorkflowRun: %v", err) + return } - if err := PrepareWebhooks(ctx, source, webhook_module.HookEventWorkflowJob, &api.WorkflowJobPayload{ - Action: status, - WorkflowJob: &api.ActionWorkflowJob{ - ID: job.ID, - // missing api endpoint for this location - URL: fmt.Sprintf("%s/actions/runs/%d/jobs/%d", repo.APIURL(), job.RunID, job.ID), - HTMLURL: fmt.Sprintf("%s/jobs/%d", job.Run.HTMLURL(), jobIndex), - RunID: job.RunID, - // Missing api endpoint for this location, artifacts are available under a nested url - RunURL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), job.RunID), - Name: job.Name, - Labels: job.RunsOn, - RunAttempt: job.Attempt, - HeadSha: job.Run.CommitSHA, - HeadBranch: git.RefName(job.Run.Ref).BranchName(), - Status: status, - Conclusion: conclusion, - RunnerID: runnerID, - RunnerName: runnerName, - Steps: steps, - CreatedAt: job.Created.AsTime().UTC(), - StartedAt: job.Started.AsTime().UTC(), - CompletedAt: job.Stopped.AsTime().UTC(), - }, + if err := PrepareWebhooks(ctx, source, webhook_module.HookEventWorkflowRun, &api.WorkflowRunPayload{ + Action: status, + Workflow: convertedWorkflow, + WorkflowRun: convertedRun, Organization: org, Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}), Sender: convert.ToUser(ctx, sender, nil), @@ -1029,29 +1055,3 @@ func (*webhookNotifier) WorkflowJobStatusUpdate(ctx context.Context, repo *repo_ log.Error("PrepareWebhooks: %v", err) } } - -func toActionStatus(status actions_model.Status) (string, string) { - var action string - var conclusion string - switch status { - // This is a naming conflict of the webhook between Gitea and GitHub Actions - case actions_model.StatusWaiting: - action = "queued" - case actions_model.StatusBlocked: - action = "waiting" - case actions_model.StatusRunning: - action = "in_progress" - } - if status.IsDone() { - action = "completed" - switch status { - case actions_model.StatusSuccess: - conclusion = "success" - case actions_model.StatusCancelled: - conclusion = "cancelled" - case actions_model.StatusFailure: - conclusion = "failure" - } - } - return action, conclusion -} diff --git a/services/webhook/packagist.go b/services/webhook/packagist.go index 8829d95da6..e6a00b0293 100644 --- a/services/webhook/packagist.go +++ b/services/webhook/packagist.go @@ -114,6 +114,10 @@ func (pc packagistConvertor) Status(_ *api.CommitStatusPayload) (PackagistPayloa return PackagistPayload{}, nil } +func (pc packagistConvertor) WorkflowRun(_ *api.WorkflowRunPayload) (PackagistPayload, error) { + return PackagistPayload{}, nil +} + func (pc packagistConvertor) WorkflowJob(_ *api.WorkflowJobPayload) (PackagistPayload, error) { return PackagistPayload{}, nil } diff --git a/services/webhook/packagist_test.go b/services/webhook/packagist_test.go index 638dcfd302..4e77f29edc 100644 --- a/services/webhook/packagist_test.go +++ b/services/webhook/packagist_test.go @@ -210,5 +210,5 @@ func TestPackagistEmptyPayload(t *testing.T) { var body PackagistPayload err = json.NewDecoder(req.Body).Decode(&body) assert.NoError(t, err) - assert.Equal(t, "", body.PackagistRepository.URL) + assert.Empty(t, body.PackagistRepository.URL) } diff --git a/services/webhook/payloader.go b/services/webhook/payloader.go index adb7243fb1..b607bf3250 100644 --- a/services/webhook/payloader.go +++ b/services/webhook/payloader.go @@ -29,6 +29,7 @@ type payloadConvertor[T any] interface { Wiki(*api.WikiPayload) (T, error) Package(*api.PackagePayload) (T, error) Status(*api.CommitStatusPayload) (T, error) + WorkflowRun(*api.WorkflowRunPayload) (T, error) WorkflowJob(*api.WorkflowJobPayload) (T, error) } @@ -81,6 +82,8 @@ func newPayload[T any](rc payloadConvertor[T], data []byte, event webhook_module return convertUnmarshalledJSON(rc.Package, data) case webhook_module.HookEventStatus: return convertUnmarshalledJSON(rc.Status, data) + case webhook_module.HookEventWorkflowRun: + return convertUnmarshalledJSON(rc.WorkflowRun, data) case webhook_module.HookEventWorkflowJob: return convertUnmarshalledJSON(rc.WorkflowJob, data) } @@ -92,7 +95,10 @@ func newJSONRequest[T any](pc payloadConvertor[T], w *webhook_model.Webhook, t * if err != nil { return nil, nil, err } + return prepareJSONRequest(payload, w, t, withDefaultHeaders) +} +func prepareJSONRequest[T any](payload T, w *webhook_model.Webhook, t *webhook_model.HookTask, withDefaultHeaders bool) (*http.Request, []byte, error) { body, err := json.MarshalIndent(payload, "", " ") if err != nil { return nil, nil, err diff --git a/services/webhook/slack.go b/services/webhook/slack.go index 589ef3fe9b..3d645a55d0 100644 --- a/services/webhook/slack.go +++ b/services/webhook/slack.go @@ -173,6 +173,12 @@ func (s slackConvertor) Status(p *api.CommitStatusPayload) (SlackPayload, error) return s.createPayload(text, nil), nil } +func (s slackConvertor) WorkflowRun(p *api.WorkflowRunPayload) (SlackPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, SlackLinkFormatter, true) + + return s.createPayload(text, nil), nil +} + func (s slackConvertor) WorkflowJob(p *api.WorkflowJobPayload) (SlackPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, SlackLinkFormatter, true) diff --git a/services/webhook/telegram.go b/services/webhook/telegram.go index ca74eabe1c..fdd428b45c 100644 --- a/services/webhook/telegram.go +++ b/services/webhook/telegram.go @@ -180,6 +180,12 @@ func (t telegramConvertor) Status(p *api.CommitStatusPayload) (TelegramPayload, return createTelegramPayloadHTML(text), nil } +func (telegramConvertor) WorkflowRun(p *api.WorkflowRunPayload) (TelegramPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, htmlLinkFormatter, true) + + return createTelegramPayloadHTML(text), nil +} + func (telegramConvertor) WorkflowJob(p *api.WorkflowJobPayload) (TelegramPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, htmlLinkFormatter, true) @@ -189,7 +195,7 @@ func (telegramConvertor) WorkflowJob(p *api.WorkflowJobPayload) (TelegramPayload func createTelegramPayloadHTML(msgHTML string) TelegramPayload { // https://core.telegram.org/bots/api#formatting-options return TelegramPayload{ - Message: strings.TrimSpace(markup.Sanitize(msgHTML)), + Message: strings.TrimSpace(string(markup.Sanitize(msgHTML))), ParseMode: "HTML", DisableWebPreview: true, } diff --git a/services/webhook/webhook_test.go b/services/webhook/webhook_test.go index 6bac02712b..5a805347e3 100644 --- a/services/webhook/webhook_test.go +++ b/services/webhook/webhook_test.go @@ -13,6 +13,7 @@ import ( webhook_model "code.gitea.io/gitea/models/webhook" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/test" webhook_module "code.gitea.io/gitea/modules/webhook" "code.gitea.io/gitea/services/convert" @@ -84,7 +85,8 @@ func TestPrepareWebhooksBranchFilterNoMatch(t *testing.T) { func TestWebhookUserMail(t *testing.T) { require.NoError(t, unittest.PrepareTestDatabase()) - setting.Service.NoReplyAddress = "no-reply.com" + defer test.MockVariableValue(&setting.Service.NoReplyAddress, "no-reply.com")() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) assert.Equal(t, user.GetPlaceholderEmail(), convert.ToUser(db.DefaultContext, user, nil).Email) assert.Equal(t, user.Email, convert.ToUser(db.DefaultContext, user, user).Email) diff --git a/services/webhook/wechatwork.go b/services/webhook/wechatwork.go index 2b19822caf..1875317406 100644 --- a/services/webhook/wechatwork.go +++ b/services/webhook/wechatwork.go @@ -181,6 +181,12 @@ func (wc wechatworkConvertor) Status(p *api.CommitStatusPayload) (WechatworkPayl return newWechatworkMarkdownPayload(text), nil } +func (wc wechatworkConvertor) WorkflowRun(p *api.WorkflowRunPayload) (WechatworkPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, noneLinkFormatter, true) + + return newWechatworkMarkdownPayload(text), nil +} + func (wc wechatworkConvertor) WorkflowJob(p *api.WorkflowJobPayload) (WechatworkPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true) diff --git a/services/webtheme/webtheme.go b/services/webtheme/webtheme.go index 58aea3bc74..4e89d6dbac 100644 --- a/services/webtheme/webtheme.go +++ b/services/webtheme/webtheme.go @@ -70,11 +70,11 @@ func parseThemeMetaInfoToMap(cssContent string) map[string]string { m := map[string]string{} for _, item := range matchedItems { v := item[3] - if strings.HasPrefix(v, `"`) { - v = strings.TrimSuffix(strings.TrimPrefix(v, `"`), `"`) + if after, ok := strings.CutPrefix(v, `"`); ok { + v = strings.TrimSuffix(after, `"`) v = strings.ReplaceAll(v, `\"`, `"`) - } else if strings.HasPrefix(v, `'`) { - v = strings.TrimSuffix(strings.TrimPrefix(v, `'`), `'`) + } else if after, ok := strings.CutPrefix(v, `'`); ok { + v = strings.TrimSuffix(after, `'`) v = strings.ReplaceAll(v, `\'`, `'`) } m[item[2]] = v diff --git a/services/wiki/wiki.go b/services/wiki/wiki.go index b21f46639d..0a955406e2 100644 --- a/services/wiki/wiki.go +++ b/services/wiki/wiki.go @@ -102,15 +102,11 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model hasDefaultBranch := gitrepo.IsBranchExist(ctx, repo.WikiStorageRepo(), repo.DefaultWikiBranch) - basePath, err := repo_module.CreateTemporaryPath("update-wiki") + basePath, cleanup, err := repo_module.CreateTemporaryPath("update-wiki") if err != nil { return err } - defer func() { - if err := repo_module.RemoveTemporaryPath(basePath); err != nil { - log.Error("Merge: RemoveTemporaryPath: %s", err) - } - }() + defer cleanup() cloneOpts := git.CloneRepoOptions{ Bare: true, @@ -198,7 +194,7 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model sign, signingKey, signer, _ := asymkey_service.SignWikiCommit(ctx, repo, doer) if sign { - commitTreeOpts.KeyID = signingKey + commitTreeOpts.Key = signingKey if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { committer = signer } @@ -264,15 +260,11 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model return fmt.Errorf("InitWiki: %w", err) } - basePath, err := repo_module.CreateTemporaryPath("update-wiki") + basePath, cleanup, err := repo_module.CreateTemporaryPath("update-wiki") if err != nil { return err } - defer func() { - if err := repo_module.RemoveTemporaryPath(basePath); err != nil { - log.Error("Merge: RemoveTemporaryPath: %s", err) - } - }() + defer cleanup() if err := git.Clone(ctx, repo.WikiPath(), basePath, git.CloneRepoOptions{ Bare: true, @@ -324,7 +316,7 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model sign, signingKey, signer, _ := asymkey_service.SignWikiCommit(ctx, repo, doer) if sign { - commitTreeOpts.KeyID = signingKey + commitTreeOpts.Key = signingKey if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { committer = signer } @@ -373,7 +365,7 @@ func ChangeDefaultWikiBranch(ctx context.Context, repo *repo_model.Repository, n } return db.WithTx(ctx, func(ctx context.Context) error { repo.DefaultWikiBranch = newBranch - if err := repo_model.UpdateRepositoryCols(ctx, repo, "default_wiki_branch"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "default_wiki_branch"); err != nil { return fmt.Errorf("unable to update database: %w", err) } diff --git a/services/wiki/wiki_test.go b/services/wiki/wiki_test.go index 288d258279..6ea3ca9c1b 100644 --- a/services/wiki/wiki_test.go +++ b/services/wiki/wiki_test.go @@ -26,7 +26,7 @@ func TestMain(m *testing.M) { func TestWebPathSegments(t *testing.T) { a := WebPathSegments("a%2Fa/b+c/d-e/f-g.-") - assert.EqualValues(t, []string{"a/a", "b c", "d e", "f-g"}, a) + assert.Equal(t, []string{"a/a", "b c", "d e", "f-g"}, a) } func TestUserTitleToWebPath(t *testing.T) { @@ -63,7 +63,7 @@ func TestWebPathToDisplayName(t *testing.T) { {"a b", "a%20b.md"}, } { _, displayName := WebPathToUserTitle(test.WebPath) - assert.EqualValues(t, test.Expected, displayName) + assert.Equal(t, test.Expected, displayName) } } @@ -80,7 +80,7 @@ func TestWebPathToGitPath(t *testing.T) { {"2000-01-02-meeting.md", "2000-01-02+meeting"}, {"2000-01-02 meeting.-.md", "2000-01-02%20meeting.-"}, } { - assert.EqualValues(t, test.Expected, WebPathToGitPath(test.WikiName)) + assert.Equal(t, test.Expected, WebPathToGitPath(test.WikiName)) } } @@ -116,9 +116,9 @@ func TestGitPathToWebPath(t *testing.T) { func TestUserWebGitPathConsistency(t *testing.T) { maxLen := 20 b := make([]byte, maxLen) - for i := 0; i < 1000; i++ { + for range 1000 { l := rand.Intn(maxLen) - for j := 0; j < l; j++ { + for j := range l { r := rand.Intn(0x80-0x20) + 0x20 b[j] = byte(r) } @@ -134,9 +134,9 @@ func TestUserWebGitPathConsistency(t *testing.T) { _, userTitle1 := WebPathToUserTitle(webPath1) gitPath1 := WebPathToGitPath(webPath1) - assert.EqualValues(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle) - assert.EqualValues(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle) - assert.EqualValues(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle) + assert.Equal(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle) + assert.Equal(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle) + assert.Equal(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle) } } @@ -175,7 +175,7 @@ func TestRepository_AddWikiPage(t *testing.T) { gitPath := WebPathToGitPath(webPath) entry, err := masterTree.GetTreeEntryByPath(gitPath) assert.NoError(t, err) - assert.EqualValues(t, gitPath, entry.Name(), "%s not added correctly", userTitle) + assert.Equal(t, gitPath, entry.Name(), "%s not added correctly", userTitle) }) } @@ -220,7 +220,7 @@ func TestRepository_EditWikiPage(t *testing.T) { gitPath := WebPathToGitPath(webPath) entry, err := masterTree.GetTreeEntryByPath(gitPath) assert.NoError(t, err) - assert.EqualValues(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName) + assert.Equal(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName) if newWikiName != "Home" { _, err := masterTree.GetTreeEntryByPath("Home.md") @@ -290,7 +290,7 @@ func TestPrepareWikiFileName(t *testing.T) { t.Errorf("expect to find an escaped file but we could not detect one") } } - assert.EqualValues(t, tt.wikiPath, newWikiPath) + assert.Equal(t, tt.wikiPath, newWikiPath) }) } } @@ -312,13 +312,13 @@ func TestPrepareWikiFileName_FirstPage(t *testing.T) { existence, newWikiPath, err := prepareGitPath(gitRepo, "master", "Home") assert.False(t, existence) assert.NoError(t, err) - assert.EqualValues(t, "Home.md", newWikiPath) + assert.Equal(t, "Home.md", newWikiPath) } func TestWebPathConversion(t *testing.T) { assert.Equal(t, "path/wiki", WebPathToURLPath(WebPath("path/wiki"))) assert.Equal(t, "wiki", WebPathToURLPath(WebPath("wiki"))) - assert.Equal(t, "", WebPathToURLPath(WebPath(""))) + assert.Empty(t, WebPathToURLPath(WebPath(""))) } func TestWebPathFromRequest(t *testing.T) { |