diff options
Diffstat (limited to 'services')
247 files changed, 4553 insertions, 3385 deletions
diff --git a/services/actions/auth.go b/services/actions/auth.go index 1ef21f6e0e..12a8fba53f 100644 --- a/services/actions/auth.go +++ b/services/actions/auth.go @@ -4,6 +4,7 @@ package actions import ( + "errors" "fmt" "net/http" "strings" @@ -80,7 +81,7 @@ func ParseAuthorizationToken(req *http.Request) (int64, error) { parts := strings.SplitN(h, " ", 2) if len(parts) != 2 { log.Error("split token failed: %s", h) - return 0, fmt.Errorf("split token failed") + return 0, errors.New("split token failed") } return TokenToTaskID(parts[1]) @@ -100,7 +101,7 @@ func TokenToTaskID(token string) (int64, error) { c, ok := parsedToken.Claims.(*actionsClaims) if !parsedToken.Valid || !ok { - return 0, fmt.Errorf("invalid token claim") + return 0, errors.New("invalid token claim") } return c.TaskID, nil diff --git a/services/actions/auth_test.go b/services/actions/auth_test.go index 85e7409105..38d0ba7f82 100644 --- a/services/actions/auth_test.go +++ b/services/actions/auth_test.go @@ -18,7 +18,7 @@ func TestCreateAuthorizationToken(t *testing.T) { var taskID int64 = 23 token, err := CreateAuthorizationToken(taskID, 1, 2) assert.NoError(t, err) - assert.NotEqual(t, "", token) + assert.NotEmpty(t, token) claims := jwt.MapClaims{} _, err = jwt.ParseWithClaims(token, claims, func(t *jwt.Token) (any, error) { return setting.GetGeneralTokenSigningSecret(), nil @@ -44,7 +44,7 @@ func TestParseAuthorizationToken(t *testing.T) { var taskID int64 = 23 token, err := CreateAuthorizationToken(taskID, 1, 2) assert.NoError(t, err) - assert.NotEqual(t, "", token) + assert.NotEmpty(t, token) headers := http.Header{} headers.Set("Authorization", "Bearer "+token) rTaskID, err := ParseAuthorizationToken(&http.Request{ diff --git a/services/actions/cleanup.go b/services/actions/cleanup.go index 23d6e3a49d..d0cc63e538 100644 --- a/services/actions/cleanup.go +++ b/services/actions/cleanup.go @@ -5,12 +5,14 @@ package actions import ( "context" + "errors" "fmt" "time" actions_model "code.gitea.io/gitea/models/actions" "code.gitea.io/gitea/models/db" actions_module "code.gitea.io/gitea/modules/actions" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" @@ -27,7 +29,7 @@ func Cleanup(ctx context.Context) error { } // clean up old logs - if err := CleanupLogs(ctx); err != nil { + if err := CleanupExpiredLogs(ctx); err != nil { return fmt.Errorf("cleanup logs: %w", err) } @@ -98,8 +100,15 @@ func cleanNeedDeleteArtifacts(taskCtx context.Context) error { const deleteLogBatchSize = 100 -// CleanupLogs removes logs which are older than the configured retention time -func CleanupLogs(ctx context.Context) error { +func removeTaskLog(ctx context.Context, task *actions_model.ActionTask) { + if err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename); err != nil { + log.Error("Failed to remove log %s (in storage %v) of task %v: %v", task.LogFilename, task.LogInStorage, task.ID, err) + // do not return error here, go on + } +} + +// CleanupExpiredLogs removes logs which are older than the configured retention time +func CleanupExpiredLogs(ctx context.Context) error { olderThan := timeutil.TimeStampNow().AddDuration(-time.Duration(setting.Actions.LogRetentionDays) * 24 * time.Hour) count := 0 @@ -109,10 +118,7 @@ func CleanupLogs(ctx context.Context) error { return fmt.Errorf("find old tasks: %w", err) } for _, task := range tasks { - if err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename); err != nil { - log.Error("Failed to remove log %s (in storage %v) of task %v: %v", task.LogFilename, task.LogInStorage, task.ID, err) - // do not return error here, go on - } + removeTaskLog(ctx, task) task.LogIndexes = nil // clear log indexes since it's a heavy field task.LogExpired = true if err := actions_model.UpdateTask(ctx, task, "log_indexes", "log_expired"); err != nil { @@ -148,3 +154,107 @@ func CleanupEphemeralRunners(ctx context.Context) error { log.Info("Removed %d runners", affected) return nil } + +// CleanupEphemeralRunnersByPickedTaskOfRepo removes all ephemeral runners that have active/finished tasks on the given repository +func CleanupEphemeralRunnersByPickedTaskOfRepo(ctx context.Context, repoID int64) error { + subQuery := builder.Select("`action_runner`.id"). + From(builder.Select("*").From("`action_runner`"), "`action_runner`"). // mysql needs this redundant subquery + Join("INNER", "`action_task`", "`action_task`.`runner_id` = `action_runner`.`id`"). + Where(builder.And(builder.Eq{"`action_runner`.`ephemeral`": true}, builder.Eq{"`action_task`.`repo_id`": repoID})) + b := builder.Delete(builder.In("id", subQuery)).From("`action_runner`") + res, err := db.GetEngine(ctx).Exec(b) + if err != nil { + return fmt.Errorf("find runners: %w", err) + } + affected, _ := res.RowsAffected() + log.Info("Removed %d runners", affected) + return nil +} + +// DeleteRun deletes workflow run, including all logs and artifacts. +func DeleteRun(ctx context.Context, run *actions_model.ActionRun) error { + if !run.Status.IsDone() { + return errors.New("run is not done") + } + + repoID := run.RepoID + + jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID) + if err != nil { + return err + } + jobIDs := container.FilterSlice(jobs, func(j *actions_model.ActionRunJob) (int64, bool) { + return j.ID, true + }) + tasks := make(actions_model.TaskList, 0) + if len(jobIDs) > 0 { + if err := db.GetEngine(ctx).Where("repo_id = ?", repoID).In("job_id", jobIDs).Find(&tasks); err != nil { + return err + } + } + + artifacts, err := db.Find[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{ + RepoID: repoID, + RunID: run.ID, + }) + if err != nil { + return err + } + + var recordsToDelete []any + + recordsToDelete = append(recordsToDelete, &actions_model.ActionRun{ + RepoID: repoID, + ID: run.ID, + }) + recordsToDelete = append(recordsToDelete, &actions_model.ActionRunJob{ + RepoID: repoID, + RunID: run.ID, + }) + for _, tas := range tasks { + recordsToDelete = append(recordsToDelete, &actions_model.ActionTask{ + RepoID: repoID, + ID: tas.ID, + }) + recordsToDelete = append(recordsToDelete, &actions_model.ActionTaskStep{ + RepoID: repoID, + TaskID: tas.ID, + }) + recordsToDelete = append(recordsToDelete, &actions_model.ActionTaskOutput{ + TaskID: tas.ID, + }) + } + recordsToDelete = append(recordsToDelete, &actions_model.ActionArtifact{ + RepoID: repoID, + RunID: run.ID, + }) + + if err := db.WithTx(ctx, func(ctx context.Context) error { + // TODO: Deleting task records could break current ephemeral runner implementation. This is a temporary workaround suggested by ChristopherHX. + // Since you delete potentially the only task an ephemeral act_runner has ever run, please delete the affected runners first. + // one of + // call cleanup ephemeral runners first + // delete affected ephemeral act_runners + // I would make ephemeral runners fully delete directly before formally finishing the task + // + // See also: https://github.com/go-gitea/gitea/pull/34337#issuecomment-2862222788 + if err := CleanupEphemeralRunners(ctx); err != nil { + return err + } + return db.DeleteBeans(ctx, recordsToDelete...) + }); err != nil { + return err + } + + // Delete files on storage + for _, tas := range tasks { + removeTaskLog(ctx, tas) + } + for _, art := range artifacts { + if err := storage.ActionsArtifacts.Delete(art.StoragePath); err != nil { + log.Error("remove artifact file %q: %v", art.StoragePath, err) + } + } + + return nil +} diff --git a/services/actions/clear_tasks.go b/services/actions/clear_tasks.go index 2aeb0e8c96..274c04aa57 100644 --- a/services/actions/clear_tasks.go +++ b/services/actions/clear_tasks.go @@ -42,6 +42,10 @@ func notifyWorkflowJobStatusUpdate(ctx context.Context, jobs []*actions_model.Ac _ = job.LoadAttributes(ctx) notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil) } + if len(jobs) > 0 { + job := jobs[0] + notify_service.WorkflowRunStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job.Run) + } } } @@ -123,7 +127,7 @@ func CancelAbandonedJobs(ctx context.Context) error { } CreateCommitStatus(ctx, job) if updated { - _ = job.LoadAttributes(ctx) + NotifyWorkflowRunStatusUpdateWithReload(ctx, job) notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil) } } diff --git a/services/actions/commit_status.go b/services/actions/commit_status.go index 94ab89a3b7..ef241e5091 100644 --- a/services/actions/commit_status.go +++ b/services/actions/commit_status.go @@ -5,6 +5,7 @@ package actions import ( "context" + "errors" "fmt" "path" @@ -13,9 +14,9 @@ import ( git_model "code.gitea.io/gitea/models/git" user_model "code.gitea.io/gitea/models/user" actions_module "code.gitea.io/gitea/modules/actions" + "code.gitea.io/gitea/modules/commitstatus" git "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" - api "code.gitea.io/gitea/modules/structs" webhook_module "code.gitea.io/gitea/modules/webhook" commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus" @@ -51,7 +52,7 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er return fmt.Errorf("GetPushEventPayload: %w", err) } if payload.HeadCommit == nil { - return fmt.Errorf("head commit is missing in event payload") + return errors.New("head commit is missing in event payload") } sha = payload.HeadCommit.ID case // pull_request @@ -71,9 +72,9 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er return fmt.Errorf("GetPullRequestEventPayload: %w", err) } if payload.PullRequest == nil { - return fmt.Errorf("pull request is missing in event payload") + return errors.New("pull request is missing in event payload") } else if payload.PullRequest.Head == nil { - return fmt.Errorf("head of pull request is missing in event payload") + return errors.New("head of pull request is missing in event payload") } sha = payload.PullRequest.Head.Sha case webhook_module.HookEventRelease: @@ -91,7 +92,7 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er } ctxname := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event) state := toCommitStatus(job.Status) - if statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil { + if statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil { for _, v := range statuses { if v.Context == ctxname { if v.State == state { @@ -146,16 +147,18 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er return commitstatus_service.CreateCommitStatus(ctx, repo, creator, commitID.String(), &status) } -func toCommitStatus(status actions_model.Status) api.CommitStatusState { +func toCommitStatus(status actions_model.Status) commitstatus.CommitStatusState { switch status { - case actions_model.StatusSuccess, actions_model.StatusSkipped: - return api.CommitStatusSuccess + case actions_model.StatusSuccess: + return commitstatus.CommitStatusSuccess case actions_model.StatusFailure, actions_model.StatusCancelled: - return api.CommitStatusFailure + return commitstatus.CommitStatusFailure case actions_model.StatusWaiting, actions_model.StatusBlocked, actions_model.StatusRunning: - return api.CommitStatusPending + return commitstatus.CommitStatusPending + case actions_model.StatusSkipped: + return commitstatus.CommitStatusSkipped default: - return api.CommitStatusError + return commitstatus.CommitStatusError } } diff --git a/services/actions/context.go b/services/actions/context.go index d14728fae4..b6de429ccf 100644 --- a/services/actions/context.go +++ b/services/actions/context.go @@ -6,6 +6,7 @@ package actions import ( "context" "fmt" + "strconv" actions_model "code.gitea.io/gitea/models/actions" "code.gitea.io/gitea/models/db" @@ -14,11 +15,16 @@ import ( "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" + + "github.com/nektos/act/pkg/model" ) +type GiteaContext map[string]any + // GenerateGiteaContext generate the gitea context without token and gitea_runtime_token // job can be nil when generating a context for parsing workflow-level expressions -func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.ActionRunJob) map[string]any { +func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.ActionRunJob) GiteaContext { event := map[string]any{} _ = json.Unmarshal([]byte(run.EventPayload), &event) @@ -41,7 +47,7 @@ func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.Actio refName := git.RefName(ref) - gitContext := map[string]any{ + gitContext := GiteaContext{ // standard contexts, see https://docs.github.com/en/actions/learn-github-actions/contexts#github-context "action": "", // string, The name of the action currently running, or the id of a step. GitHub removes special characters, and uses the name __run when the current step runs a script without an id. If you use the same action more than once in the same job, the name will include a suffix with the sequence number with underscore before it. For example, the first script you run will have the name __run, and the second script will be named __run_2. Similarly, the second invocation of actions/checkout will be actionscheckout2. "action_path": "", // string, The path where an action is located. This property is only supported in composite actions. You can use this path to access files located in the same repository as the action. @@ -68,7 +74,7 @@ func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.Actio "repositoryUrl": run.Repo.HTMLURL(), // string, The Git URL to the repository. For example, git://github.com/codertocat/hello-world.git. "retention_days": "", // string, The number of days that workflow run logs and artifacts are kept. "run_id": "", // string, A unique number for each workflow run within a repository. This number does not change if you re-run the workflow run. - "run_number": fmt.Sprint(run.Index), // string, A unique number for each run of a particular workflow in a repository. This number begins at 1 for the workflow's first run, and increments with each new run. This number does not change if you re-run the workflow run. + "run_number": strconv.FormatInt(run.Index, 10), // string, A unique number for each run of a particular workflow in a repository. This number begins at 1 for the workflow's first run, and increments with each new run. This number does not change if you re-run the workflow run. "run_attempt": "", // string, A unique number for each attempt of a particular workflow run in a repository. This number begins at 1 for the workflow run's first attempt, and increments with each re-run. "secret_source": "Actions", // string, The source of a secret used in a workflow. Possible values are None, Actions, Dependabot, or Codespaces. "server_url": setting.AppURL, // string, The URL of the GitHub server. For example: https://github.com. @@ -83,8 +89,8 @@ func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.Actio if job != nil { gitContext["job"] = job.JobID - gitContext["run_id"] = fmt.Sprint(job.RunID) - gitContext["run_attempt"] = fmt.Sprint(job.Attempt) + gitContext["run_id"] = strconv.FormatInt(job.RunID, 10) + gitContext["run_attempt"] = strconv.FormatInt(job.Attempt, 10) } return gitContext @@ -159,3 +165,37 @@ func mergeTwoOutputs(o1, o2 map[string]string) map[string]string { } return ret } + +func (g *GiteaContext) ToGitHubContext() *model.GithubContext { + return &model.GithubContext{ + Event: util.GetMapValueOrDefault(*g, "event", map[string]any(nil)), + EventPath: util.GetMapValueOrDefault(*g, "event_path", ""), + Workflow: util.GetMapValueOrDefault(*g, "workflow", ""), + RunID: util.GetMapValueOrDefault(*g, "run_id", ""), + RunNumber: util.GetMapValueOrDefault(*g, "run_number", ""), + Actor: util.GetMapValueOrDefault(*g, "actor", ""), + Repository: util.GetMapValueOrDefault(*g, "repository", ""), + EventName: util.GetMapValueOrDefault(*g, "event_name", ""), + Sha: util.GetMapValueOrDefault(*g, "sha", ""), + Ref: util.GetMapValueOrDefault(*g, "ref", ""), + RefName: util.GetMapValueOrDefault(*g, "ref_name", ""), + RefType: util.GetMapValueOrDefault(*g, "ref_type", ""), + HeadRef: util.GetMapValueOrDefault(*g, "head_ref", ""), + BaseRef: util.GetMapValueOrDefault(*g, "base_ref", ""), + Token: "", // deliberately omitted for security + Workspace: util.GetMapValueOrDefault(*g, "workspace", ""), + Action: util.GetMapValueOrDefault(*g, "action", ""), + ActionPath: util.GetMapValueOrDefault(*g, "action_path", ""), + ActionRef: util.GetMapValueOrDefault(*g, "action_ref", ""), + ActionRepository: util.GetMapValueOrDefault(*g, "action_repository", ""), + Job: util.GetMapValueOrDefault(*g, "job", ""), + JobName: "", // not present in GiteaContext + RepositoryOwner: util.GetMapValueOrDefault(*g, "repository_owner", ""), + RetentionDays: util.GetMapValueOrDefault(*g, "retention_days", ""), + RunnerPerflog: "", // not present in GiteaContext + RunnerTrackingID: "", // not present in GiteaContext + ServerURL: util.GetMapValueOrDefault(*g, "server_url", ""), + APIURL: util.GetMapValueOrDefault(*g, "api_url", ""), + GraphQLURL: util.GetMapValueOrDefault(*g, "graphql_url", ""), + } +} diff --git a/services/actions/interface.go b/services/actions/interface.go index d4fa782fec..a054c38e4f 100644 --- a/services/actions/interface.go +++ b/services/actions/interface.go @@ -25,4 +25,16 @@ type API interface { UpdateVariable(*context.APIContext) // GetRegistrationToken get registration token GetRegistrationToken(*context.APIContext) + // CreateRegistrationToken get registration token + CreateRegistrationToken(*context.APIContext) + // ListRunners list runners + ListRunners(*context.APIContext) + // GetRunner get a runner + GetRunner(*context.APIContext) + // DeleteRunner delete runner + DeleteRunner(*context.APIContext) + // ListWorkflowJobs list jobs + ListWorkflowJobs(*context.APIContext) + // ListWorkflowRuns list runs + ListWorkflowRuns(*context.APIContext) } diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go index c11bb5875f..47c9f59094 100644 --- a/services/actions/job_emitter.go +++ b/services/actions/job_emitter.go @@ -11,6 +11,7 @@ import ( actions_model "code.gitea.io/gitea/models/actions" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/queue" notify_service "code.gitea.io/gitea/services/notify" @@ -78,9 +79,30 @@ func checkJobsOfRun(ctx context.Context, runID int64) error { _ = job.LoadAttributes(ctx) notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil) } + if len(jobs) > 0 { + runUpdated := true + for _, job := range jobs { + if !job.Status.IsDone() { + runUpdated = false + break + } + } + if runUpdated { + NotifyWorkflowRunStatusUpdateWithReload(ctx, jobs[0]) + } + } return nil } +func NotifyWorkflowRunStatusUpdateWithReload(ctx context.Context, job *actions_model.ActionRunJob) { + job.Run = nil + if err := job.LoadAttributes(ctx); err != nil { + log.Error("LoadAttributes: %v", err) + return + } + notify_service.WorkflowRunStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job.Run) +} + type jobStatusResolver struct { statuses map[int64]actions_model.Status needs map[int64][]int64 diff --git a/services/actions/notifier.go b/services/actions/notifier.go index 831cde3523..d10cc0ab34 100644 --- a/services/actions/notifier.go +++ b/services/actions/notifier.go @@ -6,13 +6,16 @@ package actions import ( "context" + actions_model "code.gitea.io/gitea/models/actions" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" packages_model "code.gitea.io/gitea/models/packages" perm_model "code.gitea.io/gitea/models/perm" access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" @@ -762,3 +765,41 @@ func (n *actionsNotifier) MigrateRepository(ctx context.Context, doer, u *user_m Sender: convert.ToUser(ctx, doer, nil), }).Notify(ctx) } + +func (n *actionsNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { + ctx = withMethod(ctx, "WorkflowRunStatusUpdate") + + var org *api.Organization + if repo.Owner.IsOrganization() { + org = convert.ToOrganization(ctx, organization.OrgFromUser(repo.Owner)) + } + + status := convert.ToWorkflowRunAction(run.Status) + + gitRepo, err := gitrepo.OpenRepository(ctx, repo) + if err != nil { + log.Error("OpenRepository: %v", err) + return + } + defer gitRepo.Close() + + convertedWorkflow, err := convert.GetActionWorkflow(ctx, gitRepo, repo, run.WorkflowID) + if err != nil { + log.Error("GetActionWorkflow: %v", err) + return + } + convertedRun, err := convert.ToActionWorkflowRun(ctx, repo, run) + if err != nil { + log.Error("ToActionWorkflowRun: %v", err) + return + } + + newNotifyInput(repo, sender, webhook_module.HookEventWorkflowRun).WithPayload(&api.WorkflowRunPayload{ + Action: status, + Workflow: convertedWorkflow, + WorkflowRun: convertedRun, + Organization: org, + Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}), + Sender: convert.ToUser(ctx, sender, nil), + }).Notify(ctx) +} diff --git a/services/actions/notifier_helper.go b/services/actions/notifier_helper.go index d179134798..8010f51a86 100644 --- a/services/actions/notifier_helper.go +++ b/services/actions/notifier_helper.go @@ -33,7 +33,9 @@ import ( "github.com/nektos/act/pkg/model" ) -var methodCtxKey struct{} +type methodCtxKeyType struct{} + +var methodCtxKey methodCtxKeyType // withMethod sets the notification method that this context currently executes. // Used for debugging/ troubleshooting purposes. @@ -44,8 +46,7 @@ func withMethod(ctx context.Context, method string) context.Context { return ctx } } - // FIXME: review the use of this nolint directive - return context.WithValue(ctx, methodCtxKey, method) //nolint:staticcheck + return context.WithValue(ctx, methodCtxKey, method) } // getMethod gets the notification method that this context currently executes. @@ -178,7 +179,7 @@ func notify(ctx context.Context, input *notifyInput) error { return fmt.Errorf("gitRepo.GetCommit: %w", err) } - if skipWorkflows(input, commit) { + if skipWorkflows(ctx, input, commit) { return nil } @@ -243,7 +244,7 @@ func notify(ctx context.Context, input *notifyInput) error { return handleWorkflows(ctx, detectedWorkflows, commit, input, ref.String()) } -func skipWorkflows(input *notifyInput, commit *git.Commit) bool { +func skipWorkflows(ctx context.Context, input *notifyInput, commit *git.Commit) bool { // skip workflow runs with a configured skip-ci string in commit message or pr title if the event is push or pull_request(_sync) // https://docs.github.com/en/actions/managing-workflow-runs/skipping-workflow-runs skipWorkflowEvents := []webhook_module.HookEventType{ @@ -263,6 +264,27 @@ func skipWorkflows(input *notifyInput, commit *git.Commit) bool { } } } + if input.Event == webhook_module.HookEventWorkflowRun { + wrun, ok := input.Payload.(*api.WorkflowRunPayload) + for i := 0; i < 5 && ok && wrun.WorkflowRun != nil; i++ { + if wrun.WorkflowRun.Event != "workflow_run" { + return false + } + r, err := actions_model.GetRunByRepoAndID(ctx, input.Repo.ID, wrun.WorkflowRun.ID) + if err != nil { + log.Error("GetRunByRepoAndID: %v", err) + return true + } + wrun, err = r.GetWorkflowRunEventPayload() + if err != nil { + log.Error("GetWorkflowRunEventPayload: %v", err) + return true + } + } + // skip workflow runs events exceeding the maxiumum of 5 recursive events + log.Debug("repo %s: skipped workflow_run because of recursive event of 5", input.Repo.RepoPath()) + return true + } return false } @@ -302,9 +324,11 @@ func handleWorkflows( run := &actions_model.ActionRun{ Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0], RepoID: input.Repo.ID, + Repo: input.Repo, OwnerID: input.Repo.OwnerID, WorkflowID: dwf.EntryName, TriggerUserID: input.Doer.ID, + TriggerUser: input.Doer, Ref: ref, CommitSHA: commit.ID.String(), IsForkPullRequest: isForkPullRequest, @@ -333,12 +357,18 @@ func handleWorkflows( continue } - jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars)) + giteaCtx := GenerateGiteaContext(run, nil) + + jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars), jobparser.WithGitContext(giteaCtx.ToGitHubContext())) if err != nil { log.Error("jobparser.Parse: %v", err) continue } + if len(jobs) > 0 && jobs[0].RunName != "" { + run.Title = jobs[0].RunName + } + // cancel running jobs if the event is push or pull_request_sync if run.Event == webhook_module.HookEventPush || run.Event == webhook_module.HookEventPullRequestSync { @@ -364,6 +394,15 @@ func handleWorkflows( continue } CreateCommitStatus(ctx, alljobs...) + if len(alljobs) > 0 { + job := alljobs[0] + err := job.LoadRun(ctx) + if err != nil { + log.Error("LoadRun: %v", err) + continue + } + notify_service.WorkflowRunStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job.Run) + } for _, job := range alljobs { notify_service.WorkflowJobStatusUpdate(ctx, input.Repo, input.Doer, job, nil) } @@ -508,9 +547,11 @@ func handleSchedules( run := &actions_model.ActionSchedule{ Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0], RepoID: input.Repo.ID, + Repo: input.Repo, OwnerID: input.Repo.OwnerID, WorkflowID: dwf.EntryName, TriggerUserID: user_model.ActionsUserID, + TriggerUser: user_model.NewActionsUser(), Ref: ref, CommitSHA: commit.ID.String(), Event: input.Event, @@ -518,6 +559,25 @@ func handleSchedules( Specs: schedules, Content: dwf.Content, } + + vars, err := actions_model.GetVariablesOfRun(ctx, run.ToActionRun()) + if err != nil { + log.Error("GetVariablesOfRun: %v", err) + continue + } + + giteaCtx := GenerateGiteaContext(run.ToActionRun(), nil) + + jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars), jobparser.WithGitContext(giteaCtx.ToGitHubContext())) + if err != nil { + log.Error("jobparser.Parse: %v", err) + continue + } + + if len(jobs) > 0 && jobs[0].RunName != "" { + run.Title = jobs[0].RunName + } + crons = append(crons, run) } diff --git a/services/actions/schedule_tasks.go b/services/actions/schedule_tasks.go index a30b166063..c029c5a1a2 100644 --- a/services/actions/schedule_tasks.go +++ b/services/actions/schedule_tasks.go @@ -157,6 +157,7 @@ func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule) if err != nil { log.Error("LoadAttributes: %v", err) } + notify_service.WorkflowRunStatusUpdate(ctx, run.Repo, run.TriggerUser, run) for _, job := range allJobs { notify_service.WorkflowJobStatusUpdate(ctx, run.Repo, run.TriggerUser, job, nil) } diff --git a/services/actions/task.go b/services/actions/task.go index 9c8198206a..6a547c1c12 100644 --- a/services/actions/task.go +++ b/services/actions/task.go @@ -5,6 +5,7 @@ package actions import ( "context" + "errors" "fmt" actions_model "code.gitea.io/gitea/models/actions" @@ -39,7 +40,7 @@ func PickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv if err != nil { return nil, false, err } - return nil, false, fmt.Errorf("runner has been removed") + return nil, false, errors.New("runner has been removed") } } diff --git a/services/actions/workflow.go b/services/actions/workflow.go index dc8a1dd349..233e22b5dd 100644 --- a/services/actions/workflow.go +++ b/services/actions/workflow.go @@ -5,9 +5,6 @@ package actions import ( "fmt" - "net/http" - "net/url" - "path" "strings" actions_model "code.gitea.io/gitea/models/actions" @@ -31,61 +28,8 @@ import ( "github.com/nektos/act/pkg/model" ) -func getActionWorkflowPath(commit *git.Commit) string { - paths := []string{".gitea/workflows", ".github/workflows"} - for _, treePath := range paths { - if _, err := commit.SubTree(treePath); err == nil { - return treePath - } - } - return "" -} - -func getActionWorkflowEntry(ctx *context.APIContext, commit *git.Commit, folder string, entry *git.TreeEntry) *api.ActionWorkflow { - cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions) - cfg := cfgUnit.ActionsConfig() - - defaultBranch, _ := commit.GetBranchName() - - workflowURL := fmt.Sprintf("%s/actions/workflows/%s", ctx.Repo.Repository.APIURL(), url.PathEscape(entry.Name())) - workflowRepoURL := fmt.Sprintf("%s/src/branch/%s/%s/%s", ctx.Repo.Repository.HTMLURL(ctx), util.PathEscapeSegments(defaultBranch), util.PathEscapeSegments(folder), url.PathEscape(entry.Name())) - badgeURL := fmt.Sprintf("%s/actions/workflows/%s/badge.svg?branch=%s", ctx.Repo.Repository.HTMLURL(ctx), url.PathEscape(entry.Name()), url.QueryEscape(ctx.Repo.Repository.DefaultBranch)) - - // See https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#get-a-workflow - // State types: - // - active - // - deleted - // - disabled_fork - // - disabled_inactivity - // - disabled_manually - state := "active" - if cfg.IsWorkflowDisabled(entry.Name()) { - state = "disabled_manually" - } - - // The CreatedAt and UpdatedAt fields currently reflect the timestamp of the latest commit, which can later be refined - // by retrieving the first and last commits for the file history. The first commit would indicate the creation date, - // while the last commit would represent the modification date. The DeletedAt could be determined by identifying - // the last commit where the file existed. However, this implementation has not been done here yet, as it would likely - // cause a significant performance degradation. - createdAt := commit.Author.When - updatedAt := commit.Author.When - - return &api.ActionWorkflow{ - ID: entry.Name(), - Name: entry.Name(), - Path: path.Join(folder, entry.Name()), - State: state, - CreatedAt: createdAt, - UpdatedAt: updatedAt, - URL: workflowURL, - HTMLURL: workflowRepoURL, - BadgeURL: badgeURL, - } -} - func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnable bool) error { - workflow, err := GetActionWorkflow(ctx, workflowID) + workflow, err := convert.GetActionWorkflow(ctx, ctx.Repo.GitRepo, ctx.Repo.Repository, workflowID) if err != nil { return err } @@ -102,44 +46,6 @@ func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnabl return repo_model.UpdateRepoUnit(ctx, cfgUnit) } -func ListActionWorkflows(ctx *context.APIContext) ([]*api.ActionWorkflow, error) { - defaultBranchCommit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch) - if err != nil { - ctx.APIErrorInternal(err) - return nil, err - } - - entries, err := actions.ListWorkflows(defaultBranchCommit) - if err != nil { - ctx.APIError(http.StatusNotFound, err.Error()) - return nil, err - } - - folder := getActionWorkflowPath(defaultBranchCommit) - - workflows := make([]*api.ActionWorkflow, len(entries)) - for i, entry := range entries { - workflows[i] = getActionWorkflowEntry(ctx, defaultBranchCommit, folder, entry) - } - - return workflows, nil -} - -func GetActionWorkflow(ctx *context.APIContext, workflowID string) (*api.ActionWorkflow, error) { - entries, err := ListActionWorkflows(ctx) - if err != nil { - return nil, err - } - - for _, entry := range entries { - if entry.Name == workflowID { - return entry, nil - } - } - - return nil, util.NewNotExistErrorf("workflow %q not found", workflowID) -} - func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, gitRepo *git.Repository, workflowID, ref string, processInputs func(model *model.WorkflowDispatch, inputs map[string]any) error) error { if workflowID == "" { return util.ErrorWrapLocale( @@ -185,29 +91,62 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re } // get workflow entry from runTargetCommit - entries, err := actions.ListWorkflows(runTargetCommit) + _, entries, err := actions.ListWorkflows(runTargetCommit) if err != nil { return err } // find workflow from commit var workflows []*jobparser.SingleWorkflow - for _, entry := range entries { - if entry.Name() != workflowID { - continue - } + var entry *git.TreeEntry - content, err := actions.GetContentFromEntry(entry) - if err != nil { - return err - } - workflows, err = jobparser.Parse(content) - if err != nil { - return err + run := &actions_model.ActionRun{ + Title: strings.SplitN(runTargetCommit.CommitMessage, "\n", 2)[0], + RepoID: repo.ID, + Repo: repo, + OwnerID: repo.OwnerID, + WorkflowID: workflowID, + TriggerUserID: doer.ID, + TriggerUser: doer, + Ref: string(refName), + CommitSHA: runTargetCommit.ID.String(), + IsForkPullRequest: false, + Event: "workflow_dispatch", + TriggerEvent: "workflow_dispatch", + Status: actions_model.StatusWaiting, + } + + for _, e := range entries { + if e.Name() != workflowID { + continue } + entry = e break } + if entry == nil { + return util.ErrorWrapLocale( + util.NewNotExistErrorf("workflow %q doesn't exist", workflowID), + "actions.workflow.not_found", workflowID, + ) + } + + content, err := actions.GetContentFromEntry(entry) + if err != nil { + return err + } + + giteaCtx := GenerateGiteaContext(run, nil) + + workflows, err = jobparser.Parse(content, jobparser.WithGitContext(giteaCtx.ToGitHubContext())) + if err != nil { + return err + } + + if len(workflows) > 0 && workflows[0].RunName != "" { + run.Title = workflows[0].RunName + } + if len(workflows) == 0 { return util.ErrorWrapLocale( util.NewNotExistErrorf("workflow %q doesn't exist", workflowID), @@ -236,25 +175,12 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re Inputs: inputsWithDefaults, Sender: convert.ToUserWithAccessMode(ctx, doer, perm.AccessModeNone), } + var eventPayload []byte if eventPayload, err = workflowDispatchPayload.JSONPayload(); err != nil { return fmt.Errorf("JSONPayload: %w", err) } - - run := &actions_model.ActionRun{ - Title: strings.SplitN(runTargetCommit.CommitMessage, "\n", 2)[0], - RepoID: repo.ID, - OwnerID: repo.OwnerID, - WorkflowID: workflowID, - TriggerUserID: doer.ID, - Ref: string(refName), - CommitSHA: runTargetCommit.ID.String(), - IsForkPullRequest: false, - Event: "workflow_dispatch", - TriggerEvent: "workflow_dispatch", - EventPayload: string(eventPayload), - Status: actions_model.StatusWaiting, - } + run.EventPayload = string(eventPayload) // cancel running jobs of the same workflow if err := CancelPreviousJobs( @@ -277,9 +203,17 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re log.Error("FindRunJobs: %v", err) } CreateCommitStatus(ctx, allJobs...) + if len(allJobs) > 0 { + job := allJobs[0] + err := job.LoadRun(ctx) + if err != nil { + log.Error("LoadRun: %v", err) + } else { + notify_service.WorkflowRunStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job.Run) + } + } for _, job := range allJobs { notify_service.WorkflowJobStatusUpdate(ctx, repo, doer, job, nil) } - return nil } diff --git a/services/agit/agit.go b/services/agit/agit.go index 1e6ce93312..0fe28c5d66 100644 --- a/services/agit/agit.go +++ b/services/agit/agit.go @@ -204,7 +204,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git. return nil, fmt.Errorf("failed to update pull ref. Error: %w", err) } - pull_service.AddToTaskQueue(ctx, pr) + pull_service.StartPullRequestCheckImmediately(ctx, pr) err = pr.LoadIssue(ctx) if err != nil { return nil, fmt.Errorf("failed to load pull issue. Error: %w", err) diff --git a/services/asymkey/commit.go b/services/asymkey/commit.go index df29133972..148f51fd10 100644 --- a/services/asymkey/commit.go +++ b/services/asymkey/commit.go @@ -5,15 +5,20 @@ package asymkey import ( "context" + "fmt" + "slices" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/cachegroup" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "github.com/42wim/sshsig" "github.com/ProtonMail/go-crypto/openpgp/packet" ) @@ -57,7 +62,7 @@ func ParseCommitWithSignatureCommitter(ctx context.Context, c *git.Commit, commi // If this a SSH signature handle it differently if strings.HasPrefix(c.Signature.Signature, "-----BEGIN SSH SIGNATURE-----") { - return asymkey_model.ParseCommitWithSSHSignature(ctx, c, committer) + return ParseCommitWithSSHSignature(ctx, c, committer) } // Parsing signature @@ -113,7 +118,7 @@ func ParseCommitWithSignatureCommitter(ctx context.Context, c *git.Commit, commi } } - committerEmailAddresses, _ := user_model.GetEmailAddresses(ctx, committer.ID) + committerEmailAddresses, _ := cache.GetWithContextCache(ctx, cachegroup.UserEmailAddresses, committer.ID, user_model.GetEmailAddresses) activated := false for _, e := range committerEmailAddresses { if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { @@ -207,10 +212,9 @@ func checkKeyEmails(ctx context.Context, email string, keys ...*asymkey_model.GP } if key.Verified && key.OwnerID != 0 { if uid != key.OwnerID { - userEmails, _ = user_model.GetEmailAddresses(ctx, key.OwnerID) + userEmails, _ = cache.GetWithContextCache(ctx, cachegroup.UserEmailAddresses, key.OwnerID, user_model.GetEmailAddresses) uid = key.OwnerID - user = &user_model.User{ID: uid} - _, _ = user_model.GetUser(ctx, user) + user, _ = cache.GetWithContextCache(ctx, cachegroup.User, uid, user_model.GetUserByID) } for _, e := range userEmails { if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) { @@ -229,10 +233,7 @@ func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload s if keyID == "" { return nil } - keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{ - KeyID: keyID, - IncludeSubKeys: true, - }) + keys, err := cache.GetWithContextCache(ctx, cachegroup.GPGKeyWithSubKeys, keyID, asymkey_model.FindGPGKeyWithSubKeys) if err != nil { log.Error("GetGPGKeysByKeyID: %v", err) return &asymkey_model.CommitVerification{ @@ -247,10 +248,7 @@ func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload s for _, key := range keys { var primaryKeys []*asymkey_model.GPGKey if key.PrimaryKeyID != "" { - primaryKeys, err = db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{ - KeyID: key.PrimaryKeyID, - IncludeSubKeys: true, - }) + primaryKeys, err = cache.GetWithContextCache(ctx, cachegroup.GPGKeyWithSubKeys, key.PrimaryKeyID, asymkey_model.FindGPGKeyWithSubKeys) if err != nil { log.Error("GetGPGKeysByKeyID: %v", err) return &asymkey_model.CommitVerification{ @@ -270,8 +268,8 @@ func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload s Name: name, Email: email, } - if key.OwnerID != 0 { - owner, err := user_model.GetUserByID(ctx, key.OwnerID) + if key.OwnerID > 0 { + owner, err := cache.GetWithContextCache(ctx, cachegroup.User, key.OwnerID, user_model.GetUserByID) if err == nil { signer = owner } else if !user_model.IsErrUserNotExist(err) { @@ -361,3 +359,117 @@ func VerifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, si } return nil } + +func verifySSHCommitVerificationByInstanceKey(c *git.Commit, committerUser, signerUser *user_model.User, committerGitEmail, publicKeyContent string) *asymkey_model.CommitVerification { + fingerprint, err := asymkey_model.CalcFingerprint(publicKeyContent) + if err != nil { + log.Error("Error calculating the fingerprint public key %q, err: %v", publicKeyContent, err) + return nil + } + sshPubKey := &asymkey_model.PublicKey{ + Verified: true, + Content: publicKeyContent, + Fingerprint: fingerprint, + HasUsed: true, + } + return verifySSHCommitVerification(c.Signature.Signature, c.Signature.Payload, sshPubKey, committerUser, signerUser, committerGitEmail) +} + +// ParseCommitWithSSHSignature check if signature is good against keystore. +func ParseCommitWithSSHSignature(ctx context.Context, c *git.Commit, committerUser *user_model.User) *asymkey_model.CommitVerification { + // Now try to associate the signature with the committer, if present + if committerUser.ID != 0 { + keys, err := db.Find[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{ + OwnerID: committerUser.ID, + NotKeytype: asymkey_model.KeyTypePrincipal, + }) + if err != nil { // Skipping failed to get ssh keys of user + log.Error("ListPublicKeys: %v", err) + return &asymkey_model.CommitVerification{ + CommittingUser: committerUser, + Verified: false, + Reason: "gpg.error.failed_retrieval_gpg_keys", + } + } + + committerEmailAddresses, err := cache.GetWithContextCache(ctx, cachegroup.UserEmailAddresses, committerUser.ID, user_model.GetEmailAddresses) + if err != nil { + log.Error("GetEmailAddresses: %v", err) + } + + activated := false + for _, e := range committerEmailAddresses { + if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { + activated = true + break + } + } + + for _, k := range keys { + if k.Verified && activated { + commitVerification := verifySSHCommitVerification(c.Signature.Signature, c.Signature.Payload, k, committerUser, committerUser, c.Committer.Email) + if commitVerification != nil { + return commitVerification + } + } + } + } + + // Try the pre-set trusted keys (for key-rotation purpose) + // At the moment, we still use the SigningName&SigningEmail for the rotated keys. + // Maybe in the future we can extend the key format to "ssh-xxx .... old-user@example.com" to support different signer emails. + for _, k := range setting.Repository.Signing.TrustedSSHKeys { + signerUser := &user_model.User{ + Name: setting.Repository.Signing.SigningName, + Email: setting.Repository.Signing.SigningEmail, + } + commitVerification := verifySSHCommitVerificationByInstanceKey(c, committerUser, signerUser, c.Committer.Email, k) + if commitVerification != nil && commitVerification.Verified { + return commitVerification + } + } + + // Try the configured instance-wide SSH public key + if setting.Repository.Signing.SigningFormat == git.SigningKeyFormatSSH && !slices.Contains([]string{"", "default", "none"}, setting.Repository.Signing.SigningKey) { + gpgSettings := git.GPGSettings{ + Sign: true, + KeyID: setting.Repository.Signing.SigningKey, + Name: setting.Repository.Signing.SigningName, + Email: setting.Repository.Signing.SigningEmail, + Format: setting.Repository.Signing.SigningFormat, + } + signerUser := &user_model.User{ + Name: gpgSettings.Name, + Email: gpgSettings.Email, + } + if err := gpgSettings.LoadPublicKeyContent(); err != nil { + log.Error("Error getting instance-wide SSH signing key %q, err: %v", gpgSettings.KeyID, err) + } else { + commitVerification := verifySSHCommitVerificationByInstanceKey(c, committerUser, signerUser, gpgSettings.Email, gpgSettings.PublicKeyContent) + if commitVerification != nil && commitVerification.Verified { + return commitVerification + } + } + } + + return &asymkey_model.CommitVerification{ + CommittingUser: committerUser, + Verified: false, + Reason: asymkey_model.NoKeyFound, + } +} + +func verifySSHCommitVerification(sig, payload string, k *asymkey_model.PublicKey, committer, signer *user_model.User, email string) *asymkey_model.CommitVerification { + if err := sshsig.Verify(strings.NewReader(payload), []byte(sig), []byte(k.Content), "git"); err != nil { + return nil + } + + return &asymkey_model.CommitVerification{ // Everything is ok + CommittingUser: committer, + Verified: true, + Reason: fmt.Sprintf("%s / %s", signer.Name, k.Fingerprint), + SigningUser: signer, + SigningSSHKey: k, + SigningEmail: email, + } +} diff --git a/services/asymkey/commit_test.go b/services/asymkey/commit_test.go new file mode 100644 index 0000000000..0438209a61 --- /dev/null +++ b/services/asymkey/commit_test.go @@ -0,0 +1,54 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package asymkey + +import ( + "strings" + "testing" + + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParseCommitWithSSHSignature(t *testing.T) { + // Here we only test the TrustedSSHKeys. The complete signing test is in tests/integration/gpg_ssh_git_test.go + t.Run("TrustedSSHKey", func(t *testing.T) { + defer test.MockVariableValue(&setting.Repository.Signing.SigningName, "gitea")() + defer test.MockVariableValue(&setting.Repository.Signing.SigningEmail, "gitea@fake.local")() + defer test.MockVariableValue(&setting.Repository.Signing.TrustedSSHKeys, []string{"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIH6Y4idVaW3E+bLw1uqoAfJD7o5Siu+HqS51E9oQLPE9"})() + + commit, err := git.CommitFromReader(nil, git.Sha1ObjectFormat.EmptyObjectID(), strings.NewReader(`tree 9a93ffa76e8b72bdb6431910b3a506fa2b39f42e +author User Two <user2@example.com> 1749230009 +0200 +committer User Two <user2@example.com> 1749230009 +0200 +gpgsig -----BEGIN SSH SIGNATURE----- + U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgfpjiJ1VpbcT5svDW6qgB8kPujl + KK74epLnUT2hAs8T0AAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5 + AAAAQDX2t2iHuuLxEWHLJetYXKsgayv3c43r0pJNfAzdLN55Q65pC5M7rG6++gT2bxcpOu + Y6EXbpLqia9sunEF3+LQY= + -----END SSH SIGNATURE----- + +Initial commit with signed file +`)) + require.NoError(t, err) + committingUser := &user_model.User{ + ID: 2, + Name: "User Two", + Email: "user2@example.com", + } + ret := ParseCommitWithSSHSignature(t.Context(), commit, committingUser) + require.NotNil(t, ret) + assert.True(t, ret.Verified) + assert.False(t, ret.Warning) + assert.Equal(t, committingUser, ret.CommittingUser) + if assert.NotNil(t, ret.SigningUser) { + assert.Equal(t, "gitea", ret.SigningUser.Name) + assert.Equal(t, "gitea@fake.local", ret.SigningUser.Email) + } + }) +} diff --git a/services/asymkey/sign.go b/services/asymkey/sign.go index da265dec27..f94462ea46 100644 --- a/services/asymkey/sign.go +++ b/services/asymkey/sign.go @@ -6,6 +6,7 @@ package asymkey import ( "context" "fmt" + "os" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" @@ -85,9 +86,9 @@ func IsErrWontSign(err error) bool { } // SigningKey returns the KeyID and git Signature for the repo -func SigningKey(ctx context.Context, repoPath string) (string, *git.Signature) { +func SigningKey(ctx context.Context, repoPath string) (*git.SigningKey, *git.Signature) { if setting.Repository.Signing.SigningKey == "none" { - return "", nil + return nil, nil } if setting.Repository.Signing.SigningKey == "default" || setting.Repository.Signing.SigningKey == "" { @@ -95,53 +96,77 @@ func SigningKey(ctx context.Context, repoPath string) (string, *git.Signature) { value, _, _ := git.NewCommand("config", "--get", "commit.gpgsign").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) sign, valid := git.ParseBool(strings.TrimSpace(value)) if !sign || !valid { - return "", nil + return nil, nil } + format, _, _ := git.NewCommand("config", "--default", git.SigningKeyFormatOpenPGP, "--get", "gpg.format").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) signingKey, _, _ := git.NewCommand("config", "--get", "user.signingkey").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) signingName, _, _ := git.NewCommand("config", "--get", "user.name").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) signingEmail, _, _ := git.NewCommand("config", "--get", "user.email").RunStdString(ctx, &git.RunOpts{Dir: repoPath}) - return strings.TrimSpace(signingKey), &git.Signature{ - Name: strings.TrimSpace(signingName), - Email: strings.TrimSpace(signingEmail), + + if strings.TrimSpace(signingKey) == "" { + return nil, nil } + + return &git.SigningKey{ + KeyID: strings.TrimSpace(signingKey), + Format: strings.TrimSpace(format), + }, &git.Signature{ + Name: strings.TrimSpace(signingName), + Email: strings.TrimSpace(signingEmail), + } } - return setting.Repository.Signing.SigningKey, &git.Signature{ - Name: setting.Repository.Signing.SigningName, - Email: setting.Repository.Signing.SigningEmail, + if setting.Repository.Signing.SigningKey == "" { + return nil, nil } + + return &git.SigningKey{ + KeyID: setting.Repository.Signing.SigningKey, + Format: setting.Repository.Signing.SigningFormat, + }, &git.Signature{ + Name: setting.Repository.Signing.SigningName, + Email: setting.Repository.Signing.SigningEmail, + } } // PublicSigningKey gets the public signing key within a provided repository directory -func PublicSigningKey(ctx context.Context, repoPath string) (string, error) { +func PublicSigningKey(ctx context.Context, repoPath string) (content, format string, err error) { signingKey, _ := SigningKey(ctx, repoPath) - if signingKey == "" { - return "", nil + if signingKey == nil { + return "", "", nil + } + if signingKey.Format == git.SigningKeyFormatSSH { + content, err := os.ReadFile(signingKey.KeyID) + if err != nil { + log.Error("Unable to read SSH public key file in %s: %s, %v", repoPath, signingKey, err) + return "", signingKey.Format, err + } + return string(content), signingKey.Format, nil } content, stderr, err := process.GetManager().ExecDir(ctx, -1, repoPath, - "gpg --export -a", "gpg", "--export", "-a", signingKey) + "gpg --export -a", "gpg", "--export", "-a", signingKey.KeyID) if err != nil { log.Error("Unable to get default signing key in %s: %s, %s, %v", repoPath, signingKey, stderr, err) - return "", err + return "", signingKey.Format, err } - return content, nil + return content, signingKey.Format, nil } // SignInitialCommit determines if we should sign the initial commit to this repository -func SignInitialCommit(ctx context.Context, repoPath string, u *user_model.User) (bool, string, *git.Signature, error) { +func SignInitialCommit(ctx context.Context, repoPath string, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) { rules := signingModeFromStrings(setting.Repository.Signing.InitialCommit) signingKey, sig := SigningKey(ctx, repoPath) - if signingKey == "" { - return false, "", nil, &ErrWontSign{noKey} + if signingKey == nil { + return false, nil, nil, &ErrWontSign{noKey} } Loop: for _, rule := range rules { switch rule { case never: - return false, "", nil, &ErrWontSign{never} + return false, nil, nil, &ErrWontSign{never} case always: break Loop case pubkey: @@ -150,18 +175,18 @@ Loop: IncludeSubKeys: true, }) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if len(keys) == 0 { - return false, "", nil, &ErrWontSign{pubkey} + return false, nil, nil, &ErrWontSign{pubkey} } case twofa: twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID) if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) { - return false, "", nil, err + return false, nil, nil, err } if twofaModel == nil { - return false, "", nil, &ErrWontSign{twofa} + return false, nil, nil, &ErrWontSign{twofa} } } } @@ -169,19 +194,19 @@ Loop: } // SignWikiCommit determines if we should sign the commits to this repository wiki -func SignWikiCommit(ctx context.Context, repo *repo_model.Repository, u *user_model.User) (bool, string, *git.Signature, error) { +func SignWikiCommit(ctx context.Context, repo *repo_model.Repository, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) { repoWikiPath := repo.WikiPath() rules := signingModeFromStrings(setting.Repository.Signing.Wiki) signingKey, sig := SigningKey(ctx, repoWikiPath) - if signingKey == "" { - return false, "", nil, &ErrWontSign{noKey} + if signingKey == nil { + return false, nil, nil, &ErrWontSign{noKey} } Loop: for _, rule := range rules { switch rule { case never: - return false, "", nil, &ErrWontSign{never} + return false, nil, nil, &ErrWontSign{never} case always: break Loop case pubkey: @@ -190,35 +215,35 @@ Loop: IncludeSubKeys: true, }) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if len(keys) == 0 { - return false, "", nil, &ErrWontSign{pubkey} + return false, nil, nil, &ErrWontSign{pubkey} } case twofa: twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID) if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) { - return false, "", nil, err + return false, nil, nil, err } if twofaModel == nil { - return false, "", nil, &ErrWontSign{twofa} + return false, nil, nil, &ErrWontSign{twofa} } case parentSigned: - gitRepo, err := gitrepo.OpenWikiRepository(ctx, repo) + gitRepo, err := gitrepo.OpenRepository(ctx, repo.WikiStorageRepo()) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() commit, err := gitRepo.GetCommit("HEAD") if err != nil { - return false, "", nil, err + return false, nil, nil, err } if commit.Signature == nil { - return false, "", nil, &ErrWontSign{parentSigned} + return false, nil, nil, &ErrWontSign{parentSigned} } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{parentSigned} + return false, nil, nil, &ErrWontSign{parentSigned} } } } @@ -226,18 +251,18 @@ Loop: } // SignCRUDAction determines if we should sign a CRUD commit to this repository -func SignCRUDAction(ctx context.Context, repoPath string, u *user_model.User, tmpBasePath, parentCommit string) (bool, string, *git.Signature, error) { +func SignCRUDAction(ctx context.Context, repoPath string, u *user_model.User, tmpBasePath, parentCommit string) (bool, *git.SigningKey, *git.Signature, error) { rules := signingModeFromStrings(setting.Repository.Signing.CRUDActions) signingKey, sig := SigningKey(ctx, repoPath) - if signingKey == "" { - return false, "", nil, &ErrWontSign{noKey} + if signingKey == nil { + return false, nil, nil, &ErrWontSign{noKey} } Loop: for _, rule := range rules { switch rule { case never: - return false, "", nil, &ErrWontSign{never} + return false, nil, nil, &ErrWontSign{never} case always: break Loop case pubkey: @@ -246,35 +271,35 @@ Loop: IncludeSubKeys: true, }) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if len(keys) == 0 { - return false, "", nil, &ErrWontSign{pubkey} + return false, nil, nil, &ErrWontSign{pubkey} } case twofa: twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID) if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) { - return false, "", nil, err + return false, nil, nil, err } if twofaModel == nil { - return false, "", nil, &ErrWontSign{twofa} + return false, nil, nil, &ErrWontSign{twofa} } case parentSigned: gitRepo, err := git.OpenRepository(ctx, tmpBasePath) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() commit, err := gitRepo.GetCommit(parentCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if commit.Signature == nil { - return false, "", nil, &ErrWontSign{parentSigned} + return false, nil, nil, &ErrWontSign{parentSigned} } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{parentSigned} + return false, nil, nil, &ErrWontSign{parentSigned} } } } @@ -282,16 +307,16 @@ Loop: } // SignMerge determines if we should sign a PR merge commit to the base repository -func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, tmpBasePath, baseCommit, headCommit string) (bool, string, *git.Signature, error) { +func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, tmpBasePath, baseCommit, headCommit string) (bool, *git.SigningKey, *git.Signature, error) { if err := pr.LoadBaseRepo(ctx); err != nil { log.Error("Unable to get Base Repo for pull request") - return false, "", nil, err + return false, nil, nil, err } repo := pr.BaseRepo signingKey, signer := SigningKey(ctx, repo.RepoPath()) - if signingKey == "" { - return false, "", nil, &ErrWontSign{noKey} + if signingKey == nil { + return false, nil, nil, &ErrWontSign{noKey} } rules := signingModeFromStrings(setting.Repository.Signing.Merges) @@ -302,7 +327,7 @@ Loop: for _, rule := range rules { switch rule { case never: - return false, "", nil, &ErrWontSign{never} + return false, nil, nil, &ErrWontSign{never} case always: break Loop case pubkey: @@ -311,91 +336,91 @@ Loop: IncludeSubKeys: true, }) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if len(keys) == 0 { - return false, "", nil, &ErrWontSign{pubkey} + return false, nil, nil, &ErrWontSign{pubkey} } case twofa: twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID) if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) { - return false, "", nil, err + return false, nil, nil, err } if twofaModel == nil { - return false, "", nil, &ErrWontSign{twofa} + return false, nil, nil, &ErrWontSign{twofa} } case approved: protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pr.BaseBranch) if err != nil { - return false, "", nil, err + return false, nil, nil, err } if protectedBranch == nil { - return false, "", nil, &ErrWontSign{approved} + return false, nil, nil, &ErrWontSign{approved} } if issues_model.GetGrantedApprovalsCount(ctx, protectedBranch, pr) < 1 { - return false, "", nil, &ErrWontSign{approved} + return false, nil, nil, &ErrWontSign{approved} } case baseSigned: if gitRepo == nil { gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() } commit, err := gitRepo.GetCommit(baseCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{baseSigned} + return false, nil, nil, &ErrWontSign{baseSigned} } case headSigned: if gitRepo == nil { gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() } commit, err := gitRepo.GetCommit(headCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{headSigned} + return false, nil, nil, &ErrWontSign{headSigned} } case commitsSigned: if gitRepo == nil { gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { - return false, "", nil, err + return false, nil, nil, err } defer gitRepo.Close() } commit, err := gitRepo.GetCommit(headCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{commitsSigned} + return false, nil, nil, &ErrWontSign{commitsSigned} } // need to work out merge-base mergeBaseCommit, _, err := gitRepo.GetMergeBase("", baseCommit, headCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } commitList, err := commit.CommitsBeforeUntil(mergeBaseCommit) if err != nil { - return false, "", nil, err + return false, nil, nil, err } for _, commit := range commitList { verification := ParseCommitWithSignature(ctx, commit) if !verification.Verified { - return false, "", nil, &ErrWontSign{commitsSigned} + return false, nil, nil, &ErrWontSign{commitsSigned} } } } diff --git a/services/attachment/attachment_test.go b/services/attachment/attachment_test.go index 142bcfe629..65475836be 100644 --- a/services/attachment/attachment_test.go +++ b/services/attachment/attachment_test.go @@ -41,6 +41,6 @@ func TestUploadAttachment(t *testing.T) { attachment, err := repo_model.GetAttachmentByUUID(db.DefaultContext, attach.UUID) assert.NoError(t, err) - assert.EqualValues(t, user.ID, attachment.UploaderID) + assert.Equal(t, user.ID, attachment.UploaderID) assert.Equal(t, int64(0), attachment.DownloadCount) } diff --git a/services/auth/auth.go b/services/auth/auth.go index f7deeb4c50..fb6612290b 100644 --- a/services/auth/auth.go +++ b/services/auth/auth.go @@ -62,14 +62,14 @@ func (a *authPathDetector) isAPIPath() bool { // isAttachmentDownload check if request is a file download (GET) with URL to an attachment func (a *authPathDetector) isAttachmentDownload() bool { - return strings.HasPrefix(a.req.URL.Path, "/attachments/") && a.req.Method == "GET" + return strings.HasPrefix(a.req.URL.Path, "/attachments/") && a.req.Method == http.MethodGet } func (a *authPathDetector) isFeedRequest(req *http.Request) bool { if !setting.Other.EnableFeed { return false } - if req.Method != "GET" { + if req.Method != http.MethodGet { return false } return a.vars.feedPathRe.MatchString(req.URL.Path) || a.vars.feedRefPathRe.MatchString(req.URL.Path) diff --git a/services/auth/auth_test.go b/services/auth/auth_test.go index b8d3396163..c45f312c90 100644 --- a/services/auth/auth_test.go +++ b/services/auth/auth_test.go @@ -97,7 +97,7 @@ func Test_isGitRawOrLFSPath(t *testing.T) { defer test.MockVariableValue(&setting.LFS.StartServer)() for _, tt := range tests { t.Run(tt.path, func(t *testing.T) { - req, _ := http.NewRequest("POST", "http://localhost"+tt.path, nil) + req, _ := http.NewRequest(http.MethodPost, "http://localhost"+tt.path, nil) setting.LFS.StartServer = false assert.Equal(t, tt.want, newAuthPathDetector(req).isGitRawOrAttachOrLFSPath()) @@ -119,7 +119,7 @@ func Test_isGitRawOrLFSPath(t *testing.T) { } for _, tt := range lfsTests { t.Run(tt, func(t *testing.T) { - req, _ := http.NewRequest("POST", tt, nil) + req, _ := http.NewRequest(http.MethodPost, tt, nil) setting.LFS.StartServer = false got := newAuthPathDetector(req).isGitRawOrAttachOrLFSPath() assert.Equalf(t, setting.LFS.StartServer, got, "isGitOrLFSPath(%q) = %v, want %v, %v", tt, got, setting.LFS.StartServer, globalVars().gitRawOrAttachPathRe.MatchString(tt)) @@ -148,7 +148,7 @@ func Test_isFeedRequest(t *testing.T) { } for _, tt := range tests { t.Run(tt.path, func(t *testing.T) { - req, _ := http.NewRequest("GET", "http://localhost"+tt.path, nil) + req, _ := http.NewRequest(http.MethodGet, "http://localhost"+tt.path, nil) assert.Equal(t, tt.want, newAuthPathDetector(req).isFeedRequest(req)) }) } diff --git a/services/auth/basic.go b/services/auth/basic.go index e22b9e1eb7..b2bd14ef5d 100644 --- a/services/auth/basic.go +++ b/services/auth/basic.go @@ -7,12 +7,11 @@ package auth import ( "errors" "net/http" - "strings" actions_model "code.gitea.io/gitea/models/actions" auth_model "code.gitea.io/gitea/models/auth" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/auth/httpauth" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -54,17 +53,15 @@ func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore return nil, nil } - baHead := req.Header.Get("Authorization") - if len(baHead) == 0 { + authHeader := req.Header.Get("Authorization") + if authHeader == "" { return nil, nil } - - auths := strings.SplitN(baHead, " ", 2) - if len(auths) != 2 || (strings.ToLower(auths[0]) != "basic") { + parsed, ok := httpauth.ParseAuthorizationHeader(authHeader) + if !ok || parsed.BasicAuth == nil { return nil, nil } - - uname, passwd, _ := base.BasicAuthDecode(auths[1]) + uname, passwd := parsed.BasicAuth.Username, parsed.BasicAuth.Password // Check if username or password is a token isUsernameToken := len(passwd) == 0 || passwd == "x-oauth-basic" @@ -142,14 +139,14 @@ func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore return nil, err } - if skipper, ok := source.Cfg.(LocalTwoFASkipper); !ok || !skipper.IsSkipLocalTwoFA() { - // Check if the user has webAuthn registration + if !source.TwoFactorShouldSkip() { + // Check if the user has WebAuthn registration hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(req.Context(), u.ID) if err != nil { return nil, err } if hasWebAuthn { - return nil, errors.New("Basic authorization is not allowed while webAuthn enrolled") + return nil, errors.New("basic authorization is not allowed while WebAuthn enrolled") } if err := validateTOTP(req, u); err != nil { diff --git a/services/auth/httpsign.go b/services/auth/httpsign.go index 83a36bef23..25e96ff32d 100644 --- a/services/auth/httpsign.go +++ b/services/auth/httpsign.go @@ -134,7 +134,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) { // Check if it's really a ssh certificate cert, ok := pk.(*ssh.Certificate) if !ok { - return nil, fmt.Errorf("no certificate found") + return nil, errors.New("no certificate found") } c := &ssh.CertChecker{ @@ -153,7 +153,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) { // check the CA of the cert if !c.IsUserAuthority(cert.SignatureKey) { - return nil, fmt.Errorf("CA check failed") + return nil, errors.New("CA check failed") } // Create a verifier @@ -191,7 +191,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) { } // No public key matching a principal in the certificate is registered in gitea - return nil, fmt.Errorf("no valid principal found") + return nil, errors.New("no valid principal found") } // doVerify iterates across the provided public keys attempting the verify the current request against each key in turn diff --git a/services/auth/interface.go b/services/auth/interface.go index 275b4dd56c..e7eccecea0 100644 --- a/services/auth/interface.go +++ b/services/auth/interface.go @@ -35,11 +35,6 @@ type PasswordAuthenticator interface { Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error) } -// LocalTwoFASkipper represents a source of authentication that can skip local 2fa -type LocalTwoFASkipper interface { - IsSkipLocalTwoFA() bool -} - // SynchronizableSource represents a source that can synchronize users type SynchronizableSource interface { Sync(ctx context.Context, updateExisting bool) error diff --git a/services/auth/oauth2.go b/services/auth/oauth2.go index 66cc686809..7df6f4638e 100644 --- a/services/auth/oauth2.go +++ b/services/auth/oauth2.go @@ -13,6 +13,7 @@ import ( actions_model "code.gitea.io/gitea/models/actions" auth_model "code.gitea.io/gitea/models/auth" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/auth/httpauth" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -97,9 +98,9 @@ func parseToken(req *http.Request) (string, bool) { // check header token if auHead := req.Header.Get("Authorization"); auHead != "" { - auths := strings.Fields(auHead) - if len(auths) == 2 && (auths[0] == "token" || strings.ToLower(auths[0]) == "bearer") { - return auths[1], true + parsed, ok := httpauth.ParseAuthorizationHeader(auHead) + if ok && parsed.BearerToken != nil { + return parsed.BearerToken.Token, true } } return "", false diff --git a/services/auth/oauth2_test.go b/services/auth/oauth2_test.go index 9edf18d58e..f003742a94 100644 --- a/services/auth/oauth2_test.go +++ b/services/auth/oauth2_test.go @@ -26,7 +26,7 @@ func TestUserIDFromToken(t *testing.T) { o := OAuth2{} uid := o.userIDFromToken(t.Context(), token, ds) - assert.Equal(t, int64(user_model.ActionsUserID), uid) + assert.Equal(t, user_model.ActionsUserID, uid) assert.Equal(t, true, ds["IsActionsToken"]) assert.Equal(t, ds["ActionsTaskID"], int64(RunningTaskID)) }) diff --git a/services/auth/source/db/source.go b/services/auth/source/db/source.go index bb2270cbd6..90baa61f5b 100644 --- a/services/auth/source/db/source.go +++ b/services/auth/source/db/source.go @@ -11,7 +11,9 @@ import ( ) // Source is a password authentication service -type Source struct{} +type Source struct { + auth.ConfigBase `json:"-"` +} // FromDB fills up an OAuth2Config from serialized format. func (source *Source) FromDB(bs []byte) error { diff --git a/services/auth/source/ldap/assert_interface_test.go b/services/auth/source/ldap/assert_interface_test.go index 33347687dc..8e8accd668 100644 --- a/services/auth/source/ldap/assert_interface_test.go +++ b/services/auth/source/ldap/assert_interface_test.go @@ -15,13 +15,11 @@ import ( type sourceInterface interface { auth.PasswordAuthenticator auth.SynchronizableSource - auth.LocalTwoFASkipper auth_model.SSHKeyProvider auth_model.Config auth_model.SkipVerifiable auth_model.HasTLSer auth_model.UseTLSer - auth_model.SourceSettable } var _ (sourceInterface) = &ldap.Source{} diff --git a/services/auth/source/ldap/source.go b/services/auth/source/ldap/source.go index 963cdba7c2..d49dbc45ce 100644 --- a/services/auth/source/ldap/source.go +++ b/services/auth/source/ldap/source.go @@ -24,6 +24,8 @@ import ( // Source Basic LDAP authentication service type Source struct { + auth.ConfigBase `json:"-"` + Name string // canonical name (ie. corporate.ad) Host string // LDAP host Port int // port number @@ -54,9 +56,6 @@ type Source struct { GroupTeamMap string // Map LDAP groups to teams GroupTeamMapRemoval bool // Remove user from teams which are synchronized and user is not a member of the corresponding LDAP group UserUID string // User Attribute listed in Group - SkipLocalTwoFA bool `json:",omitempty"` // Skip Local 2fa for users authenticated with this source - - authSource *auth.Source // reference to the authSource } // FromDB fills up a LDAPConfig from serialized format. @@ -109,11 +108,6 @@ func (source *Source) ProvidesSSHKeys() bool { return strings.TrimSpace(source.AttributeSSHPublicKey) != "" } -// SetAuthSource sets the related AuthSource -func (source *Source) SetAuthSource(authSource *auth.Source) { - source.authSource = authSource -} - func init() { auth.RegisterTypeConfig(auth.LDAP, &Source{}) auth.RegisterTypeConfig(auth.DLDAP, &Source{}) diff --git a/services/auth/source/ldap/source_authenticate.go b/services/auth/source/ldap/source_authenticate.go index 6a6c60cd40..6005a4744a 100644 --- a/services/auth/source/ldap/source_authenticate.go +++ b/services/auth/source/ldap/source_authenticate.go @@ -5,7 +5,6 @@ package ldap import ( "context" - "fmt" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" @@ -26,7 +25,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u if user != nil { loginName = user.LoginName } - sr := source.SearchEntry(loginName, password, source.authSource.Type == auth.DLDAP) + sr := source.SearchEntry(loginName, password, source.AuthSource.Type == auth.DLDAP) if sr == nil { // User not in LDAP, do nothing return nil, user_model.ErrUserNotExist{Name: loginName} @@ -41,7 +40,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u sr.Username = userName } if sr.Mail == "" { - sr.Mail = fmt.Sprintf("%s@localhost.local", sr.Username) + sr.Mail = sr.Username + "@localhost.local" } isAttributeSSHPublicKeySet := strings.TrimSpace(source.AttributeSSHPublicKey) != "" @@ -59,7 +58,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u opts := &user_service.UpdateOptions{} if source.AdminFilter != "" && user.IsAdmin != sr.IsAdmin { // Change existing admin flag only if AdminFilter option is set - opts.IsAdmin = optional.Some(sr.IsAdmin) + opts.IsAdmin = user_service.UpdateOptionFieldFromSync(sr.IsAdmin) } if !sr.IsAdmin && source.RestrictedFilter != "" && user.IsRestricted != sr.IsRestricted { // Change existing restricted flag only if RestrictedFilter option is set @@ -74,7 +73,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u } if user != nil { - if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, user, source.authSource, sr.SSHPublicKey) { + if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, user, source.AuthSource, sr.SSHPublicKey) { if err := asymkey_service.RewriteAllPublicKeys(ctx); err != nil { return user, err } @@ -85,8 +84,8 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u Name: sr.Username, FullName: composeFullName(sr.Name, sr.Surname, sr.Username), Email: sr.Mail, - LoginType: source.authSource.Type, - LoginSource: source.authSource.ID, + LoginType: source.AuthSource.Type, + LoginSource: source.AuthSource.ID, LoginName: userName, IsAdmin: sr.IsAdmin, } @@ -100,7 +99,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u return user, err } - if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(ctx, user, source.authSource, sr.SSHPublicKey) { + if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(ctx, user, source.AuthSource, sr.SSHPublicKey) { if err := asymkey_service.RewriteAllPublicKeys(ctx); err != nil { return user, err } @@ -124,8 +123,3 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u return user, nil } - -// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication -func (source *Source) IsSkipLocalTwoFA() bool { - return source.SkipLocalTwoFA -} diff --git a/services/auth/source/ldap/source_search.go b/services/auth/source/ldap/source_search.go index fa2c45ce4a..e6bce04a83 100644 --- a/services/auth/source/ldap/source_search.go +++ b/services/auth/source/ldap/source_search.go @@ -117,10 +117,10 @@ func dial(source *Source) (*ldap.Conn, error) { } if source.SecurityProtocol == SecurityProtocolLDAPS { - return ldap.DialTLS("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)), tlsConfig) //nolint:staticcheck + return ldap.DialTLS("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)), tlsConfig) //nolint:staticcheck // DialTLS is deprecated } - conn, err := ldap.Dial("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port))) //nolint:staticcheck + conn, err := ldap.Dial("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port))) //nolint:staticcheck // Dial is deprecated if err != nil { return nil, fmt.Errorf("error during Dial: %w", err) } diff --git a/services/auth/source/ldap/source_sync.go b/services/auth/source/ldap/source_sync.go index e817bf1fa9..7b401c5c96 100644 --- a/services/auth/source/ldap/source_sync.go +++ b/services/auth/source/ldap/source_sync.go @@ -5,7 +5,6 @@ package ldap import ( "context" - "fmt" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" @@ -23,21 +22,21 @@ import ( // Sync causes this ldap source to synchronize its users with the db func (source *Source) Sync(ctx context.Context, updateExisting bool) error { - log.Trace("Doing: SyncExternalUsers[%s]", source.authSource.Name) + log.Trace("Doing: SyncExternalUsers[%s]", source.AuthSource.Name) isAttributeSSHPublicKeySet := strings.TrimSpace(source.AttributeSSHPublicKey) != "" var sshKeysNeedUpdate bool // Find all users with this login type - FIXME: Should this be an iterator? - users, err := user_model.GetUsersBySource(ctx, source.authSource) + users, err := user_model.GetUsersBySource(ctx, source.AuthSource) if err != nil { log.Error("SyncExternalUsers: %v", err) return err } select { case <-ctx.Done(): - log.Warn("SyncExternalUsers: Cancelled before update of %s", source.authSource.Name) - return db.ErrCancelledf("Before update of %s", source.authSource.Name) + log.Warn("SyncExternalUsers: Cancelled before update of %s", source.AuthSource.Name) + return db.ErrCancelledf("Before update of %s", source.AuthSource.Name) default: } @@ -52,7 +51,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { sr, err := source.SearchEntries() if err != nil { - log.Error("SyncExternalUsers LDAP source failure [%s], skipped", source.authSource.Name) + log.Error("SyncExternalUsers LDAP source failure [%s], skipped", source.AuthSource.Name) return nil } @@ -75,7 +74,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { for _, su := range sr { select { case <-ctx.Done(): - log.Warn("SyncExternalUsers: Cancelled at update of %s before completed update of users", source.authSource.Name) + log.Warn("SyncExternalUsers: Cancelled at update of %s before completed update of users", source.AuthSource.Name) // Rewrite authorized_keys file if LDAP Public SSH Key attribute is set and any key was added or removed if sshKeysNeedUpdate { err = asymkey_service.RewriteAllPublicKeys(ctx) @@ -83,7 +82,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { log.Error("RewriteAllPublicKeys: %v", err) } } - return db.ErrCancelledf("During update of %s before completed update of users", source.authSource.Name) + return db.ErrCancelledf("During update of %s before completed update of users", source.AuthSource.Name) default: } if su.Username == "" && su.Mail == "" { @@ -106,20 +105,20 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { } if su.Mail == "" { - su.Mail = fmt.Sprintf("%s@localhost.local", su.Username) + su.Mail = su.Username + "@localhost.local" } fullName := composeFullName(su.Name, su.Surname, su.Username) // If no existing user found, create one if usr == nil { - log.Trace("SyncExternalUsers[%s]: Creating user %s", source.authSource.Name, su.Username) + log.Trace("SyncExternalUsers[%s]: Creating user %s", source.AuthSource.Name, su.Username) usr = &user_model.User{ LowerName: su.LowerName, Name: su.Username, FullName: fullName, - LoginType: source.authSource.Type, - LoginSource: source.authSource.ID, + LoginType: source.AuthSource.Type, + LoginSource: source.AuthSource.ID, LoginName: su.Username, Email: su.Mail, IsAdmin: su.IsAdmin, @@ -131,12 +130,12 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { err = user_model.CreateUser(ctx, usr, &user_model.Meta{}, overwriteDefault) if err != nil { - log.Error("SyncExternalUsers[%s]: Error creating user %s: %v", source.authSource.Name, su.Username, err) + log.Error("SyncExternalUsers[%s]: Error creating user %s: %v", source.AuthSource.Name, su.Username, err) } if err == nil && isAttributeSSHPublicKeySet { - log.Trace("SyncExternalUsers[%s]: Adding LDAP Public SSH Keys for user %s", source.authSource.Name, usr.Name) - if asymkey_model.AddPublicKeysBySource(ctx, usr, source.authSource, su.SSHPublicKey) { + log.Trace("SyncExternalUsers[%s]: Adding LDAP Public SSH Keys for user %s", source.AuthSource.Name, usr.Name) + if asymkey_model.AddPublicKeysBySource(ctx, usr, source.AuthSource, su.SSHPublicKey) { sshKeysNeedUpdate = true } } @@ -146,7 +145,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { } } else if updateExisting { // Synchronize SSH Public Key if that attribute is set - if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, usr, source.authSource, su.SSHPublicKey) { + if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, usr, source.AuthSource, su.SSHPublicKey) { sshKeysNeedUpdate = true } @@ -156,14 +155,14 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { !strings.EqualFold(usr.Email, su.Mail) || usr.FullName != fullName || !usr.IsActive { - log.Trace("SyncExternalUsers[%s]: Updating user %s", source.authSource.Name, usr.Name) + log.Trace("SyncExternalUsers[%s]: Updating user %s", source.AuthSource.Name, usr.Name) opts := &user_service.UpdateOptions{ FullName: optional.Some(fullName), IsActive: optional.Some(true), } if source.AdminFilter != "" { - opts.IsAdmin = optional.Some(su.IsAdmin) + opts.IsAdmin = user_service.UpdateOptionFieldFromSync(su.IsAdmin) } // Change existing restricted flag only if RestrictedFilter option is set if !su.IsAdmin && source.RestrictedFilter != "" { @@ -171,16 +170,17 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { } if err := user_service.UpdateUser(ctx, usr, opts); err != nil { - log.Error("SyncExternalUsers[%s]: Error updating user %s: %v", source.authSource.Name, usr.Name, err) + log.Error("SyncExternalUsers[%s]: Error updating user %s: %v", source.AuthSource.Name, usr.Name, err) } if err := user_service.ReplacePrimaryEmailAddress(ctx, usr, su.Mail); err != nil { - log.Error("SyncExternalUsers[%s]: Error updating user %s primary email %s: %v", source.authSource.Name, usr.Name, su.Mail, err) + log.Error("SyncExternalUsers[%s]: Error updating user %s primary email %s: %v", source.AuthSource.Name, usr.Name, su.Mail, err) } } - if usr.IsUploadAvatarChanged(su.Avatar) { - if err == nil && source.AttributeAvatar != "" { + if source.AttributeAvatar != "" { + if len(su.Avatar) > 0 && usr.IsUploadAvatarChanged(su.Avatar) { + log.Trace("SyncExternalUsers[%s]: Uploading new avatar for %s", source.AuthSource.Name, usr.Name) _ = user_service.UploadAvatar(ctx, usr, su.Avatar) } } @@ -203,8 +203,8 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { select { case <-ctx.Done(): - log.Warn("SyncExternalUsers: Cancelled during update of %s before delete users", source.authSource.Name) - return db.ErrCancelledf("During update of %s before delete users", source.authSource.Name) + log.Warn("SyncExternalUsers: Cancelled during update of %s before delete users", source.AuthSource.Name) + return db.ErrCancelledf("During update of %s before delete users", source.AuthSource.Name) default: } @@ -215,13 +215,13 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { continue } - log.Trace("SyncExternalUsers[%s]: Deactivating user %s", source.authSource.Name, usr.Name) + log.Trace("SyncExternalUsers[%s]: Deactivating user %s", source.AuthSource.Name, usr.Name) opts := &user_service.UpdateOptions{ IsActive: optional.Some(false), } if err := user_service.UpdateUser(ctx, usr, opts); err != nil { - log.Error("SyncExternalUsers[%s]: Error deactivating user %s: %v", source.authSource.Name, usr.Name, err) + log.Error("SyncExternalUsers[%s]: Error deactivating user %s: %v", source.AuthSource.Name, usr.Name, err) } } } diff --git a/services/auth/source/oauth2/assert_interface_test.go b/services/auth/source/oauth2/assert_interface_test.go index 56fe0e4aa8..d870ac1dcd 100644 --- a/services/auth/source/oauth2/assert_interface_test.go +++ b/services/auth/source/oauth2/assert_interface_test.go @@ -14,7 +14,6 @@ import ( type sourceInterface interface { auth_model.Config - auth_model.SourceSettable auth_model.RegisterableSource auth.PasswordAuthenticator } diff --git a/services/auth/source/oauth2/source.go b/services/auth/source/oauth2/source.go index 3454c9ad55..08837de377 100644 --- a/services/auth/source/oauth2/source.go +++ b/services/auth/source/oauth2/source.go @@ -10,6 +10,8 @@ import ( // Source holds configuration for the OAuth2 login source. type Source struct { + auth.ConfigBase `json:"-"` + Provider string ClientID string ClientSecret string @@ -25,10 +27,6 @@ type Source struct { GroupTeamMap string GroupTeamMapRemoval bool RestrictedGroup string - SkipLocalTwoFA bool `json:",omitempty"` - - // reference to the authSource - authSource *auth.Source } // FromDB fills up an OAuth2Config from serialized format. @@ -41,11 +39,6 @@ func (source *Source) ToDB() ([]byte, error) { return json.Marshal(source) } -// SetAuthSource sets the related AuthSource -func (source *Source) SetAuthSource(authSource *auth.Source) { - source.authSource = authSource -} - func init() { auth.RegisterTypeConfig(auth.OAuth2, &Source{}) } diff --git a/services/auth/source/oauth2/source_callout.go b/services/auth/source/oauth2/source_callout.go index 8d70bee248..f09d25c772 100644 --- a/services/auth/source/oauth2/source_callout.go +++ b/services/auth/source/oauth2/source_callout.go @@ -13,7 +13,7 @@ import ( // Callout redirects request/response pair to authenticate against the provider func (source *Source) Callout(request *http.Request, response http.ResponseWriter) error { // not sure if goth is thread safe (?) when using multiple providers - request.Header.Set(ProviderHeaderKey, source.authSource.Name) + request.Header.Set(ProviderHeaderKey, source.AuthSource.Name) // don't use the default gothic begin handler to prevent issues when some error occurs // normally the gothic library will write some custom stuff to the response instead of our own nice error page @@ -33,7 +33,7 @@ func (source *Source) Callout(request *http.Request, response http.ResponseWrite // this will trigger a new authentication request, but because we save it in the session we can use that func (source *Source) Callback(request *http.Request, response http.ResponseWriter) (goth.User, error) { // not sure if goth is thread safe (?) when using multiple providers - request.Header.Set(ProviderHeaderKey, source.authSource.Name) + request.Header.Set(ProviderHeaderKey, source.AuthSource.Name) gothRWMutex.RLock() defer gothRWMutex.RUnlock() diff --git a/services/auth/source/oauth2/source_register.go b/services/auth/source/oauth2/source_register.go index 82a36acaa6..12da56c11b 100644 --- a/services/auth/source/oauth2/source_register.go +++ b/services/auth/source/oauth2/source_register.go @@ -9,13 +9,13 @@ import ( // RegisterSource causes an OAuth2 configuration to be registered func (source *Source) RegisterSource() error { - err := RegisterProviderWithGothic(source.authSource.Name, source) - return wrapOpenIDConnectInitializeError(err, source.authSource.Name, source) + err := RegisterProviderWithGothic(source.AuthSource.Name, source) + return wrapOpenIDConnectInitializeError(err, source.AuthSource.Name, source) } // UnregisterSource causes an OAuth2 configuration to be unregistered func (source *Source) UnregisterSource() error { - RemoveProviderFromGothic(source.authSource.Name) + RemoveProviderFromGothic(source.AuthSource.Name) return nil } diff --git a/services/auth/source/oauth2/source_sync.go b/services/auth/source/oauth2/source_sync.go index 5e30313c8f..c2e3dfb1a8 100644 --- a/services/auth/source/oauth2/source_sync.go +++ b/services/auth/source/oauth2/source_sync.go @@ -18,27 +18,27 @@ import ( // Sync causes this OAuth2 source to synchronize its users with the db. func (source *Source) Sync(ctx context.Context, updateExisting bool) error { - log.Trace("Doing: SyncExternalUsers[%s] %d", source.authSource.Name, source.authSource.ID) + log.Trace("Doing: SyncExternalUsers[%s] %d", source.AuthSource.Name, source.AuthSource.ID) if !updateExisting { - log.Info("SyncExternalUsers[%s] not running since updateExisting is false", source.authSource.Name) + log.Info("SyncExternalUsers[%s] not running since updateExisting is false", source.AuthSource.Name) return nil } - provider, err := createProvider(source.authSource.Name, source) + provider, err := createProvider(source.AuthSource.Name, source) if err != nil { return err } if !provider.RefreshTokenAvailable() { - log.Trace("SyncExternalUsers[%s] provider doesn't support refresh tokens, can't synchronize", source.authSource.Name) + log.Trace("SyncExternalUsers[%s] provider doesn't support refresh tokens, can't synchronize", source.AuthSource.Name) return nil } opts := user_model.FindExternalUserOptions{ HasRefreshToken: true, Expired: true, - LoginSourceID: source.authSource.ID, + LoginSourceID: source.AuthSource.ID, } return user_model.IterateExternalLogin(ctx, opts, func(ctx context.Context, u *user_model.ExternalLoginUser) error { @@ -77,7 +77,7 @@ func (source *Source) refresh(ctx context.Context, provider goth.Provider, u *us // recognizes them as a valid user, they will be able to login // via their provider and reactivate their account. if shouldDisable { - log.Info("SyncExternalUsers[%s] disabling user %d", source.authSource.Name, user.ID) + log.Info("SyncExternalUsers[%s] disabling user %d", source.AuthSource.Name, user.ID) return db.WithTx(ctx, func(ctx context.Context) error { if hasUser { diff --git a/services/auth/source/oauth2/source_sync_test.go b/services/auth/source/oauth2/source_sync_test.go index aacb4286a1..2927f3634b 100644 --- a/services/auth/source/oauth2/source_sync_test.go +++ b/services/auth/source/oauth2/source_sync_test.go @@ -18,19 +18,21 @@ func TestSource(t *testing.T) { source := &Source{ Provider: "fake", - authSource: &auth.Source{ - ID: 12, - Type: auth.OAuth2, - Name: "fake", - IsActive: true, - IsSyncEnabled: true, + ConfigBase: auth.ConfigBase{ + AuthSource: &auth.Source{ + ID: 12, + Type: auth.OAuth2, + Name: "fake", + IsActive: true, + IsSyncEnabled: true, + }, }, } user := &user_model.User{ LoginName: "external", LoginType: auth.OAuth2, - LoginSource: source.authSource.ID, + LoginSource: source.AuthSource.ID, Name: "test", Email: "external@example.com", } @@ -47,7 +49,7 @@ func TestSource(t *testing.T) { err = user_model.LinkExternalToUser(t.Context(), user, e) assert.NoError(t, err) - provider, err := createProvider(source.authSource.Name, source) + provider, err := createProvider(source.AuthSource.Name, source) assert.NoError(t, err) t.Run("refresh", func(t *testing.T) { @@ -88,8 +90,8 @@ func TestSource(t *testing.T) { ok, err := user_model.GetExternalLogin(t.Context(), e) assert.NoError(t, err) assert.True(t, ok) - assert.Equal(t, "", e.RefreshToken) - assert.Equal(t, "", e.AccessToken) + assert.Empty(t, e.RefreshToken) + assert.Empty(t, e.AccessToken) u, err := user_model.GetUserByID(t.Context(), user.ID) assert.NoError(t, err) diff --git a/services/auth/source/oauth2/urlmapping.go b/services/auth/source/oauth2/urlmapping.go index d0442d58a8..b9f445caa7 100644 --- a/services/auth/source/oauth2/urlmapping.go +++ b/services/auth/source/oauth2/urlmapping.go @@ -14,11 +14,11 @@ type CustomURLMapping struct { // CustomURLSettings describes the urls values and availability to use when customizing OAuth2 provider URLs type CustomURLSettings struct { - AuthURL Attribute `json:",omitempty"` - TokenURL Attribute `json:",omitempty"` - ProfileURL Attribute `json:",omitempty"` - EmailURL Attribute `json:",omitempty"` - Tenant Attribute `json:",omitempty"` + AuthURL Attribute + TokenURL Attribute + ProfileURL Attribute + EmailURL Attribute + Tenant Attribute } // Attribute describes the availability, and required status for a custom url configuration diff --git a/services/auth/source/pam/assert_interface_test.go b/services/auth/source/pam/assert_interface_test.go index 8e7648b8d3..908d097d96 100644 --- a/services/auth/source/pam/assert_interface_test.go +++ b/services/auth/source/pam/assert_interface_test.go @@ -15,7 +15,6 @@ import ( type sourceInterface interface { auth.PasswordAuthenticator auth_model.Config - auth_model.SourceSettable } var _ (sourceInterface) = &pam.Source{} diff --git a/services/auth/source/pam/source.go b/services/auth/source/pam/source.go index 96b182e185..d1db6db9b7 100644 --- a/services/auth/source/pam/source.go +++ b/services/auth/source/pam/source.go @@ -17,12 +17,10 @@ import ( // Source holds configuration for the PAM login source. type Source struct { - ServiceName string // pam service (e.g. system-auth) - EmailDomain string - SkipLocalTwoFA bool `json:",omitempty"` // Skip Local 2fa for users authenticated with this source + auth.ConfigBase `json:"-"` - // reference to the authSource - authSource *auth.Source + ServiceName string // pam service (e.g. system-auth) + EmailDomain string } // FromDB fills up a PAMConfig from serialized format. @@ -35,11 +33,6 @@ func (source *Source) ToDB() ([]byte, error) { return json.Marshal(source) } -// SetAuthSource sets the related AuthSource -func (source *Source) SetAuthSource(authSource *auth.Source) { - source.authSource = authSource -} - func init() { auth.RegisterTypeConfig(auth.PAM, &Source{}) } diff --git a/services/auth/source/pam/source_authenticate.go b/services/auth/source/pam/source_authenticate.go index 6fd02dc29f..db7c6aab96 100644 --- a/services/auth/source/pam/source_authenticate.go +++ b/services/auth/source/pam/source_authenticate.go @@ -56,7 +56,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u Email: email, Passwd: password, LoginType: auth.PAM, - LoginSource: source.authSource.ID, + LoginSource: source.AuthSource.ID, LoginName: userName, // This is what the user typed in } overwriteDefault := &user_model.CreateUserOverwriteOptions{ @@ -69,8 +69,3 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u return user, nil } - -// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication -func (source *Source) IsSkipLocalTwoFA() bool { - return source.SkipLocalTwoFA -} diff --git a/services/auth/source/smtp/assert_interface_test.go b/services/auth/source/smtp/assert_interface_test.go index 6c9cde66e1..56edad0c71 100644 --- a/services/auth/source/smtp/assert_interface_test.go +++ b/services/auth/source/smtp/assert_interface_test.go @@ -18,7 +18,6 @@ type sourceInterface interface { auth_model.SkipVerifiable auth_model.HasTLSer auth_model.UseTLSer - auth_model.SourceSettable } var _ (sourceInterface) = &smtp.Source{} diff --git a/services/auth/source/smtp/source.go b/services/auth/source/smtp/source.go index 2a648e421e..2ae81ad4f2 100644 --- a/services/auth/source/smtp/source.go +++ b/services/auth/source/smtp/source.go @@ -17,6 +17,8 @@ import ( // Source holds configuration for the SMTP login source. type Source struct { + auth.ConfigBase `json:"-"` + Auth string Host string Port int @@ -25,10 +27,6 @@ type Source struct { SkipVerify bool HeloHostname string DisableHelo bool - SkipLocalTwoFA bool `json:",omitempty"` - - // reference to the authSource - authSource *auth.Source } // FromDB fills up an SMTPConfig from serialized format. @@ -56,11 +54,6 @@ func (source *Source) UseTLS() bool { return source.ForceSMTPS || source.Port == 465 } -// SetAuthSource sets the related AuthSource -func (source *Source) SetAuthSource(authSource *auth.Source) { - source.authSource = authSource -} - func init() { auth.RegisterTypeConfig(auth.SMTP, &Source{}) } diff --git a/services/auth/source/smtp/source_authenticate.go b/services/auth/source/smtp/source_authenticate.go index b2e94933a6..b8e668f5f9 100644 --- a/services/auth/source/smtp/source_authenticate.go +++ b/services/auth/source/smtp/source_authenticate.go @@ -72,7 +72,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u Email: userName, Passwd: password, LoginType: auth_model.SMTP, - LoginSource: source.authSource.ID, + LoginSource: source.AuthSource.ID, LoginName: userName, } overwriteDefault := &user_model.CreateUserOverwriteOptions{ @@ -85,8 +85,3 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u return user, nil } - -// IsSkipLocalTwoFA returns if this source should skip local 2fa for password authentication -func (source *Source) IsSkipLocalTwoFA() bool { - return source.SkipLocalTwoFA -} diff --git a/services/auth/source/sspi/source.go b/services/auth/source/sspi/source.go index bdd6ef451c..3b7b5cb033 100644 --- a/services/auth/source/sspi/source.go +++ b/services/auth/source/sspi/source.go @@ -17,6 +17,8 @@ import ( // Source holds configuration for SSPI single sign-on. type Source struct { + auth.ConfigBase `json:"-"` + AutoCreateUsers bool AutoActivateUsers bool StripDomainNames bool diff --git a/services/automerge/automerge.go b/services/automerge/automerge.go index 62d560ff94..0520a097d3 100644 --- a/services/automerge/automerge.go +++ b/services/automerge/automerge.go @@ -36,7 +36,7 @@ func Init() error { prAutoMergeQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_auto_merge", handler) if prAutoMergeQueue == nil { - return fmt.Errorf("unable to create pr_auto_merge queue") + return errors.New("unable to create pr_auto_merge queue") } go graceful.GetManager().RunWithCancel(prAutoMergeQueue) return nil @@ -289,7 +289,7 @@ func handlePullRequestAutoMerge(pullID int64, sha string) { } if err := pull_service.CheckPullMergeable(ctx, doer, &perm, pr, pull_service.MergeCheckTypeGeneral, false); err != nil { - if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) { + if errors.Is(err, pull_service.ErrNotReadyToMerge) { log.Info("%-v was scheduled to automerge by an unauthorized user", pr) return } diff --git a/services/context/access_log.go b/services/context/access_log.go index 925e4a3056..caade113a7 100644 --- a/services/context/access_log.go +++ b/services/context/access_log.go @@ -5,7 +5,6 @@ package context import ( "bytes" - "fmt" "net" "net/http" "strings" @@ -47,7 +46,7 @@ func parseRequestIDFromRequestHeader(req *http.Request) string { } } if len(requestID) > maxRequestIDByteLength { - requestID = fmt.Sprintf("%s...", requestID[:maxRequestIDByteLength]) + requestID = requestID[:maxRequestIDByteLength] + "..." } return requestID } diff --git a/services/context/access_log_test.go b/services/context/access_log_test.go index c40ef9acd1..139a6eb217 100644 --- a/services/context/access_log_test.go +++ b/services/context/access_log_test.go @@ -59,7 +59,7 @@ func TestAccessLogger(t *testing.T) { recorder.logger = mockLogger req := &http.Request{ RemoteAddr: "remote-addr", - Method: "GET", + Method: http.MethodGet, Proto: "https", URL: &url.URL{Path: "/path"}, } diff --git a/services/context/api.go b/services/context/api.go index 10fad419ba..ab50a360f4 100644 --- a/services/context/api.go +++ b/services/context/api.go @@ -9,11 +9,14 @@ import ( "fmt" "net/http" "net/url" + "slices" + "strconv" "strings" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/httpcache" "code.gitea.io/gitea/modules/log" @@ -168,7 +171,7 @@ func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string { if paginater.HasNext() { u := *curURL queries := u.Query() - queries.Set("page", fmt.Sprintf("%d", paginater.Next())) + queries.Set("page", strconv.Itoa(paginater.Next())) u.RawQuery = queries.Encode() links = append(links, fmt.Sprintf("<%s%s>; rel=\"next\"", setting.AppURL, u.RequestURI()[1:])) @@ -176,7 +179,7 @@ func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string { if !paginater.IsLast() { u := *curURL queries := u.Query() - queries.Set("page", fmt.Sprintf("%d", paginater.TotalPages())) + queries.Set("page", strconv.Itoa(paginater.TotalPages())) u.RawQuery = queries.Encode() links = append(links, fmt.Sprintf("<%s%s>; rel=\"last\"", setting.AppURL, u.RequestURI()[1:])) @@ -192,7 +195,7 @@ func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string { if paginater.HasPrevious() { u := *curURL queries := u.Query() - queries.Set("page", fmt.Sprintf("%d", paginater.Previous())) + queries.Set("page", strconv.Itoa(paginater.Previous())) u.RawQuery = queries.Encode() links = append(links, fmt.Sprintf("<%s%s>; rel=\"prev\"", setting.AppURL, u.RequestURI()[1:])) @@ -225,7 +228,7 @@ func APIContexter() func(http.Handler) http.Handler { ctx.SetContextValue(apiContextKey, ctx) // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid. - if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { + if ctx.Req.Method == http.MethodPost && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size ctx.APIErrorInternal(err) return @@ -243,8 +246,8 @@ func APIContexter() func(http.Handler) http.Handler { // APIErrorNotFound handles 404s for APIContext // String will replace message, errors will be added to a slice func (ctx *APIContext) APIErrorNotFound(objs ...any) { - message := ctx.Locale.TrString("error.not_found") - var errors []string + var message string + var errs []string for _, obj := range objs { // Ignore nil if obj == nil { @@ -252,16 +255,15 @@ func (ctx *APIContext) APIErrorNotFound(objs ...any) { } if err, ok := obj.(error); ok { - errors = append(errors, err.Error()) + errs = append(errs, err.Error()) } else { message = obj.(string) } } - ctx.JSON(http.StatusNotFound, map[string]any{ - "message": message, + "message": util.IfZero(message, "not found"), // do not use locale in API "url": setting.API.SwaggerURL, - "errors": errors, + "errors": errs, }) } @@ -297,39 +299,27 @@ func RepoRefForAPI(next http.Handler) http.Handler { } if ctx.Repo.GitRepo == nil { - ctx.APIErrorInternal(fmt.Errorf("no open git repo")) - return + panic("no GitRepo, forgot to call the middleware?") // it is a programming error } - refName, _, _ := getRefNameLegacy(ctx.Base, ctx.Repo, ctx.PathParam("*"), ctx.FormTrim("ref")) + refName, refType, _ := getRefNameLegacy(ctx.Base, ctx.Repo, ctx.PathParam("*"), ctx.FormTrim("ref")) var err error - - if gitrepo.IsBranchExist(ctx, ctx.Repo.Repository, refName) { + switch refType { + case git.RefTypeBranch: ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName) - if err != nil { - ctx.APIErrorInternal(err) - return - } - ctx.Repo.CommitID = ctx.Repo.Commit.ID.String() - } else if gitrepo.IsTagExist(ctx, ctx.Repo.Repository, refName) { + case git.RefTypeTag: ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName) - if err != nil { - ctx.APIErrorInternal(err) - return - } - ctx.Repo.CommitID = ctx.Repo.Commit.ID.String() - } else if len(refName) == ctx.Repo.GetObjectFormat().FullLength() { - ctx.Repo.CommitID = refName + case git.RefTypeCommit: ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName) - if err != nil { - ctx.APIErrorNotFound("GetCommit", err) - return - } - } else { - ctx.APIErrorNotFound(fmt.Errorf("not exist: '%s'", ctx.PathParam("*"))) + } + if ctx.Repo.Commit == nil || errors.Is(err, util.ErrNotExist) { + ctx.APIErrorNotFound("unable to find a git ref") + return + } else if err != nil { + ctx.APIErrorInternal(err) return } - + ctx.Repo.CommitID = ctx.Repo.Commit.ID.String() next.ServeHTTP(w, req) }) } @@ -375,11 +365,5 @@ func (ctx *APIContext) IsUserRepoAdmin() bool { // IsUserRepoWriter returns true if current user has "write" privilege in current repo func (ctx *APIContext) IsUserRepoWriter(unitTypes []unit.Type) bool { - for _, unitType := range unitTypes { - if ctx.Repo.CanWrite(unitType) { - return true - } - } - - return false + return slices.ContainsFunc(unitTypes, ctx.Repo.CanWrite) } diff --git a/services/context/api_test.go b/services/context/api_test.go index 911a49949e..87d74004db 100644 --- a/services/context/api_test.go +++ b/services/context/api_test.go @@ -45,6 +45,6 @@ func TestGenAPILinks(t *testing.T) { links := genAPILinks(u, 100, 20, curPage) - assert.EqualValues(t, links, response) + assert.Equal(t, links, response) } } diff --git a/services/context/base.go b/services/context/base.go index 3701668bf6..f3f92b7eeb 100644 --- a/services/context/base.go +++ b/services/context/base.go @@ -8,6 +8,7 @@ import ( "html/template" "io" "net/http" + "strconv" "strings" "code.gitea.io/gitea/modules/httplib" @@ -53,7 +54,7 @@ func (b *Base) AppendAccessControlExposeHeaders(names ...string) { // SetTotalCountHeader set "X-Total-Count" header func (b *Base) SetTotalCountHeader(total int64) { - b.RespHeader().Set("X-Total-Count", fmt.Sprint(total)) + b.RespHeader().Set("X-Total-Count", strconv.FormatInt(total, 10)) b.AppendAccessControlExposeHeaders("X-Total-Count") } diff --git a/services/context/base_form.go b/services/context/base_form.go index 5b8cae9e99..81fd7cd328 100644 --- a/services/context/base_form.go +++ b/services/context/base_form.go @@ -12,6 +12,8 @@ import ( ) // FormString returns the first value matching the provided key in the form as a string +// It works the same as http.Request.FormValue: +// try urlencoded request body first, then query string, then multipart form body func (b *Base) FormString(key string, def ...string) string { s := b.Req.FormValue(key) if s == "" { @@ -20,7 +22,7 @@ func (b *Base) FormString(key string, def ...string) string { return s } -// FormStrings returns a string slice for the provided key from the form +// FormStrings returns a values for the key in the form (including query parameters), similar to FormString func (b *Base) FormStrings(key string) []string { if b.Req.Form == nil { if err := b.Req.ParseMultipartForm(32 << 20); err != nil { diff --git a/services/context/base_test.go b/services/context/base_test.go index b936b76f58..2a4f86dddf 100644 --- a/services/context/base_test.go +++ b/services/context/base_test.go @@ -15,7 +15,7 @@ import ( func TestRedirect(t *testing.T) { setting.IsInTesting = true - req, _ := http.NewRequest("GET", "/", nil) + req, _ := http.NewRequest(http.MethodGet, "/", nil) cases := []struct { url string @@ -36,7 +36,7 @@ func TestRedirect(t *testing.T) { assert.Equal(t, c.keep, has, "url = %q", c.url) } - req, _ = http.NewRequest("GET", "/", nil) + req, _ = http.NewRequest(http.MethodGet, "/", nil) resp := httptest.NewRecorder() req.Header.Add("HX-Request", "true") b := NewBaseContextForTest(resp, req) diff --git a/services/context/context.go b/services/context/context.go index 79bc5da920..32ec260aab 100644 --- a/services/context/context.go +++ b/services/context/context.go @@ -23,6 +23,7 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" web_types "code.gitea.io/gitea/modules/web/types" @@ -184,7 +185,7 @@ func Contexter() func(next http.Handler) http.Handler { }) // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid. - if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { + if ctx.Req.Method == http.MethodPost && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size ctx.ServerError("ParseMultipartForm", err) return @@ -196,6 +197,8 @@ func Contexter() func(next http.Handler) http.Handler { ctx.Data["SystemConfig"] = setting.Config() + ctx.Data["ShowTwoFactorRequiredMessage"] = ctx.DoerNeedTwoFactorAuth() + // FIXME: do we really always need these setting? There should be someway to have to avoid having to always set these ctx.Data["DisableMigrations"] = setting.Repository.DisableMigrations ctx.Data["DisableStars"] = setting.Repository.DisableStars @@ -209,6 +212,13 @@ func Contexter() func(next http.Handler) http.Handler { } } +func (ctx *Context) DoerNeedTwoFactorAuth() bool { + if !setting.TwoFactorAuthEnforced { + return false + } + return ctx.Session.Get(session.KeyUserHasTwoFactorAuth) == false +} + // HasError returns true if error occurs in form validation. // Attention: this function changes ctx.Data and ctx.Flash // If HasError is called, then before Redirect, the error message should be stored by ctx.Flash.Error(ctx.GetErrMsg()) again. @@ -252,3 +262,11 @@ func (ctx *Context) JSONError(msg any) { panic(fmt.Sprintf("unsupported type: %T", msg)) } } + +func (ctx *Context) JSONErrorNotFound(optMsg ...string) { + msg := util.OptionalArg(optMsg) + if msg == "" { + msg = ctx.Locale.TrString("error.not_found") + } + ctx.JSON(http.StatusNotFound, map[string]any{"errorMessage": msg, "renderFormat": "text"}) +} diff --git a/services/context/context_response.go b/services/context/context_response.go index 61b432395a..4e11e29b69 100644 --- a/services/context/context_response.go +++ b/services/context/context_response.go @@ -150,7 +150,7 @@ func (ctx *Context) notFoundInternal(logMsg string, logErr error) { ctx.Data["IsRepo"] = ctx.Repo.Repository != nil ctx.Data["Title"] = "Page Not Found" - ctx.HTML(http.StatusNotFound, templates.TplName("status/404")) + ctx.HTML(http.StatusNotFound, "status/404") } // ServerError displays a 500 (Internal Server Error) page and prints the given error, if any. diff --git a/services/context/org.go b/services/context/org.go index 992a48afa0..c8b6ed09b7 100644 --- a/services/context/org.go +++ b/services/context/org.go @@ -182,7 +182,7 @@ func OrgAssignment(opts OrgAssignmentOptions) func(ctx *Context) { return } for _, team := range teams { - if team.IncludesAllRepositories && team.AccessMode >= perm.AccessModeAdmin { + if team.IncludesAllRepositories && team.HasAdminAccess() { shouldSeeAllTeams = true break } @@ -228,7 +228,7 @@ func OrgAssignment(opts OrgAssignmentOptions) func(ctx *Context) { return } - ctx.Org.IsTeamAdmin = ctx.Org.Team.IsOwnerTeam() || ctx.Org.Team.AccessMode >= perm.AccessModeAdmin + ctx.Org.IsTeamAdmin = ctx.Org.Team.IsOwnerTeam() || ctx.Org.Team.HasAdminAccess() ctx.Data["IsTeamAdmin"] = ctx.Org.IsTeamAdmin if opts.RequireTeamAdmin && !ctx.Org.IsTeamAdmin { ctx.NotFound(err) diff --git a/services/context/package.go b/services/context/package.go index 64bb4f3ecd..8b722932b1 100644 --- a/services/context/package.go +++ b/services/context/package.go @@ -93,7 +93,7 @@ func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, any)) *Package } func determineAccessMode(ctx *Base, pkg *Package, doer *user_model.User) (perm.AccessMode, error) { - if setting.Service.RequireSignInView && (doer == nil || doer.IsGhost()) { + if setting.Service.RequireSignInViewStrict && (doer == nil || doer.IsGhost()) { return perm.AccessModeNone, nil } diff --git a/services/context/pagination.go b/services/context/pagination.go index d33dd217d0..25a9298e01 100644 --- a/services/context/pagination.go +++ b/services/context/pagination.go @@ -21,12 +21,18 @@ type Pagination struct { // NewPagination creates a new instance of the Pagination struct. // "pagingNum" is "page size" or "limit", "current" is "page" +// total=-1 means only showing prev/next func NewPagination(total, pagingNum, current, numPages int) *Pagination { p := &Pagination{} p.Paginater = paginator.New(total, pagingNum, current, numPages) return p } +func (p *Pagination) WithCurRows(n int) *Pagination { + p.Paginater.SetCurRows(n) + return p +} + func (p *Pagination) AddParamFromRequest(req *http.Request) { for key, values := range req.URL.Query() { if key == "page" || len(values) == 0 || (len(values) == 1 && values[0] == "") { diff --git a/services/context/permission.go b/services/context/permission.go index 7055f798da..c0a5a98724 100644 --- a/services/context/permission.go +++ b/services/context/permission.go @@ -5,6 +5,7 @@ package context import ( "net/http" + "slices" auth_model "code.gitea.io/gitea/models/auth" repo_model "code.gitea.io/gitea/models/repo" @@ -34,10 +35,8 @@ func CanWriteToBranch() func(ctx *Context) { // RequireUnitWriter returns a middleware for requiring repository write to one of the unit permission func RequireUnitWriter(unitTypes ...unit.Type) func(ctx *Context) { return func(ctx *Context) { - for _, unitType := range unitTypes { - if ctx.Repo.CanWrite(unitType) { - return - } + if slices.ContainsFunc(unitTypes, ctx.Repo.CanWrite) { + return } ctx.NotFound(nil) } diff --git a/services/context/private.go b/services/context/private.go index 51857da8fe..d20e49f588 100644 --- a/services/context/private.go +++ b/services/context/private.go @@ -5,7 +5,6 @@ package context import ( "context" - "fmt" "net/http" "time" @@ -29,7 +28,6 @@ func init() { }) } -// Deadline is part of the interface for context.Context and we pass this to the request context func (ctx *PrivateContext) Deadline() (deadline time.Time, ok bool) { if ctx.Override != nil { return ctx.Override.Deadline() @@ -37,7 +35,6 @@ func (ctx *PrivateContext) Deadline() (deadline time.Time, ok bool) { return ctx.Base.Deadline() } -// Done is part of the interface for context.Context and we pass this to the request context func (ctx *PrivateContext) Done() <-chan struct{} { if ctx.Override != nil { return ctx.Override.Done() @@ -45,7 +42,6 @@ func (ctx *PrivateContext) Done() <-chan struct{} { return ctx.Base.Done() } -// Err is part of the interface for context.Context and we pass this to the request context func (ctx *PrivateContext) Err() error { if ctx.Override != nil { return ctx.Override.Err() @@ -53,14 +49,14 @@ func (ctx *PrivateContext) Err() error { return ctx.Base.Err() } -var privateContextKey any = "default_private_context" +type privateContextKeyType struct{} + +var privateContextKey privateContextKeyType -// GetPrivateContext returns a context for Private routes func GetPrivateContext(req *http.Request) *PrivateContext { return req.Context().Value(privateContextKey).(*PrivateContext) } -// PrivateContexter returns apicontext as middleware func PrivateContexter() func(http.Handler) http.Handler { return func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { @@ -82,7 +78,7 @@ func OverrideContext() func(http.Handler) http.Handler { // We now need to override the request context as the base for our work because even if the request is cancelled we have to continue this work ctx := GetPrivateContext(req) var finished func() - ctx.Override, _, finished = process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), fmt.Sprintf("PrivateContext: %s", ctx.Req.RequestURI), process.RequestProcessType, true) + ctx.Override, _, finished = process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "PrivateContext: "+ctx.Req.RequestURI, process.RequestProcessType, true) defer finished() next.ServeHTTP(ctx.Resp, ctx.Req) }) diff --git a/services/context/repo.go b/services/context/repo.go index 6eccd1312a..572211712b 100644 --- a/services/context/repo.go +++ b/services/context/repo.go @@ -71,11 +71,6 @@ func (r *Repository) CanWriteToBranch(ctx context.Context, user *user_model.User return issues_model.CanMaintainerWriteToBranch(ctx, r.Permission, branch, user) } -// CanEnableEditor returns true if repository is editable and user has proper access level. -func (r *Repository) CanEnableEditor(ctx context.Context, user *user_model.User) bool { - return r.RefFullName.IsBranch() && r.CanWriteToBranch(ctx, user, r.BranchName) && r.Repository.CanEnableEditor() && !r.Repository.IsArchived -} - // CanCreateBranch returns true if repository is editable and user has proper access level. func (r *Repository) CanCreateBranch() bool { return r.Permission.CanWrite(unit_model.TypeCode) && r.Repository.CanCreateBranch() @@ -94,58 +89,100 @@ func RepoMustNotBeArchived() func(ctx *Context) { } } -// CanCommitToBranchResults represents the results of CanCommitToBranch -type CanCommitToBranchResults struct { - CanCommitToBranch bool - EditorEnabled bool - UserCanPush bool - RequireSigned bool - WillSign bool - SigningKey string - WontSignReason string +type CommitFormOptions struct { + NeedFork bool + + TargetRepo *repo_model.Repository + TargetFormAction string + WillSubmitToFork bool + CanCommitToBranch bool + UserCanPush bool + RequireSigned bool + WillSign bool + SigningKey *git.SigningKey + WontSignReason string + CanCreatePullRequest bool + CanCreateBasePullRequest bool } -// CanCommitToBranch returns true if repository is editable and user has proper access level -// -// and branch is not protected for push -func (r *Repository) CanCommitToBranch(ctx context.Context, doer *user_model.User) (CanCommitToBranchResults, error) { - protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, r.Repository.ID, r.BranchName) +func PrepareCommitFormOptions(ctx *Context, doer *user_model.User, targetRepo *repo_model.Repository, doerRepoPerm access_model.Permission, refName git.RefName) (*CommitFormOptions, error) { + if !refName.IsBranch() { + // it shouldn't happen because middleware already checks + return nil, util.NewInvalidArgumentErrorf("ref %q is not a branch", refName) + } + + originRepo := targetRepo + branchName := refName.ShortName() + // TODO: CanMaintainerWriteToBranch is a bad name, but it really does what "CanWriteToBranch" does + if !issues_model.CanMaintainerWriteToBranch(ctx, doerRepoPerm, branchName, doer) { + targetRepo = repo_model.GetForkedRepo(ctx, doer.ID, targetRepo.ID) + if targetRepo == nil { + return &CommitFormOptions{NeedFork: true}, nil + } + // now, we get our own forked repo; it must be writable by us. + } + submitToForkedRepo := targetRepo.ID != originRepo.ID + err := targetRepo.GetBaseRepo(ctx) + if err != nil { + return nil, err + } + + protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, targetRepo.ID, branchName) if err != nil { - return CanCommitToBranchResults{}, err + return nil, err } - userCanPush := true - requireSigned := false + canPushWithProtection := true + protectionRequireSigned := false if protectedBranch != nil { - protectedBranch.Repo = r.Repository - userCanPush = protectedBranch.CanUserPush(ctx, doer) - requireSigned = protectedBranch.RequireSignedCommits + protectedBranch.Repo = targetRepo + canPushWithProtection = protectedBranch.CanUserPush(ctx, doer) + protectionRequireSigned = protectedBranch.RequireSignedCommits } - sign, keyID, _, err := asymkey_service.SignCRUDAction(ctx, r.Repository.RepoPath(), doer, r.Repository.RepoPath(), git.BranchPrefix+r.BranchName) - - canCommit := r.CanEnableEditor(ctx, doer) && userCanPush - if requireSigned { - canCommit = canCommit && sign - } + willSign, signKeyID, _, err := asymkey_service.SignCRUDAction(ctx, targetRepo.RepoPath(), doer, targetRepo.RepoPath(), refName.String()) wontSignReason := "" - if err != nil { - if asymkey_service.IsErrWontSign(err) { - wontSignReason = string(err.(*asymkey_service.ErrWontSign).Reason) - err = nil - } else { - wontSignReason = "error" - } + if asymkey_service.IsErrWontSign(err) { + wontSignReason = string(err.(*asymkey_service.ErrWontSign).Reason) + } else if err != nil { + return nil, err } - return CanCommitToBranchResults{ - CanCommitToBranch: canCommit, - EditorEnabled: r.CanEnableEditor(ctx, doer), - UserCanPush: userCanPush, - RequireSigned: requireSigned, - WillSign: sign, - SigningKey: keyID, + canCommitToBranch := !submitToForkedRepo /* same repo */ && targetRepo.CanEnableEditor() && canPushWithProtection + if protectionRequireSigned { + canCommitToBranch = canCommitToBranch && willSign + } + + canCreateBasePullRequest := targetRepo.BaseRepo != nil && targetRepo.BaseRepo.UnitEnabled(ctx, unit_model.TypePullRequests) + canCreatePullRequest := targetRepo.UnitEnabled(ctx, unit_model.TypePullRequests) || canCreateBasePullRequest + + opts := &CommitFormOptions{ + TargetRepo: targetRepo, + WillSubmitToFork: submitToForkedRepo, + CanCommitToBranch: canCommitToBranch, + UserCanPush: canPushWithProtection, + RequireSigned: protectionRequireSigned, + WillSign: willSign, + SigningKey: signKeyID, WontSignReason: wontSignReason, - }, err + + CanCreatePullRequest: canCreatePullRequest, + CanCreateBasePullRequest: canCreateBasePullRequest, + } + editorAction := ctx.PathParam("editor_action") + editorPathParamRemaining := util.PathEscapeSegments(branchName) + "/" + util.PathEscapeSegments(ctx.Repo.TreePath) + if submitToForkedRepo { + // there is only "default branch" in forked repo, we will use "from_base_branch" to get a new branch from base repo + editorPathParamRemaining = util.PathEscapeSegments(targetRepo.DefaultBranch) + "/" + util.PathEscapeSegments(ctx.Repo.TreePath) + "?from_base_branch=" + url.QueryEscape(branchName) + } + if editorAction == "_cherrypick" { + opts.TargetFormAction = targetRepo.Link() + "/" + editorAction + "/" + ctx.PathParam("sha") + "/" + editorPathParamRemaining + } else { + opts.TargetFormAction = targetRepo.Link() + "/" + editorAction + "/" + editorPathParamRemaining + } + if ctx.Req.URL.RawQuery != "" { + opts.TargetFormAction += util.Iif(strings.Contains(opts.TargetFormAction, "?"), "&", "?") + ctx.Req.URL.RawQuery + } + return opts, nil } // CanUseTimetracker returns whether a user can use the timetracker. @@ -328,7 +365,9 @@ func RedirectToRepo(ctx *Base, redirectRepoID int64) { if ctx.Req.URL.RawQuery != "" { redirectPath += "?" + ctx.Req.URL.RawQuery } - ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusTemporaryRedirect) + // Git client needs a 301 redirect by default to follow the new location + // It's not documentated in git documentation, but it's the behavior of git client + ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusMovedPermanently) } func repoAssignment(ctx *Context, repo *repo_model.Repository) { @@ -338,13 +377,17 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) { return } - ctx.Repo.Permission, err = access_model.GetUserRepoPermission(ctx, repo, ctx.Doer) - if err != nil { - ctx.ServerError("GetUserRepoPermission", err) - return + if ctx.DoerNeedTwoFactorAuth() { + ctx.Repo.Permission = access_model.PermissionNoAccess() + } else { + ctx.Repo.Permission, err = access_model.GetUserRepoPermission(ctx, repo, ctx.Doer) + if err != nil { + ctx.ServerError("GetUserRepoPermission", err) + return + } } - if !ctx.Repo.Permission.HasAnyUnitAccessOrEveryoneAccess() && !canWriteAsMaintainer(ctx) { + if !ctx.Repo.Permission.HasAnyUnitAccessOrPublicAccess() && !canWriteAsMaintainer(ctx) { if ctx.FormString("go-get") == "1" { EarlyResponseForGoGetMeta(ctx) return @@ -667,12 +710,6 @@ func RepoAssignment(ctx *Context) { const headRefName = "HEAD" -func RepoRef() func(*Context) { - // old code does: return RepoRefByType(git.RefTypeBranch) - // in most cases, it is an abuse, so we just disable it completely and fix the abuses one by one (if there is anything wrong) - return nil -} - func getRefNameFromPath(repo *Repository, path string, isExist func(string) bool) string { refName := "" parts := strings.Split(path, "/") @@ -795,8 +832,8 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) { return func(ctx *Context) { var err error refType := detectRefType - if ctx.Repo.Repository.IsBeingCreated() { - return // no git repo, so do nothing, users will see a "migrating" UI provided by "migrate/migrating.tmpl" + if ctx.Repo.Repository.IsBeingCreated() || ctx.Repo.Repository.IsBroken() { + return // no git repo, so do nothing, users will see a "migrating" UI provided by "migrate/migrating.tmpl", or empty repo guide } // Empty repository does not have reference information. if ctx.Repo.Repository.IsEmpty { @@ -815,9 +852,9 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) { if reqPath == "" { refShortName = ctx.Repo.Repository.DefaultBranch if !gitrepo.IsBranchExist(ctx, ctx.Repo.Repository, refShortName) { - brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 1) + brs, _, err := ctx.Repo.GitRepo.GetBranchNames(0, 1) if err == nil && len(brs) != 0 { - refShortName = brs[0].Name + refShortName = brs[0] } else if len(brs) == 0 { log.Error("No branches in non-empty repository %s", ctx.Repo.GitRepo.Path) } else { @@ -936,6 +973,15 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) { ctx.ServerError("GetCommitsCount", err) return } + if ctx.Repo.RefFullName.IsTag() { + rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, ctx.Repo.RefFullName.TagName()) + if err == nil && rel.NumCommits <= 0 { + rel.NumCommits = ctx.Repo.CommitsCount + if err := repo_model.UpdateReleaseNumCommits(ctx, rel); err != nil { + log.Error("UpdateReleaseNumCommits", err) + } + } + } ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount ctx.Repo.GitRepo.LastCommitCache = git.NewLastCommitCache(ctx.Repo.CommitsCount, ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, cache.GetCache()) } diff --git a/services/context/upload/upload.go b/services/context/upload/upload.go index da4370a433..23707950d4 100644 --- a/services/context/upload/upload.go +++ b/services/context/upload/upload.go @@ -11,7 +11,9 @@ import ( "regexp" "strings" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/reqctx" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/services/context" ) @@ -39,7 +41,7 @@ func Verify(buf []byte, fileName, allowedTypesStr string) error { allowedTypesStr = strings.ReplaceAll(allowedTypesStr, "|", ",") // compat for old config format allowedTypes := []string{} - for _, entry := range strings.Split(allowedTypesStr, ",") { + for entry := range strings.SplitSeq(allowedTypesStr, ",") { entry = strings.ToLower(strings.TrimSpace(entry)) if entry != "" { allowedTypes = append(allowedTypes, entry) @@ -87,14 +89,15 @@ func Verify(buf []byte, fileName, allowedTypesStr string) error { // AddUploadContext renders template values for dropzone func AddUploadContext(ctx *context.Context, uploadType string) { - if uploadType == "release" { + switch uploadType { + case "release": ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/releases/attachments" ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/releases/attachments/remove" ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/releases/attachments" ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Release.AllowedTypes, "|", ",") ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize - } else if uploadType == "comment" { + case "comment": ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/issues/attachments" ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/issues/attachments/remove" if len(ctx.PathParam("index")) > 0 { @@ -105,12 +108,17 @@ func AddUploadContext(ctx *context.Context, uploadType string) { ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Attachment.AllowedTypes, "|", ",") ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize - } else if uploadType == "repo" { - ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/upload-file" - ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/upload-remove" - ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/upload-file" - ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Upload.AllowedTypes, "|", ",") - ctx.Data["UploadMaxFiles"] = setting.Repository.Upload.MaxFiles - ctx.Data["UploadMaxSize"] = setting.Repository.Upload.FileMaxSize + default: + setting.PanicInDevOrTesting("Invalid upload type: %s", uploadType) } } + +func AddUploadContextForRepo(ctx reqctx.RequestContext, repo *repo_model.Repository) { + ctxData, repoLink := ctx.GetData(), repo.Link() + ctxData["UploadUrl"] = repoLink + "/upload-file" + ctxData["UploadRemoveUrl"] = repoLink + "/upload-remove" + ctxData["UploadLinkUrl"] = repoLink + "/upload-file" + ctxData["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Upload.AllowedTypes, "|", ",") + ctxData["UploadMaxFiles"] = setting.Repository.Upload.MaxFiles + ctxData["UploadMaxSize"] = setting.Repository.Upload.FileMaxSize +} diff --git a/services/contexttest/context_tests.go b/services/contexttest/context_tests.go index c895de3569..b54023897b 100644 --- a/services/contexttest/context_tests.go +++ b/services/contexttest/context_tests.go @@ -170,10 +170,19 @@ func LoadUser(t *testing.T, ctx gocontext.Context, userID int64) { // LoadGitRepo load a git repo into a test context. Requires that ctx.Repo has // already been populated. -func LoadGitRepo(t *testing.T, ctx *context.Context) { - assert.NoError(t, ctx.Repo.Repository.LoadOwner(ctx)) +func LoadGitRepo(t *testing.T, ctx gocontext.Context) { + var repo *context.Repository + switch ctx := any(ctx).(type) { + case *context.Context: + repo = ctx.Repo + case *context.APIContext: + repo = ctx.Repo + default: + assert.FailNow(t, "context is not *context.Context or *context.APIContext") + } + assert.NoError(t, repo.Repository.LoadOwner(ctx)) var err error - ctx.Repo.GitRepo, err = gitrepo.OpenRepository(ctx, ctx.Repo.Repository) + repo.GitRepo, err = gitrepo.OpenRepository(ctx, repo.Repository) assert.NoError(t, err) } diff --git a/services/convert/convert.go b/services/convert/convert.go index ac2680766c..0de3822140 100644 --- a/services/convert/convert.go +++ b/services/convert/convert.go @@ -5,8 +5,11 @@ package convert import ( + "bytes" "context" "fmt" + "net/url" + "path" "strconv" "strings" "time" @@ -14,6 +17,7 @@ import ( actions_model "code.gitea.io/gitea/models/actions" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/auth" + "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/organization" @@ -22,6 +26,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/actions" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" @@ -30,6 +35,9 @@ import ( "code.gitea.io/gitea/modules/util" asymkey_service "code.gitea.io/gitea/services/asymkey" "code.gitea.io/gitea/services/gitdiff" + + runnerv1 "code.gitea.io/actions-proto-go/runner/v1" + "github.com/nektos/act/pkg/model" ) // ToEmail convert models.EmailAddress to api.Email @@ -141,7 +149,7 @@ func ToBranchProtection(ctx context.Context, bp *git_model.ProtectedBranch, repo mergeWhitelistUsernames := getWhitelistEntities(readers, bp.MergeWhitelistUserIDs) approvalsWhitelistUsernames := getWhitelistEntities(readers, bp.ApprovalsWhitelistUserIDs) - teamReaders, err := organization.OrgFromUser(repo.Owner).TeamsWithAccessToRepo(ctx, repo.ID, perm.AccessModeRead) + teamReaders, err := organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.Owner.ID, repo.ID, perm.AccessModeRead, unit.TypeCode, unit.TypePullRequests) if err != nil { log.Error("Repo.Owner.TeamsWithAccessToRepo: %v", err) } @@ -195,13 +203,22 @@ func ToBranchProtection(ctx context.Context, bp *git_model.ProtectedBranch, repo // ToTag convert a git.Tag to an api.Tag func ToTag(repo *repo_model.Repository, t *git.Tag) *api.Tag { + tarballURL := util.URLJoin(repo.HTMLURL(), "archive", t.Name+".tar.gz") + zipballURL := util.URLJoin(repo.HTMLURL(), "archive", t.Name+".zip") + + // Archive URLs are "" if the download feature is disabled + if setting.Repository.DisableDownloadSourceArchives { + tarballURL = "" + zipballURL = "" + } + return &api.Tag{ Name: t.Name, Message: strings.TrimSpace(t.Message), ID: t.ID.String(), Commit: ToCommitMeta(repo, t), - ZipballURL: util.URLJoin(repo.HTMLURL(), "archive", t.Name+".zip"), - TarballURL: util.URLJoin(repo.HTMLURL(), "archive", t.Name+".tar.gz"), + ZipballURL: zipballURL, + TarballURL: tarballURL, } } @@ -230,6 +247,242 @@ func ToActionTask(ctx context.Context, t *actions_model.ActionTask) (*api.Action }, nil } +func ToActionWorkflowRun(ctx context.Context, repo *repo_model.Repository, run *actions_model.ActionRun) (*api.ActionWorkflowRun, error) { + err := run.LoadAttributes(ctx) + if err != nil { + return nil, err + } + status, conclusion := ToActionsStatus(run.Status) + return &api.ActionWorkflowRun{ + ID: run.ID, + URL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), run.ID), + HTMLURL: run.HTMLURL(), + RunNumber: run.Index, + StartedAt: run.Started.AsLocalTime(), + CompletedAt: run.Stopped.AsLocalTime(), + Event: string(run.Event), + DisplayTitle: run.Title, + HeadBranch: git.RefName(run.Ref).BranchName(), + HeadSha: run.CommitSHA, + Status: status, + Conclusion: conclusion, + Path: fmt.Sprintf("%s@%s", run.WorkflowID, run.Ref), + Repository: ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeNone}), + TriggerActor: ToUser(ctx, run.TriggerUser, nil), + // We do not have a way to get a different User for the actor than the trigger user + Actor: ToUser(ctx, run.TriggerUser, nil), + }, nil +} + +func ToWorkflowRunAction(status actions_model.Status) string { + var action string + switch status { + case actions_model.StatusWaiting, actions_model.StatusBlocked: + action = "requested" + case actions_model.StatusRunning: + action = "in_progress" + } + if status.IsDone() { + action = "completed" + } + return action +} + +func ToActionsStatus(status actions_model.Status) (string, string) { + var action string + var conclusion string + switch status { + // This is a naming conflict of the webhook between Gitea and GitHub Actions + case actions_model.StatusWaiting: + action = "queued" + case actions_model.StatusBlocked: + action = "waiting" + case actions_model.StatusRunning: + action = "in_progress" + } + if status.IsDone() { + action = "completed" + switch status { + case actions_model.StatusSuccess: + conclusion = "success" + case actions_model.StatusCancelled: + conclusion = "cancelled" + case actions_model.StatusFailure: + conclusion = "failure" + case actions_model.StatusSkipped: + conclusion = "skipped" + } + } + return action, conclusion +} + +// ToActionWorkflowJob convert a actions_model.ActionRunJob to an api.ActionWorkflowJob +// task is optional and can be nil +func ToActionWorkflowJob(ctx context.Context, repo *repo_model.Repository, task *actions_model.ActionTask, job *actions_model.ActionRunJob) (*api.ActionWorkflowJob, error) { + err := job.LoadAttributes(ctx) + if err != nil { + return nil, err + } + + jobIndex := 0 + jobs, err := actions_model.GetRunJobsByRunID(ctx, job.RunID) + if err != nil { + return nil, err + } + for i, j := range jobs { + if j.ID == job.ID { + jobIndex = i + break + } + } + + status, conclusion := ToActionsStatus(job.Status) + var runnerID int64 + var runnerName string + var steps []*api.ActionWorkflowStep + + if job.TaskID != 0 { + if task == nil { + task, _, err = db.GetByID[actions_model.ActionTask](ctx, job.TaskID) + if err != nil { + return nil, err + } + } + + runnerID = task.RunnerID + if runner, ok, _ := db.GetByID[actions_model.ActionRunner](ctx, runnerID); ok { + runnerName = runner.Name + } + for i, step := range task.Steps { + stepStatus, stepConclusion := ToActionsStatus(job.Status) + steps = append(steps, &api.ActionWorkflowStep{ + Name: step.Name, + Number: int64(i), + Status: stepStatus, + Conclusion: stepConclusion, + StartedAt: step.Started.AsTime().UTC(), + CompletedAt: step.Stopped.AsTime().UTC(), + }) + } + } + + return &api.ActionWorkflowJob{ + ID: job.ID, + // missing api endpoint for this location + URL: fmt.Sprintf("%s/actions/jobs/%d", repo.APIURL(), job.ID), + HTMLURL: fmt.Sprintf("%s/jobs/%d", job.Run.HTMLURL(), jobIndex), + RunID: job.RunID, + // Missing api endpoint for this location, artifacts are available under a nested url + RunURL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), job.RunID), + Name: job.Name, + Labels: job.RunsOn, + RunAttempt: job.Attempt, + HeadSha: job.Run.CommitSHA, + HeadBranch: git.RefName(job.Run.Ref).BranchName(), + Status: status, + Conclusion: conclusion, + RunnerID: runnerID, + RunnerName: runnerName, + Steps: steps, + CreatedAt: job.Created.AsTime().UTC(), + StartedAt: job.Started.AsTime().UTC(), + CompletedAt: job.Stopped.AsTime().UTC(), + }, nil +} + +func getActionWorkflowEntry(ctx context.Context, repo *repo_model.Repository, commit *git.Commit, folder string, entry *git.TreeEntry) *api.ActionWorkflow { + cfgUnit := repo.MustGetUnit(ctx, unit.TypeActions) + cfg := cfgUnit.ActionsConfig() + + defaultBranch, _ := commit.GetBranchName() + + workflowURL := fmt.Sprintf("%s/actions/workflows/%s", repo.APIURL(), util.PathEscapeSegments(entry.Name())) + workflowRepoURL := fmt.Sprintf("%s/src/branch/%s/%s/%s", repo.HTMLURL(ctx), util.PathEscapeSegments(defaultBranch), util.PathEscapeSegments(folder), util.PathEscapeSegments(entry.Name())) + badgeURL := fmt.Sprintf("%s/actions/workflows/%s/badge.svg?branch=%s", repo.HTMLURL(ctx), util.PathEscapeSegments(entry.Name()), url.QueryEscape(repo.DefaultBranch)) + + // See https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#get-a-workflow + // State types: + // - active + // - deleted + // - disabled_fork + // - disabled_inactivity + // - disabled_manually + state := "active" + if cfg.IsWorkflowDisabled(entry.Name()) { + state = "disabled_manually" + } + + // The CreatedAt and UpdatedAt fields currently reflect the timestamp of the latest commit, which can later be refined + // by retrieving the first and last commits for the file history. The first commit would indicate the creation date, + // while the last commit would represent the modification date. The DeletedAt could be determined by identifying + // the last commit where the file existed. However, this implementation has not been done here yet, as it would likely + // cause a significant performance degradation. + createdAt := commit.Author.When + updatedAt := commit.Author.When + + content, err := actions.GetContentFromEntry(entry) + name := entry.Name() + if err == nil { + workflow, err := model.ReadWorkflow(bytes.NewReader(content)) + if err == nil { + // Only use the name when specified in the workflow file + if workflow.Name != "" { + name = workflow.Name + } + } else { + log.Error("getActionWorkflowEntry: Failed to parse workflow: %v", err) + } + } else { + log.Error("getActionWorkflowEntry: Failed to get content from entry: %v", err) + } + + return &api.ActionWorkflow{ + ID: entry.Name(), + Name: name, + Path: path.Join(folder, entry.Name()), + State: state, + CreatedAt: createdAt, + UpdatedAt: updatedAt, + URL: workflowURL, + HTMLURL: workflowRepoURL, + BadgeURL: badgeURL, + } +} + +func ListActionWorkflows(ctx context.Context, gitrepo *git.Repository, repo *repo_model.Repository) ([]*api.ActionWorkflow, error) { + defaultBranchCommit, err := gitrepo.GetBranchCommit(repo.DefaultBranch) + if err != nil { + return nil, err + } + + folder, entries, err := actions.ListWorkflows(defaultBranchCommit) + if err != nil { + return nil, err + } + + workflows := make([]*api.ActionWorkflow, len(entries)) + for i, entry := range entries { + workflows[i] = getActionWorkflowEntry(ctx, repo, defaultBranchCommit, folder, entry) + } + + return workflows, nil +} + +func GetActionWorkflow(ctx context.Context, gitrepo *git.Repository, repo *repo_model.Repository, workflowID string) (*api.ActionWorkflow, error) { + entries, err := ListActionWorkflows(ctx, gitrepo, repo) + if err != nil { + return nil, err + } + + for _, entry := range entries { + if entry.ID == workflowID { + return entry, nil + } + } + + return nil, util.NewNotExistErrorf("workflow %q not found", workflowID) +} + // ToActionArtifact convert a actions_model.ActionArtifact to an api.ActionArtifact func ToActionArtifact(repo *repo_model.Repository, art *actions_model.ActionArtifact) (*api.ActionArtifact, error) { url := fmt.Sprintf("%s/actions/artifacts/%d", repo.APIURL(), art.ID) @@ -252,6 +505,30 @@ func ToActionArtifact(repo *repo_model.Repository, art *actions_model.ActionArti }, nil } +func ToActionRunner(ctx context.Context, runner *actions_model.ActionRunner) *api.ActionRunner { + status := runner.Status() + apiStatus := "offline" + if runner.IsOnline() { + apiStatus = "online" + } + labels := make([]*api.ActionRunnerLabel, len(runner.AgentLabels)) + for i, label := range runner.AgentLabels { + labels[i] = &api.ActionRunnerLabel{ + ID: int64(i), + Name: label, + Type: "custom", + } + } + return &api.ActionRunner{ + ID: runner.ID, + Name: runner.Name, + Status: apiStatus, + Busy: status == runnerv1.RunnerStatus_RUNNER_STATUS_ACTIVE, + Ephemeral: runner.Ephemeral, + Labels: labels, + } +} + // ToVerification convert a git.Commit.Signature to an api.PayloadCommitVerification func ToVerification(ctx context.Context, c *git.Commit) *api.PayloadCommitVerification { verif := asymkey_service.ParseCommitWithSignature(ctx, c) @@ -281,6 +558,7 @@ func ToPublicKey(apiLink string, key *asymkey_model.PublicKey) *api.PublicKey { Title: key.Name, Fingerprint: key.Fingerprint, Created: key.CreatedUnix.AsTime(), + Updated: key.UpdatedUnix.AsTime(), } } @@ -449,7 +727,7 @@ func ToTagProtection(ctx context.Context, pt *git_model.ProtectedTag, repo *repo whitelistUsernames := getWhitelistEntities(readers, pt.AllowlistUserIDs) - teamReaders, err := organization.OrgFromUser(repo.Owner).TeamsWithAccessToRepo(ctx, repo.ID, perm.AccessModeRead) + teamReaders, err := organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.Owner.ID, repo.ID, perm.AccessModeRead, unit.TypeCode, unit.TypePullRequests) if err != nil { log.Error("Repo.Owner.TeamsWithAccessToRepo: %v", err) } diff --git a/services/convert/git_commit_test.go b/services/convert/git_commit_test.go index 73cb5e8c71..ad1cc0eca3 100644 --- a/services/convert/git_commit_test.go +++ b/services/convert/git_commit_test.go @@ -33,7 +33,7 @@ func TestToCommitMeta(t *testing.T) { commitMeta := ToCommitMeta(headRepo, tag) assert.NotNil(t, commitMeta) - assert.EqualValues(t, &api.CommitMeta{ + assert.Equal(t, &api.CommitMeta{ SHA: sha1.EmptyObjectID().String(), URL: util.URLJoin(headRepo.APIURL(), "git/commits", sha1.EmptyObjectID().String()), Created: time.Unix(0, 0), diff --git a/services/convert/pull.go b/services/convert/pull.go index 928534ce5e..8f9679f649 100644 --- a/services/convert/pull.go +++ b/services/convert/pull.go @@ -14,6 +14,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/cachegroup" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" @@ -28,8 +29,8 @@ import ( // Optional - Merger func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) *api.PullRequest { var ( - baseBranch *git.Branch - headBranch *git.Branch + baseBranch string + headBranch string baseCommit *git.Commit err error ) @@ -60,14 +61,14 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u doerID = doer.ID } - const repoDoerPermCacheKey = "repo_doer_perm_cache" - p, err := cache.GetWithContextCache(ctx, repoDoerPermCacheKey, fmt.Sprintf("%d_%d", pr.BaseRepoID, doerID), - func() (access_model.Permission, error) { + repoUserPerm, err := cache.GetWithContextCache(ctx, cachegroup.RepoUserPermission, fmt.Sprintf("%d-%d", pr.BaseRepoID, doerID), + func(ctx context.Context, _ string) (access_model.Permission, error) { return access_model.GetUserRepoPermission(ctx, pr.BaseRepo, doer) - }) + }, + ) if err != nil { log.Error("GetUserRepoPermission[%d]: %v", pr.BaseRepoID, err) - p.AccessMode = perm.AccessModeNone + repoUserPerm.AccessMode = perm.AccessModeNone } apiPullRequest := &api.PullRequest{ @@ -107,7 +108,7 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u Name: pr.BaseBranch, Ref: pr.BaseBranch, RepoID: pr.BaseRepoID, - Repository: ToRepo(ctx, pr.BaseRepo, p), + Repository: ToRepo(ctx, pr.BaseRepo, repoUserPerm), }, Head: &api.PRBranchInfo{ Name: pr.HeadBranch, @@ -150,16 +151,16 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u } defer gitRepo.Close() - baseBranch, err = gitRepo.GetBranch(pr.BaseBranch) - if err != nil && !git.IsErrBranchNotExist(err) { + exist, err := git_model.IsBranchExist(ctx, pr.BaseRepoID, pr.BaseBranch) + if err != nil { log.Error("GetBranch[%s]: %v", pr.BaseBranch, err) return nil } - if err == nil { - baseCommit, err = baseBranch.GetCommit() + if exist { + baseCommit, err = gitRepo.GetBranchCommit(pr.BaseBranch) if err != nil && !git.IsErrNotExist(err) { - log.Error("GetCommit[%s]: %v", baseBranch.Name, err) + log.Error("GetCommit[%s]: %v", baseBranch, err) return nil } @@ -169,13 +170,6 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u } if pr.Flow == issues_model.PullRequestFlowAGit { - gitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo) - if err != nil { - log.Error("OpenRepository[%s]: %v", pr.GetGitRefName(), err) - return nil - } - defer gitRepo.Close() - apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName()) if err != nil { log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err) @@ -203,8 +197,8 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u } defer headGitRepo.Close() - headBranch, err = headGitRepo.GetBranch(pr.HeadBranch) - if err != nil && !git.IsErrBranchNotExist(err) { + exist, err = git_model.IsBranchExist(ctx, pr.HeadRepoID, pr.HeadBranch) + if err != nil { log.Error("GetBranch[%s]: %v", pr.HeadBranch, err) return nil } @@ -215,7 +209,7 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u endCommitID string ) - if git.IsErrBranchNotExist(err) { + if !exist { headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref) if err != nil && !git.IsErrNotExist(err) { log.Error("GetCommit[%s]: %v", pr.HeadBranch, err) @@ -226,9 +220,9 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u endCommitID = headCommitID } } else { - commit, err := headBranch.GetCommit() + commit, err := headGitRepo.GetBranchCommit(pr.HeadBranch) if err != nil && !git.IsErrNotExist(err) { - log.Error("GetCommit[%s]: %v", headBranch.Name, err) + log.Error("GetCommit[%s]: %v", headBranch, err) return nil } if err == nil { @@ -389,7 +383,7 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs }, Head: &api.PRBranchInfo{ Name: pr.HeadBranch, - Ref: fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index), + Ref: pr.GetGitRefName(), RepoID: -1, }, } @@ -422,72 +416,43 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs return nil, err } } - if baseBranch != nil { apiPullRequest.Base.Sha = baseBranch.CommitID } + if pr.HeadRepoID == pr.BaseRepoID { + apiPullRequest.Head.Repository = apiPullRequest.Base.Repository + } - if pr.Flow == issues_model.PullRequestFlowAGit { - apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName()) + // pull request head branch, both repository and branch could not exist + if pr.HeadRepo != nil { + apiPullRequest.Head.RepoID = pr.HeadRepo.ID + exist, err := git_model.IsBranchExist(ctx, pr.HeadRepo.ID, pr.HeadBranch) if err != nil { - log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err) + log.Error("IsBranchExist[%d]: %v", pr.HeadRepo.ID, err) return nil, err } - apiPullRequest.Head.RepoID = pr.BaseRepoID - apiPullRequest.Head.Repository = apiPullRequest.Base.Repository - apiPullRequest.Head.Name = "" - } - - var headGitRepo *git.Repository - if pr.HeadRepo != nil && pr.Flow == issues_model.PullRequestFlowGithub { - if pr.HeadRepoID == pr.BaseRepoID { - apiPullRequest.Head.RepoID = pr.HeadRepo.ID - apiPullRequest.Head.Repository = apiRepo - headGitRepo = gitRepo - } else { + if exist { + apiPullRequest.Head.Ref = pr.HeadBranch + } + if pr.HeadRepoID != pr.BaseRepoID { p, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer) if err != nil { log.Error("GetUserRepoPermission[%d]: %v", pr.HeadRepoID, err) p.AccessMode = perm.AccessModeNone } - - apiPullRequest.Head.RepoID = pr.HeadRepo.ID apiPullRequest.Head.Repository = ToRepo(ctx, pr.HeadRepo, p) - - headGitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo) - if err != nil { - log.Error("OpenRepository[%s]: %v", pr.HeadRepo.RepoPath(), err) - return nil, err - } - defer headGitRepo.Close() - } - - headBranch, err := headGitRepo.GetBranch(pr.HeadBranch) - if err != nil && !git.IsErrBranchNotExist(err) { - log.Error("GetBranch[%s]: %v", pr.HeadBranch, err) - return nil, err } + } + if apiPullRequest.Head.Ref == "" { + apiPullRequest.Head.Ref = pr.GetGitRefName() + } - if git.IsErrBranchNotExist(err) { - headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref) - if err != nil && !git.IsErrNotExist(err) { - log.Error("GetCommit[%s]: %v", pr.HeadBranch, err) - return nil, err - } - if err == nil { - apiPullRequest.Head.Sha = headCommitID - } - } else { - commit, err := headBranch.GetCommit() - if err != nil && !git.IsErrNotExist(err) { - log.Error("GetCommit[%s]: %v", headBranch.Name, err) - return nil, err - } - if err == nil { - apiPullRequest.Head.Ref = pr.HeadBranch - apiPullRequest.Head.Sha = commit.ID.String() - } - } + if pr.Flow == issues_model.PullRequestFlowAGit { + apiPullRequest.Head.Name = "" + } + apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName()) + if err != nil { + log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err) } if len(apiPullRequest.Head.Sha) == 0 && len(apiPullRequest.Head.Ref) != 0 { diff --git a/services/convert/pull_review_test.go b/services/convert/pull_review_test.go index a1296fafd4..d0a077ab24 100644 --- a/services/convert/pull_review_test.go +++ b/services/convert/pull_review_test.go @@ -19,8 +19,8 @@ func Test_ToPullReview(t *testing.T) { reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 6}) - assert.EqualValues(t, reviewer.ID, review.ReviewerID) - assert.EqualValues(t, issues_model.ReviewTypePending, review.Type) + assert.Equal(t, reviewer.ID, review.ReviewerID) + assert.Equal(t, issues_model.ReviewTypePending, review.Type) reviewList := []*issues_model.Review{review} diff --git a/services/convert/pull_test.go b/services/convert/pull_test.go index e069fa4a68..dfbe24d184 100644 --- a/services/convert/pull_test.go +++ b/services/convert/pull_test.go @@ -27,7 +27,7 @@ func TestPullRequest_APIFormat(t *testing.T) { assert.NoError(t, pr.LoadIssue(db.DefaultContext)) apiPullRequest := ToAPIPullRequest(git.DefaultContext, pr, nil) assert.NotNil(t, apiPullRequest) - assert.EqualValues(t, &structs.PRBranchInfo{ + assert.Equal(t, &structs.PRBranchInfo{ Name: "branch1", Ref: "refs/pull/2/head", Sha: "4a357436d925b5c974181ff12a994538ddc5a269", @@ -46,4 +46,11 @@ func TestPullRequest_APIFormat(t *testing.T) { assert.NotNil(t, apiPullRequest) assert.Nil(t, apiPullRequest.Head.Repository) assert.EqualValues(t, -1, apiPullRequest.Head.RepoID) + + apiPullRequests, err := ToAPIPullRequests(git.DefaultContext, pr.BaseRepo, []*issues_model.PullRequest{pr}, nil) + assert.NoError(t, err) + assert.Len(t, apiPullRequests, 1) + assert.NotNil(t, apiPullRequests[0]) + assert.Nil(t, apiPullRequests[0].Head.Repository) + assert.EqualValues(t, -1, apiPullRequests[0].Head.RepoID) } diff --git a/services/convert/release_test.go b/services/convert/release_test.go index 201b27e16d..bb618c9ca3 100644 --- a/services/convert/release_test.go +++ b/services/convert/release_test.go @@ -23,6 +23,6 @@ func TestRelease_ToRelease(t *testing.T) { apiRelease := ToAPIRelease(db.DefaultContext, repo1, release1) assert.NotNil(t, apiRelease) assert.EqualValues(t, 1, apiRelease.ID) - assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL) - assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL) + assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL) + assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL) } diff --git a/services/convert/repository.go b/services/convert/repository.go index 7dfdfd2179..614eb58a88 100644 --- a/services/convert/repository.go +++ b/services/convert/repository.go @@ -98,6 +98,8 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR allowSquash := false allowFastForwardOnly := false allowRebaseUpdate := false + allowManualMerge := true + autodetectManualMerge := false defaultDeleteBranchAfterMerge := false defaultMergeStyle := repo_model.MergeStyleMerge defaultAllowMaintainerEdit := false @@ -111,6 +113,8 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR allowSquash = config.AllowSquash allowFastForwardOnly = config.AllowFastForwardOnly allowRebaseUpdate = config.AllowRebaseUpdate + allowManualMerge = config.AllowManualMerge + autodetectManualMerge = config.AutodetectManualMerge defaultDeleteBranchAfterMerge = config.DefaultDeleteBranchAfterMerge defaultMergeStyle = config.GetDefaultMergeStyle() defaultAllowMaintainerEdit = config.DefaultAllowMaintainerEdit @@ -235,6 +239,8 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR AllowSquash: allowSquash, AllowFastForwardOnly: allowFastForwardOnly, AllowRebaseUpdate: allowRebaseUpdate, + AllowManualMerge: allowManualMerge, + AutodetectManualMerge: autodetectManualMerge, DefaultDeleteBranchAfterMerge: defaultDeleteBranchAfterMerge, DefaultMergeStyle: string(defaultMergeStyle), DefaultAllowMaintainerEdit: defaultAllowMaintainerEdit, diff --git a/services/convert/status.go b/services/convert/status.go index 6cef63c1cd..b4864a0307 100644 --- a/services/convert/status.go +++ b/services/convert/status.go @@ -5,6 +5,7 @@ package convert import ( "context" + "net/url" git_model "code.gitea.io/gitea/models/git" user_model "code.gitea.io/gitea/models/user" @@ -32,34 +33,29 @@ func ToCommitStatus(ctx context.Context, status *git_model.CommitStatus) *api.Co return apiStatus } +func ToCommitStatuses(ctx context.Context, statuses []*git_model.CommitStatus) []*api.CommitStatus { + apiStatuses := make([]*api.CommitStatus, len(statuses)) + for i, status := range statuses { + apiStatuses[i] = ToCommitStatus(ctx, status) + } + return apiStatuses +} + // ToCombinedStatus converts List of CommitStatus to a CombinedStatus func ToCombinedStatus(ctx context.Context, statuses []*git_model.CommitStatus, repo *api.Repository) *api.CombinedStatus { if len(statuses) == 0 { return nil } - retStatus := &api.CombinedStatus{ - SHA: statuses[0].SHA, + combinedStatus := git_model.CalcCommitStatus(statuses) + + return &api.CombinedStatus{ + State: combinedStatus.State, + Statuses: ToCommitStatuses(ctx, statuses), + SHA: combinedStatus.SHA, TotalCount: len(statuses), Repository: repo, - URL: "", + CommitURL: repo.URL + "/commits/" + url.PathEscape(combinedStatus.SHA), + URL: repo.URL + "/commits/" + url.PathEscape(combinedStatus.SHA) + "/status", } - - retStatus.Statuses = make([]*api.CommitStatus, 0, len(statuses)) - for _, status := range statuses { - retStatus.Statuses = append(retStatus.Statuses, ToCommitStatus(ctx, status)) - if retStatus.State == "" || status.State.NoBetterThan(retStatus.State) { - retStatus.State = status.State - } - } - // According to https://docs.github.com/en/rest/commits/statuses?apiVersion=2022-11-28#get-the-combined-status-for-a-specific-reference - // > Additionally, a combined state is returned. The state is one of: - // > failure if any of the contexts report as error or failure - // > pending if there are no statuses or a context is pending - // > success if the latest status for all contexts is success - if retStatus.State.IsError() { - retStatus.State = api.CommitStatusFailure - } - - return retStatus } diff --git a/services/convert/user_test.go b/services/convert/user_test.go index 4b1effc7aa..199d500732 100644 --- a/services/convert/user_test.go +++ b/services/convert/user_test.go @@ -30,11 +30,11 @@ func TestUser_ToUser(t *testing.T) { apiUser = toUser(db.DefaultContext, user1, false, false) assert.False(t, apiUser.IsAdmin) - assert.EqualValues(t, api.VisibleTypePublic.String(), apiUser.Visibility) + assert.Equal(t, api.VisibleTypePublic.String(), apiUser.Visibility) user31 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate}) apiUser = toUser(db.DefaultContext, user31, true, true) assert.False(t, apiUser.IsAdmin) - assert.EqualValues(t, api.VisibleTypePrivate.String(), apiUser.Visibility) + assert.Equal(t, api.VisibleTypePrivate.String(), apiUser.Visibility) } diff --git a/services/convert/utils_test.go b/services/convert/utils_test.go index a8363ec6bd..7965624e2b 100644 --- a/services/convert/utils_test.go +++ b/services/convert/utils_test.go @@ -10,10 +10,10 @@ import ( ) func TestToCorrectPageSize(t *testing.T) { - assert.EqualValues(t, 30, ToCorrectPageSize(0)) - assert.EqualValues(t, 30, ToCorrectPageSize(-10)) - assert.EqualValues(t, 20, ToCorrectPageSize(20)) - assert.EqualValues(t, 50, ToCorrectPageSize(100)) + assert.Equal(t, 30, ToCorrectPageSize(0)) + assert.Equal(t, 30, ToCorrectPageSize(-10)) + assert.Equal(t, 20, ToCorrectPageSize(20)) + assert.Equal(t, 50, ToCorrectPageSize(100)) } func TestToGitServiceType(t *testing.T) { diff --git a/services/doctor/actions.go b/services/doctor/actions.go index 7c44fb8392..28e26c88eb 100644 --- a/services/doctor/actions.go +++ b/services/doctor/actions.go @@ -19,7 +19,7 @@ func disableMirrorActionsUnit(ctx context.Context, logger log.Logger, autofix bo var reposToFix []*repo_model.Repository for page := 1; ; page++ { - repos, _, err := repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{ + repos, _, err := repo_model.SearchRepository(ctx, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ PageSize: repo_model.RepositoryListDefaultPageSize, Page: page, diff --git a/services/doctor/authorizedkeys.go b/services/doctor/authorizedkeys.go index 8d6fc9cb5e..46e7099dce 100644 --- a/services/doctor/authorizedkeys.go +++ b/services/doctor/authorizedkeys.go @@ -7,6 +7,7 @@ import ( "bufio" "bytes" "context" + "errors" "fmt" "os" "path/filepath" @@ -78,7 +79,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e fPath, "gitea admin regenerate keys", "gitea doctor --run authorized-keys --fix") - return fmt.Errorf(`authorized_keys is out of date and should be regenerated with "gitea admin regenerate keys" or "gitea doctor --run authorized-keys --fix"`) + return errors.New(`authorized_keys is out of date and should be regenerated with "gitea admin regenerate keys" or "gitea doctor --run authorized-keys --fix"`) } logger.Warn("authorized_keys is out of date. Attempting rewrite...") err = asymkey_service.RewriteAllPublicKeys(ctx) diff --git a/services/doctor/dbconsistency.go b/services/doctor/dbconsistency.go index 62326ed07c..d5a133d8b2 100644 --- a/services/doctor/dbconsistency.go +++ b/services/doctor/dbconsistency.go @@ -15,6 +15,7 @@ import ( secret_model "code.gitea.io/gitea/models/secret" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + issue_service "code.gitea.io/gitea/services/issue" ) type consistencyCheck struct { @@ -93,7 +94,7 @@ func prepareDBConsistencyChecks() []consistencyCheck { // find issues without existing repository Name: "Orphaned Issues without existing repository", Counter: issues_model.CountOrphanedIssues, - Fixer: asFixer(issues_model.DeleteOrphanedIssues), + Fixer: asFixer(issue_service.DeleteOrphanedIssues), }, // find releases without existing repository genericOrphanCheck("Orphaned Releases without existing repository", diff --git a/services/doctor/fix16961_test.go b/services/doctor/fix16961_test.go index 498ed9c8d5..11a128620c 100644 --- a/services/doctor/fix16961_test.go +++ b/services/doctor/fix16961_test.go @@ -19,12 +19,6 @@ func Test_fixUnitConfig_16961(t *testing.T) { wantErr bool }{ { - name: "empty", - bs: "", - wantFixed: true, - wantErr: false, - }, - { name: "normal: {}", bs: "{}", wantFixed: false, @@ -221,7 +215,7 @@ func Test_fixPullRequestsConfig_16961(t *testing.T) { if gotFixed != tt.wantFixed { t.Errorf("fixPullRequestsConfig_16961() = %v, want %v", gotFixed, tt.wantFixed) } - assert.EqualValues(t, &tt.expected, cfg) + assert.Equal(t, &tt.expected, cfg) }) } } @@ -265,7 +259,7 @@ func Test_fixIssuesConfig_16961(t *testing.T) { if gotFixed != tt.wantFixed { t.Errorf("fixIssuesConfig_16961() = %v, want %v", gotFixed, tt.wantFixed) } - assert.EqualValues(t, &tt.expected, cfg) + assert.Equal(t, &tt.expected, cfg) }) } } diff --git a/services/doctor/lfs.go b/services/doctor/lfs.go index 5f110b8f97..a90f394450 100644 --- a/services/doctor/lfs.go +++ b/services/doctor/lfs.go @@ -5,7 +5,7 @@ package doctor import ( "context" - "fmt" + "errors" "time" "code.gitea.io/gitea/modules/log" @@ -27,7 +27,7 @@ func init() { func garbageCollectLFSCheck(ctx context.Context, logger log.Logger, autofix bool) error { if !setting.LFS.StartServer { - return fmt.Errorf("LFS support is disabled") + return errors.New("LFS support is disabled") } if err := repository.GarbageCollectLFSMetaObjects(ctx, repository.GarbageCollectLFSMetaObjectsOptions{ diff --git a/services/doctor/misc.go b/services/doctor/misc.go index 260a28ec4c..1269d088c3 100644 --- a/services/doctor/misc.go +++ b/services/doctor/misc.go @@ -8,7 +8,7 @@ import ( "fmt" "os" "os/exec" - "path" + "path/filepath" "strings" "code.gitea.io/gitea/models" @@ -49,14 +49,14 @@ func checkScriptType(ctx context.Context, logger log.Logger, autofix bool) error func checkHooks(ctx context.Context, logger log.Logger, autofix bool) error { if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error { - results, err := gitrepo.CheckDelegateHooksForRepo(ctx, repo) + results, err := gitrepo.CheckDelegateHooks(ctx, repo) if err != nil { logger.Critical("Unable to check delegate hooks for repo %-v. ERROR: %v", repo, err) return fmt.Errorf("Unable to check delegate hooks for repo %-v. ERROR: %w", repo, err) } if len(results) > 0 && autofix { logger.Warn("Regenerated hooks for %s", repo.FullName()) - if err := gitrepo.CreateDelegateHooksForRepo(ctx, repo); err != nil { + if err := gitrepo.CreateDelegateHooks(ctx, repo); err != nil { logger.Critical("Unable to recreate delegate hooks for %-v. ERROR: %v", repo, err) return fmt.Errorf("Unable to recreate delegate hooks for %-v. ERROR: %w", repo, err) } @@ -148,7 +148,7 @@ func checkDaemonExport(ctx context.Context, logger log.Logger, autofix bool) err } // Create/Remove git-daemon-export-ok for git-daemon... - daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`) + daemonExportFile := filepath.Join(repo.RepoPath(), `git-daemon-export-ok`) isExist, err := util.IsExist(daemonExportFile) if err != nil { log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err) @@ -196,7 +196,7 @@ func checkCommitGraph(ctx context.Context, logger log.Logger, autofix bool) erro commitGraphExists := func() (bool, error) { // Check commit-graph exists - commitGraphFile := path.Join(repo.RepoPath(), `objects/info/commit-graph`) + commitGraphFile := filepath.Join(repo.RepoPath(), `objects/info/commit-graph`) isExist, err := util.IsExist(commitGraphFile) if err != nil { logger.Error("Unable to check if %s exists. Error: %v", commitGraphFile, err) @@ -204,7 +204,7 @@ func checkCommitGraph(ctx context.Context, logger log.Logger, autofix bool) erro } if !isExist { - commitGraphsDir := path.Join(repo.RepoPath(), `objects/info/commit-graphs`) + commitGraphsDir := filepath.Join(repo.RepoPath(), `objects/info/commit-graphs`) isExist, err = util.IsExist(commitGraphsDir) if err != nil { logger.Error("Unable to check if %s exists. Error: %v", commitGraphsDir, err) diff --git a/services/doctor/paths.go b/services/doctor/paths.go index 3f62d587ab..4214c36b1a 100644 --- a/services/doctor/paths.go +++ b/services/doctor/paths.go @@ -99,15 +99,14 @@ func checkConfigurationFiles(ctx context.Context, logger log.Logger, autofix boo func isWritableDir(path string) error { // There's no platform-independent way of checking if a directory is writable // https://stackoverflow.com/questions/20026320/how-to-tell-if-folder-exists-and-is-writable - tmpFile, err := os.CreateTemp(path, "doctors-order") if err != nil { return err } if err := os.Remove(tmpFile.Name()); err != nil { - fmt.Printf("Warning: can't remove temporary file: '%s'\n", tmpFile.Name()) //nolint:forbidigo + log.Warn("can't remove temporary file: %q", tmpFile.Name()) } - tmpFile.Close() + _ = tmpFile.Close() return nil } diff --git a/services/doctor/repository.go b/services/doctor/repository.go index 6c33426636..359c4a17e0 100644 --- a/services/doctor/repository.go +++ b/services/doctor/repository.go @@ -7,7 +7,6 @@ import ( "context" "code.gitea.io/gitea/models/db" - user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/storage" repo_service "code.gitea.io/gitea/services/repository" @@ -39,7 +38,6 @@ func deleteOrphanedRepos(ctx context.Context) (int64, error) { batchSize := db.MaxBatchInsertSize("repository") e := db.GetEngine(ctx) var deleted int64 - adminUser := &user_model.User{IsAdmin: true} for { select { @@ -60,7 +58,7 @@ func deleteOrphanedRepos(ctx context.Context) (int64, error) { } for _, id := range ids { - if err := repo_service.DeleteRepositoryDirectly(ctx, adminUser, id, true); err != nil { + if err := repo_service.DeleteRepositoryDirectly(ctx, id, true); err != nil { return deleted, err } deleted++ diff --git a/services/doctor/storage.go b/services/doctor/storage.go index 3f3b562c37..77fc6d65df 100644 --- a/services/doctor/storage.go +++ b/services/doctor/storage.go @@ -121,7 +121,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo storer: storage.LFS, isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { // The oid of an LFS stored object is the name but with all the path.Separators removed - oid := strings.ReplaceAll(path, "/", "") + oid := strings.ReplaceAll(strings.ReplaceAll(path, "\\", ""), "/", "") exists, err := git.ExistsLFSObject(ctx, oid) return !exists, err }, diff --git a/services/externalaccount/link.go b/services/externalaccount/link.go index d6e2ea7e94..ab853140cb 100644 --- a/services/externalaccount/link.go +++ b/services/externalaccount/link.go @@ -5,7 +5,7 @@ package externalaccount import ( "context" - "fmt" + "errors" user_model "code.gitea.io/gitea/models/user" @@ -23,7 +23,7 @@ type Store interface { func LinkAccountFromStore(ctx context.Context, store Store, user *user_model.User) error { gothUser := store.Get("linkAccountGothUser") if gothUser == nil { - return fmt.Errorf("not in LinkAccount session") + return errors.New("not in LinkAccount session") } return LinkAccountToUser(ctx, user, gothUser.(goth.User)) diff --git a/services/feed/feed.go b/services/feed/feed.go index a1c327fb51..1dbd2e0e26 100644 --- a/services/feed/feed.go +++ b/services/feed/feed.go @@ -6,6 +6,7 @@ package feed import ( "context" "fmt" + "strings" activities_model "code.gitea.io/gitea/models/activities" "code.gitea.io/gitea/models/db" @@ -14,8 +15,15 @@ import ( "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" ) +func GetFeedsForDashboard(ctx context.Context, opts activities_model.GetFeedsOptions) (activities_model.ActionList, int, error) { + opts.DontCount = opts.RequestedTeam == nil && opts.Date == "" + results, cnt, err := activities_model.GetFeeds(ctx, opts) + return results, util.Iif(opts.DontCount, -1, int(cnt)), err +} + // GetFeeds returns actions according to the provided options func GetFeeds(ctx context.Context, opts activities_model.GetFeedsOptions) (activities_model.ActionList, int64, error) { return activities_model.GetFeeds(ctx, opts) @@ -27,7 +35,18 @@ func GetFeeds(ctx context.Context, opts activities_model.GetFeedsOptions) (activ // * Organization action: UserID=100 (the repo's org), ActUserID=1 // * Watcher action: UserID=20 (a user who is watching a repo), ActUserID=1 func notifyWatchers(ctx context.Context, act *activities_model.Action, watchers []*repo_model.Watch, permCode, permIssue, permPR []bool) error { - // Add feed for actioner. + // MySQL has TEXT length limit 65535. + // Sometimes the content is "field1|field2|field3", sometimes the content is JSON (ActionMirrorSyncPush, ActionCommitRepo, ActionPushTag, etc...) + if left, right := util.EllipsisDisplayStringX(act.Content, 65535); right != "" { + if strings.HasPrefix(act.Content, `{"`) && strings.HasSuffix(act.Content, `}`) { + // FIXME: at the moment we can do nothing if the content is JSON and it is too long + act.Content = "{}" + } else { + act.Content = left + } + } + + // Add feed for actor. act.UserID = act.ActUserID if err := db.Insert(ctx, act); err != nil { return fmt.Errorf("insert new actioner: %w", err) @@ -63,6 +82,7 @@ func notifyWatchers(ctx context.Context, act *activities_model.Action, watchers if !permPR[i] { continue } + default: } if err := db.Insert(ctx, act); err != nil { @@ -73,7 +93,7 @@ func notifyWatchers(ctx context.Context, act *activities_model.Action, watchers return nil } -// NotifyWatchersActions creates batch of actions for every watcher. +// NotifyWatchers creates batch of actions for every watcher. func NotifyWatchers(ctx context.Context, acts ...*activities_model.Action) error { return db.WithTx(ctx, func(ctx context.Context) error { if len(acts) == 0 { diff --git a/services/feed/feed_test.go b/services/feed/feed_test.go index 243bc046b0..705d42a2eb 100644 --- a/services/feed/feed_test.go +++ b/services/feed/feed_test.go @@ -30,7 +30,7 @@ func TestGetFeeds(t *testing.T) { assert.NoError(t, err) if assert.Len(t, actions, 1) { assert.EqualValues(t, 1, actions[0].ID) - assert.EqualValues(t, user.ID, actions[0].UserID) + assert.Equal(t, user.ID, actions[0].UserID) } assert.Equal(t, int64(1), count) @@ -107,7 +107,7 @@ func TestGetFeeds2(t *testing.T) { assert.Len(t, actions, 1) if assert.Len(t, actions, 1) { assert.EqualValues(t, 2, actions[0].ID) - assert.EqualValues(t, org.ID, actions[0].UserID) + assert.Equal(t, org.ID, actions[0].UserID) } assert.Equal(t, int64(1), count) @@ -147,7 +147,7 @@ func TestRepoActions(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) _ = db.TruncateBeans(db.DefaultContext, &activities_model.Action{}) - for i := 0; i < 3; i++ { + for i := range 3 { _ = db.Insert(db.DefaultContext, &activities_model.Action{ UserID: 2 + int64(i), ActUserID: 2, diff --git a/services/forms/auth_form.go b/services/forms/auth_form.go index c9f3182b3a..a8f97572b1 100644 --- a/services/forms/auth_form.go +++ b/services/forms/auth_form.go @@ -14,9 +14,11 @@ import ( // AuthenticationForm form for authentication type AuthenticationForm struct { - ID int64 - Type int `binding:"Range(2,7)"` - Name string `binding:"Required;MaxSize(30)"` + ID int64 + Type int `binding:"Range(2,7)"` + Name string `binding:"Required;MaxSize(30)"` + TwoFactorPolicy string + Host string Port int BindDN string @@ -74,7 +76,6 @@ type AuthenticationForm struct { Oauth2RestrictedGroup string Oauth2GroupTeamMap string `binding:"ValidGroupTeamMap"` Oauth2GroupTeamMapRemoval bool - SkipLocalTwoFA bool SSPIAutoCreateUsers bool SSPIAutoActivateUsers bool SSPIStripDomainNames bool diff --git a/services/forms/org.go b/services/forms/org.go index db182f7e96..2ac18ef25c 100644 --- a/services/forms/org.go +++ b/services/forms/org.go @@ -36,7 +36,6 @@ func (f *CreateOrgForm) Validate(req *http.Request, errs binding.Errors) binding // UpdateOrgSettingForm form for updating organization settings type UpdateOrgSettingForm struct { - Name string `binding:"Required;Username;MaxSize(40)" locale:"org.org_name_holder"` FullName string `binding:"MaxSize(100)"` Email string `binding:"MaxSize(255)"` Description string `binding:"MaxSize(255)"` @@ -53,6 +52,11 @@ func (f *UpdateOrgSettingForm) Validate(req *http.Request, errs binding.Errors) return middleware.Validate(errs, ctx.Data, f, ctx.Locale) } +type RenameOrgForm struct { + OrgName string `binding:"Required"` + NewOrgName string `binding:"Required;Username;MaxSize(40)" locale:"org.org_name_holder"` +} + // ___________ // \__ ___/___ _____ _____ // | |_/ __ \\__ \ / \ diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go index 1366d30b1f..d116bb9f11 100644 --- a/services/forms/repo_form.go +++ b/services/forms/repo_form.go @@ -10,7 +10,6 @@ import ( issues_model "code.gitea.io/gitea/models/issues" project_model "code.gitea.io/gitea/models/project" - "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/services/context" @@ -110,17 +109,14 @@ type RepoSettingForm struct { EnablePrune bool // Advanced settings - EnableCode bool - DefaultCodeEveryoneAccess string + EnableCode bool - EnableWiki bool - EnableExternalWiki bool - DefaultWikiBranch string - DefaultWikiEveryoneAccess string - ExternalWikiURL string + EnableWiki bool + EnableExternalWiki bool + DefaultWikiBranch string + ExternalWikiURL string EnableIssues bool - DefaultIssuesEveryoneAccess string EnableExternalTracker bool ExternalTrackerURL string TrackerURLFormat string @@ -237,6 +233,7 @@ type WebhookForm struct { Release bool Package bool Status bool + WorkflowRun bool WorkflowJob bool Active bool BranchFilter string `binding:"GlobPattern"` @@ -476,22 +473,6 @@ func (i *IssueLockForm) Validate(req *http.Request, errs binding.Errors) binding return middleware.Validate(errs, ctx.Data, i, ctx.Locale) } -// HasValidReason checks to make sure that the reason submitted in -// the form matches any of the values in the config -func (i IssueLockForm) HasValidReason() bool { - if strings.TrimSpace(i.Reason) == "" { - return true - } - - for _, v := range setting.Repository.Issue.LockReasons { - if v == i.Reason { - return true - } - } - - return false -} - // CreateProjectForm form for creating a project type CreateProjectForm struct { Title string `binding:"Required;MaxSize(100)"` @@ -522,12 +503,13 @@ func (f *CreateMilestoneForm) Validate(req *http.Request, errs binding.Errors) b // CreateLabelForm form for creating label type CreateLabelForm struct { - ID int64 - Title string `binding:"Required;MaxSize(50)" locale:"repo.issues.label_title"` - Exclusive bool `form:"exclusive"` - IsArchived bool `form:"is_archived"` - Description string `binding:"MaxSize(200)" locale:"repo.issues.label_description"` - Color string `binding:"Required;MaxSize(7)" locale:"repo.issues.label_color"` + ID int64 + Title string `binding:"Required;MaxSize(50)" locale:"repo.issues.label_title"` + Exclusive bool `form:"exclusive"` + ExclusiveOrder int `form:"exclusive_order"` + IsArchived bool `form:"is_archived"` + Description string `binding:"MaxSize(200)" locale:"repo.issues.label_description"` + Color string `binding:"Required;MaxSize(7)" locale:"repo.issues.label_color"` } // Validate validates the fields @@ -698,129 +680,6 @@ func (f *NewWikiForm) Validate(req *http.Request, errs binding.Errors) binding.E return middleware.Validate(errs, ctx.Data, f, ctx.Locale) } -// ___________ .___.__ __ -// \_ _____/ __| _/|__|/ |_ -// | __)_ / __ | | \ __\ -// | \/ /_/ | | || | -// /_______ /\____ | |__||__| -// \/ \/ - -// EditRepoFileForm form for changing repository file -type EditRepoFileForm struct { - TreePath string `binding:"Required;MaxSize(500)"` - Content string - CommitSummary string `binding:"MaxSize(100)"` - CommitMessage string - CommitChoice string `binding:"Required;MaxSize(50)"` - NewBranchName string `binding:"GitRefName;MaxSize(100)"` - LastCommit string - Signoff bool - CommitEmail string -} - -// Validate validates the fields -func (f *EditRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// EditPreviewDiffForm form for changing preview diff -type EditPreviewDiffForm struct { - Content string -} - -// Validate validates the fields -func (f *EditPreviewDiffForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// _________ .__ __________.__ __ -// \_ ___ \| |__ __________________ ___.__. \______ \__| ____ | | __ -// / \ \/| | \_/ __ \_ __ \_ __ < | | | ___/ |/ ___\| |/ / -// \ \___| Y \ ___/| | \/| | \/\___ | | | | \ \___| < -// \______ /___| /\___ >__| |__| / ____| |____| |__|\___ >__|_ \ -// \/ \/ \/ \/ \/ \/ - -// CherryPickForm form for changing repository file -type CherryPickForm struct { - CommitSummary string `binding:"MaxSize(100)"` - CommitMessage string - CommitChoice string `binding:"Required;MaxSize(50)"` - NewBranchName string `binding:"GitRefName;MaxSize(100)"` - LastCommit string - Revert bool - Signoff bool - CommitEmail string -} - -// Validate validates the fields -func (f *CherryPickForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// ____ ___ .__ .___ -// | | \______ | | _________ __| _/ -// | | /\____ \| | / _ \__ \ / __ | -// | | / | |_> > |_( <_> ) __ \_/ /_/ | -// |______/ | __/|____/\____(____ /\____ | -// |__| \/ \/ -// - -// UploadRepoFileForm form for uploading repository file -type UploadRepoFileForm struct { - TreePath string `binding:"MaxSize(500)"` - CommitSummary string `binding:"MaxSize(100)"` - CommitMessage string - CommitChoice string `binding:"Required;MaxSize(50)"` - NewBranchName string `binding:"GitRefName;MaxSize(100)"` - Files []string - Signoff bool - CommitEmail string -} - -// Validate validates the fields -func (f *UploadRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// RemoveUploadFileForm form for removing uploaded file -type RemoveUploadFileForm struct { - File string `binding:"Required;MaxSize(50)"` -} - -// Validate validates the fields -func (f *RemoveUploadFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - -// ________ .__ __ -// \______ \ ____ | | _____/ |_ ____ -// | | \_/ __ \| | _/ __ \ __\/ __ \ -// | ` \ ___/| |_\ ___/| | \ ___/ -// /_______ /\___ >____/\___ >__| \___ > -// \/ \/ \/ \/ - -// DeleteRepoFileForm form for deleting repository file -type DeleteRepoFileForm struct { - CommitSummary string `binding:"MaxSize(100)"` - CommitMessage string - CommitChoice string `binding:"Required;MaxSize(50)"` - NewBranchName string `binding:"GitRefName;MaxSize(100)"` - LastCommit string - Signoff bool - CommitEmail string -} - -// Validate validates the fields -func (f *DeleteRepoFileForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { - ctx := context.GetValidateContext(req) - return middleware.Validate(errs, ctx.Data, f, ctx.Locale) -} - // ___________.__ ___________ __ // \__ ___/|__| _____ ____ \__ ___/___________ ____ | | __ ___________ // | | | |/ \_/ __ \ | | \_ __ \__ \ _/ ___\| |/ // __ \_ __ \ diff --git a/services/forms/repo_form_editor.go b/services/forms/repo_form_editor.go new file mode 100644 index 0000000000..3ad2eae75d --- /dev/null +++ b/services/forms/repo_form_editor.go @@ -0,0 +1,57 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package forms + +import ( + "net/http" + + "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/modules/web/middleware" + "code.gitea.io/gitea/services/context" + + "gitea.com/go-chi/binding" +) + +type CommitCommonForm struct { + TreePath string `binding:"MaxSize(500)"` + CommitSummary string `binding:"MaxSize(100)"` + CommitMessage string + CommitChoice string `binding:"Required;MaxSize(50)"` + NewBranchName string `binding:"GitRefName;MaxSize(100)"` + LastCommit string + Signoff bool + CommitEmail string +} + +func (f *CommitCommonForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { + ctx := context.GetValidateContext(req) + return middleware.Validate(errs, ctx.Data, f, ctx.Locale) +} + +type CommitCommonFormInterface interface { + GetCommitCommonForm() *CommitCommonForm +} + +func (f *CommitCommonForm) GetCommitCommonForm() *CommitCommonForm { + return f +} + +type EditRepoFileForm struct { + CommitCommonForm + Content optional.Option[string] +} + +type DeleteRepoFileForm struct { + CommitCommonForm +} + +type UploadRepoFileForm struct { + CommitCommonForm + Files []string +} + +type CherryPickForm struct { + CommitCommonForm + Revert bool +} diff --git a/services/forms/repo_form_test.go b/services/forms/repo_form_test.go index 2c5a8e2c0f..a0c67fe0f8 100644 --- a/services/forms/repo_form_test.go +++ b/services/forms/repo_form_test.go @@ -6,8 +6,6 @@ package forms import ( "testing" - "code.gitea.io/gitea/modules/setting" - "github.com/stretchr/testify/assert" ) @@ -39,26 +37,3 @@ func TestSubmitReviewForm_IsEmpty(t *testing.T) { assert.Equal(t, v.expected, v.form.HasEmptyContent()) } } - -func TestIssueLock_HasValidReason(t *testing.T) { - // Init settings - _ = setting.Repository - - cases := []struct { - form IssueLockForm - expected bool - }{ - {IssueLockForm{""}, true}, // an empty reason is accepted - {IssueLockForm{"Off-topic"}, true}, - {IssueLockForm{"Too heated"}, true}, - {IssueLockForm{"Spam"}, true}, - {IssueLockForm{"Resolved"}, true}, - - {IssueLockForm{"ZZZZ"}, false}, - {IssueLockForm{"I want to lock this issue"}, false}, - } - - for _, v := range cases { - assert.Equal(t, v.expected, v.form.HasValidReason()) - } -} diff --git a/services/forms/user_form_test.go b/services/forms/user_form_test.go index b4120f20ed..09e9ec0f65 100644 --- a/services/forms/user_form_test.go +++ b/services/forms/user_form_test.go @@ -7,6 +7,7 @@ import ( "testing" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" "github.com/gobwas/glob" "github.com/stretchr/testify/assert" @@ -26,12 +27,7 @@ func TestRegisterForm_IsDomainAllowed_Empty(t *testing.T) { } func TestRegisterForm_IsDomainAllowed_InvalidEmail(t *testing.T) { - oldService := setting.Service - defer func() { - setting.Service = oldService - }() - - setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("gitea.io")} + defer test.MockVariableValue(&setting.Service.EmailDomainAllowList, []glob.Glob{glob.MustCompile("gitea.io")})() tt := []struct { email string @@ -48,12 +44,7 @@ func TestRegisterForm_IsDomainAllowed_InvalidEmail(t *testing.T) { } func TestRegisterForm_IsDomainAllowed_AllowedEmail(t *testing.T) { - oldService := setting.Service - defer func() { - setting.Service = oldService - }() - - setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.allow")} + defer test.MockVariableValue(&setting.Service.EmailDomainAllowList, []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.allow")})() tt := []struct { email string @@ -76,13 +67,7 @@ func TestRegisterForm_IsDomainAllowed_AllowedEmail(t *testing.T) { } func TestRegisterForm_IsDomainAllowed_BlockedEmail(t *testing.T) { - oldService := setting.Service - defer func() { - setting.Service = oldService - }() - - setting.Service.EmailDomainAllowList = nil - setting.Service.EmailDomainBlockList = []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.block")} + defer test.MockVariableValue(&setting.Service.EmailDomainBlockList, []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.block")})() tt := []struct { email string diff --git a/services/git/commit.go b/services/git/commit.go index 8ab8f3d369..2e0e8a5096 100644 --- a/services/git/commit.go +++ b/services/git/commit.go @@ -17,7 +17,7 @@ import ( ) // ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys. -func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.UserCommit, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error)) ([]*asymkey_model.SignCommit, error) { +func ParseCommitsWithSignature(ctx context.Context, repo *repo_model.Repository, oldCommits []*user_model.UserCommit, repoTrustModel repo_model.TrustModelType) ([]*asymkey_model.SignCommit, error) { newCommits := make([]*asymkey_model.SignCommit, 0, len(oldCommits)) keyMap := map[string]bool{} @@ -34,9 +34,9 @@ func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.Use } for _, c := range oldCommits { - committer, ok := emailUsers[c.Committer.Email] - if !ok && c.Committer != nil { - committer = &user_model.User{ + committerUser := emailUsers.GetByEmail(c.Committer.Email) // FIXME: why ValidateCommitsWithEmails uses "Author", but ParseCommitsWithSignature uses "Committer"? + if committerUser == nil { + committerUser = &user_model.User{ Name: c.Committer.Name, Email: c.Committer.Email, } @@ -44,7 +44,11 @@ func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.Use signCommit := &asymkey_model.SignCommit{ UserCommit: c, - Verification: asymkey_service.ParseCommitWithSignatureCommitter(ctx, c.Commit, committer), + Verification: asymkey_service.ParseCommitWithSignatureCommitter(ctx, c.Commit, committerUser), + } + + isOwnerMemberCollaborator := func(user *user_model.User) (bool, error) { + return repo_model.IsOwnerMemberCollaborator(ctx, repo, user.ID) } _ = asymkey_model.CalculateTrustStatus(signCommit.Verification, repoTrustModel, isOwnerMemberCollaborator, &keyMap) @@ -62,11 +66,9 @@ func ConvertFromGitCommit(ctx context.Context, commits []*git.Commit, repo *repo } signedCommits, err := ParseCommitsWithSignature( ctx, + repo, validatedCommits, repo.GetTrustModel(), - func(user *user_model.User) (bool, error) { - return repo_model.IsOwnerMemberCollaborator(ctx, repo, user.ID) - }, ) if err != nil { return nil, err @@ -82,7 +84,7 @@ func ParseCommitsWithStatus(ctx context.Context, oldCommits []*asymkey_model.Sig commit := &git_model.SignCommitWithStatuses{ SignCommit: c, } - statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptions{}) + statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptionsAll) if err != nil { return nil, err } diff --git a/services/gitdiff/csv.go b/services/gitdiff/csv.go index 8db73c56a3..c10ee14490 100644 --- a/services/gitdiff/csv.go +++ b/services/gitdiff/csv.go @@ -134,7 +134,7 @@ func createCsvDiffSingle(reader *csv.Reader, celltype TableDiffCellType) ([]*Tab return nil, err } cells := make([]*TableDiffCell, len(row)) - for j := 0; j < len(row); j++ { + for j := range row { if celltype == TableDiffCellDel { cells[j] = &TableDiffCell{LeftCell: row[j], Type: celltype} } else { @@ -365,11 +365,11 @@ func getColumnMapping(baseCSVReader, headCSVReader *csvReader) ([]int, []int) { } // Loops through the baseRow and see if there is a match in the head row - for i := 0; i < len(baseRow); i++ { + for i := range baseRow { base2HeadColMap[i] = unmappedColumn baseCell, err := getCell(baseRow, i) if err == nil { - for j := 0; j < len(headRow); j++ { + for j := range headRow { if head2BaseColMap[j] == -1 { headCell, err := getCell(headRow, j) if err == nil && baseCell == headCell { @@ -390,7 +390,7 @@ func getColumnMapping(baseCSVReader, headCSVReader *csvReader) ([]int, []int) { // tryMapColumnsByContent tries to map missing columns by the content of the first lines. func tryMapColumnsByContent(baseCSVReader *csvReader, base2HeadColMap []int, headCSVReader *csvReader, head2BaseColMap []int) { - for i := 0; i < len(base2HeadColMap); i++ { + for i := range base2HeadColMap { headStart := 0 for base2HeadColMap[i] == unmappedColumn && headStart < len(head2BaseColMap) { if head2BaseColMap[headStart] == unmappedColumn { @@ -424,7 +424,7 @@ func getCell(row []string, column int) (string, error) { // countUnmappedColumns returns the count of unmapped columns. func countUnmappedColumns(mapping []int) int { count := 0 - for i := 0; i < len(mapping); i++ { + for i := range mapping { if mapping[i] == unmappedColumn { count++ } diff --git a/services/gitdiff/git_diff_tree.go b/services/gitdiff/git_diff_tree.go index 035210a31d..ed94bfbfe4 100644 --- a/services/gitdiff/git_diff_tree.go +++ b/services/gitdiff/git_diff_tree.go @@ -6,6 +6,7 @@ package gitdiff import ( "bufio" "context" + "errors" "fmt" "io" "strconv" @@ -71,7 +72,7 @@ func runGitDiffTree(ctx context.Context, gitRepo *git.Repository, useMergeBase b func validateGitDiffTreeArguments(gitRepo *git.Repository, useMergeBase bool, baseSha, headSha string) (shouldUseMergeBase bool, resolvedBaseSha, resolvedHeadSha string, err error) { // if the head is empty its an error if headSha == "" { - return false, "", "", fmt.Errorf("headSha is empty") + return false, "", "", errors.New("headSha is empty") } // if the head commit doesn't exist its and error @@ -207,7 +208,7 @@ func parseGitDiffTreeLine(line string) (*DiffTreeRecord, error) { func statusFromLetter(rawStatus string) (status string, score uint8, err error) { if len(rawStatus) < 1 { - return "", 0, fmt.Errorf("empty status letter") + return "", 0, errors.New("empty status letter") } switch rawStatus[0] { case 'A': @@ -235,7 +236,7 @@ func statusFromLetter(rawStatus string) (status string, score uint8, err error) func tryParseStatusScore(rawStatus string) (uint8, error) { if len(rawStatus) < 2 { - return 0, fmt.Errorf("status score missing") + return 0, errors.New("status score missing") } score, err := strconv.ParseUint(rawStatus[1:], 10, 8) diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go index b9781cf8d0..9964329876 100644 --- a/services/gitdiff/gitdiff.go +++ b/services/gitdiff/gitdiff.go @@ -25,6 +25,7 @@ import ( "code.gitea.io/gitea/modules/analyze" "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/git/attribute" "code.gitea.io/gitea/modules/highlight" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" @@ -213,51 +214,6 @@ func (diffSection *DiffSection) GetLine(idx int) *DiffLine { return diffSection.Lines[idx] } -// GetLine gets a specific line by type (add or del) and file line number -// This algorithm is not quite right. -// Actually now we have "Match" field, it is always right, so use it instead in new GetLine -func (diffSection *DiffSection) getLineLegacy(lineType DiffLineType, idx int) *DiffLine { //nolint:unused - var ( - difference = 0 - addCount = 0 - delCount = 0 - matchDiffLine *DiffLine - ) - -LOOP: - for _, diffLine := range diffSection.Lines { - switch diffLine.Type { - case DiffLineAdd: - addCount++ - case DiffLineDel: - delCount++ - default: - if matchDiffLine != nil { - break LOOP - } - difference = diffLine.RightIdx - diffLine.LeftIdx - addCount = 0 - delCount = 0 - } - - switch lineType { - case DiffLineDel: - if diffLine.RightIdx == 0 && diffLine.LeftIdx == idx-difference { - matchDiffLine = diffLine - } - case DiffLineAdd: - if diffLine.LeftIdx == 0 && diffLine.RightIdx == idx+difference { - matchDiffLine = diffLine - } - } - } - - if addCount == delCount { - return matchDiffLine - } - return nil -} - func defaultDiffMatchPatch() *diffmatchpatch.DiffMatchPatch { dmp := diffmatchpatch.New() dmp.DiffEditCost = 100 @@ -539,10 +495,7 @@ func ParsePatch(ctx context.Context, maxLines, maxLineCharacters, maxFiles int, // OK let's set a reasonable buffer size. // This should be at least the size of maxLineCharacters or 4096 whichever is larger. - readerSize := maxLineCharacters - if readerSize < 4096 { - readerSize = 4096 - } + readerSize := max(maxLineCharacters, 4096) input := bufio.NewReaderSize(reader, readerSize) line, err := input.ReadString('\n') @@ -1237,24 +1190,21 @@ func GetDiffForRender(ctx context.Context, gitRepo *git.Repository, opts *DiffOp return nil, err } - checker, deferrable := gitRepo.CheckAttributeReader(opts.AfterCommitID) - defer deferrable() + checker, err := attribute.NewBatchChecker(gitRepo, opts.AfterCommitID, []string{attribute.LinguistVendored, attribute.LinguistGenerated, attribute.LinguistLanguage, attribute.GitlabLanguage}) + if err != nil { + return nil, err + } + defer checker.Close() for _, diffFile := range diff.Files { isVendored := optional.None[bool]() isGenerated := optional.None[bool]() - if checker != nil { - attrs, err := checker.CheckPath(diffFile.Name) - if err == nil { - isVendored = git.AttributeToBool(attrs, git.AttributeLinguistVendored) - isGenerated = git.AttributeToBool(attrs, git.AttributeLinguistGenerated) - - language := git.TryReadLanguageAttribute(attrs) - if language.Has() { - diffFile.Language = language.Value() - } - } else { - checker = nil // CheckPath fails, it's not impossible to "check" anymore + attrs, err := checker.CheckPath(diffFile.Name) + if err == nil { + isVendored, isGenerated = attrs.GetVendored(), attrs.GetGenerated() + language := attrs.GetLanguage() + if language.Has() { + diffFile.Language = language.Value() } } @@ -1339,10 +1289,13 @@ func GetDiffShortStat(gitRepo *git.Repository, beforeCommitID, afterCommitID str // SyncUserSpecificDiff inserts user-specific data such as which files the user has already viewed on the given diff // Additionally, the database is updated asynchronously if files have changed since the last review -func SyncUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model.PullRequest, gitRepo *git.Repository, diff *Diff, opts *DiffOptions, files ...string) error { +func SyncUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model.PullRequest, gitRepo *git.Repository, diff *Diff, opts *DiffOptions) (*pull_model.ReviewState, error) { review, err := pull_model.GetNewestReviewState(ctx, userID, pull.ID) - if err != nil || review == nil || review.UpdatedFiles == nil { - return err + if err != nil { + return nil, err + } + if review == nil || len(review.UpdatedFiles) == 0 { + return review, nil } latestCommit := opts.AfterCommitID @@ -1350,13 +1303,13 @@ func SyncUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model. latestCommit = pull.HeadBranch // opts.AfterCommitID is preferred because it handles PRs from forks correctly and the branch name doesn't } - changedFiles, err := gitRepo.GetFilesChangedBetween(review.CommitSHA, latestCommit) + changedFiles, errIgnored := gitRepo.GetFilesChangedBetween(review.CommitSHA, latestCommit) // There are way too many possible errors. // Examples are various git errors such as the commit the review was based on was gc'ed and hence doesn't exist anymore as well as unrecoverable errors where we should serve a 500 response // Due to the current architecture and physical limitation of needing to compare explicit error messages, we can only choose one approach without the code getting ugly // For SOME of the errors such as the gc'ed commit, it would be best to mark all files as changed // But as that does not work for all potential errors, we simply mark all files as unchanged and drop the error which always works, even if not as good as possible - if err != nil { + if errIgnored != nil { log.Error("Could not get changed files between %s and %s for pull request %d in repo with path %s. Assuming no changes. Error: %w", review.CommitSHA, latestCommit, pull.Index, gitRepo.Path, err) } @@ -1395,11 +1348,11 @@ outer: err := pull_model.UpdateReviewState(ctx, review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff) if err != nil { log.Warn("Could not update review for user %d, pull %d, commit %s and the changed files %v: %v", review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff, err) - return err + return nil, err } } - return nil + return review, nil } // CommentAsDiff returns c.Patch as *Diff diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go index 71394b1915..b84530043a 100644 --- a/services/gitdiff/gitdiff_test.go +++ b/services/gitdiff/gitdiff_test.go @@ -416,7 +416,7 @@ index 0000000..6bb8f39 ` diffBuilder.WriteString(diff) - for i := 0; i < 35; i++ { + for i := range 35 { diffBuilder.WriteString("+line" + strconv.Itoa(i) + "\n") } diff = diffBuilder.String() @@ -453,11 +453,11 @@ index 0000000..6bb8f39 diffBuilder.Reset() diffBuilder.WriteString(diff) - for i := 0; i < 33; i++ { + for i := range 33 { diffBuilder.WriteString("+line" + strconv.Itoa(i) + "\n") } diffBuilder.WriteString("+line33") - for i := 0; i < 512; i++ { + for range 512 { diffBuilder.WriteString("0123456789ABCDEF") } diffBuilder.WriteByte('\n') diff --git a/services/gitdiff/highlightdiff.go b/services/gitdiff/highlightdiff.go index 6e18651d83..e8be063e69 100644 --- a/services/gitdiff/highlightdiff.go +++ b/services/gitdiff/highlightdiff.go @@ -14,13 +14,14 @@ import ( // token is a html tag or entity, eg: "<span ...>", "</span>", "<" func extractHTMLToken(s string) (before, token, after string, valid bool) { for pos1 := 0; pos1 < len(s); pos1++ { - if s[pos1] == '<' { + switch s[pos1] { + case '<': pos2 := strings.IndexByte(s[pos1:], '>') if pos2 == -1 { return "", "", s, false } return s[:pos1], s[pos1 : pos1+pos2+1], s[pos1+pos2+1:], true - } else if s[pos1] == '&' { + case '&': pos2 := strings.IndexByte(s[pos1:], ';') if pos2 == -1 { return "", "", s, false diff --git a/services/gitdiff/highlightdiff_test.go b/services/gitdiff/highlightdiff_test.go index c2584dc622..aebe38ae7c 100644 --- a/services/gitdiff/highlightdiff_test.go +++ b/services/gitdiff/highlightdiff_test.go @@ -38,15 +38,15 @@ func TestDiffWithHighlight(t *testing.T) { hcd.placeholderTokenMap['O'], hcd.placeholderTokenMap['C'] = "<span>", "</span>" assert.Equal(t, "<span></span>", string(hcd.recoverOneDiff("OC"))) assert.Equal(t, "<span></span>", string(hcd.recoverOneDiff("O"))) - assert.Equal(t, "", string(hcd.recoverOneDiff("C"))) + assert.Empty(t, string(hcd.recoverOneDiff("C"))) }) } func TestDiffWithHighlightPlaceholder(t *testing.T) { hcd := newHighlightCodeDiff() output := hcd.diffLineWithHighlight(DiffLineDel, "a='\U00100000'", "a='\U0010FFFD''") - assert.Equal(t, "", hcd.placeholderTokenMap[0x00100000]) - assert.Equal(t, "", hcd.placeholderTokenMap[0x0010FFFD]) + assert.Empty(t, hcd.placeholderTokenMap[0x00100000]) + assert.Empty(t, hcd.placeholderTokenMap[0x0010FFFD]) expected := fmt.Sprintf(`a='<span class="removed-code">%s</span>'`, "\U00100000") assert.Equal(t, expected, string(output)) diff --git a/services/gitdiff/submodule_test.go b/services/gitdiff/submodule_test.go index 3047b23103..152c5b7066 100644 --- a/services/gitdiff/submodule_test.go +++ b/services/gitdiff/submodule_test.go @@ -203,7 +203,6 @@ index 0000000..68972a9 } for _, testcase := range tests { - testcase := testcase t.Run(testcase.name, func(t *testing.T) { diff, err := ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(testcase.gitdiff), "") assert.NoError(t, err) diff --git a/services/issue/assignee.go b/services/issue/assignee.go index c7e2495568..ba9c91e0ed 100644 --- a/services/issue/assignee.go +++ b/services/issue/assignee.go @@ -54,6 +54,8 @@ func ToggleAssigneeWithNotify(ctx context.Context, issue *issues_model.Issue, do if err != nil { return false, nil, err } + issue.AssigneeID = assigneeID + issue.Assignee = assignee notify_service.IssueChangeAssignee(ctx, doer, issue, assignee, removed, comment) @@ -302,7 +304,7 @@ func CanDoerChangeReviewRequests(ctx context.Context, doer *user_model.User, rep // If the repo's owner is an organization, members of teams with read permission on pull requests can change reviewers if repo.Owner.IsOrganization() { - teams, err := organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead) + teams, err := organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead, unit.TypePullRequests) if err != nil { log.Error("GetTeamsWithAccessToRepo: %v", err) return false diff --git a/services/issue/comments.go b/services/issue/comments.go index 46f92f7cd2..10c81198d5 100644 --- a/services/issue/comments.go +++ b/services/issue/comments.go @@ -5,6 +5,7 @@ package issue import ( "context" + "errors" "fmt" "code.gitea.io/gitea/models/db" @@ -22,7 +23,7 @@ import ( // CreateRefComment creates a commit reference comment to issue. func CreateRefComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, content, commitSHA string) error { if len(commitSHA) == 0 { - return fmt.Errorf("cannot create reference with empty commit SHA") + return errors.New("cannot create reference with empty commit SHA") } if user_model.IsUserBlockedBy(ctx, doer, issue.PosterID, repo.OwnerID) { diff --git a/services/issue/issue.go b/services/issue/issue.go index 455a1ec297..2cb5f2801d 100644 --- a/services/issue/issue.go +++ b/services/issue/issue.go @@ -190,9 +190,13 @@ func DeleteIssue(ctx context.Context, doer *user_model.User, gitRepo *git.Reposi } // delete entries in database - if err := deleteIssue(ctx, issue); err != nil { + attachmentPaths, err := deleteIssue(ctx, issue) + if err != nil { return err } + for _, attachmentPath := range attachmentPaths { + system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", attachmentPath) + } // delete pull request related git data if issue.IsPull && gitRepo != nil { @@ -256,45 +260,45 @@ func GetRefEndNamesAndURLs(issues []*issues_model.Issue, repoLink string) (map[i } // deleteIssue deletes the issue -func deleteIssue(ctx context.Context, issue *issues_model.Issue) error { +func deleteIssue(ctx context.Context, issue *issues_model.Issue) ([]string, error) { ctx, committer, err := db.TxContext(ctx) if err != nil { - return err + return nil, err } defer committer.Close() - e := db.GetEngine(ctx) - if _, err := e.ID(issue.ID).NoAutoCondition().Delete(issue); err != nil { - return err + if _, err := db.GetEngine(ctx).ID(issue.ID).NoAutoCondition().Delete(issue); err != nil { + return nil, err } // update the total issue numbers if err := repo_model.UpdateRepoIssueNumbers(ctx, issue.RepoID, issue.IsPull, false); err != nil { - return err + return nil, err } // if the issue is closed, update the closed issue numbers if issue.IsClosed { if err := repo_model.UpdateRepoIssueNumbers(ctx, issue.RepoID, issue.IsPull, true); err != nil { - return err + return nil, err } } if err := issues_model.UpdateMilestoneCounters(ctx, issue.MilestoneID); err != nil { - return fmt.Errorf("error updating counters for milestone id %d: %w", + return nil, fmt.Errorf("error updating counters for milestone id %d: %w", issue.MilestoneID, err) } if err := activities_model.DeleteIssueActions(ctx, issue.RepoID, issue.ID, issue.Index); err != nil { - return err + return nil, err } // find attachments related to this issue and remove them - if err := issue.LoadAttributes(ctx); err != nil { - return err + if err := issue.LoadAttachments(ctx); err != nil { + return nil, err } + var attachmentPaths []string for i := range issue.Attachments { - system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", issue.Attachments[i].RelativePath()) + attachmentPaths = append(attachmentPaths, issue.Attachments[i].RelativePath()) } // delete all database data still assigned to this issue @@ -318,8 +322,68 @@ func deleteIssue(ctx context.Context, issue *issues_model.Issue) error { &issues_model.Comment{DependentIssueID: issue.ID}, &issues_model.IssuePin{IssueID: issue.ID}, ); err != nil { + return nil, err + } + + if err := committer.Commit(); err != nil { + return nil, err + } + return attachmentPaths, nil +} + +// DeleteOrphanedIssues delete issues without a repo +func DeleteOrphanedIssues(ctx context.Context) error { + var attachmentPaths []string + err := db.WithTx(ctx, func(ctx context.Context) error { + repoIDs, err := issues_model.GetOrphanedIssueRepoIDs(ctx) + if err != nil { + return err + } + for i := range repoIDs { + paths, err := DeleteIssuesByRepoID(ctx, repoIDs[i]) + if err != nil { + return err + } + attachmentPaths = append(attachmentPaths, paths...) + } + return nil + }) + if err != nil { return err } - return committer.Commit() + // Remove issue attachment files. + for i := range attachmentPaths { + system_model.RemoveStorageWithNotice(ctx, storage.Attachments, "Delete issue attachment", attachmentPaths[i]) + } + return nil +} + +// DeleteIssuesByRepoID deletes issues by repositories id +func DeleteIssuesByRepoID(ctx context.Context, repoID int64) (attachmentPaths []string, err error) { + for { + issues := make([]*issues_model.Issue, 0, db.DefaultMaxInSize) + if err := db.GetEngine(ctx). + Where("repo_id = ?", repoID). + OrderBy("id"). + Limit(db.DefaultMaxInSize). + Find(&issues); err != nil { + return nil, err + } + + if len(issues) == 0 { + break + } + + for _, issue := range issues { + issueAttachPaths, err := deleteIssue(ctx, issue) + if err != nil { + return nil, fmt.Errorf("deleteIssue [issue_id: %d]: %w", issue.ID, err) + } + + attachmentPaths = append(attachmentPaths, issueAttachPaths...) + } + } + + return attachmentPaths, err } diff --git a/services/issue/issue_test.go b/services/issue/issue_test.go index 8806cec0e7..bad0d65d1e 100644 --- a/services/issue/issue_test.go +++ b/services/issue/issue_test.go @@ -24,8 +24,8 @@ func TestGetRefEndNamesAndURLs(t *testing.T) { repoLink := "/foo/bar" endNames, urls := GetRefEndNamesAndURLs(issues, repoLink) - assert.EqualValues(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames) - assert.EqualValues(t, map[int64]string{ + assert.Equal(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames) + assert.Equal(t, map[int64]string{ 1: repoLink + "/src/branch/branch1", 2: repoLink + "/src/tag/tag1", 3: repoLink + "/src/commit/c0ffee", @@ -44,7 +44,7 @@ func TestIssue_DeleteIssue(t *testing.T) { ID: issueIDs[2], } - err = deleteIssue(db.DefaultContext, issue) + _, err = deleteIssue(db.DefaultContext, issue) assert.NoError(t, err) issueIDs, err = issues_model.GetIssueIDsByRepoID(db.DefaultContext, 1) assert.NoError(t, err) @@ -55,7 +55,7 @@ func TestIssue_DeleteIssue(t *testing.T) { assert.NoError(t, err) issue, err = issues_model.GetIssueByID(db.DefaultContext, 4) assert.NoError(t, err) - err = deleteIssue(db.DefaultContext, issue) + _, err = deleteIssue(db.DefaultContext, issue) assert.NoError(t, err) assert.Len(t, attachments, 2) for i := range attachments { @@ -78,7 +78,7 @@ func TestIssue_DeleteIssue(t *testing.T) { assert.NoError(t, err) assert.False(t, left) - err = deleteIssue(db.DefaultContext, issue2) + _, err = deleteIssue(db.DefaultContext, issue2) assert.NoError(t, err) left, err = issues_model.IssueNoDependenciesLeft(db.DefaultContext, issue1) assert.NoError(t, err) diff --git a/services/issue/milestone.go b/services/issue/milestone.go index beb6f131a9..afca70794d 100644 --- a/services/issue/milestone.go +++ b/services/issue/milestone.go @@ -5,6 +5,7 @@ package issue import ( "context" + "errors" "fmt" "code.gitea.io/gitea/models/db" @@ -21,7 +22,7 @@ func changeMilestoneAssign(ctx context.Context, doer *user_model.User, issue *is return fmt.Errorf("HasMilestoneByRepoID: %w", err) } if !has { - return fmt.Errorf("HasMilestoneByRepoID: issue doesn't exist") + return errors.New("HasMilestoneByRepoID: issue doesn't exist") } } diff --git a/services/issue/pull.go b/services/issue/pull.go index bd19c25436..3543b05b18 100644 --- a/services/issue/pull.go +++ b/services/issue/pull.go @@ -48,10 +48,6 @@ func IsCodeOwnerFile(f string) bool { } func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullRequest) ([]*ReviewRequestNotifier, error) { - return PullRequestCodeOwnersReviewSpecialCommits(ctx, pr, "", "") // no commit is provided, then it uses PR's base&head branch -} - -func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_model.PullRequest, startCommitID, endCommitID string) ([]*ReviewRequestNotifier, error) { if err := pr.LoadIssue(ctx); err != nil { return nil, err } @@ -100,19 +96,15 @@ func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_m return nil, nil } - if startCommitID == "" && endCommitID == "" { - // get the mergebase - mergeBase, err := getMergeBase(repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName()) - if err != nil { - return nil, err - } - startCommitID = mergeBase - endCommitID = pr.GetGitRefName() + // get the mergebase + mergeBase, err := getMergeBase(repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName()) + if err != nil { + return nil, err } // https://github.com/go-gitea/gitea/issues/29763, we need to get the files changed // between the merge base and the head commit but not the base branch and the head commit - changedFiles, err := repo.GetFilesChangedBetween(startCommitID, endCommitID) + changedFiles, err := repo.GetFilesChangedBetween(mergeBase, pr.GetGitRefName()) if err != nil { return nil, err } @@ -138,13 +130,31 @@ func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_m return nil, err } + // load all reviews from database + latestReivews, _, err := issues_model.GetReviewsByIssueID(ctx, pr.IssueID) + if err != nil { + return nil, err + } + + contain := func(list issues_model.ReviewList, u *user_model.User) bool { + for _, review := range list { + if review.ReviewerTeamID == 0 && review.ReviewerID == u.ID { + return true + } + } + return false + } + for _, u := range uniqUsers { - if u.ID != issue.Poster.ID { + if u.ID != issue.Poster.ID && !contain(latestReivews, u) { comment, err := issues_model.AddReviewRequest(ctx, issue, u, issue.Poster) if err != nil { log.Warn("Failed add assignee user: %s to PR review: %s#%d, error: %s", u.Name, pr.BaseRepo.Name, pr.ID, err) return nil, err } + if comment == nil { // comment maybe nil if review type is ReviewTypeRequest + continue + } notifiers = append(notifiers, &ReviewRequestNotifier{ Comment: comment, IsAdd: true, @@ -152,12 +162,16 @@ func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_m }) } } + for _, t := range uniqTeams { comment, err := issues_model.AddTeamReviewRequest(ctx, issue, t, issue.Poster) if err != nil { log.Warn("Failed add assignee team: %s to PR review: %s#%d, error: %s", t.Name, pr.BaseRepo.Name, pr.ID, err) return nil, err } + if comment == nil { // comment maybe nil if review type is ReviewTypeRequest + continue + } notifiers = append(notifiers, &ReviewRequestNotifier{ Comment: comment, IsAdd: true, diff --git a/services/issue/status.go b/services/issue/status.go index e18b891175..f9d7dca841 100644 --- a/services/issue/status.go +++ b/services/issue/status.go @@ -24,14 +24,14 @@ func CloseIssue(ctx context.Context, issue *issues_model.Issue, doer *user_model comment, err := issues_model.CloseIssue(dbCtx, issue, doer) if err != nil { if issues_model.IsErrDependenciesLeft(err) { - if err := issues_model.FinishIssueStopwatchIfPossible(dbCtx, doer, issue); err != nil { + if _, err := issues_model.FinishIssueStopwatch(dbCtx, doer, issue); err != nil { log.Error("Unable to stop stopwatch for issue[%d]#%d: %v", issue.ID, issue.Index, err) } } return err } - if err := issues_model.FinishIssueStopwatchIfPossible(dbCtx, doer, issue); err != nil { + if _, err := issues_model.FinishIssueStopwatch(dbCtx, doer, issue); err != nil { return err } diff --git a/services/issue/suggestion_test.go b/services/issue/suggestion_test.go index 84cfd520ac..a5b39d27bb 100644 --- a/services/issue/suggestion_test.go +++ b/services/issue/suggestion_test.go @@ -51,7 +51,7 @@ func Test_Suggestion(t *testing.T) { for _, issue := range issues { issueIndexes = append(issueIndexes, issue.Index) } - assert.EqualValues(t, testCase.expectedIndexes, issueIndexes) + assert.Equal(t, testCase.expectedIndexes, issueIndexes) }) } } diff --git a/services/lfs/locks.go b/services/lfs/locks.go index 1d464f4a66..264001f0f9 100644 --- a/services/lfs/locks.go +++ b/services/lfs/locks.go @@ -74,10 +74,7 @@ func GetListLockHandler(ctx *context.Context) { } ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - cursor := ctx.FormInt("cursor") - if cursor < 0 { - cursor = 0 - } + cursor := max(ctx.FormInt("cursor"), 0) limit := ctx.FormInt("limit") if limit > setting.LFS.LocksPagingNum && setting.LFS.LocksPagingNum > 0 { limit = setting.LFS.LocksPagingNum @@ -239,10 +236,7 @@ func VerifyLockHandler(ctx *context.Context) { ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType) - cursor := ctx.FormInt("cursor") - if cursor < 0 { - cursor = 0 - } + cursor := max(ctx.FormInt("cursor"), 0) limit := ctx.FormInt("limit") if limit > setting.LFS.LocksPagingNum && setting.LFS.LocksPagingNum > 0 { limit = setting.LFS.LocksPagingNum diff --git a/services/lfs/server.go b/services/lfs/server.go index c4866edaab..c44cc35e53 100644 --- a/services/lfs/server.go +++ b/services/lfs/server.go @@ -11,6 +11,7 @@ import ( "errors" "fmt" "io" + "maps" "net/http" "net/url" "path" @@ -26,6 +27,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/auth/httpauth" "code.gitea.io/gitea/modules/json" lfs_module "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" @@ -164,11 +166,12 @@ func BatchHandler(ctx *context.Context) { } var isUpload bool - if br.Operation == "upload" { + switch br.Operation { + case "upload": isUpload = true - } else if br.Operation == "download" { + case "download": isUpload = false - } else { + default: log.Trace("Attempt to BATCH with invalid operation: %s", br.Operation) writeStatus(ctx, http.StatusBadRequest) return @@ -201,7 +204,7 @@ func BatchHandler(ctx *context.Context) { exists, err := contentStore.Exists(p) if err != nil { - log.Error("Unable to check if LFS OID[%s] exist. Error: %v", p.Oid, rc.User, rc.Repo, err) + log.Error("Unable to check if LFS object with ID '%s' exists for %s/%s. Error: %v", p.Oid, rc.User, rc.Repo, err) writeStatus(ctx, http.StatusInternalServerError) return } @@ -479,9 +482,7 @@ func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, downloa rep.Actions["upload"] = &lfs_module.Link{Href: rc.UploadLink(pointer), Header: header} verifyHeader := make(map[string]string) - for key, value := range header { - verifyHeader[key] = value - } + maps.Copy(verifyHeader, header) // This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662 verifyHeader["Accept"] = lfs_module.AcceptHeader @@ -571,15 +572,15 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo claims, claimsOk := token.Claims.(*Claims) if !token.Valid || !claimsOk { - return nil, fmt.Errorf("invalid token claim") + return nil, errors.New("invalid token claim") } if claims.RepoID != target.ID { - return nil, fmt.Errorf("invalid token claim") + return nil, errors.New("invalid token claim") } if mode == perm_model.AccessModeWrite && claims.Op != "upload" { - return nil, fmt.Errorf("invalid token claim") + return nil, errors.New("invalid token claim") } u, err := user_model.GetUserByID(ctx, claims.UserID) @@ -592,21 +593,13 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo func parseToken(ctx stdCtx.Context, authorization string, target *repo_model.Repository, mode perm_model.AccessMode) (*user_model.User, error) { if authorization == "" { - return nil, fmt.Errorf("no token") - } - - parts := strings.SplitN(authorization, " ", 2) - if len(parts) != 2 { - return nil, fmt.Errorf("no token") + return nil, errors.New("no token") } - tokenSHA := parts[1] - switch strings.ToLower(parts[0]) { - case "bearer": - fallthrough - case "token": - return handleLFSToken(ctx, tokenSHA, target, mode) + parsed, ok := httpauth.ParseAuthorizationHeader(authorization) + if !ok || parsed.BearerToken == nil { + return nil, errors.New("token not found") } - return nil, fmt.Errorf("token not found") + return handleLFSToken(ctx, parsed.BearerToken.Token, target, mode) } func requireAuth(ctx *context.Context) { diff --git a/services/mailer/mail.go b/services/mailer/mail.go index f7e5b0c9f0..aa51cbdbcf 100644 --- a/services/mailer/mail.go +++ b/services/mailer/mail.go @@ -8,6 +8,7 @@ import ( "bytes" "context" "encoding/base64" + "errors" "fmt" "html/template" "io" @@ -117,7 +118,7 @@ func (b64embedder *mailAttachmentBase64Embedder) AttachmentSrcToBase64DataURI(ct attachmentUUID, ok = strings.CutPrefix(parsedSrc.RepoSubPath, "/attachments/") } if !ok { - return "", fmt.Errorf("not an attachment") + return "", errors.New("not an attachment") } } attachment, err := repo_model.GetAttachmentByUUID(ctx, attachmentUUID) @@ -126,10 +127,10 @@ func (b64embedder *mailAttachmentBase64Embedder) AttachmentSrcToBase64DataURI(ct } if attachment.RepoID != b64embedder.repo.ID { - return "", fmt.Errorf("attachment does not belong to the repository") + return "", errors.New("attachment does not belong to the repository") } if attachment.Size+b64embedder.estimateSize > b64embedder.maxSize { - return "", fmt.Errorf("total embedded images exceed max limit") + return "", errors.New("total embedded images exceed max limit") } fr, err := storage.Attachments.Open(attachment.RelativePath()) @@ -146,7 +147,7 @@ func (b64embedder *mailAttachmentBase64Embedder) AttachmentSrcToBase64DataURI(ct mimeType := typesniffer.DetectContentType(content) if !mimeType.IsImage() { - return "", fmt.Errorf("not an image") + return "", errors.New("not an image") } encoded := base64.StdEncoding.EncodeToString(content) diff --git a/services/mailer/mail_team_invite.go b/services/mailer/mail_team_invite.go index 1fbade7e23..f4aa788dec 100644 --- a/services/mailer/mail_team_invite.go +++ b/services/mailer/mail_team_invite.go @@ -6,6 +6,7 @@ package mailer import ( "bytes" "context" + "errors" "fmt" "net/url" @@ -38,10 +39,10 @@ func MailTeamInvite(ctx context.Context, inviter *user_model.User, team *org_mod if err != nil && !user_model.IsErrUserNotExist(err) { return err } else if user != nil && user.ProhibitLogin { - return fmt.Errorf("login is prohibited for the invited user") + return errors.New("login is prohibited for the invited user") } - inviteRedirect := url.QueryEscape(fmt.Sprintf("/org/invite/%s", invite.Token)) + inviteRedirect := url.QueryEscape("/org/invite/" + invite.Token) inviteURL := fmt.Sprintf("%suser/sign_up?redirect_to=%s", setting.AppURL, inviteRedirect) if (err == nil && user != nil) || setting.Service.DisableRegistration || setting.Service.AllowOnlyExternalRegistration { diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go index 6aced705f3..b15949f352 100644 --- a/services/mailer/mail_test.go +++ b/services/mailer/mail_test.go @@ -467,7 +467,7 @@ func TestFromDisplayName(t *testing.T) { t.Run(tc.userDisplayName, func(t *testing.T) { user := &user_model.User{FullName: tc.userDisplayName, Name: "tmp"} got := fromDisplayName(user) - assert.EqualValues(t, tc.fromDisplayName, got) + assert.Equal(t, tc.fromDisplayName, got) }) } @@ -484,7 +484,7 @@ func TestFromDisplayName(t *testing.T) { setting.Domain = oldDomain }() - assert.EqualValues(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"})) + assert.Equal(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"})) }) } @@ -528,7 +528,7 @@ func TestEmbedBase64Images(t *testing.T) { require.NoError(t, err) mailBody := msgs[0].Body - assert.Regexp(t, `MSG-BEFORE <a[^>]+><img src="data:image/png;base64,iVBORw0KGgo="/></a> MSG-AFTER`, mailBody) + assert.Regexp(t, `MSG-BEFORE <a[^>]+><img src="data:image/png;base64,iVBORw0KGgo=".*/></a> MSG-AFTER`, mailBody) }) t.Run("EmbedInstanceImageSkipExternalImage", func(t *testing.T) { diff --git a/services/mailer/notify.go b/services/mailer/notify.go index a27177e8f5..77c366fe31 100644 --- a/services/mailer/notify.go +++ b/services/mailer/notify.go @@ -31,15 +31,16 @@ func (m *mailNotifier) CreateIssueComment(ctx context.Context, doer *user_model. issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User, ) { var act activities_model.ActionType - if comment.Type == issues_model.CommentTypeClose { + switch comment.Type { + case issues_model.CommentTypeClose: act = activities_model.ActionCloseIssue - } else if comment.Type == issues_model.CommentTypeReopen { + case issues_model.CommentTypeReopen: act = activities_model.ActionReopenIssue - } else if comment.Type == issues_model.CommentTypeComment { + case issues_model.CommentTypeComment: act = activities_model.ActionCommentIssue - } else if comment.Type == issues_model.CommentTypeCode { + case issues_model.CommentTypeCode: act = activities_model.ActionCommentIssue - } else if comment.Type == issues_model.CommentTypePullRequestPush { + case issues_model.CommentTypePullRequestPush: act = 0 } @@ -95,11 +96,12 @@ func (m *mailNotifier) NewPullRequest(ctx context.Context, pr *issues_model.Pull func (m *mailNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, r *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) { var act activities_model.ActionType - if comment.Type == issues_model.CommentTypeClose { + switch comment.Type { + case issues_model.CommentTypeClose: act = activities_model.ActionCloseIssue - } else if comment.Type == issues_model.CommentTypeReopen { + case issues_model.CommentTypeReopen: act = activities_model.ActionReopenIssue - } else if comment.Type == issues_model.CommentTypeComment { + case issues_model.CommentTypeComment: act = activities_model.ActionCommentPull } if err := MailParticipantsComment(ctx, comment, act, pr.Issue, mentions); err != nil { diff --git a/services/mailer/sender/message_test.go b/services/mailer/sender/message_test.go index 63d0bc349a..ae153ebf05 100644 --- a/services/mailer/sender/message_test.go +++ b/services/mailer/sender/message_test.go @@ -108,9 +108,9 @@ func extractMailHeaderAndContent(t *testing.T, mail string) (map[string]string, } content := strings.TrimSpace("boundary=" + parts[1]) - hParts := strings.Split(parts[0], "\n") + hParts := strings.SplitSeq(parts[0], "\n") - for _, hPart := range hParts { + for hPart := range hParts { parts := strings.SplitN(hPart, ":", 2) hk := strings.TrimSpace(parts[0]) if hk != "" { diff --git a/services/mailer/sender/smtp.go b/services/mailer/sender/smtp.go index c53c3da997..8dc1b40b74 100644 --- a/services/mailer/sender/smtp.go +++ b/services/mailer/sender/smtp.go @@ -5,6 +5,7 @@ package sender import ( "crypto/tls" + "errors" "fmt" "io" "net" @@ -99,7 +100,7 @@ func (s *SMTPSender) Send(from string, to []string, msg io.WriterTo) error { canAuth, options := client.Extension("AUTH") if len(opts.User) > 0 { if !canAuth { - return fmt.Errorf("SMTP server does not support AUTH, but credentials provided") + return errors.New("SMTP server does not support AUTH, but credentials provided") } var auth smtp.Auth diff --git a/services/mailer/sender/smtp_auth.go b/services/mailer/sender/smtp_auth.go index 260b12437b..c60e0dbfbb 100644 --- a/services/mailer/sender/smtp_auth.go +++ b/services/mailer/sender/smtp_auth.go @@ -4,6 +4,7 @@ package sender import ( + "errors" "fmt" "github.com/Azure/go-ntlmssp" @@ -60,7 +61,7 @@ func (a *ntlmAuth) Start(server *smtp.ServerInfo) (string, []byte, error) { func (a *ntlmAuth) Next(fromServer []byte, more bool) ([]byte, error) { if more { if len(fromServer) == 0 { - return nil, fmt.Errorf("ntlm ChallengeMessage is empty") + return nil, errors.New("ntlm ChallengeMessage is empty") } authenticateMessage, err := ntlmssp.ProcessChallenge(fromServer, a.username, a.password, a.domainNeeded) return authenticateMessage, err diff --git a/services/markup/renderhelper_codepreview.go b/services/markup/renderhelper_codepreview.go index d638af7ff0..fa1eb824a2 100644 --- a/services/markup/renderhelper_codepreview.go +++ b/services/markup/renderhelper_codepreview.go @@ -6,7 +6,7 @@ package markup import ( "bufio" "context" - "fmt" + "errors" "html/template" "strings" @@ -14,13 +14,13 @@ import ( "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/charset" + "code.gitea.io/gitea/modules/git/languagestats" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/indexer/code" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" gitea_context "code.gitea.io/gitea/services/context" - "code.gitea.io/gitea/services/repository/files" ) func renderRepoFileCodePreview(ctx context.Context, opts markup.RenderCodePreviewOptions) (template.HTML, error) { @@ -38,7 +38,7 @@ func renderRepoFileCodePreview(ctx context.Context, opts markup.RenderCodePrevie webCtx := gitea_context.GetWebContext(ctx) if webCtx == nil { - return "", fmt.Errorf("context is not a web context") + return "", errors.New("context is not a web context") } doer := webCtx.Doer @@ -61,14 +61,14 @@ func renderRepoFileCodePreview(ctx context.Context, opts markup.RenderCodePrevie return "", err } - language, _ := files.TryGetContentLanguage(gitRepo, opts.CommitID, opts.FilePath) + language, _ := languagestats.GetFileLanguage(ctx, gitRepo, opts.CommitID, opts.FilePath) blob, err := commit.GetBlobByPath(opts.FilePath) if err != nil { return "", err } if blob.Size() > setting.UI.MaxDisplayFileSize { - return "", fmt.Errorf("file is too large") + return "", errors.New("file is too large") } dataRc, err := blob.DataAsync() diff --git a/services/markup/renderhelper_issueicontitle.go b/services/markup/renderhelper_issueicontitle.go index fd8f9d43fa..27b5595fa9 100644 --- a/services/markup/renderhelper_issueicontitle.go +++ b/services/markup/renderhelper_issueicontitle.go @@ -5,6 +5,7 @@ package markup import ( "context" + "errors" "fmt" "html/template" @@ -20,7 +21,7 @@ import ( func renderRepoIssueIconTitle(ctx context.Context, opts markup.RenderIssueIconTitleOptions) (_ template.HTML, err error) { webCtx := gitea_context.GetWebContext(ctx) if webCtx == nil { - return "", fmt.Errorf("context is not a web context") + return "", errors.New("context is not a web context") } textIssueIndex := fmt.Sprintf("(#%d)", opts.IssueIndex) diff --git a/services/markup/renderhelper_mention_test.go b/services/markup/renderhelper_mention_test.go index d05fbb6fba..d54ab13a48 100644 --- a/services/markup/renderhelper_mention_test.go +++ b/services/markup/renderhelper_mention_test.go @@ -37,7 +37,7 @@ func TestRenderHelperMention(t *testing.T) { assert.False(t, FormalRenderHelperFuncs().IsUsernameMentionable(t.Context(), userNoSuch)) // when using web context, use user.IsUserVisibleToViewer to check - req, err := http.NewRequest("GET", "/", nil) + req, err := http.NewRequest(http.MethodGet, "/", nil) assert.NoError(t, err) base := gitea_context.NewBaseContextForTest(httptest.NewRecorder(), req) giteaCtx := gitea_context.NewWebContext(base, &contexttest.MockRender{}, nil) diff --git a/services/migrations/codebase.go b/services/migrations/codebase.go index 880dd21497..240c7bcdc9 100644 --- a/services/migrations/codebase.go +++ b/services/migrations/codebase.go @@ -134,7 +134,7 @@ func (d *CodebaseDownloader) callAPI(ctx context.Context, endpoint string, param u.RawQuery = query.Encode() } - req, err := http.NewRequestWithContext(ctx, "GET", u.String(), nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) if err != nil { return err } diff --git a/services/migrations/codecommit.go b/services/migrations/codecommit.go index c45f9e5943..d08b2e6d4a 100644 --- a/services/migrations/codecommit.go +++ b/services/migrations/codecommit.go @@ -5,7 +5,7 @@ package migrations import ( "context" - "fmt" + "errors" "net/url" "strconv" "strings" @@ -42,13 +42,13 @@ func (c *CodeCommitDownloaderFactory) New(ctx context.Context, opts base.Migrate hostElems := strings.Split(u.Host, ".") if len(hostElems) != 4 { - return nil, fmt.Errorf("cannot get the region from clone URL") + return nil, errors.New("cannot get the region from clone URL") } region := hostElems[1] pathElems := strings.Split(u.Path, "/") if len(pathElems) == 0 { - return nil, fmt.Errorf("cannot get the repo name from clone URL") + return nil, errors.New("cannot get the repo name from clone URL") } repoName := pathElems[len(pathElems)-1] @@ -155,10 +155,7 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag } startIndex := (page - 1) * perPage - endIndex := page * perPage - if endIndex > len(allPullRequestIDs) { - endIndex = len(allPullRequestIDs) - } + endIndex := min(page*perPage, len(allPullRequestIDs)) batch := allPullRequestIDs[startIndex:endIndex] prs := make([]*base.PullRequest, 0, len(batch)) @@ -180,11 +177,15 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag continue } target := orig.PullRequestTargets[0] + description := "" + if orig.Description != nil { + description = *orig.Description + } pr := &base.PullRequest{ Number: number, Title: *orig.Title, PosterName: c.getUsernameFromARN(*orig.AuthorArn), - Content: *orig.Description, + Content: description, State: "open", Created: *orig.CreationDate, Updated: *orig.LastActivityDate, @@ -206,6 +207,10 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag pr.State = "closed" pr.Closed = orig.LastActivityDate } + if pr.Merged { + pr.MergeCommitSHA = *target.MergeMetadata.MergeCommitId + pr.MergedTime = orig.LastActivityDate + } _ = CheckAndEnsureSafePR(pr, c.baseURL, c) prs = append(prs, pr) diff --git a/services/migrations/error.go b/services/migrations/error.go index c7d912f50b..9b470149bf 100644 --- a/services/migrations/error.go +++ b/services/migrations/error.go @@ -7,7 +7,7 @@ package migrations import ( "errors" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v71/github" ) // ErrRepoNotCreated returns the error that repository not created diff --git a/services/migrations/gitea_downloader.go b/services/migrations/gitea_downloader.go index f92f318293..5d48d2f003 100644 --- a/services/migrations/gitea_downloader.go +++ b/services/migrations/gitea_downloader.go @@ -298,7 +298,7 @@ func (g *GiteaDownloader) convertGiteaRelease(rel *gitea_sdk.Release) *base.Rele } // FIXME: for a private download? - req, err := http.NewRequest("GET", assetDownloadURL, nil) + req, err := http.NewRequest(http.MethodGet, assetDownloadURL, nil) if err != nil { return nil, err } diff --git a/services/migrations/gitea_downloader_test.go b/services/migrations/gitea_downloader_test.go index da56120065..bb1760e889 100644 --- a/services/migrations/gitea_downloader_test.go +++ b/services/migrations/gitea_downloader_test.go @@ -47,7 +47,7 @@ func TestGiteaDownloadRepo(t *testing.T) { topics, err := downloader.GetTopics(ctx) assert.NoError(t, err) sort.Strings(topics) - assert.EqualValues(t, []string{"ci", "gitea", "migration", "test"}, topics) + assert.Equal(t, []string{"ci", "gitea", "migration", "test"}, topics) labels, err := downloader.GetLabels(ctx) assert.NoError(t, err) @@ -134,7 +134,7 @@ func TestGiteaDownloadRepo(t *testing.T) { assert.NoError(t, err) assert.True(t, isEnd) assert.Len(t, issues, 7) - assert.EqualValues(t, "open", issues[0].State) + assert.Equal(t, "open", issues[0].State) issues, isEnd, err = downloader.GetIssues(ctx, 3, 2) assert.NoError(t, err) diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go index b17cc3ce41..737bff24d0 100644 --- a/services/migrations/gitea_uploader.go +++ b/services/migrations/gitea_uploader.go @@ -107,7 +107,7 @@ func (g *GiteaLocalUploader) CreateRepo(ctx context.Context, repo *base.Reposito IsPrivate: opts.Private || setting.Repository.ForcePrivate, IsMirror: opts.Mirror, Status: repo_model.RepositoryBeingMigrated, - }) + }, false) } else { r, err = repo_model.GetRepositoryByID(ctx, opts.MigrateToRepoID) } @@ -148,7 +148,7 @@ func (g *GiteaLocalUploader) CreateRepo(ctx context.Context, repo *base.Reposito return err } g.repo.ObjectFormatName = objectFormat.Name() - return repo_model.UpdateRepositoryCols(ctx, g.repo, "object_format_name") + return repo_model.UpdateRepositoryColsNoAutoTime(ctx, g.repo, "object_format_name") } // Close closes this uploader @@ -556,7 +556,7 @@ func (g *GiteaLocalUploader) CreatePullRequests(ctx context.Context, prs ...*bas } for _, pr := range gprs { g.issues[pr.Issue.Index] = pr.Issue - pull.AddToTaskQueue(ctx, pr) + pull.StartPullRequestCheckImmediately(ctx, pr) } return nil } @@ -765,7 +765,7 @@ func (g *GiteaLocalUploader) newPullRequest(ctx context.Context, pr *base.PullRe issue := issues_model.Issue{ RepoID: g.repo.ID, Repo: g.repo, - Title: prTitle, + Title: util.TruncateRunes(prTitle, 255), Index: pr.Number, Content: pr.Content, MilestoneID: milestoneID, @@ -975,7 +975,7 @@ func (g *GiteaLocalUploader) Finish(ctx context.Context) error { } g.repo.Status = repo_model.RepositoryReady - return repo_model.UpdateRepositoryCols(ctx, g.repo, "status") + return repo_model.UpdateRepositoryColsWithAutoTime(ctx, g.repo, "status") } func (g *GiteaLocalUploader) remapUser(ctx context.Context, source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error { @@ -1017,7 +1017,7 @@ func (g *GiteaLocalUploader) remapLocalUser(ctx context.Context, source user_mod func (g *GiteaLocalUploader) remapExternalUser(ctx context.Context, source user_model.ExternalUserMigrated) (userid int64, err error) { userid, ok := g.userMap[source.GetExternalID()] if !ok { - userid, err = user_model.GetUserIDByExternalUserID(ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID())) + userid, err = user_model.GetUserIDByExternalUserID(ctx, g.gitServiceType.Name(), strconv.FormatInt(source.GetExternalID(), 10)) if err != nil { log.Error("GetUserIDByExternalUserID: %v", err) return 0, err diff --git a/services/migrations/gitea_uploader_test.go b/services/migrations/gitea_uploader_test.go index f52d4157c8..1970c0550c 100644 --- a/services/migrations/gitea_uploader_test.go +++ b/services/migrations/gitea_uploader_test.go @@ -64,7 +64,7 @@ func TestGiteaUploadRepo(t *testing.T) { repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, Name: repoName}) assert.True(t, repo.HasWiki()) - assert.EqualValues(t, repo_model.RepositoryReady, repo.Status) + assert.Equal(t, repo_model.RepositoryReady, repo.Status) milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{ RepoID: repo.ID, @@ -152,7 +152,7 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) { uploader.userMap = make(map[int64]int64) err := uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, doer.ID, target.GetUserID()) + assert.Equal(t, doer.ID, target.GetUserID()) // // The externalID matches a known user but the name does not match, @@ -163,7 +163,7 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) { uploader.userMap = make(map[int64]int64) err = uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, doer.ID, target.GetUserID()) + assert.Equal(t, doer.ID, target.GetUserID()) // // The externalID and externalName match an existing user, everything @@ -174,7 +174,7 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) { uploader.userMap = make(map[int64]int64) err = uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, user.ID, target.GetUserID()) + assert.Equal(t, user.ID, target.GetUserID()) } func TestGiteaUploadRemapExternalUser(t *testing.T) { @@ -202,7 +202,7 @@ func TestGiteaUploadRemapExternalUser(t *testing.T) { target := repo_model.Release{} err := uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, doer.ID, target.GetUserID()) + assert.Equal(t, doer.ID, target.GetUserID()) // // Link the external ID to an existing user @@ -225,7 +225,7 @@ func TestGiteaUploadRemapExternalUser(t *testing.T) { target = repo_model.Release{} err = uploader.remapUser(ctx, &source, &target) assert.NoError(t, err) - assert.EqualValues(t, linkedUser.ID, target.GetUserID()) + assert.Equal(t, linkedUser.ID, target.GetUserID()) } func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { @@ -239,7 +239,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { assert.NoError(t, git.InitRepository(git.DefaultContext, fromRepo.RepoPath(), false, fromRepo.ObjectFormatName)) err := git.NewCommand("symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseRef).Run(git.DefaultContext, &git.RunOpts{Dir: fromRepo.RepoPath()}) assert.NoError(t, err) - assert.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# Testing Repository\n\nOriginally created in: %s", fromRepo.RepoPath())), 0o644)) + assert.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte("# Testing Repository\n\nOriginally created in: "+fromRepo.RepoPath()), 0o644)) assert.NoError(t, git.AddChanges(fromRepo.RepoPath(), true)) signature := git.Signature{ Email: "test@example.com", @@ -287,7 +287,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { })) _, _, err = git.NewCommand("checkout", "-b").AddDynamicArguments(forkHeadRef).RunStdString(git.DefaultContext, &git.RunOpts{Dir: forkRepo.RepoPath()}) assert.NoError(t, err) - assert.NoError(t, os.WriteFile(filepath.Join(forkRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# branch2 %s", forkRepo.RepoPath())), 0o644)) + assert.NoError(t, os.WriteFile(filepath.Join(forkRepo.RepoPath(), "README.md"), []byte("# branch2 "+forkRepo.RepoPath()), 0o644)) assert.NoError(t, git.AddChanges(forkRepo.RepoPath(), true)) assert.NoError(t, git.CommitChanges(forkRepo.RepoPath(), git.CommitChangesOptions{ Committer: &signature, @@ -508,14 +508,14 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { head, err := uploader.updateGitForPullRequest(ctx, &testCase.pr) assert.NoError(t, err) - assert.EqualValues(t, testCase.head, head) + assert.Equal(t, testCase.head, head) log.Info(stopMark) logFiltered, logStopped := logChecker.Check(5 * time.Second) assert.True(t, logStopped) if len(testCase.logFilter) > 0 { - assert.EqualValues(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter) + assert.Equal(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter) } }) } diff --git a/services/migrations/github.go b/services/migrations/github.go index b00d6ed27f..2ce11615c6 100644 --- a/services/migrations/github.go +++ b/services/migrations/github.go @@ -20,7 +20,7 @@ import ( "code.gitea.io/gitea/modules/proxy" "code.gitea.io/gitea/modules/structs" - "github.com/google/go-github/v61/github" + "github.com/google/go-github/v71/github" "golang.org/x/oauth2" ) @@ -89,8 +89,8 @@ func NewGithubDownloaderV3(_ context.Context, baseURL, userName, password, token } if token != "" { - tokens := strings.Split(token, ",") - for _, token := range tokens { + tokens := strings.SplitSeq(token, ",") + for token := range tokens { token = strings.TrimSpace(token) ts := oauth2.StaticTokenSource( &oauth2.Token{AccessToken: token}, @@ -133,7 +133,7 @@ func (g *GithubDownloaderV3) LogString() string { func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) { githubClient := github.NewClient(client) if baseURL != "https://github.com" { - githubClient, _ = github.NewClient(client).WithEnterpriseURLs(baseURL, baseURL) + githubClient, _ = githubClient.WithEnterpriseURLs(baseURL, baseURL) } g.clients = append(g.clients, githubClient) g.rates = append(g.rates, nil) @@ -358,7 +358,7 @@ func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *gith } g.waitAndPickClient(ctx) - req, err := http.NewRequestWithContext(ctx, "GET", redirectURL, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, redirectURL, nil) if err != nil { return nil, err } @@ -441,9 +441,11 @@ func (g *GithubDownloaderV3) GetIssues(ctx context.Context, page, perPage int) ( if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListOptions{ - Page: i, - PerPage: perPage, + res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: perPage, + }, }) if err != nil { return nil, false, err @@ -527,9 +529,11 @@ func (g *GithubDownloaderV3) getComments(ctx context.Context, commentable base.C if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{ - Page: i, - PerPage: g.maxPerPage, + res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }, }) if err != nil { return nil, err @@ -602,9 +606,11 @@ func (g *GithubDownloaderV3) GetAllComments(ctx context.Context, page, perPage i if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{ - Page: i, - PerPage: g.maxPerPage, + res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }, }) if err != nil { return nil, false, err @@ -673,9 +679,11 @@ func (g *GithubDownloaderV3) GetPullRequests(ctx context.Context, page, perPage if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListOptions{ - Page: i, - PerPage: perPage, + res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: perPage, + }, }) if err != nil { return nil, false, err @@ -760,9 +768,11 @@ func (g *GithubDownloaderV3) convertGithubReviewComments(ctx context.Context, cs if !g.SkipReactions { for i := 1; ; i++ { g.waitAndPickClient(ctx) - res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListOptions{ - Page: i, - PerPage: g.maxPerPage, + res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListReactionOptions{ + ListOptions: github.ListOptions{ + Page: i, + PerPage: g.maxPerPage, + }, }) if err != nil { return nil, err @@ -872,3 +882,18 @@ func (g *GithubDownloaderV3) GetReviews(ctx context.Context, reviewable base.Rev } return allReviews, nil } + +// FormatCloneURL add authentication into remote URLs +func (g *GithubDownloaderV3) FormatCloneURL(opts MigrateOptions, remoteAddr string) (string, error) { + u, err := url.Parse(remoteAddr) + if err != nil { + return "", err + } + if len(opts.AuthToken) > 0 { + // "multiple tokens" are used to benefit more "API rate limit quota" + // git clone doesn't count for rate limits, so only use the first token. + // source: https://github.com/orgs/community/discussions/44515 + u.User = url.UserPassword("oauth2", strings.Split(opts.AuthToken, ",")[0]) + } + return u.String(), nil +} diff --git a/services/migrations/github_test.go b/services/migrations/github_test.go index 2625fb62ec..6d1a5378b9 100644 --- a/services/migrations/github_test.go +++ b/services/migrations/github_test.go @@ -12,6 +12,7 @@ import ( base "code.gitea.io/gitea/modules/migration" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestGitHubDownloadRepo(t *testing.T) { @@ -429,3 +430,36 @@ func TestGitHubDownloadRepo(t *testing.T) { }, }, reviews) } + +func TestGithubMultiToken(t *testing.T) { + testCases := []struct { + desc string + token string + expectedCloneURL string + }{ + { + desc: "Single Token", + token: "single_token", + expectedCloneURL: "https://oauth2:single_token@github.com", + }, + { + desc: "Multi Token", + token: "token1,token2", + expectedCloneURL: "https://oauth2:token1@github.com", + }, + } + factory := GithubDownloaderV3Factory{} + + for _, tC := range testCases { + t.Run(tC.desc, func(t *testing.T) { + opts := base.MigrateOptions{CloneAddr: "https://github.com/go-gitea/gitea", AuthToken: tC.token} + client, err := factory.New(t.Context(), opts) + require.NoError(t, err) + + cloneURL, err := client.FormatCloneURL(opts, "https://github.com") + require.NoError(t, err) + + assert.Equal(t, tC.expectedCloneURL, cloneURL) + }) + } +} diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go index efc5b960cf..a19a04bc44 100644 --- a/services/migrations/gitlab.go +++ b/services/migrations/gitlab.go @@ -16,6 +16,7 @@ import ( "time" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/log" base "code.gitea.io/gitea/modules/migration" @@ -340,7 +341,7 @@ func (g *GitlabDownloader) convertGitlabRelease(ctx context.Context, rel *gitlab return io.NopCloser(strings.NewReader(link.URL)), nil } - req, err := http.NewRequest("GET", link.URL, nil) + req, err := http.NewRequest(http.MethodGet, link.URL, nil) if err != nil { return nil, err } @@ -535,11 +536,15 @@ func (g *GitlabDownloader) GetComments(ctx context.Context, commentable base.Com } for _, stateEvent := range stateEvents { + posterUserID, posterUsername := user.GhostUserID, user.GhostUserName + if stateEvent.User != nil { + posterUserID, posterUsername = int64(stateEvent.User.ID), stateEvent.User.Username + } comment := &base.Comment{ IssueIndex: commentable.GetLocalIndex(), Index: int64(stateEvent.ID), - PosterID: int64(stateEvent.User.ID), - PosterName: stateEvent.User.Username, + PosterID: posterUserID, + PosterName: posterUsername, Content: "", Created: *stateEvent.CreatedAt, } diff --git a/services/migrations/gitlab_test.go b/services/migrations/gitlab_test.go index 52f5827dfe..73a1b6a276 100644 --- a/services/migrations/gitlab_test.go +++ b/services/migrations/gitlab_test.go @@ -50,7 +50,7 @@ func TestGitlabDownloadRepo(t *testing.T) { topics, err := downloader.GetTopics(ctx) assert.NoError(t, err) assert.Len(t, topics, 2) - assert.EqualValues(t, []string{"migration", "test"}, topics) + assert.Equal(t, []string{"migration", "test"}, topics) milestones, err := downloader.GetMilestones(ctx) assert.NoError(t, err) @@ -501,7 +501,7 @@ func TestAwardsToReactions(t *testing.T) { assert.NoError(t, json.Unmarshal([]byte(testResponse), &awards)) reactions := downloader.awardsToReactions(awards) - assert.EqualValues(t, []*base.Reaction{ + assert.Equal(t, []*base.Reaction{ { UserName: "lafriks", UserID: 1241334, @@ -593,7 +593,7 @@ func TestNoteToComment(t *testing.T) { for i, note := range notes { actualComment := *downloader.convertNoteToComment(17, ¬e) - assert.EqualValues(t, actualComment, comments[i]) + assert.Equal(t, actualComment, comments[i]) } } diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go index 5dda12286f..eba9c79df5 100644 --- a/services/migrations/migrate.go +++ b/services/migrations/migrate.go @@ -6,6 +6,7 @@ package migrations import ( "context" + "errors" "fmt" "net" "net/url" @@ -74,11 +75,9 @@ func IsMigrateURLAllowed(remoteURL string, doer *user_model.User) error { return &git.ErrInvalidCloneAddr{Host: u.Host, IsProtocolInvalid: true, IsPermissionDenied: true, IsURLError: true} } - hostName, _, err := net.SplitHostPort(u.Host) - if err != nil { - // u.Host can be "host" or "host:port" - err = nil //nolint - hostName = u.Host + hostName, _, errIgnored := net.SplitHostPort(u.Host) + if errIgnored != nil { + hostName = u.Host // u.Host can be "host" or "host:port" } // some users only use proxy, there is no DNS resolver. it's safe to ignore the LookupIP error @@ -211,7 +210,7 @@ func migrateRepository(ctx context.Context, doer *user_model.User, downloader ba if cloneURL.Scheme == "file" || cloneURL.Scheme == "" { if cloneAddrURL.Scheme != "file" && cloneAddrURL.Scheme != "" { - return fmt.Errorf("repo info has changed from external to local filesystem") + return errors.New("repo info has changed from external to local filesystem") } } diff --git a/services/migrations/onedev.go b/services/migrations/onedev.go index 4ce35dd12e..e052cba0cc 100644 --- a/services/migrations/onedev.go +++ b/services/migrations/onedev.go @@ -128,7 +128,7 @@ func (d *OneDevDownloader) callAPI(ctx context.Context, endpoint string, paramet u.RawQuery = query.Encode() } - req, err := http.NewRequestWithContext(ctx, "GET", u.String(), nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) if err != nil { return err } diff --git a/services/mirror/mirror.go b/services/mirror/mirror.go index e029bbb1d6..7fb7fabb75 100644 --- a/services/mirror/mirror.go +++ b/services/mirror/mirror.go @@ -5,7 +5,7 @@ package mirror import ( "context" - "fmt" + "errors" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/log" @@ -29,7 +29,7 @@ func doMirrorSync(ctx context.Context, req *SyncRequest) { } } -var errLimit = fmt.Errorf("reached limit") +var errLimit = errors.New("reached limit") // Update checks and updates mirror repositories. func Update(ctx context.Context, pullLimit, pushLimit int) error { @@ -68,7 +68,7 @@ func Update(ctx context.Context, pullLimit, pushLimit int) error { // Check we've not been cancelled select { case <-ctx.Done(): - return fmt.Errorf("aborted") + return errors.New("aborted") default: } diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go index 658747e7c8..cb90af5894 100644 --- a/services/mirror/mirror_pull.go +++ b/services/mirror/mirror_pull.go @@ -71,7 +71,7 @@ func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error // erase authentication before storing in database u.User = nil m.Repo.OriginalURL = u.String() - return repo_model.UpdateRepositoryCols(ctx, m.Repo, "original_url") + return repo_model.UpdateRepositoryColsNoAutoTime(ctx, m.Repo, "original_url") } // mirrorSyncResult contains information of a updated reference. @@ -235,6 +235,24 @@ func pruneBrokenReferences(ctx context.Context, return pruneErr } +// checkRecoverableSyncError takes an error message from a git fetch command and returns false if it should be a fatal/blocking error +func checkRecoverableSyncError(stderrMessage string) bool { + switch { + case strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken"): + return true + case strings.Contains(stderrMessage, "remote error") && strings.Contains(stderrMessage, "not our ref"): + return true + case strings.Contains(stderrMessage, "cannot lock ref") && strings.Contains(stderrMessage, "but expected"): + return true + case strings.Contains(stderrMessage, "cannot lock ref") && strings.Contains(stderrMessage, "unable to resolve reference"): + return true + case strings.Contains(stderrMessage, "Unable to create") && strings.Contains(stderrMessage, ".lock"): + return true + default: + return false + } +} + // runSync returns true if sync finished without error. func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bool) { repoPath := m.Repo.RepoPath() @@ -275,7 +293,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stdoutMessage := util.SanitizeCredentialURLs(stdout) // Now check if the error is a resolve reference due to broken reference - if strings.Contains(stderr, "unable to resolve reference") && strings.Contains(stderr, "reference broken") { + if checkRecoverableSyncError(stderr) { log.Warn("SyncMirrors [repo: %-v]: failed to update mirror repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err) err = nil @@ -324,6 +342,15 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo return nil, false } + if m.LFS && setting.LFS.StartServer { + log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo) + endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint) + lfsClient := lfs.NewClient(endpoint, nil) + if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil { + log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err) + } + } + log.Trace("SyncMirrors [repo: %-v]: syncing branches...", m.Repo) if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, m.Repo, gitRepo, 0); err != nil { log.Error("SyncMirrors [repo: %-v]: failed to synchronize branches: %v", m.Repo, err) @@ -333,15 +360,6 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo if err = repo_module.SyncReleasesWithTags(ctx, m.Repo, gitRepo); err != nil { log.Error("SyncMirrors [repo: %-v]: failed to synchronize tags to releases: %v", m.Repo, err) } - - if m.LFS && setting.LFS.StartServer { - log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo) - endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint) - lfsClient := lfs.NewClient(endpoint, nil) - if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil { - log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err) - } - } gitRepo.Close() log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo) @@ -368,7 +386,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stdoutMessage := util.SanitizeCredentialURLs(stdout) // Now check if the error is a resolve reference due to broken reference - if strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken") { + if checkRecoverableSyncError(stderrMessage) { log.Warn("SyncMirrors [repo: %-v Wiki]: failed to update mirror wiki repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err) err = nil @@ -419,7 +437,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo } for _, branch := range branches { - cache.Remove(m.Repo.GetCommitsCountCacheKey(branch.Name, true)) + cache.Remove(m.Repo.GetCommitsCountCacheKey(branch, true)) } m.UpdatedUnix = timeutil.TimeStampNow() @@ -635,7 +653,7 @@ func checkAndUpdateEmptyRepository(ctx context.Context, m *repo_model.Mirror, re } m.Repo.IsEmpty = false // Update the is empty and default_branch columns - if err := repo_model.UpdateRepositoryCols(ctx, m.Repo, "default_branch", "is_empty"); err != nil { + if err := repo_model.UpdateRepositoryColsWithAutoTime(ctx, m.Repo, "default_branch", "is_empty"); err != nil { log.Error("Failed to update default branch of repository %-v. Error: %v", m.Repo, err) desc := fmt.Sprintf("Failed to update default branch of repository '%s': %v", m.Repo.RepoPath(), err) if err = system_model.CreateRepositoryNotice(desc); err != nil { diff --git a/services/mirror/mirror_pull_test.go b/services/mirror/mirror_pull_test.go new file mode 100644 index 0000000000..97859be5b0 --- /dev/null +++ b/services/mirror/mirror_pull_test.go @@ -0,0 +1,94 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package mirror + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_parseRemoteUpdateOutput(t *testing.T) { + output := ` + * [new tag] v0.1.8 -> v0.1.8 + * [new branch] master -> origin/master + - [deleted] (none) -> origin/test1 + - [deleted] (none) -> tag1 + + f895a1e...957a993 test2 -> origin/test2 (forced update) + 957a993..a87ba5f test3 -> origin/test3 + * [new ref] refs/pull/26595/head -> refs/pull/26595/head + * [new ref] refs/pull/26595/merge -> refs/pull/26595/merge + e0639e38fb..6db2410489 refs/pull/25873/head -> refs/pull/25873/head + + 1c97ebc746...976d27d52f refs/pull/25873/merge -> refs/pull/25873/merge (forced update) +` + results := parseRemoteUpdateOutput(output, "origin") + assert.Len(t, results, 10) + assert.Equal(t, "refs/tags/v0.1.8", results[0].refName.String()) + assert.Equal(t, gitShortEmptySha, results[0].oldCommitID) + assert.Empty(t, results[0].newCommitID) + + assert.Equal(t, "refs/heads/master", results[1].refName.String()) + assert.Equal(t, gitShortEmptySha, results[1].oldCommitID) + assert.Empty(t, results[1].newCommitID) + + assert.Equal(t, "refs/heads/test1", results[2].refName.String()) + assert.Empty(t, results[2].oldCommitID) + assert.Equal(t, gitShortEmptySha, results[2].newCommitID) + + assert.Equal(t, "refs/tags/tag1", results[3].refName.String()) + assert.Empty(t, results[3].oldCommitID) + assert.Equal(t, gitShortEmptySha, results[3].newCommitID) + + assert.Equal(t, "refs/heads/test2", results[4].refName.String()) + assert.Equal(t, "f895a1e", results[4].oldCommitID) + assert.Equal(t, "957a993", results[4].newCommitID) + + assert.Equal(t, "refs/heads/test3", results[5].refName.String()) + assert.Equal(t, "957a993", results[5].oldCommitID) + assert.Equal(t, "a87ba5f", results[5].newCommitID) + + assert.Equal(t, "refs/pull/26595/head", results[6].refName.String()) + assert.Equal(t, gitShortEmptySha, results[6].oldCommitID) + assert.Empty(t, results[6].newCommitID) + + assert.Equal(t, "refs/pull/26595/merge", results[7].refName.String()) + assert.Equal(t, gitShortEmptySha, results[7].oldCommitID) + assert.Empty(t, results[7].newCommitID) + + assert.Equal(t, "refs/pull/25873/head", results[8].refName.String()) + assert.Equal(t, "e0639e38fb", results[8].oldCommitID) + assert.Equal(t, "6db2410489", results[8].newCommitID) + + assert.Equal(t, "refs/pull/25873/merge", results[9].refName.String()) + assert.Equal(t, "1c97ebc746", results[9].oldCommitID) + assert.Equal(t, "976d27d52f", results[9].newCommitID) +} + +func Test_checkRecoverableSyncError(t *testing.T) { + cases := []struct { + recoverable bool + message string + }{ + // A race condition in http git-fetch where certain refs were listed on the remote and are no longer there, would exit status 128 + {true, "fatal: remote error: upload-pack: not our ref 988881adc9fc3655077dc2d4d757d480b5ea0e11"}, + // A race condition where a local gc/prune removes a named ref during a git-fetch would exit status 1 + {true, "cannot lock ref 'refs/pull/123456/merge': unable to resolve reference 'refs/pull/134153/merge'"}, + // A race condition in http git-fetch where named refs were listed on the remote and are no longer there + {true, "error: cannot lock ref 'refs/remotes/origin/foo': unable to resolve reference 'refs/remotes/origin/foo': reference broken"}, + // A race condition in http git-fetch where named refs were force-pushed during the update, would exit status 128 + {true, "error: cannot lock ref 'refs/pull/123456/merge': is at 988881adc9fc3655077dc2d4d757d480b5ea0e11 but expected 7f894307ffc9553edbd0b671cab829786866f7b2"}, + // A race condition with other local git operations, such as git-maintenance, would exit status 128 (well, "Unable" the "U" is uppercase) + {true, "fatal: Unable to create '/data/gitea-repositories/foo-org/bar-repo.git/./objects/info/commit-graphs/commit-graph-chain.lock': File exists."}, + // Missing or unauthorized credentials, would exit status 128 + {false, "fatal: Authentication failed for 'https://example.com/foo-does-not-exist/bar.git/'"}, + // A non-existent remote repository, would exit status 128 + {false, "fatal: Could not read from remote repository."}, + // A non-functioning proxy, would exit status 128 + {false, "fatal: unable to access 'https://example.com/foo-does-not-exist/bar.git/': Failed to connect to configured-https-proxy port 1080 after 0 ms: Couldn't connect to server"}, + } + + for _, c := range cases { + assert.Equal(t, c.recoverable, checkRecoverableSyncError(c.message), "test case: %s", c.message) + } +} diff --git a/services/mirror/mirror_push.go b/services/mirror/mirror_push.go index 6e72876893..9b57427d98 100644 --- a/services/mirror/mirror_push.go +++ b/services/mirror/mirror_push.go @@ -143,7 +143,7 @@ func runPushSync(ctx context.Context, m *repo_model.PushMirror) error { var gitRepo *git.Repository if isWiki { - gitRepo, err = gitrepo.OpenWikiRepository(ctx, repo) + gitRepo, err = gitrepo.OpenRepository(ctx, repo.WikiStorageRepo()) } else { gitRepo, err = gitrepo.OpenRepository(ctx, repo) } diff --git a/services/mirror/mirror_test.go b/services/mirror/mirror_test.go deleted file mode 100644 index 76632b6872..0000000000 --- a/services/mirror/mirror_test.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2023 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package mirror - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func Test_parseRemoteUpdateOutput(t *testing.T) { - output := ` - * [new tag] v0.1.8 -> v0.1.8 - * [new branch] master -> origin/master - - [deleted] (none) -> origin/test1 - - [deleted] (none) -> tag1 - + f895a1e...957a993 test2 -> origin/test2 (forced update) - 957a993..a87ba5f test3 -> origin/test3 - * [new ref] refs/pull/26595/head -> refs/pull/26595/head - * [new ref] refs/pull/26595/merge -> refs/pull/26595/merge - e0639e38fb..6db2410489 refs/pull/25873/head -> refs/pull/25873/head - + 1c97ebc746...976d27d52f refs/pull/25873/merge -> refs/pull/25873/merge (forced update) -` - results := parseRemoteUpdateOutput(output, "origin") - assert.Len(t, results, 10) - assert.EqualValues(t, "refs/tags/v0.1.8", results[0].refName.String()) - assert.EqualValues(t, gitShortEmptySha, results[0].oldCommitID) - assert.EqualValues(t, "", results[0].newCommitID) - - assert.EqualValues(t, "refs/heads/master", results[1].refName.String()) - assert.EqualValues(t, gitShortEmptySha, results[1].oldCommitID) - assert.EqualValues(t, "", results[1].newCommitID) - - assert.EqualValues(t, "refs/heads/test1", results[2].refName.String()) - assert.EqualValues(t, "", results[2].oldCommitID) - assert.EqualValues(t, gitShortEmptySha, results[2].newCommitID) - - assert.EqualValues(t, "refs/tags/tag1", results[3].refName.String()) - assert.EqualValues(t, "", results[3].oldCommitID) - assert.EqualValues(t, gitShortEmptySha, results[3].newCommitID) - - assert.EqualValues(t, "refs/heads/test2", results[4].refName.String()) - assert.EqualValues(t, "f895a1e", results[4].oldCommitID) - assert.EqualValues(t, "957a993", results[4].newCommitID) - - assert.EqualValues(t, "refs/heads/test3", results[5].refName.String()) - assert.EqualValues(t, "957a993", results[5].oldCommitID) - assert.EqualValues(t, "a87ba5f", results[5].newCommitID) - - assert.EqualValues(t, "refs/pull/26595/head", results[6].refName.String()) - assert.EqualValues(t, gitShortEmptySha, results[6].oldCommitID) - assert.EqualValues(t, "", results[6].newCommitID) - - assert.EqualValues(t, "refs/pull/26595/merge", results[7].refName.String()) - assert.EqualValues(t, gitShortEmptySha, results[7].oldCommitID) - assert.EqualValues(t, "", results[7].newCommitID) - - assert.EqualValues(t, "refs/pull/25873/head", results[8].refName.String()) - assert.EqualValues(t, "e0639e38fb", results[8].oldCommitID) - assert.EqualValues(t, "6db2410489", results[8].newCommitID) - - assert.EqualValues(t, "refs/pull/25873/merge", results[9].refName.String()) - assert.EqualValues(t, "1c97ebc746", results[9].oldCommitID) - assert.EqualValues(t, "976d27d52f", results[9].newCommitID) -} diff --git a/services/notify/notifier.go b/services/notify/notifier.go index 40428454be..875a70e564 100644 --- a/services/notify/notifier.go +++ b/services/notify/notifier.go @@ -79,5 +79,7 @@ type Notifier interface { CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus) + WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) + WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) } diff --git a/services/notify/notify.go b/services/notify/notify.go index 9f8be4b577..2416cbd2e0 100644 --- a/services/notify/notify.go +++ b/services/notify/notify.go @@ -46,10 +46,25 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model } } +func shouldSendCommentChangeNotification(ctx context.Context, comment *issues_model.Comment) bool { + if err := comment.LoadReview(ctx); err != nil { + log.Error("LoadReview: %v", err) + return false + } else if comment.Review != nil && comment.Review.Type == issues_model.ReviewTypePending { + // Pending review comments updating should not triggered + return false + } + return true +} + // CreateIssueComment notifies issue comment related message to notifiers func CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User, ) { + if !shouldSendCommentChangeNotification(ctx, comment) { + return + } + for _, notifier := range notifiers { notifier.CreateIssueComment(ctx, doer, repo, issue, comment, mentions) } @@ -156,6 +171,10 @@ func PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issue // UpdateComment notifies update comment to notifiers func UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) { + if !shouldSendCommentChangeNotification(ctx, c) { + return + } + for _, notifier := range notifiers { notifier.UpdateComment(ctx, doer, c, oldContent) } @@ -163,6 +182,10 @@ func UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.C // DeleteComment notifies delete comment to notifiers func DeleteComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment) { + if !shouldSendCommentChangeNotification(ctx, c) { + return + } + for _, notifier := range notifiers { notifier.DeleteComment(ctx, doer, c) } @@ -376,6 +399,12 @@ func CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit } } +func WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { + for _, notifier := range notifiers { + notifier.WorkflowRunStatusUpdate(ctx, repo, sender, run) + } +} + func WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) { for _, notifier := range notifiers { notifier.WorkflowJobStatusUpdate(ctx, repo, sender, job, task) diff --git a/services/notify/null.go b/services/notify/null.go index 9c794a2342..c3085d7c9e 100644 --- a/services/notify/null.go +++ b/services/notify/null.go @@ -214,5 +214,8 @@ func (*NullNotifier) ChangeDefaultBranch(ctx context.Context, repo *repo_model.R func (*NullNotifier) CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus) { } +func (*NullNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { +} + func (*NullNotifier) WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) { } diff --git a/services/oauth2_provider/access_token.go b/services/oauth2_provider/access_token.go index 5cb6fb64c5..e01777031b 100644 --- a/services/oauth2_provider/access_token.go +++ b/services/oauth2_provider/access_token.go @@ -1,12 +1,13 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( "context" "fmt" "slices" + "strconv" "strings" auth "code.gitea.io/gitea/models/auth" @@ -15,7 +16,9 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "github.com/golang-jwt/jwt/v5" ) @@ -82,7 +85,7 @@ func GrantAdditionalScopes(grantScopes string) auth.AccessTokenScope { } var accessScopes []string // the scopes for access control, but not for general information - for _, scope := range strings.Split(grantScopes, " ") { + for scope := range strings.SplitSeq(grantScopes, " ") { if scope != "" && !slices.Contains(generalScopesSupported, scope) { accessScopes = append(accessScopes, scope) } @@ -177,7 +180,7 @@ func NewAccessTokenResponse(ctx context.Context, grant *auth.OAuth2Grant, server ExpiresAt: jwt.NewNumericDate(expirationDate.AsTime()), Issuer: setting.AppURL, Audience: []string{app.ClientID}, - Subject: fmt.Sprint(grant.UserID), + Subject: strconv.FormatInt(grant.UserID, 10), }, Nonce: grant.Nonce, } @@ -230,12 +233,11 @@ func NewAccessTokenResponse(ctx context.Context, grant *auth.OAuth2Grant, server }, nil } -// returns a list of "org" and "org:team" strings, -// that the given user is a part of. +// GetOAuthGroupsForUser returns a list of "org" and "org:team" strings, that the given user is a part of. func GetOAuthGroupsForUser(ctx context.Context, user *user_model.User, onlyPublicGroups bool) ([]string, error) { orgs, err := db.Find[org_model.Organization](ctx, org_model.FindOrgOptions{ - UserID: user.ID, - IncludePrivate: !onlyPublicGroups, + UserID: user.ID, + IncludeVisibility: util.Iif(onlyPublicGroups, api.VisibleTypePublic, api.VisibleTypePrivate), }) if err != nil { return nil, fmt.Errorf("GetUserOrgList: %w", err) diff --git a/services/oauth2_provider/additional_scopes_test.go b/services/oauth2_provider/additional_scopes_test.go index 2d4df7aea2..5f375346dc 100644 --- a/services/oauth2_provider/additional_scopes_test.go +++ b/services/oauth2_provider/additional_scopes_test.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( "testing" diff --git a/services/oauth2_provider/init.go b/services/oauth2_provider/init.go index e5958099a6..c412bd6433 100644 --- a/services/oauth2_provider/init.go +++ b/services/oauth2_provider/init.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( "context" diff --git a/services/oauth2_provider/jwtsigningkey.go b/services/oauth2_provider/jwtsigningkey.go index 6c668db463..03c7403f75 100644 --- a/services/oauth2_provider/jwtsigningkey.go +++ b/services/oauth2_provider/jwtsigningkey.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( "crypto/ecdsa" @@ -31,7 +31,7 @@ type ErrInvalidAlgorithmType struct { } func (err ErrInvalidAlgorithmType) Error() string { - return fmt.Sprintf("JWT signing algorithm is not supported: %s", err.Algorithm) + return "JWT signing algorithm is not supported: " + err.Algorithm } // JWTSigningKey represents a algorithm/key pair to sign JWTs diff --git a/services/oauth2_provider/token.go b/services/oauth2_provider/token.go index b71b11906e..935c4cc01f 100644 --- a/services/oauth2_provider/token.go +++ b/services/oauth2_provider/token.go @@ -1,9 +1,10 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package oauth2_provider //nolint +package oauth2_provider import ( + "errors" "fmt" "time" @@ -44,12 +45,12 @@ func ParseToken(jwtToken string, signingKey JWTSigningKey) (*Token, error) { return nil, err } if !parsedToken.Valid { - return nil, fmt.Errorf("invalid token") + return nil, errors.New("invalid token") } var token *Token var ok bool if token, ok = parsedToken.Claims.(*Token); !ok || !parsedToken.Valid { - return nil, fmt.Errorf("invalid token") + return nil, errors.New("invalid token") } return token, nil } diff --git a/services/org/team.go b/services/org/team.go index ee3bd898ea..6890dafd90 100644 --- a/services/org/team.go +++ b/services/org/team.go @@ -259,37 +259,6 @@ func AddTeamMember(ctx context.Context, team *organization.Team, user *user_mode } team.NumMembers++ - - // Give access to team repositories. - // update exist access if mode become bigger - subQuery := builder.Select("repo_id").From("team_repo"). - Where(builder.Eq{"team_id": team.ID}) - - if _, err := sess.Where("user_id=?", user.ID). - In("repo_id", subQuery). - And("mode < ?", team.AccessMode). - SetExpr("mode", team.AccessMode). - Update(new(access_model.Access)); err != nil { - return fmt.Errorf("update user accesses: %w", err) - } - - // for not exist access - var repoIDs []int64 - accessSubQuery := builder.Select("repo_id").From("access").Where(builder.Eq{"user_id": user.ID}) - if err := sess.SQL(subQuery.And(builder.NotIn("repo_id", accessSubQuery))).Find(&repoIDs); err != nil { - return fmt.Errorf("select id accesses: %w", err) - } - - accesses := make([]*access_model.Access, 0, 100) - for i, repoID := range repoIDs { - accesses = append(accesses, &access_model.Access{RepoID: repoID, UserID: user.ID, Mode: team.AccessMode}) - if (i%100 == 0 || i == len(repoIDs)-1) && len(accesses) > 0 { - if err = db.Insert(ctx, accesses); err != nil { - return fmt.Errorf("insert new user accesses: %w", err) - } - accesses = accesses[:0] - } - } return nil }) if err != nil { diff --git a/services/org/team_test.go b/services/org/team_test.go index 3791776e46..c1a69d8ee7 100644 --- a/services/org/team_test.go +++ b/services/org/team_test.go @@ -88,7 +88,7 @@ func TestUpdateTeam(t *testing.T) { assert.True(t, strings.HasPrefix(team.Description, "A long description!")) access := unittest.AssertExistsAndLoadBean(t, &access_model.Access{UserID: 4, RepoID: 3}) - assert.EqualValues(t, perm.AccessModeAdmin, access.Mode) + assert.Equal(t, perm.AccessModeAdmin, access.Mode) unittest.CheckConsistencyFor(t, &organization.Team{ID: team.ID}) } @@ -166,24 +166,6 @@ func TestRemoveTeamMember(t *testing.T) { assert.True(t, organization.IsErrLastOrgOwner(err)) } -func TestRepository_RecalculateAccesses3(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - team5 := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 5}) - user29 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 29}) - - has, err := db.GetEngine(db.DefaultContext).Get(&access_model.Access{UserID: user29.ID, RepoID: 23}) - assert.NoError(t, err) - assert.False(t, has) - - // adding user29 to team5 should add an explicit access row for repo 23 - // even though repo 23 is public - assert.NoError(t, AddTeamMember(db.DefaultContext, team5, user29)) - - has, err = db.GetEngine(db.DefaultContext).Get(&access_model.Access{UserID: user29.ID, RepoID: 23}) - assert.NoError(t, err) - assert.True(t, has) -} - func TestIncludesAllRepositoriesTeams(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) @@ -222,8 +204,9 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { // Create repos. repoIDs := make([]int64, 0) - for i := 0; i < 3; i++ { - r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), repo_service.CreateRepoOptions{Name: fmt.Sprintf("repo-%d", i)}) + for i := range 3 { + r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), + repo_service.CreateRepoOptions{Name: fmt.Sprintf("repo-%d", i)}, true) assert.NoError(t, err, "CreateRepository %d", i) if r != nil { repoIDs = append(repoIDs, r.ID) @@ -285,7 +268,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { } // Create repo and check teams repositories. - r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), repo_service.CreateRepoOptions{Name: "repo-last"}) + r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(), repo_service.CreateRepoOptions{Name: "repo-last"}, true) assert.NoError(t, err, "CreateRepository last") if r != nil { repoIDs = append(repoIDs, r.ID) @@ -298,7 +281,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { } // Remove repo and check teams repositories. - assert.NoError(t, repo_service.DeleteRepositoryDirectly(db.DefaultContext, user, repoIDs[0]), "DeleteRepository") + assert.NoError(t, repo_service.DeleteRepositoryDirectly(db.DefaultContext, repoIDs[0]), "DeleteRepository") teamRepos[0] = repoIDs[1:] teamRepos[1] = repoIDs[1:] teamRepos[3] = repoIDs[1:3] @@ -310,7 +293,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { // Wipe created items. for i, rid := range repoIDs { if i > 0 { // first repo already deleted. - assert.NoError(t, repo_service.DeleteRepositoryDirectly(db.DefaultContext, user, rid), "DeleteRepository %d", i) + assert.NoError(t, repo_service.DeleteRepositoryDirectly(db.DefaultContext, rid), "DeleteRepository %d", i) } } assert.NoError(t, DeleteOrganization(db.DefaultContext, org, false), "DeleteOrganization") diff --git a/services/org/user.go b/services/org/user.go index 3565ecc2fc..26927253d2 100644 --- a/services/org/user.go +++ b/services/org/user.go @@ -64,10 +64,11 @@ func RemoveOrgUser(ctx context.Context, org *organization.Organization, user *us if err != nil { return fmt.Errorf("AccessibleReposEnv: %w", err) } - repoIDs, err := env.RepoIDs(ctx, 1, org.NumRepos) + repoIDs, err := env.RepoIDs(ctx) if err != nil { return fmt.Errorf("GetUserRepositories [%d]: %w", user.ID, err) } + for _, repoID := range repoIDs { repo, err := repo_model.GetRepositoryByID(ctx, repoID) if err != nil { diff --git a/services/org/user_test.go b/services/org/user_test.go index 96d1a1c8ca..c61d600d90 100644 --- a/services/org/user_test.go +++ b/services/org/user_test.go @@ -53,7 +53,7 @@ func TestRemoveOrgUser(t *testing.T) { assert.NoError(t, RemoveOrgUser(db.DefaultContext, org, user)) unittest.AssertNotExistsBean(t, &organization.OrgUser{OrgID: org.ID, UID: user.ID}) org = unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: org.ID}) - assert.EqualValues(t, expectedNumMembers, org.NumMembers) + assert.Equal(t, expectedNumMembers, org.NumMembers) } org3 := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}) diff --git a/services/packages/alpine/repository.go b/services/packages/alpine/repository.go index 27e6391980..277c188874 100644 --- a/services/packages/alpine/repository.go +++ b/services/packages/alpine/repository.go @@ -290,7 +290,7 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package privPem, _ := pem.Decode([]byte(priv)) if privPem == nil { - return fmt.Errorf("failed to decode private key pem") + return errors.New("failed to decode private key pem") } privKey, err := x509.ParsePKCS1PrivateKey(privPem.Bytes) diff --git a/services/packages/arch/repository.go b/services/packages/arch/repository.go index a12af82ba5..438bb10837 100644 --- a/services/packages/arch/repository.go +++ b/services/packages/arch/repository.go @@ -13,6 +13,7 @@ import ( "fmt" "io" "os" + "strconv" "strings" packages_model "code.gitea.io/gitea/models/packages" @@ -372,8 +373,8 @@ func writeDescription(tw *tar.Writer, opts *entryOptions) error { {"MD5SUM", opts.Blob.HashMD5}, {"SHA256SUM", opts.Blob.HashSHA256}, {"PGPSIG", opts.Signature}, - {"CSIZE", fmt.Sprintf("%d", opts.Blob.Size)}, - {"ISIZE", fmt.Sprintf("%d", opts.FileMetadata.InstalledSize)}, + {"CSIZE", strconv.FormatInt(opts.Blob.Size, 10)}, + {"ISIZE", strconv.FormatInt(opts.FileMetadata.InstalledSize, 10)}, {"NAME", opts.Package.Name}, {"BASE", opts.FileMetadata.Base}, {"ARCH", opts.FileMetadata.Architecture}, @@ -382,7 +383,7 @@ func writeDescription(tw *tar.Writer, opts *entryOptions) error { {"URL", opts.VersionMetadata.ProjectURL}, {"LICENSE", strings.Join(opts.VersionMetadata.Licenses, "\n")}, {"GROUPS", strings.Join(opts.FileMetadata.Groups, "\n")}, - {"BUILDDATE", fmt.Sprintf("%d", opts.FileMetadata.BuildDate)}, + {"BUILDDATE", strconv.FormatInt(opts.FileMetadata.BuildDate, 10)}, {"PACKAGER", opts.FileMetadata.Packager}, {"PROVIDES", strings.Join(opts.FileMetadata.Provides, "\n")}, {"REPLACES", strings.Join(opts.FileMetadata.Replaces, "\n")}, diff --git a/services/packages/arch/vercmp.go b/services/packages/arch/vercmp.go index 0d33dda0f1..d44aa530f0 100644 --- a/services/packages/arch/vercmp.go +++ b/services/packages/arch/vercmp.go @@ -34,13 +34,8 @@ func parseEVR(evr string) (epoch, version, release string) { func compareSegments(a, b []string) int { lenA, lenB := len(a), len(b) - var l int - if lenA > lenB { - l = lenB - } else { - l = lenA - } - for i := 0; i < l; i++ { + l := min(lenA, lenB) + for i := range l { if r := compare(a[i], b[i]); r != 0 { return r } diff --git a/services/packages/auth.go b/services/packages/auth.go index 4526a8e303..6e87643e29 100644 --- a/services/packages/auth.go +++ b/services/packages/auth.go @@ -4,6 +4,7 @@ package packages import ( + "errors" "fmt" "net/http" "strings" @@ -58,7 +59,7 @@ func ParseAuthorizationRequest(req *http.Request) (*PackageMeta, error) { parts := strings.SplitN(h, " ", 2) if len(parts) != 2 { log.Error("split token failed: %s", h) - return nil, fmt.Errorf("split token failed") + return nil, errors.New("split token failed") } return ParseAuthorizationToken(parts[1]) @@ -77,7 +78,7 @@ func ParseAuthorizationToken(tokenStr string) (*PackageMeta, error) { c, ok := token.Claims.(*packageClaims) if !token.Valid || !ok { - return nil, fmt.Errorf("invalid token claim") + return nil, errors.New("invalid token claim") } return &c.PackageMeta, nil diff --git a/services/packages/cargo/index.go b/services/packages/cargo/index.go index 0c8a98c40f..605335d0f1 100644 --- a/services/packages/cargo/index.go +++ b/services/packages/cargo/index.go @@ -213,7 +213,7 @@ func getOrCreateIndexRepository(ctx context.Context, doer, owner *user_model.Use if errors.Is(err, util.ErrNotExist) { repo, err = repo_service.CreateRepositoryDirectly(ctx, doer, owner, repo_service.CreateRepoOptions{ Name: IndexRepositoryName, - }) + }, true) if err != nil { return nil, fmt.Errorf("CreateRepository: %w", err) } @@ -247,7 +247,7 @@ func createOrUpdateConfigFile(ctx context.Context, repo *repo_model.Repository, "Initialize Cargo Config", func(t *files_service.TemporaryUploadRepository) error { var b bytes.Buffer - err := json.NewEncoder(&b).Encode(BuildConfig(owner, setting.Service.RequireSignInView || owner.Visibility != structs.VisibleTypePublic || repo.IsPrivate)) + err := json.NewEncoder(&b).Encode(BuildConfig(owner, setting.Service.RequireSignInViewStrict || owner.Visibility != structs.VisibleTypePublic || repo.IsPrivate)) if err != nil { return err } @@ -310,7 +310,7 @@ func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *re } func writeObjectToIndex(ctx context.Context, t *files_service.TemporaryUploadRepository, path string, r io.Reader) error { - hash, err := t.HashObject(ctx, r) + hash, err := t.HashObjectAndWrite(ctx, r) if err != nil { return err } diff --git a/services/packages/cleanup/cleanup.go b/services/packages/cleanup/cleanup.go index b7ba2b6ac4..959babe7cd 100644 --- a/services/packages/cleanup/cleanup.go +++ b/services/packages/cleanup/cleanup.go @@ -32,127 +32,136 @@ func CleanupTask(ctx context.Context, olderThan time.Duration) error { return CleanupExpiredData(ctx, olderThan) } -func ExecuteCleanupRules(outerCtx context.Context) error { - ctx, committer, err := db.TxContext(outerCtx) +func executeCleanupOneRulePackage(ctx context.Context, pcr *packages_model.PackageCleanupRule, p *packages_model.Package) (versionDeleted bool, err error) { + olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays) + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + PackageID: p.ID, + IsInternal: optional.Some(false), + Sort: packages_model.SortCreatedDesc, + }) if err != nil { - return err + return false, fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err) } - defer committer.Close() - - err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { - select { - case <-outerCtx.Done(): - return db.ErrCancelledf("While processing package cleanup rules") - default: + if pcr.KeepCount > 0 { + if pcr.KeepCount < len(pvs) { + pvs = pvs[pcr.KeepCount:] + } else { + pvs = nil } - - if err := pcr.CompiledPattern(); err != nil { - return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err) + } + for _, pv := range pvs { + if pcr.Type == packages_model.TypeContainer { + if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil { + return false, fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err) + } else if skip { + log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version) + continue + } } - - olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays) - - packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type) - if err != nil { - return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err) + toMatch := pv.LowerVersion + if pcr.MatchFullName { + toMatch = p.LowerName + "/" + pv.LowerVersion + } + if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) { + log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version) + continue + } + if pv.CreatedUnix.AsLocalTime().After(olderThan) { + log.Debug("Rule[%d]: keep '%s/%s' (remove days) %v", pcr.ID, p.Name, pv.Version, pv.CreatedUnix.FormatDate()) + continue } + if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) { + log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version) + continue + } + log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version) + if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil { + log.Error("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %v", pcr.ID, err) + continue + } + versionDeleted = true + } + return versionDeleted, nil +} - anyVersionDeleted := false - for _, p := range packages { - pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ - PackageID: p.ID, - IsInternal: optional.Some(false), - Sort: packages_model.SortCreatedDesc, - Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200), - }) - if err != nil { - return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err) - } - versionDeleted := false - for _, pv := range pvs { - if pcr.Type == packages_model.TypeContainer { - if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil { - return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err) - } else if skip { - log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version) - continue - } - } +func executeCleanupOneRule(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { + if err := pcr.CompiledPattern(); err != nil { + return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err) + } - toMatch := pv.LowerVersion - if pcr.MatchFullName { - toMatch = p.LowerName + "/" + pv.LowerVersion - } + packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type) + if err != nil { + return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err) + } - if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) { - log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version) - continue - } - if pv.CreatedUnix.AsLocalTime().After(olderThan) { - log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version) - continue - } - if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) { - log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version) - continue + anyVersionDeleted := false + for _, p := range packages { + versionDeleted := false + err = db.WithTx(ctx, func(ctx context.Context) (err error) { + versionDeleted, err = executeCleanupOneRulePackage(ctx, pcr, p) + return err + }) + if err != nil { + log.Error("CleanupRule [%d]: executeCleanupOneRulePackage(%d) failed: %v", pcr.ID, p.ID, err) + continue + } + anyVersionDeleted = anyVersionDeleted || versionDeleted + if versionDeleted { + if pcr.Type == packages_model.TypeCargo { + owner, err := user_model.GetUserByID(ctx, pcr.OwnerID) + if err != nil { + return fmt.Errorf("GetUserByID failed: %w", err) } - - log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version) - - if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil { - return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err) + if err := cargo_service.UpdatePackageIndexIfExists(ctx, owner, owner, p.ID); err != nil { + return fmt.Errorf("CleanupRule [%d]: cargo.UpdatePackageIndexIfExists failed: %w", pcr.ID, err) } + } + } + } - versionDeleted = true - anyVersionDeleted = true + if anyVersionDeleted { + switch pcr.Type { + case packages_model.TypeDebian: + if err := debian_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { + return fmt.Errorf("CleanupRule [%d]: debian.BuildAllRepositoryFiles failed: %w", pcr.ID, err) + } + case packages_model.TypeAlpine: + if err := alpine_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { + return fmt.Errorf("CleanupRule [%d]: alpine.BuildAllRepositoryFiles failed: %w", pcr.ID, err) } + case packages_model.TypeRpm: + if err := rpm_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { + return fmt.Errorf("CleanupRule [%d]: rpm.BuildAllRepositoryFiles failed: %w", pcr.ID, err) + } + case packages_model.TypeArch: + release, err := arch_service.AquireRegistryLock(ctx, pcr.OwnerID) + if err != nil { + return err + } + defer release() - if versionDeleted { - if pcr.Type == packages_model.TypeCargo { - owner, err := user_model.GetUserByID(ctx, pcr.OwnerID) - if err != nil { - return fmt.Errorf("GetUserByID failed: %w", err) - } - if err := cargo_service.UpdatePackageIndexIfExists(ctx, owner, owner, p.ID); err != nil { - return fmt.Errorf("CleanupRule [%d]: cargo.UpdatePackageIndexIfExists failed: %w", pcr.ID, err) - } - } + if err := arch_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { + return fmt.Errorf("CleanupRule [%d]: arch.BuildAllRepositoryFiles failed: %w", pcr.ID, err) } } + } + return nil +} - if anyVersionDeleted { - switch pcr.Type { - case packages_model.TypeDebian: - if err := debian_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { - return fmt.Errorf("CleanupRule [%d]: debian.BuildAllRepositoryFiles failed: %w", pcr.ID, err) - } - case packages_model.TypeAlpine: - if err := alpine_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { - return fmt.Errorf("CleanupRule [%d]: alpine.BuildAllRepositoryFiles failed: %w", pcr.ID, err) - } - case packages_model.TypeRpm: - if err := rpm_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { - return fmt.Errorf("CleanupRule [%d]: rpm.BuildAllRepositoryFiles failed: %w", pcr.ID, err) - } - case packages_model.TypeArch: - release, err := arch_service.AquireRegistryLock(ctx, pcr.OwnerID) - if err != nil { - return err - } - defer release() +func ExecuteCleanupRules(ctx context.Context) error { + return packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { + select { + case <-ctx.Done(): + return db.ErrCancelledf("While processing package cleanup rules") + default: + } - if err := arch_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil { - return fmt.Errorf("CleanupRule [%d]: arch.BuildAllRepositoryFiles failed: %w", pcr.ID, err) - } - } + err := executeCleanupOneRule(ctx, pcr) + if err != nil { + log.Error("CleanupRule [%d]: executeCleanupOneRule failed: %v", pcr.ID, err) } return nil }) - if err != nil { - return err - } - - return committer.Commit() } func CleanupExpiredData(outerCtx context.Context, olderThan time.Duration) error { diff --git a/services/packages/container/blob_uploader.go b/services/packages/container/blob_uploader.go index bae2e2d6af..27bc4a5421 100644 --- a/services/packages/container/blob_uploader.go +++ b/services/packages/container/blob_uploader.go @@ -12,7 +12,7 @@ import ( packages_model "code.gitea.io/gitea/models/packages" packages_module "code.gitea.io/gitea/modules/packages" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/modules/tempdir" ) var ( @@ -30,8 +30,12 @@ type BlobUploader struct { reading bool } -func buildFilePath(id string) string { - return util.FilePathJoinAbs(setting.Packages.ChunkedUploadPath, id) +func uploadPathTempDir() *tempdir.TempDir { + return setting.AppDataTempDir("package-upload") +} + +func buildFilePath(uploadPath *tempdir.TempDir, id string) string { + return uploadPath.JoinPath(id) } // NewBlobUploader creates a new blob uploader for the given id @@ -48,7 +52,12 @@ func NewBlobUploader(ctx context.Context, id string) (*BlobUploader, error) { } } - f, err := os.OpenFile(buildFilePath(model.ID), os.O_RDWR|os.O_CREATE, 0o666) + uploadPath := uploadPathTempDir() + _, err = uploadPath.MkdirAllSub("") + if err != nil { + return nil, err + } + f, err := os.OpenFile(buildFilePath(uploadPath, model.ID), os.O_RDWR|os.O_CREATE, 0o666) if err != nil { return nil, err } @@ -118,13 +127,13 @@ func (u *BlobUploader) Read(p []byte) (int, error) { return u.file.Read(p) } -// Remove deletes the data and the model of a blob upload +// RemoveBlobUploadByID Remove deletes the data and the model of a blob upload func RemoveBlobUploadByID(ctx context.Context, id string) error { if err := packages_model.DeleteBlobUploadByID(ctx, id); err != nil { return err } - err := os.Remove(buildFilePath(id)) + err := os.Remove(buildFilePath(uploadPathTempDir(), id)) if err != nil && !os.IsNotExist(err) { return err } diff --git a/services/packages/container/cleanup.go b/services/packages/container/cleanup.go index 3f5f43bbc0..263562a396 100644 --- a/services/packages/container/cleanup.go +++ b/services/packages/container/cleanup.go @@ -13,7 +13,7 @@ import ( container_module "code.gitea.io/gitea/modules/packages/container" packages_service "code.gitea.io/gitea/services/packages" - digest "github.com/opencontainers/go-digest" + "github.com/opencontainers/go-digest" ) // Cleanup removes expired container data @@ -57,7 +57,7 @@ func cleanupExpiredUploadedBlobs(ctx context.Context, olderThan time.Duration) e Type: packages_model.TypeContainer, Version: packages_model.SearchValue{ ExactMatch: true, - Value: container_model.UploadVersion, + Value: container_module.UploadVersion, }, IsInternal: optional.Some(true), HasFiles: optional.Some(false), diff --git a/services/packages/container/common.go b/services/packages/container/common.go index 5a14ed5b7a..02cbff2286 100644 --- a/services/packages/container/common.go +++ b/services/packages/container/common.go @@ -5,11 +5,17 @@ package container import ( "context" + "io" "strings" packages_model "code.gitea.io/gitea/models/packages" + container_service "code.gitea.io/gitea/models/packages/container" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/packages" container_module "code.gitea.io/gitea/modules/packages/container" + + "github.com/opencontainers/image-spec/specs-go/v1" ) // UpdateRepositoryNames updates the repository name property for all packages of the specific owner @@ -22,7 +28,7 @@ func UpdateRepositoryNames(ctx context.Context, owner *user_model.User, newOwner newOwnerName = strings.ToLower(newOwnerName) for _, p := range ps { - if err := packages_model.DeletePropertyByName(ctx, packages_model.PropertyTypePackage, p.ID, container_module.PropertyRepository); err != nil { + if err := packages_model.DeletePropertiesByName(ctx, packages_model.PropertyTypePackage, p.ID, container_module.PropertyRepository); err != nil { return err } @@ -33,3 +39,26 @@ func UpdateRepositoryNames(ctx context.Context, owner *user_model.User, newOwner return nil } + +func ParseManifestMetadata(ctx context.Context, rd io.Reader, ownerID int64, imageName string) (*v1.Manifest, *packages_model.PackageFileDescriptor, *container_module.Metadata, error) { + var manifest v1.Manifest + if err := json.NewDecoder(rd).Decode(&manifest); err != nil { + return nil, nil, nil, err + } + configDescriptor, err := container_service.GetContainerBlob(ctx, &container_service.BlobSearchOptions{ + OwnerID: ownerID, + Image: imageName, + Digest: manifest.Config.Digest.String(), + }) + if err != nil { + return nil, nil, nil, err + } + + configReader, err := packages.NewContentStore().OpenBlob(packages.BlobHash256Key(configDescriptor.Blob.HashSHA256)) + if err != nil { + return nil, nil, nil, err + } + defer configReader.Close() + metadata, err := container_module.ParseImageConfig(manifest.Config.MediaType, configReader) + return &manifest, configDescriptor, metadata, err +} diff --git a/services/packages/packages.go b/services/packages/packages.go index bd1d460fd3..517334cbc7 100644 --- a/services/packages/packages.go +++ b/services/packages/packages.go @@ -563,8 +563,8 @@ func DeletePackageFile(ctx context.Context, pf *packages_model.PackageFile) erro return packages_model.DeleteFileByID(ctx, pf.ID) } -// GetFileStreamByPackageNameAndVersion returns the content of the specific package file -func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { +// OpenFileForDownloadByPackageNameAndVersion returns the content of the specific package file and increases the download counter. +func OpenFileForDownloadByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { log.Trace("Getting package file stream: %v, %v, %s, %s, %s, %s", pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version, pfi.Filename, pfi.CompositeKey) pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version) @@ -576,32 +576,38 @@ func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, return nil, nil, nil, err } - return GetFileStreamByPackageVersion(ctx, pv, pfi) + return OpenFileForDownloadByPackageVersion(ctx, pv, pfi) } -// GetFileStreamByPackageVersion returns the content of the specific package file -func GetFileStreamByPackageVersion(ctx context.Context, pv *packages_model.PackageVersion, pfi *PackageFileInfo) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { +// OpenFileForDownloadByPackageVersion returns the content of the specific package file and increases the download counter. +func OpenFileForDownloadByPackageVersion(ctx context.Context, pv *packages_model.PackageVersion, pfi *PackageFileInfo) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, pfi.Filename, pfi.CompositeKey) if err != nil { return nil, nil, nil, err } - return GetPackageFileStream(ctx, pf) + return OpenFileForDownload(ctx, pf) } -// GetPackageFileStream returns the content of the specific package file -func GetPackageFileStream(ctx context.Context, pf *packages_model.PackageFile) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { +// OpenFileForDownload returns the content of the specific package file and increases the download counter. +func OpenFileForDownload(ctx context.Context, pf *packages_model.PackageFile) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { pb, err := packages_model.GetBlobByID(ctx, pf.BlobID) if err != nil { return nil, nil, nil, err } - return GetPackageBlobStream(ctx, pf, pb, nil) + return OpenBlobForDownload(ctx, pf, pb, nil) } -// GetPackageBlobStream returns the content of the specific package blob +func OpenBlobStream(pb *packages_model.PackageBlob) (io.ReadSeekCloser, error) { + cs := packages_module.NewContentStore() + key := packages_module.BlobHash256Key(pb.HashSHA256) + return cs.OpenBlob(key) +} + +// OpenBlobForDownload returns the content of the specific package blob and increases the download counter. // If the storage supports direct serving and it's enabled, only the direct serving url is returned. -func GetPackageBlobStream(ctx context.Context, pf *packages_model.PackageFile, pb *packages_model.PackageBlob, serveDirectReqParams url.Values) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { +func OpenBlobForDownload(ctx context.Context, pf *packages_model.PackageFile, pb *packages_model.PackageBlob, serveDirectReqParams url.Values) (io.ReadSeekCloser, *url.URL, *packages_model.PackageFile, error) { key := packages_module.BlobHash256Key(pb.HashSHA256) cs := packages_module.NewContentStore() @@ -617,7 +623,7 @@ func GetPackageBlobStream(ctx context.Context, pf *packages_model.PackageFile, p } } if u == nil { - s, err = cs.Get(key) + s, err = cs.OpenBlob(key) } if err == nil { diff --git a/services/packages/rpm/repository.go b/services/packages/rpm/repository.go index a7d196c15c..fbbf8d7dad 100644 --- a/services/packages/rpm/repository.go +++ b/services/packages/rpm/repository.go @@ -408,7 +408,6 @@ func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs [] files = append(files, f) } } - packageVersion := fmt.Sprintf("%s-%s", pd.FileMetadata.Version, pd.FileMetadata.Release) packages = append(packages, &Package{ Type: "rpm", Name: pd.Package.Name, @@ -437,7 +436,7 @@ func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs [] Archive: pd.FileMetadata.ArchiveSize, }, Location: Location{ - Href: fmt.Sprintf("package/%s/%s/%s/%s-%s.%s.rpm", pd.Package.Name, packageVersion, pd.FileMetadata.Architecture, pd.Package.Name, packageVersion, pd.FileMetadata.Architecture), + Href: fmt.Sprintf("package/%s/%s/%s/%s-%s.%s.rpm", pd.Package.Name, pd.Version.Version, pd.FileMetadata.Architecture, pd.Package.Name, pd.Version.Version, pd.FileMetadata.Architecture), }, Format: Format{ License: pd.VersionMetadata.License, @@ -471,7 +470,7 @@ func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs [] } // https://docs.pulpproject.org/en/2.19/plugins/pulp_rpm/tech-reference/rpm.html#filelists-xml -func buildFilelists(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl +func buildFilelists(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl // duplicates with buildOther type Version struct { Epoch string `xml:"epoch,attr"` Version string `xml:"ver,attr"` @@ -518,7 +517,7 @@ func buildFilelists(ctx context.Context, pv *packages_model.PackageVersion, pfs } // https://docs.pulpproject.org/en/2.19/plugins/pulp_rpm/tech-reference/rpm.html#other-xml -func buildOther(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl +func buildOther(ctx context.Context, pv *packages_model.PackageVersion, pfs []*packages_model.PackageFile, c packageCache, group string) (*repoData, error) { //nolint:dupl // duplicates with buildFilelists type Version struct { Epoch string `xml:"epoch,attr"` Version string `xml:"ver,attr"` diff --git a/services/projects/issue.go b/services/projects/issue.go index 090d19d2f4..590fe960d5 100644 --- a/services/projects/issue.go +++ b/services/projects/issue.go @@ -5,7 +5,7 @@ package project import ( "context" - "fmt" + "errors" "code.gitea.io/gitea/models/db" issues_model "code.gitea.io/gitea/models/issues" @@ -29,7 +29,7 @@ func MoveIssuesOnProjectColumn(ctx context.Context, doer *user_model.User, colum return err } if int(count) != len(sortedIssueIDs) { - return fmt.Errorf("all issues have to be added to a project first") + return errors.New("all issues have to be added to a project first") } issues, err := issues_model.GetIssuesByIDs(ctx, issueIDs) diff --git a/services/projects/issue_test.go b/services/projects/issue_test.go index b6f0b1dae1..e76d31e757 100644 --- a/services/projects/issue_test.go +++ b/services/projects/issue_test.go @@ -130,7 +130,7 @@ func Test_Projects(t *testing.T) { }) assert.NoError(t, err) assert.Len(t, projects, 1) - assert.EqualValues(t, project1.ID, projects[0].ID) + assert.Equal(t, project1.ID, projects[0].ID) t.Run("Authenticated user", func(t *testing.T) { columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{ diff --git a/services/pull/check.go b/services/pull/check.go index 9b159891d7..5d8990aa00 100644 --- a/services/pull/check.go +++ b/services/pull/check.go @@ -10,6 +10,7 @@ import ( "fmt" "strconv" "strings" + "time" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" @@ -25,6 +26,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/queue" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" asymkey_service "code.gitea.io/gitea/services/asymkey" notify_service "code.gitea.io/gitea/services/notify" @@ -34,27 +36,88 @@ import ( var prPatchCheckerQueue *queue.WorkerPoolQueue[string] var ( - ErrIsClosed = errors.New("pull is closed") - ErrUserNotAllowedToMerge = ErrDisallowedToMerge{} - ErrHasMerged = errors.New("has already been merged") - ErrIsWorkInProgress = errors.New("work in progress PRs cannot be merged") - ErrIsChecking = errors.New("cannot merge while conflict checking is in progress") - ErrNotMergeableState = errors.New("not in mergeable state") - ErrDependenciesLeft = errors.New("is blocked by an open dependency") + ErrIsClosed = errors.New("pull is closed") + ErrNoPermissionToMerge = errors.New("no permission to merge") + ErrNotReadyToMerge = errors.New("not ready to merge") + ErrHasMerged = errors.New("has already been merged") + ErrIsWorkInProgress = errors.New("work in progress PRs cannot be merged") + ErrIsChecking = errors.New("cannot merge while conflict checking is in progress") + ErrNotMergeableState = errors.New("not in mergeable state") + ErrDependenciesLeft = errors.New("is blocked by an open dependency") ) -// AddToTaskQueue adds itself to pull request test task queue. -func AddToTaskQueue(ctx context.Context, pr *issues_model.PullRequest) { +func markPullRequestStatusAsChecking(ctx context.Context, pr *issues_model.PullRequest) bool { pr.Status = issues_model.PullRequestStatusChecking err := pr.UpdateColsIfNotMerged(ctx, "status") if err != nil { - log.Error("AddToTaskQueue(%-v).UpdateCols.(add to queue): %v", pr, err) + log.Error("UpdateColsIfNotMerged failed, pr: %-v, err: %v", pr, err) + return false + } + pr, err = issues_model.GetPullRequestByID(ctx, pr.ID) + if err != nil { + log.Error("GetPullRequestByID failed, pr: %-v, err: %v", pr, err) + return false + } + return pr.Status == issues_model.PullRequestStatusChecking +} + +var AddPullRequestToCheckQueue = realAddPullRequestToCheckQueue + +func realAddPullRequestToCheckQueue(prID int64) { + err := prPatchCheckerQueue.Push(strconv.FormatInt(prID, 10)) + if err != nil && !errors.Is(err, queue.ErrAlreadyInQueue) { + log.Error("Error adding %v to the pull requests check queue: %v", prID, err) + } +} + +func StartPullRequestCheckImmediately(ctx context.Context, pr *issues_model.PullRequest) { + if !markPullRequestStatusAsChecking(ctx, pr) { return } - log.Trace("Adding %-v to the test pull requests queue", pr) - err = prPatchCheckerQueue.Push(strconv.FormatInt(pr.ID, 10)) - if err != nil && err != queue.ErrAlreadyInQueue { - log.Error("Error adding %-v to the test pull requests queue: %v", pr, err) + AddPullRequestToCheckQueue(pr.ID) +} + +// StartPullRequestCheckDelayable will delay the check if the pull request was not updated recently. +// When the "base" branch gets updated, all PRs targeting that "base" branch need to re-check whether +// they are mergeable. +// When there are too many stale PRs, each "base" branch update will consume a lot of system resources. +// So we can delay the checks for PRs that were not updated recently, only mark their status as +// "checking", and then next time when these PRs are updated or viewed, the real checks will run. +func StartPullRequestCheckDelayable(ctx context.Context, pr *issues_model.PullRequest) { + if !markPullRequestStatusAsChecking(ctx, pr) { + return + } + + if setting.Repository.PullRequest.DelayCheckForInactiveDays >= 0 { + if err := pr.LoadIssue(ctx); err != nil { + return + } + duration := 24 * time.Hour * time.Duration(setting.Repository.PullRequest.DelayCheckForInactiveDays) + if pr.Issue.UpdatedUnix.AddDuration(duration) <= timeutil.TimeStampNow() { + return + } + } + + AddPullRequestToCheckQueue(pr.ID) +} + +func StartPullRequestCheckOnView(ctx context.Context, pr *issues_model.PullRequest) { + // TODO: its correctness totally depends on the "unique queue" feature and the global lock. + // So duplicate "start" requests will be ignored if there is already a task in the queue or one is running. + // Ideally in the future we should decouple the "unique queue" feature from the "start" request. + if pr.Status == issues_model.PullRequestStatusChecking { + if setting.IsInTesting { + // In testing mode, there might be an "immediate" queue, which is not a real queue, everything is executed in the same goroutine + // So we can't use the global lock here, otherwise it will cause a deadlock. + AddPullRequestToCheckQueue(pr.ID) + } else { + // When a PR check starts, the task is popped from the queue and the task handler acquires the global lock + // So we need to acquire the global lock here to prevent from duplicate tasks + _, _ = globallock.TryLockAndDo(ctx, getPullWorkingLockKey(pr.ID), func(ctx context.Context) error { + AddPullRequestToCheckQueue(pr.ID) // the queue is a unique queue and won't add the same task again + return nil + }) + } } } @@ -84,7 +147,7 @@ func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *acc log.Error("Error whilst checking if %-v is allowed to merge %-v: %v", doer, pr, err) return err } else if !allowedMerge { - return ErrUserNotAllowedToMerge + return ErrNoPermissionToMerge } if mergeCheckType == MergeCheckTypeManually { @@ -105,7 +168,7 @@ func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *acc } if err := CheckPullBranchProtections(ctx, pr, false); err != nil { - if !IsErrDisallowedToMerge(err) { + if !errors.Is(err, ErrNotReadyToMerge) { log.Error("Error whilst checking pull branch protection for %-v: %v", pr, err) return err } @@ -172,10 +235,10 @@ func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer return sign, err } -// checkAndUpdateStatus checks if pull request is possible to leaving checking status, +// markPullRequestAsMergeable checks if pull request is possible to leaving checking status, // and set to be either conflict or mergeable. -func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) { - // If status has not been changed to conflict by testPatch then we are mergeable +func markPullRequestAsMergeable(ctx context.Context, pr *issues_model.PullRequest) { + // If status has not been changed to conflict by testPullRequestTmpRepoBranchMergeable then we are mergeable if pr.Status == issues_model.PullRequestStatusChecking { pr.Status = issues_model.PullRequestStatusMergeable } @@ -310,6 +373,10 @@ func manuallyMerged(ctx context.Context, pr *issues_model.PullRequest) bool { // InitializePullRequests checks and tests untested patches of pull requests. func InitializePullRequests(ctx context.Context) { + // If we prefer to delay the checks, then no need to do any check during startup, there should be not much difference + if setting.Repository.PullRequest.DelayCheckForInactiveDays >= 0 { + return + } prs, err := issues_model.GetPullRequestIDsByCheckStatus(ctx, issues_model.PullRequestStatusChecking) if err != nil { log.Error("Find Checking PRs: %v", err) @@ -320,24 +387,12 @@ func InitializePullRequests(ctx context.Context) { case <-ctx.Done(): return default: - log.Trace("Adding PR[%d] to the pull requests patch checking queue", prID) - if err := prPatchCheckerQueue.Push(strconv.FormatInt(prID, 10)); err != nil { - log.Error("Error adding PR[%d] to the pull requests patch checking queue %v", prID, err) - } + AddPullRequestToCheckQueue(prID) } } } -// handle passed PR IDs and test the PRs -func handler(items ...string) []string { - for _, s := range items { - id, _ := strconv.ParseInt(s, 10, 64) - testPR(id) - } - return nil -} - -func testPR(id int64) { +func checkPullRequestMergeable(id int64) { ctx := graceful.GetManager().HammerContext() releaser, err := globallock.Lock(ctx, getPullWorkingLockKey(id)) if err != nil { @@ -351,7 +406,7 @@ func testPR(id int64) { pr, err := issues_model.GetPullRequestByID(ctx, id) if err != nil { - log.Error("Unable to GetPullRequestByID[%d] for testPR: %v", id, err) + log.Error("Unable to GetPullRequestByID[%d] for checkPullRequestMergeable: %v", id, err) return } @@ -370,15 +425,15 @@ func testPR(id int64) { return } - if err := TestPatch(pr); err != nil { - log.Error("testPatch[%-v]: %v", pr, err) + if err := testPullRequestBranchMergeable(pr); err != nil { + log.Error("testPullRequestTmpRepoBranchMergeable[%-v]: %v", pr, err) pr.Status = issues_model.PullRequestStatusError if err := pr.UpdateCols(ctx, "status"); err != nil { log.Error("update pr [%-v] status to PullRequestStatusError failed: %v", pr, err) } return } - checkAndUpdateStatus(ctx, pr) + markPullRequestAsMergeable(ctx, pr) } // CheckPRsForBaseBranch check all pulls with baseBrannch @@ -387,20 +442,24 @@ func CheckPRsForBaseBranch(ctx context.Context, baseRepo *repo_model.Repository, if err != nil { return err } - for _, pr := range prs { - AddToTaskQueue(ctx, pr) + StartPullRequestCheckImmediately(ctx, pr) } - return nil } // Init runs the task queue to test all the checking status pull requests func Init() error { - prPatchCheckerQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_patch_checker", handler) + prPatchCheckerQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_patch_checker", func(items ...string) []string { + for _, s := range items { + id, _ := strconv.ParseInt(s, 10, 64) + checkPullRequestMergeable(id) + } + return nil + }) if prPatchCheckerQueue == nil { - return fmt.Errorf("unable to create pr_patch_checker queue") + return errors.New("unable to create pr_patch_checker queue") } go graceful.GetManager().RunWithCancel(prPatchCheckerQueue) diff --git a/services/pull/check_test.go b/services/pull/check_test.go index 5508a70f45..fa3a676ef1 100644 --- a/services/pull/check_test.go +++ b/services/pull/check_test.go @@ -36,7 +36,7 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { assert.NoError(t, err) pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) - AddToTaskQueue(db.DefaultContext, pr) + StartPullRequestCheckImmediately(db.DefaultContext, pr) assert.Eventually(t, func() bool { pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) @@ -51,7 +51,7 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { select { case id := <-idChan: - assert.EqualValues(t, pr.ID, id) + assert.Equal(t, pr.ID, id) case <-time.After(time.Second): assert.FailNow(t, "Timeout: nothing was added to pullRequestQueue") } diff --git a/services/pull/commit_status.go b/services/pull/commit_status.go index 0bfff21746..7952ca6fe3 100644 --- a/services/pull/commit_status.go +++ b/services/pull/commit_status.go @@ -10,93 +10,59 @@ import ( "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/modules/commitstatus" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/structs" "github.com/gobwas/glob" "github.com/pkg/errors" ) // MergeRequiredContextsCommitStatus returns a commit status state for given required contexts -func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus, requiredContexts []string) structs.CommitStatusState { - // matchedCount is the number of `CommitStatus.Context` that match any context of `requiredContexts` - matchedCount := 0 - returnedStatus := structs.CommitStatusSuccess - - if len(requiredContexts) > 0 { - requiredContextsGlob := make(map[string]glob.Glob, len(requiredContexts)) - for _, ctx := range requiredContexts { - if gp, err := glob.Compile(ctx); err != nil { - log.Error("glob.Compile %s failed. Error: %v", ctx, err) - } else { - requiredContextsGlob[ctx] = gp - } - } - - for _, gp := range requiredContextsGlob { - var targetStatus structs.CommitStatusState - for _, commitStatus := range commitStatuses { - if gp.Match(commitStatus.Context) { - targetStatus = commitStatus.State - matchedCount++ - break - } - } - - // If required rule not match any action, then it is pending - if targetStatus == "" { - if structs.CommitStatusPending.NoBetterThan(returnedStatus) { - returnedStatus = structs.CommitStatusPending - } - break - } - - if targetStatus.NoBetterThan(returnedStatus) { - returnedStatus = targetStatus - } - } +func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus, requiredContexts []string) commitstatus.CommitStatusState { + if len(commitStatuses) == 0 { + return commitstatus.CommitStatusPending } - if matchedCount == 0 && returnedStatus == structs.CommitStatusSuccess { - status := git_model.CalcCommitStatus(commitStatuses) - if status != nil { - return status.State - } - return structs.CommitStatusSuccess + if len(requiredContexts) == 0 { + return git_model.CalcCommitStatus(commitStatuses).State } - return returnedStatus -} - -// IsCommitStatusContextSuccess returns true if all required status check contexts succeed. -func IsCommitStatusContextSuccess(commitStatuses []*git_model.CommitStatus, requiredContexts []string) bool { - // If no specific context is required, require that last commit status is a success - if len(requiredContexts) == 0 { - status := git_model.CalcCommitStatus(commitStatuses) - if status == nil || status.State != structs.CommitStatusSuccess { - return false + requiredContextsGlob := make(map[string]glob.Glob, len(requiredContexts)) + for _, ctx := range requiredContexts { + if gp, err := glob.Compile(ctx); err != nil { + log.Error("glob.Compile %s failed. Error: %v", ctx, err) + } else { + requiredContextsGlob[ctx] = gp } - return true } - for _, ctx := range requiredContexts { - var found bool + requiredCommitStatuses := make([]*git_model.CommitStatus, 0, len(commitStatuses)) + allRequiredContextsMatched := true + for _, gp := range requiredContextsGlob { + requiredContextMatched := false for _, commitStatus := range commitStatuses { - if commitStatus.Context == ctx { - if commitStatus.State != structs.CommitStatusSuccess { - return false - } - - found = true - break + if gp.Match(commitStatus.Context) { + requiredCommitStatuses = append(requiredCommitStatuses, commitStatus) + requiredContextMatched = true } } - if !found { - return false - } + allRequiredContextsMatched = allRequiredContextsMatched && requiredContextMatched + } + if len(requiredCommitStatuses) == 0 { + return commitstatus.CommitStatusPending + } + + returnedStatus := git_model.CalcCommitStatus(requiredCommitStatuses).State + if allRequiredContextsMatched { + return returnedStatus + } + + if returnedStatus == commitstatus.CommitStatusFailure { + return commitstatus.CommitStatusFailure } - return true + // even if part of success, return pending + return commitstatus.CommitStatusPending } // IsPullCommitStatusPass returns if all required status checks PASS @@ -117,7 +83,7 @@ func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) ( } // GetPullRequestCommitStatusState returns pull request merged commit status state -func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullRequest) (structs.CommitStatusState, error) { +func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullRequest) (commitstatus.CommitStatusState, error) { // Ensure HeadRepo is loaded if err := pr.LoadHeadRepo(ctx); err != nil { return "", errors.Wrap(err, "LoadHeadRepo") @@ -151,7 +117,7 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR return "", errors.Wrap(err, "LoadBaseRepo") } - commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) + commitStatuses, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) if err != nil { return "", errors.Wrap(err, "GetLatestCommitStatus") } diff --git a/services/pull/commit_status_test.go b/services/pull/commit_status_test.go index 592acdd55c..a58e788c04 100644 --- a/services/pull/commit_status_test.go +++ b/services/pull/commit_status_test.go @@ -8,58 +8,85 @@ import ( "testing" git_model "code.gitea.io/gitea/models/git" - "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/commitstatus" "github.com/stretchr/testify/assert" ) func TestMergeRequiredContextsCommitStatus(t *testing.T) { - testCases := [][]*git_model.CommitStatus{ + cases := []struct { + commitStatuses []*git_model.CommitStatus + requiredContexts []string + expected commitstatus.CommitStatusState + }{ { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 3", State: structs.CommitStatusSuccess}, + commitStatuses: []*git_model.CommitStatus{}, + requiredContexts: []string{}, + expected: commitstatus.CommitStatusPending, }, { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 2t", State: structs.CommitStatusPending}, + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build xxx", State: commitstatus.CommitStatusSkipped}, + }, + requiredContexts: []string{"Build*"}, + expected: commitstatus.CommitStatusSuccess, }, { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 2t", State: structs.CommitStatusFailure}, + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSkipped}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 3", State: commitstatus.CommitStatusSuccess}, + }, + requiredContexts: []string{"Build*"}, + expected: commitstatus.CommitStatusSuccess, }, { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 2t", State: structs.CommitStatusSuccess}, + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusPending}, + }, + requiredContexts: []string{"Build*", "Build 2t*"}, + expected: commitstatus.CommitStatusPending, }, { - {Context: "Build 1", State: structs.CommitStatusSuccess}, - {Context: "Build 2", State: structs.CommitStatusSuccess}, - {Context: "Build 2t", State: structs.CommitStatusSuccess}, + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusFailure}, + }, + requiredContexts: []string{"Build*", "Build 2t*"}, + expected: commitstatus.CommitStatusFailure, + }, + { + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusFailure}, + }, + requiredContexts: []string{"Build*"}, + expected: commitstatus.CommitStatusFailure, + }, + { + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusSuccess}, + }, + requiredContexts: []string{"Build*", "Build 2t*", "Build 3*"}, + expected: commitstatus.CommitStatusPending, + }, + { + commitStatuses: []*git_model.CommitStatus{ + {Context: "Build 1", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2", State: commitstatus.CommitStatusSuccess}, + {Context: "Build 2t", State: commitstatus.CommitStatusSuccess}, + }, + requiredContexts: []string{"Build*", "Build *", "Build 2t*", "Build 1*"}, + expected: commitstatus.CommitStatusSuccess, }, } - testCasesRequiredContexts := [][]string{ - {"Build*"}, - {"Build*", "Build 2t*"}, - {"Build*", "Build 2t*"}, - {"Build*", "Build 2t*", "Build 3*"}, - {"Build*", "Build *", "Build 2t*", "Build 1*"}, - } - - testCasesExpected := []structs.CommitStatusState{ - structs.CommitStatusSuccess, - structs.CommitStatusPending, - structs.CommitStatusFailure, - structs.CommitStatusPending, - structs.CommitStatusSuccess, - } - - for i, commitStatuses := range testCases { - if MergeRequiredContextsCommitStatus(commitStatuses, testCasesRequiredContexts[i]) != testCasesExpected[i] { - assert.Fail(t, "Test case failed", "Test case %d failed", i+1) - } + for i, c := range cases { + assert.Equal(t, c.expected, MergeRequiredContextsCommitStatus(c.commitStatuses, c.requiredContexts), "case %d", i) } } diff --git a/services/pull/merge.go b/services/pull/merge.go index 1e1ca55bc1..cd9aeb2ad1 100644 --- a/services/pull/merge.go +++ b/services/pull/merge.go @@ -6,12 +6,15 @@ package pull import ( "context" + "errors" "fmt" + "maps" "os" "path/filepath" "regexp" "strconv" "strings" + "unicode" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" @@ -59,7 +62,7 @@ func getMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issue issueReference = "!" } - reviewedOn := fmt.Sprintf("Reviewed-on: %s", httplib.MakeAbsoluteURL(ctx, pr.Issue.Link())) + reviewedOn := "Reviewed-on: " + httplib.MakeAbsoluteURL(ctx, pr.Issue.Link()) reviewedBy := pr.GetApprovers(ctx) if mergeStyle != "" { @@ -93,9 +96,7 @@ func getMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issue vars["HeadRepoOwnerName"] = pr.HeadRepo.OwnerName vars["HeadRepoName"] = pr.HeadRepo.Name } - for extraKey, extraValue := range extraVars { - vars[extraKey] = extraValue - } + maps.Copy(vars, extraVars) refs, err := pr.ResolveCrossReferences(ctx) if err == nil { closeIssueIndexes := make([]string, 0, len(refs)) @@ -160,6 +161,41 @@ func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr return getMergeMessage(ctx, baseGitRepo, pr, mergeStyle, nil) } +func AddCommitMessageTailer(message, tailerKey, tailerValue string) string { + tailerLine := tailerKey + ": " + tailerValue + message = strings.ReplaceAll(message, "\r\n", "\n") + message = strings.ReplaceAll(message, "\r", "\n") + if strings.Contains(message, "\n"+tailerLine+"\n") || strings.HasSuffix(message, "\n"+tailerLine) { + return message + } + + if !strings.HasSuffix(message, "\n") { + message += "\n" + } + pos1 := strings.LastIndexByte(message[:len(message)-1], '\n') + pos2 := -1 + if pos1 != -1 { + pos2 = strings.IndexByte(message[pos1:], ':') + if pos2 != -1 { + pos2 += pos1 + } + } + var lastLineKey string + if pos1 != -1 && pos2 != -1 { + lastLineKey = message[pos1+1 : pos2] + } + + isLikelyTailerLine := lastLineKey != "" && unicode.IsUpper(rune(lastLineKey[0])) && strings.Contains(message, "-") + for i := 0; isLikelyTailerLine && i < len(lastLineKey); i++ { + r := rune(lastLineKey[i]) + isLikelyTailerLine = unicode.IsLetter(r) || unicode.IsDigit(r) || r == '-' + } + if !strings.HasSuffix(message, "\n\n") && !isLikelyTailerLine { + message += "\n" + } + return message + tailerLine +} + // ErrInvalidMergeStyle represents an error if merging with disabled merge strategy type ErrInvalidMergeStyle struct { ID int64 @@ -289,7 +325,7 @@ func handleCloseCrossReferences(ctx context.Context, pr *issues_model.PullReques } // doMergeAndPush performs the merge operation without changing any pull information in database and pushes it up to the base repository -func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, pushTrigger repo_module.PushTrigger) (string, error) { //nolint:unparam +func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, pushTrigger repo_module.PushTrigger) (string, error) { //nolint:unparam // non-error result is never used // Clone base repo. mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, expectedHeadCommitID) if err != nil { @@ -395,10 +431,13 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use func commitAndSignNoAuthor(ctx *mergeContext, message string) error { cmdCommit := git.NewCommand("commit").AddOptionFormat("--message=%s", message) - if ctx.signKeyID == "" { + if ctx.signKey == nil { cmdCommit.AddArguments("--no-gpg-sign") } else { - cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID) + if ctx.signKey.Format != "" { + cmdCommit.AddConfig("gpg.format", ctx.signKey.Format) + } + cmdCommit.AddOptionFormat("-S%s", ctx.signKey.KeyID) } if err := cmdCommit.Run(ctx, ctx.RunOpts()); err != nil { log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) @@ -517,25 +556,6 @@ func IsUserAllowedToMerge(ctx context.Context, pr *issues_model.PullRequest, p a return false, nil } -// ErrDisallowedToMerge represents an error that a branch is protected and the current user is not allowed to modify it. -type ErrDisallowedToMerge struct { - Reason string -} - -// IsErrDisallowedToMerge checks if an error is an ErrDisallowedToMerge. -func IsErrDisallowedToMerge(err error) bool { - _, ok := err.(ErrDisallowedToMerge) - return ok -} - -func (err ErrDisallowedToMerge) Error() string { - return fmt.Sprintf("not allowed to merge [reason: %s]", err.Reason) -} - -func (err ErrDisallowedToMerge) Unwrap() error { - return util.ErrPermissionDenied -} - // CheckPullBranchProtections checks whether the PR is ready to be merged (reviews and status checks) func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullRequest, skipProtectedFilesCheck bool) (err error) { if err = pr.LoadBaseRepo(ctx); err != nil { @@ -555,31 +575,21 @@ func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullReques return err } if !isPass { - return ErrDisallowedToMerge{ - Reason: "Not all required status checks successful", - } + return util.ErrorWrap(ErrNotReadyToMerge, "Not all required status checks successful") } if !issues_model.HasEnoughApprovals(ctx, pb, pr) { - return ErrDisallowedToMerge{ - Reason: "Does not have enough approvals", - } + return util.ErrorWrap(ErrNotReadyToMerge, "Does not have enough approvals") } if issues_model.MergeBlockedByRejectedReview(ctx, pb, pr) { - return ErrDisallowedToMerge{ - Reason: "There are requested changes", - } + return util.ErrorWrap(ErrNotReadyToMerge, "There are requested changes") } if issues_model.MergeBlockedByOfficialReviewRequests(ctx, pb, pr) { - return ErrDisallowedToMerge{ - Reason: "There are official review requests", - } + return util.ErrorWrap(ErrNotReadyToMerge, "There are official review requests") } if issues_model.MergeBlockedByOutdatedBranch(pb, pr) { - return ErrDisallowedToMerge{ - Reason: "The head branch is behind the base branch", - } + return util.ErrorWrap(ErrNotReadyToMerge, "The head branch is behind the base branch") } if skipProtectedFilesCheck { @@ -587,9 +597,7 @@ func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullReques } if pb.MergeBlockedByProtectedFiles(pr.ChangedProtectedFiles) { - return ErrDisallowedToMerge{ - Reason: "Changed protected files", - } + return util.ErrorWrap(ErrNotReadyToMerge, "Changed protected files") } return nil @@ -621,13 +629,13 @@ func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *use objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName) if len(commitID) != objectFormat.FullLength() { - return fmt.Errorf("Wrong commit ID") + return errors.New("Wrong commit ID") } commit, err := baseGitRepo.GetCommit(commitID) if err != nil { if git.IsErrNotExist(err) { - return fmt.Errorf("Wrong commit ID") + return errors.New("Wrong commit ID") } return err } @@ -638,14 +646,14 @@ func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *use return err } if !ok { - return fmt.Errorf("Wrong commit ID") + return errors.New("Wrong commit ID") } var merged bool if merged, err = SetMerged(ctx, pr, commitID, timeutil.TimeStamp(commit.Author.When.Unix()), doer, issues_model.PullRequestStatusManuallyMerged); err != nil { return err } else if !merged { - return fmt.Errorf("SetMerged failed") + return errors.New("SetMerged failed") } return nil }) @@ -708,7 +716,7 @@ func SetMerged(ctx context.Context, pr *issues_model.PullRequest, mergedCommitID return false, fmt.Errorf("ChangeIssueStatus: %w", err) } - // We need to save all of the data used to compute this merge as it may have already been changed by TestPatch. FIXME: need to set some state to prevent TestPatch from running whilst we are merging. + // We need to save all of the data used to compute this merge as it may have already been changed by testPullRequestBranchMergeable. FIXME: need to set some state to prevent testPullRequestBranchMergeable from running whilst we are merging. if cnt, err := db.GetEngine(ctx).Where("id = ?", pr.ID). And("has_merged = ?", false). Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix, conflicted_files"). diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go index 593cba550a..31a1e13734 100644 --- a/services/pull/merge_prepare.go +++ b/services/pull/merge_prepare.go @@ -23,11 +23,11 @@ import ( ) type mergeContext struct { - *prContext + *prTmpRepoContext doer *user_model.User sig *git.Signature committer *git.Signature - signKeyID string // empty for no-sign, non-empty to sign + signKey *git.SigningKey env []string } @@ -68,8 +68,8 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque } mergeCtx = &mergeContext{ - prContext: prCtx, - doer: doer, + prTmpRepoContext: prCtx, + doer: doer, } if expectedHeadCommitID != "" { @@ -99,9 +99,9 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque mergeCtx.committer = mergeCtx.sig // Determine if we should sign - sign, keyID, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch) + sign, key, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch) if sign { - mergeCtx.signKeyID = keyID + mergeCtx.signKey = key if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { mergeCtx.committer = signer } diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go index 076189fd7a..0049c0b117 100644 --- a/services/pull/merge_squash.go +++ b/services/pull/merge_squash.go @@ -5,7 +5,6 @@ package pull import ( "fmt" - "strings" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -66,18 +65,19 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error { if setting.Repository.PullRequest.AddCoCommitterTrailers && ctx.committer.String() != sig.String() { // add trailer - if !strings.Contains(message, fmt.Sprintf("Co-authored-by: %s", sig.String())) { - message += fmt.Sprintf("\nCo-authored-by: %s", sig.String()) - } - message += fmt.Sprintf("\nCo-committed-by: %s\n", sig.String()) + message = AddCommitMessageTailer(message, "Co-authored-by", sig.String()) + message = AddCommitMessageTailer(message, "Co-committed-by", sig.String()) // FIXME: this one should be removed, it is not really used or widely used } cmdCommit := git.NewCommand("commit"). AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email). AddOptionFormat("--message=%s", message) - if ctx.signKeyID == "" { + if ctx.signKey == nil { cmdCommit.AddArguments("--no-gpg-sign") } else { - cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID) + if ctx.signKey.Format != "" { + cmdCommit.AddConfig("gpg.format", ctx.signKey.Format) + } + cmdCommit.AddOptionFormat("-S%s", ctx.signKey.KeyID) } if err := cmdCommit.Run(ctx, ctx.RunOpts()); err != nil { log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) diff --git a/services/pull/merge_test.go b/services/pull/merge_test.go index 6df6f55d46..91abeb9d9c 100644 --- a/services/pull/merge_test.go +++ b/services/pull/merge_test.go @@ -65,3 +65,28 @@ func Test_expandDefaultMergeMessage(t *testing.T) { }) } } + +func TestAddCommitMessageTailer(t *testing.T) { + // add tailer for empty message + assert.Equal(t, "\n\nTest-tailer: TestValue", AddCommitMessageTailer("", "Test-tailer", "TestValue")) + + // add tailer for message without newlines + assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTailer("title", "Test-tailer", "TestValue")) + assert.Equal(t, "title\n\nNot tailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\nNot tailer: xxx", "Test-tailer", "TestValue")) + assert.Equal(t, "title\n\nNotTailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\nNotTailer: xxx", "Test-tailer", "TestValue")) + assert.Equal(t, "title\n\nnot-tailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\nnot-tailer: xxx", "Test-tailer", "TestValue")) + + // add tailer for message with one EOL + assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n", "Test-tailer", "TestValue")) + + // add tailer for message with two EOLs + assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\n", "Test-tailer", "TestValue")) + + // add tailer for message with existing tailer (won't duplicate) + assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTailer("title\n\nTest-tailer: TestValue", "Test-tailer", "TestValue")) + assert.Equal(t, "title\n\nTest-tailer: TestValue\n", AddCommitMessageTailer("title\n\nTest-tailer: TestValue\n", "Test-tailer", "TestValue")) + + // add tailer for message with existing tailer and different value (will append) + assert.Equal(t, "title\n\nTest-tailer: v1\nTest-tailer: v2", AddCommitMessageTailer("title\n\nTest-tailer: v1", "Test-tailer", "v2")) + assert.Equal(t, "title\n\nTest-tailer: v1\nTest-tailer: v2", AddCommitMessageTailer("title\n\nTest-tailer: v1\n", "Test-tailer", "v2")) +} diff --git a/services/pull/patch.go b/services/pull/patch.go index 29f2f992ab..153e0baf87 100644 --- a/services/pull/patch.go +++ b/services/pull/patch.go @@ -67,9 +67,8 @@ var patchErrorSuffices = []string{ ": does not exist in index", } -// TestPatch will test whether a simple patch will apply -func TestPatch(pr *issues_model.PullRequest) error { - ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("TestPatch: %s", pr)) +func testPullRequestBranchMergeable(pr *issues_model.PullRequest) error { + ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("testPullRequestBranchMergeable: %s", pr)) defer finished() prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr) @@ -81,10 +80,10 @@ func TestPatch(pr *issues_model.PullRequest) error { } defer cancel() - return testPatch(ctx, prCtx, pr) + return testPullRequestTmpRepoBranchMergeable(ctx, prCtx, pr) } -func testPatch(ctx context.Context, prCtx *prContext, pr *issues_model.PullRequest) error { +func testPullRequestTmpRepoBranchMergeable(ctx context.Context, prCtx *prTmpRepoContext, pr *issues_model.PullRequest) error { gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath) if err != nil { return fmt.Errorf("OpenRepository: %w", err) @@ -134,7 +133,7 @@ type errMergeConflict struct { } func (e *errMergeConflict) Error() string { - return fmt.Sprintf("conflict detected at: %s", e.filename) + return "conflict detected at: " + e.filename } func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, filesToRemove *[]string, filesToAdd *[]git.IndexObjectInfo) error { @@ -355,23 +354,19 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * } // 3b. Create a plain patch from head to base - tmpPatchFile, err := os.CreateTemp("", "patch") + tmpPatchFile, cleanup, err := setting.AppDataTempDir("git-repo-content").CreateTempFileRandom("patch") if err != nil { log.Error("Unable to create temporary patch file! Error: %v", err) return false, fmt.Errorf("unable to create temporary patch file! Error: %w", err) } - defer func() { - _ = util.Remove(tmpPatchFile.Name()) - }() + defer cleanup() if err := gitRepo.GetDiffBinary(pr.MergeBase+"...tracking", tmpPatchFile); err != nil { - tmpPatchFile.Close() log.Error("Unable to get patch file from %s to %s in %s Error: %v", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err) return false, fmt.Errorf("unable to get patch file from %s to %s in %s Error: %w", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err) } stat, err := tmpPatchFile.Stat() if err != nil { - tmpPatchFile.Close() return false, fmt.Errorf("unable to stat patch file: %w", err) } patchPath := tmpPatchFile.Name() @@ -384,7 +379,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * return false, nil } - log.Trace("PullRequest[%d].testPatch (patchPath): %s", pr.ID, patchPath) + log.Trace("PullRequest[%d].testPullRequestTmpRepoBranchMergeable (patchPath): %s", pr.ID, patchPath) // 4. Read the base branch in to the index of the temporary repository _, _, err = git.NewCommand("read-tree", "base").RunStdString(gitRepo.Ctx, &git.RunOpts{Dir: tmpBasePath}) @@ -454,7 +449,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * scanner := bufio.NewScanner(stderrReader) for scanner.Scan() { line := scanner.Text() - log.Trace("PullRequest[%d].testPatch: stderr: %s", pr.ID, line) + log.Trace("PullRequest[%d].testPullRequestTmpRepoBranchMergeable: stderr: %s", pr.ID, line) if strings.HasPrefix(line, prefix) { conflict = true filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0]) diff --git a/services/pull/pull.go b/services/pull/pull.go index 4641d4ac40..701c4f4d32 100644 --- a/services/pull/pull.go +++ b/services/pull/pull.go @@ -96,7 +96,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { } defer cancel() - if err := testPatch(ctx, prCtx, pr); err != nil { + if err := testPullRequestTmpRepoBranchMergeable(ctx, prCtx, pr); err != nil { return err } @@ -314,12 +314,12 @@ func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer pr.BaseBranch = targetBranch // Refresh patch - if err := TestPatch(pr); err != nil { + if err := testPullRequestBranchMergeable(pr); err != nil { return err } // Update target branch, PR diff and status - // This is the same as checkAndUpdateStatus in check service, but also updates base_branch + // This is the same as markPullRequestAsMergeable in check service, but also updates base_branch if pr.Status == issues_model.PullRequestStatusChecking { pr.Status = issues_model.PullRequestStatusMergeable } @@ -409,7 +409,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) { continue } - AddToTaskQueue(ctx, pr) + StartPullRequestCheckImmediately(ctx, pr) comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID) if err == nil && comment != nil { notify_service.PullRequestPushCommits(ctx, opts.Doer, pr, comment) @@ -463,12 +463,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) { } if !pr.IsWorkInProgress(ctx) { - var reviewNotifiers []*issue_service.ReviewRequestNotifier - if opts.IsForcePush { - reviewNotifiers, err = issue_service.PullRequestCodeOwnersReview(ctx, pr) - } else { - reviewNotifiers, err = issue_service.PullRequestCodeOwnersReviewSpecialCommits(ctx, pr, opts.OldCommitID, opts.NewCommitID) - } + reviewNotifiers, err := issue_service.PullRequestCodeOwnersReview(ctx, pr) if err != nil { log.Error("PullRequestCodeOwnersReview: %v", err) } @@ -502,7 +497,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) { log.Error("UpdateCommitDivergence: %v", err) } } - AddToTaskQueue(ctx, pr) + StartPullRequestCheckDelayable(ctx, pr) } }) } @@ -763,7 +758,7 @@ func CloseRepoBranchesPulls(ctx context.Context, doer *user_model.User, repo *re var errs []error for _, branch := range branches { - prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repo.ID, branch.Name) + prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repo.ID, branch) if err != nil { return err } @@ -950,12 +945,6 @@ func GetSquashMergeCommitMessages(ctx context.Context, pr *issues_model.PullRequ return stringBuilder.String() } -// GetIssuesLastCommitStatus returns a map of issue ID to the most recent commit's latest status -func GetIssuesLastCommitStatus(ctx context.Context, issues issues_model.IssueList) (map[int64]*git_model.CommitStatus, error) { - _, lastStatus, err := GetIssuesAllCommitStatus(ctx, issues) - return lastStatus, err -} - // GetIssuesAllCommitStatus returns a map of issue ID to a list of all statuses for the most recent commit as well as a map of issue ID to only the commit's latest status func GetIssuesAllCommitStatus(ctx context.Context, issues issues_model.IssueList) (map[int64][]*git_model.CommitStatus, map[int64]*git_model.CommitStatus, error) { if err := issues.LoadPullRequests(ctx); err != nil { @@ -1009,7 +998,7 @@ func getAllCommitStatus(ctx context.Context, gitRepo *git.Repository, pr *issues return nil, nil, shaErr } - statuses, _, err = git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) + statuses, err = git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) lastStatus = git_model.CalcCommitStatus(statuses) return statuses, lastStatus, err } diff --git a/services/pull/review.go b/services/pull/review.go index 78723a58ae..5c80e7b338 100644 --- a/services/pull/review.go +++ b/services/pull/review.go @@ -395,7 +395,7 @@ func DismissReview(ctx context.Context, reviewID, repoID int64, message string, } if review.Type != issues_model.ReviewTypeApprove && review.Type != issues_model.ReviewTypeReject { - return nil, fmt.Errorf("not need to dismiss this review because it's type is not Approve or change request") + return nil, errors.New("not need to dismiss this review because it's type is not Approve or change request") } // load data for notify @@ -405,7 +405,7 @@ func DismissReview(ctx context.Context, reviewID, repoID int64, message string, // Check if the review's repoID is the one we're currently expecting. if review.Issue.RepoID != repoID { - return nil, fmt.Errorf("reviews's repository is not the same as the one we expect") + return nil, errors.New("reviews's repository is not the same as the one we expect") } issue := review.Issue diff --git a/services/pull/reviewer.go b/services/pull/reviewer.go index bf0d8cb298..52f2f3401c 100644 --- a/services/pull/reviewer.go +++ b/services/pull/reviewer.go @@ -85,5 +85,5 @@ func GetReviewerTeams(ctx context.Context, repo *repo_model.Repository) ([]*orga return nil, nil } - return organization.GetTeamsWithAccessToRepoUnit(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead, unit.TypePullRequests) + return organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead, unit.TypePullRequests) } diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go index 3f33370798..72406482e0 100644 --- a/services/pull/temp_repo.go +++ b/services/pull/temp_repo.go @@ -28,7 +28,7 @@ const ( stagingBranch = "staging" // this is used for a working branch ) -type prContext struct { +type prTmpRepoContext struct { context.Context tmpBasePath string pr *issues_model.PullRequest @@ -36,7 +36,7 @@ type prContext struct { errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use } -func (ctx *prContext) RunOpts() *git.RunOpts { +func (ctx *prTmpRepoContext) RunOpts() *git.RunOpts { ctx.outbuf.Reset() ctx.errbuf.Reset() return &git.RunOpts{ @@ -48,7 +48,7 @@ func (ctx *prContext) RunOpts() *git.RunOpts { // createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch // it also create a second base branch called "original_base" -func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prContext, cancel context.CancelFunc, err error) { +func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prTmpRepoContext, cancel context.CancelFunc, err error) { if err := pr.LoadHeadRepo(ctx); err != nil { log.Error("%-v LoadHeadRepo: %v", pr, err) return nil, nil, fmt.Errorf("%v LoadHeadRepo: %w", pr, err) @@ -74,23 +74,20 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) } // Clone base repo. - tmpBasePath, err := repo_module.CreateTemporaryPath("pull") + tmpBasePath, cleanup, err := repo_module.CreateTemporaryPath("pull") if err != nil { log.Error("CreateTemporaryPath[%-v]: %v", pr, err) return nil, nil, err } - prCtx = &prContext{ + cancel = cleanup + + prCtx = &prTmpRepoContext{ Context: ctx, tmpBasePath: tmpBasePath, pr: pr, outbuf: &strings.Builder{}, errbuf: &strings.Builder{}, } - cancel = func() { - if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { - log.Error("Error whilst removing removing temporary repo for %-v: %v", pr, err) - } - } baseRepoPath := pr.BaseRepo.RepoPath() headRepoPath := pr.HeadRepo.RepoPath() diff --git a/services/pull/update.go b/services/pull/update.go index 3e00dd4e65..b8f84e3d65 100644 --- a/services/pull/update.go +++ b/services/pull/update.go @@ -5,6 +5,7 @@ package pull import ( "context" + "errors" "fmt" git_model "code.gitea.io/gitea/models/git" @@ -23,7 +24,7 @@ import ( func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error { if pr.Flow == issues_model.PullRequestFlowAGit { // TODO: update of agit flow pull request's head branch is unsupported - return fmt.Errorf("update of agit flow pull request's head branch is unsupported") + return errors.New("update of agit flow pull request's head branch is unsupported") } releaser, err := globallock.Lock(ctx, getPullWorkingLockKey(pr.ID)) @@ -40,22 +41,6 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model. return fmt.Errorf("HeadBranch of PR %d is up to date", pr.Index) } - if rebase { - defer func() { - go AddTestPullRequestTask(TestPullRequestOptions{ - RepoID: pr.BaseRepo.ID, - Doer: doer, - Branch: pr.BaseBranch, - IsSync: false, - IsForcePush: false, - OldCommitID: "", - NewCommitID: "", - }) - }() - - return updateHeadByRebaseOnToBase(ctx, pr, doer) - } - if err := pr.LoadBaseRepo(ctx); err != nil { log.Error("unable to load BaseRepo for %-v during update-by-merge: %v", pr, err) return fmt.Errorf("unable to load BaseRepo for PR[%d] during update-by-merge: %w", pr.ID, err) @@ -73,6 +58,22 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model. return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err) } + defer func() { + go AddTestPullRequestTask(TestPullRequestOptions{ + RepoID: pr.BaseRepo.ID, + Doer: doer, + Branch: pr.BaseBranch, + IsSync: false, + IsForcePush: false, + OldCommitID: "", + NewCommitID: "", + }) + }() + + if rebase { + return updateHeadByRebaseOnToBase(ctx, pr, doer) + } + // TODO: FakePR: it is somewhat hacky, but it is the only way to "merge" at the moment // ideally in the future the "merge" functions should be refactored to decouple from the PullRequest // now use a fake reverse PR to switch head&base repos/branches @@ -89,19 +90,6 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model. } _, err = doMergeAndPush(ctx, reversePR, doer, repo_model.MergeStyleMerge, "", message, repository.PushTriggerPRUpdateWithBase) - - defer func() { - go AddTestPullRequestTask(TestPullRequestOptions{ - RepoID: reversePR.HeadRepo.ID, - Doer: doer, - Branch: reversePR.HeadBranch, - IsSync: false, - IsForcePush: false, - OldCommitID: "", - NewCommitID: "", - }) - }() - return err } diff --git a/services/release/release_test.go b/services/release/release_test.go index 95a54832b9..36a9f667d6 100644 --- a/services/release/release_test.go +++ b/services/release/release_test.go @@ -250,9 +250,9 @@ func TestRelease_Update(t *testing.T) { assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, []string{attach.UUID}, nil, nil)) assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release)) assert.Len(t, release.Attachments, 1) - assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID) - assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID) - assert.EqualValues(t, attach.Name, release.Attachments[0].Name) + assert.Equal(t, attach.UUID, release.Attachments[0].UUID) + assert.Equal(t, release.ID, release.Attachments[0].ReleaseID) + assert.Equal(t, attach.Name, release.Attachments[0].Name) // update the attachment name assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, nil, nil, map[string]string{ @@ -261,9 +261,9 @@ func TestRelease_Update(t *testing.T) { release.Attachments = nil assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release)) assert.Len(t, release.Attachments, 1) - assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID) - assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID) - assert.EqualValues(t, "test2.txt", release.Attachments[0].Name) + assert.Equal(t, attach.UUID, release.Attachments[0].UUID) + assert.Equal(t, release.ID, release.Attachments[0].ReleaseID) + assert.Equal(t, "test2.txt", release.Attachments[0].Name) // delete the attachment assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, nil, []string{attach.UUID}, nil)) diff --git a/services/repository/adopt.go b/services/repository/adopt.go index ea4f9a1920..2bd1c55de4 100644 --- a/services/repository/adopt.go +++ b/services/repository/adopt.go @@ -16,7 +16,6 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" - "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" @@ -28,18 +27,30 @@ import ( "github.com/gobwas/glob" ) +func deleteFailedAdoptRepository(repoID int64) error { + return db.WithTx(db.DefaultContext, func(ctx context.Context) error { + if err := deleteDBRepository(ctx, repoID); err != nil { + return fmt.Errorf("deleteDBRepository: %w", err) + } + if err := git_model.DeleteRepoBranches(ctx, repoID); err != nil { + return fmt.Errorf("deleteRepoBranches: %w", err) + } + return repo_model.DeleteRepoReleases(ctx, repoID) + }) +} + // AdoptRepository adopts pre-existing repository files for the user/organization. -func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) { - if !doer.IsAdmin && !u.CanCreateRepo() { +func AdoptRepository(ctx context.Context, doer, owner *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) { + if !doer.CanCreateRepoIn(owner) { return nil, repo_model.ErrReachLimitOfRepo{ - Limit: u.MaxRepoCreation, + Limit: owner.MaxRepoCreation, } } repo := &repo_model.Repository{ - OwnerID: u.ID, - Owner: u, - OwnerName: u.Name, + OwnerID: owner.ID, + Owner: owner, + OwnerName: owner.Name, Name: opts.Name, LowerName: strings.ToLower(opts.Name), Description: opts.Description, @@ -48,59 +59,52 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR IsPrivate: opts.IsPrivate, IsFsckEnabled: !opts.IsMirror, CloseIssuesViaCommitInAnyBranch: setting.Repository.DefaultCloseIssuesViaCommitsInAnyBranch, - Status: opts.Status, + Status: repo_model.RepositoryBeingMigrated, IsEmpty: !opts.AutoInit, } - if err := db.WithTx(ctx, func(ctx context.Context) error { - isExist, err := gitrepo.IsRepositoryExist(ctx, repo) + // 1 - create the repository database operations first + err := db.WithTx(ctx, func(ctx context.Context) error { + return createRepositoryInDB(ctx, doer, owner, repo, false) + }) + if err != nil { + return nil, err + } + + // last - clean up if something goes wrong + // WARNING: Don't override all later err with local variables + defer func() { if err != nil { - log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) - return err - } - if !isExist { - return repo_model.ErrRepoNotExist{ - OwnerName: u.Name, - Name: repo.Name, + // we can not use the ctx because it maybe canceled or timeout + if errDel := deleteFailedAdoptRepository(repo.ID); errDel != nil { + log.Error("Failed to delete repository %s that could not be adopted: %v", repo.FullName(), errDel) } } + }() - if err := CreateRepositoryByExample(ctx, doer, u, repo, true, false); err != nil { - return err - } - - // Re-fetch the repository from database before updating it (else it would - // override changes that were done earlier with sql) - if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil { - return fmt.Errorf("getRepositoryByID: %w", err) - } - return nil - }); err != nil { - return nil, err + // Re-fetch the repository from database before updating it (else it would + // override changes that were done earlier with sql) + if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil { + return nil, fmt.Errorf("getRepositoryByID: %w", err) } - if err := func() error { - if err := adoptRepository(ctx, repo, opts.DefaultBranch); err != nil { - return fmt.Errorf("adoptRepository: %w", err) - } + // 2 - adopt the repository from disk + if err = adoptRepository(ctx, repo, opts.DefaultBranch); err != nil { + return nil, fmt.Errorf("adoptRepository: %w", err) + } - if err := repo_module.CheckDaemonExportOK(ctx, repo); err != nil { - return fmt.Errorf("checkDaemonExportOK: %w", err) - } + // 3 - Update the git repository + if err = updateGitRepoAfterCreate(ctx, repo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) + } - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil { - log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) - return fmt.Errorf("CreateRepository(git update-server-info): %w", err) - } - return nil - }(); err != nil { - if errDel := DeleteRepository(ctx, doer, repo, false /* no notify */); errDel != nil { - log.Error("Failed to delete repository %s that could not be adopted: %v", repo.FullName(), errDel) - } - return nil, err + // 4 - update repository status + repo.Status = repo_model.RepositoryReady + if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "status"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) } - notify_service.AdoptRepository(ctx, doer, u, repo) + + notify_service.AdoptRepository(ctx, doer, owner, repo) return repo, nil } @@ -115,7 +119,7 @@ func adoptRepository(ctx context.Context, repo *repo_model.Repository, defaultBr return fmt.Errorf("adoptRepository: path does not already exist: %s", repo.FullName()) } - if err := gitrepo.CreateDelegateHooksForRepo(ctx, repo); err != nil { + if err := gitrepo.CreateDelegateHooks(ctx, repo); err != nil { return fmt.Errorf("createDelegateHooks: %w", err) } @@ -192,8 +196,13 @@ func adoptRepository(ctx context.Context, repo *repo_model.Repository, defaultBr return fmt.Errorf("setDefaultBranch: %w", err) } } - if err = repo_module.UpdateRepository(ctx, repo, false); err != nil { - return fmt.Errorf("updateRepository: %w", err) + + if err = repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_empty", "default_branch"); err != nil { + return fmt.Errorf("UpdateRepositoryCols: %w", err) + } + + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { + log.Error("Failed to update size for repository: %v", err) } return nil @@ -256,7 +265,7 @@ func checkUnadoptedRepositories(ctx context.Context, userName string, repoNamesT } return err } - repos, _, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{ + repos, _, err := repo_model.GetUserRepositories(ctx, repo_model.SearchRepoOptions{ Actor: ctxUser, Private: true, ListOptions: db.ListOptions{ diff --git a/services/repository/adopt_test.go b/services/repository/adopt_test.go index 123cedc1f2..86f586c748 100644 --- a/services/repository/adopt_test.go +++ b/services/repository/adopt_test.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" ) @@ -28,7 +29,7 @@ func TestCheckUnadoptedRepositories_Add(t *testing.T) { } total := 30 - for i := 0; i < total; i++ { + for range total { unadopted.add("something") } @@ -71,7 +72,7 @@ func TestListUnadoptedRepositories_ListOptions(t *testing.T) { username := "user2" unadoptedList := []string{path.Join(username, "unadopted1"), path.Join(username, "unadopted2")} for _, unadopted := range unadoptedList { - _ = os.Mkdir(path.Join(setting.RepoRootPath, unadopted+".git"), 0o755) + _ = os.Mkdir(filepath.Join(setting.RepoRootPath, unadopted+".git"), 0o755) } opts := db.ListOptions{Page: 1, PageSize: 1} @@ -89,10 +90,36 @@ func TestListUnadoptedRepositories_ListOptions(t *testing.T) { func TestAdoptRepository(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - assert.NoError(t, unittest.SyncDirs(filepath.Join(setting.RepoRootPath, "user2", "repo1.git"), filepath.Join(setting.RepoRootPath, "user2", "test-adopt.git"))) + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) - _, err := AdoptRepository(db.DefaultContext, user2, user2, CreateRepoOptions{Name: "test-adopt"}) + + // a successful adopt + destDir := filepath.Join(setting.RepoRootPath, user2.Name, "test-adopt.git") + assert.NoError(t, unittest.SyncDirs(filepath.Join(setting.RepoRootPath, user2.Name, "repo1.git"), destDir)) + + adoptedRepo, err := AdoptRepository(db.DefaultContext, user2, user2, CreateRepoOptions{Name: "test-adopt"}) assert.NoError(t, err) repoTestAdopt := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{Name: "test-adopt"}) assert.Equal(t, "sha1", repoTestAdopt.ObjectFormatName) + + // just delete the adopted repo's db records + err = deleteFailedAdoptRepository(adoptedRepo.ID) + assert.NoError(t, err) + + unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: "test-adopt"}) + + // a failed adopt because some mock data + // remove the hooks directory and create a file so that we cannot create the hooks successfully + _ = os.RemoveAll(filepath.Join(destDir, "hooks", "update.d")) + assert.NoError(t, os.WriteFile(filepath.Join(destDir, "hooks", "update.d"), []byte("tests"), os.ModePerm)) + + adoptedRepo, err = AdoptRepository(db.DefaultContext, user2, user2, CreateRepoOptions{Name: "test-adopt"}) + assert.Error(t, err) + assert.Nil(t, adoptedRepo) + + unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: "test-adopt"}) + + exist, err := util.IsExist(repo_model.RepoPath(user2.Name, "test-adopt")) + assert.NoError(t, err) + assert.True(t, exist) // the repository should be still in the disk } diff --git a/services/repository/archiver/archiver.go b/services/repository/archiver/archiver.go index d39acc080d..a657e3884c 100644 --- a/services/repository/archiver/archiver.go +++ b/services/repository/archiver/archiver.go @@ -44,7 +44,7 @@ type ErrUnknownArchiveFormat struct { // Error implements error func (err ErrUnknownArchiveFormat) Error() string { - return fmt.Sprintf("unknown format: %s", err.RequestNameType) + return "unknown format: " + err.RequestNameType } // Is implements error @@ -60,7 +60,7 @@ type RepoRefNotFoundError struct { // Error implements error. func (e RepoRefNotFoundError) Error() string { - return fmt.Sprintf("unrecognized repository reference: %s", e.RefShortName) + return "unrecognized repository reference: " + e.RefShortName } func (e RepoRefNotFoundError) Is(err error) bool { diff --git a/services/repository/archiver/archiver_test.go b/services/repository/archiver/archiver_test.go index 522f90558a..87324ad38c 100644 --- a/services/repository/archiver/archiver_test.go +++ b/services/repository/archiver/archiver_test.go @@ -33,7 +33,7 @@ func TestArchive_Basic(t *testing.T) { bogusReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip") assert.NoError(t, err) assert.NotNil(t, bogusReq) - assert.EqualValues(t, firstCommit+".zip", bogusReq.GetArchiveName()) + assert.Equal(t, firstCommit+".zip", bogusReq.GetArchiveName()) // Check a series of bogus requests. // Step 1, valid commit with a bad extension. @@ -54,12 +54,12 @@ func TestArchive_Basic(t *testing.T) { bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "master.zip") assert.NoError(t, err) assert.NotNil(t, bogusReq) - assert.EqualValues(t, "master.zip", bogusReq.GetArchiveName()) + assert.Equal(t, "master.zip", bogusReq.GetArchiveName()) bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "test/archive.zip") assert.NoError(t, err) assert.NotNil(t, bogusReq) - assert.EqualValues(t, "test-archive.zip", bogusReq.GetArchiveName()) + assert.Equal(t, "test-archive.zip", bogusReq.GetArchiveName()) // Now two valid requests, firstCommit with valid extensions. zipReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip") diff --git a/services/repository/avatar.go b/services/repository/avatar.go index 15e51d4a25..26bf6da465 100644 --- a/services/repository/avatar.go +++ b/services/repository/avatar.go @@ -40,7 +40,7 @@ func UploadAvatar(ctx context.Context, repo *repo_model.Repository, data []byte) // Users can upload the same image to other repo - prefix it with ID // Then repo will be removed - only it avatar file will be removed repo.Avatar = newAvatar - if err := repo_model.UpdateRepositoryCols(ctx, repo, "avatar"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "avatar"); err != nil { return fmt.Errorf("UploadAvatar: Update repository avatar: %w", err) } @@ -77,7 +77,7 @@ func DeleteAvatar(ctx context.Context, repo *repo_model.Repository) error { defer committer.Close() repo.Avatar = "" - if err := repo_model.UpdateRepositoryCols(ctx, repo, "avatar"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "avatar"); err != nil { return fmt.Errorf("DeleteAvatar: Update repository avatar: %w", err) } @@ -112,5 +112,5 @@ func generateAvatar(ctx context.Context, templateRepo, generateRepo *repo_model. return err } - return repo_model.UpdateRepositoryCols(ctx, generateRepo, "avatar") + return repo_model.UpdateRepositoryColsNoAutoTime(ctx, generateRepo, "avatar") } diff --git a/services/repository/avatar_test.go b/services/repository/avatar_test.go index bea820e85f..2dc5173eec 100644 --- a/services/repository/avatar_test.go +++ b/services/repository/avatar_test.go @@ -59,7 +59,7 @@ func TestDeleteAvatar(t *testing.T) { err = DeleteAvatar(db.DefaultContext, repo) assert.NoError(t, err) - assert.Equal(t, "", repo.Avatar) + assert.Empty(t, repo.Avatar) } func TestGenerateAvatar(t *testing.T) { diff --git a/services/repository/branch.go b/services/repository/branch.go index 8804778bd5..dd00ca7dcd 100644 --- a/services/repository/branch.go +++ b/services/repository/branch.go @@ -303,7 +303,7 @@ func SyncBranchesToDB(ctx context.Context, repoID, pusherID int64, branchNames, // For other batches, it will hit optimization 4. if len(branchNames) != len(commitIDs) { - return fmt.Errorf("branchNames and commitIDs length not match") + return errors.New("branchNames and commitIDs length not match") } return db.WithTx(ctx, func(ctx context.Context) error { @@ -663,6 +663,11 @@ func SetRepoDefaultBranch(ctx context.Context, repo *repo_model.Repository, newB } } + // clear divergence cache + if err := DelRepoDivergenceFromCache(ctx, repo.ID); err != nil { + log.Error("DelRepoDivergenceFromCache: %v", err) + } + notify_service.ChangeDefaultBranch(ctx, repo) return nil diff --git a/services/repository/check.go b/services/repository/check.go index b475fbc487..ffcd5ac749 100644 --- a/services/repository/check.go +++ b/services/repository/check.go @@ -162,7 +162,7 @@ func DeleteMissingRepositories(ctx context.Context, doer *user_model.User) error default: } log.Trace("Deleting %d/%d...", repo.OwnerID, repo.ID) - if err := DeleteRepositoryDirectly(ctx, doer, repo.ID); err != nil { + if err := DeleteRepositoryDirectly(ctx, repo.ID); err != nil { log.Error("Failed to DeleteRepository %-v: Error: %v", repo, err) if err2 := system_model.CreateRepositoryNotice("Failed to DeleteRepository %s [%d]: Error: %v", repo.FullName(), repo.ID, err); err2 != nil { log.Error("CreateRepositoryNotice: %v", err) diff --git a/services/repository/commitstatus/commitstatus.go b/services/repository/commitstatus/commitstatus.go index f369a303e6..fa7a89882a 100644 --- a/services/repository/commitstatus/commitstatus.go +++ b/services/repository/commitstatus/commitstatus.go @@ -14,17 +14,17 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/commitstatus" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" repo_module "code.gitea.io/gitea/modules/repository" - api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/services/notify" ) func getCacheKey(repoID int64, brancheName string) string { - hashBytes := sha256.Sum256([]byte(fmt.Sprintf("%d:%s", repoID, brancheName))) + hashBytes := sha256.Sum256(fmt.Appendf(nil, "%d:%s", repoID, brancheName)) return fmt.Sprintf("commit_status:%x", hashBytes) } @@ -47,7 +47,7 @@ func getCommitStatusCache(repoID int64, branchName string) *commitStatusCacheVal return nil } -func updateCommitStatusCache(repoID int64, branchName string, state api.CommitStatusState, targetURL string) error { +func updateCommitStatusCache(repoID int64, branchName string, state commitstatus.CommitStatusState, targetURL string) error { c := cache.GetCache() bs, err := json.Marshal(commitStatusCacheValue{ State: state.String(), @@ -127,7 +127,7 @@ func FindReposLastestCommitStatuses(ctx context.Context, repos []*repo_model.Rep for i, repo := range repos { if cv := getCommitStatusCache(repo.ID, repo.DefaultBranch); cv != nil { results[i] = &git_model.CommitStatus{ - State: api.CommitStatusState(cv.State), + State: commitstatus.CommitStatusState(cv.State), TargetURL: cv.TargetURL, } } else { diff --git a/services/repository/contributors_graph_test.go b/services/repository/contributors_graph_test.go index 6db93f6a64..7d32b1c931 100644 --- a/services/repository/contributors_graph_test.go +++ b/services/repository/contributors_graph_test.go @@ -38,14 +38,14 @@ func TestRepository_ContributorsGraph(t *testing.T) { keys = append(keys, k) } slices.Sort(keys) - assert.EqualValues(t, []string{ + assert.Equal(t, []string{ "ethantkoenig@gmail.com", "jimmy.praet@telenet.be", "jon@allspice.io", "total", // generated summary }, keys) - assert.EqualValues(t, &ContributorData{ + assert.Equal(t, &ContributorData{ Name: "Ethan Koenig", AvatarLink: "/assets/img/avatar_default.png", TotalCommits: 1, @@ -58,7 +58,7 @@ func TestRepository_ContributorsGraph(t *testing.T) { }, }, }, data["ethantkoenig@gmail.com"]) - assert.EqualValues(t, &ContributorData{ + assert.Equal(t, &ContributorData{ Name: "Total", AvatarLink: "", TotalCommits: 3, diff --git a/services/repository/create.go b/services/repository/create.go index 1a6a68b35a..bed02e5d7e 100644 --- a/services/repository/create.go +++ b/services/repository/create.go @@ -17,6 +17,7 @@ import ( "code.gitea.io/gitea/models/perm" access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" + system_model "code.gitea.io/gitea/models/system" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/models/webhook" @@ -28,7 +29,6 @@ import ( "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/templates/vars" - "code.gitea.io/gitea/modules/util" ) // CreateRepoOptions contains the create repository options @@ -100,8 +100,8 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir // .gitignore if len(opts.Gitignores) > 0 { var buf bytes.Buffer - names := strings.Split(opts.Gitignores, ",") - for _, name := range names { + names := strings.SplitSeq(opts.Gitignores, ",") + for name := range names { data, err = options.Gitignore(name) if err != nil { return fmt.Errorf("GetRepoInitFile[%s]: %w", name, err) @@ -140,21 +140,20 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir // InitRepository initializes README and .gitignore if needed. func initRepository(ctx context.Context, u *user_model.User, repo *repo_model.Repository, opts CreateRepoOptions) (err error) { - if err = repo_module.CheckInitRepository(ctx, repo); err != nil { - return err + // Init git bare new repository. + if err = git.InitRepository(ctx, repo.RepoPath(), true, repo.ObjectFormatName); err != nil { + return fmt.Errorf("git.InitRepository: %w", err) + } else if err = gitrepo.CreateDelegateHooks(ctx, repo); err != nil { + return fmt.Errorf("createDelegateHooks: %w", err) } // Initialize repository according to user's choice. if opts.AutoInit { - tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-"+repo.Name) + tmpDir, cleanup, err := setting.AppDataTempDir("git-repo-content").MkdirTempRandom("repos-" + repo.Name) if err != nil { - return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.FullName(), err) + return fmt.Errorf("failed to create temp dir for repository %s: %w", repo.FullName(), err) } - defer func() { - if err := util.RemoveAll(tmpDir); err != nil { - log.Warn("Unable to remove temporary directory: %s: Error: %v", tmpDir, err) - } - }() + defer cleanup() if err = prepareRepoCommit(ctx, repo, tmpDir, opts); err != nil { return fmt.Errorf("prepareRepoCommit: %w", err) @@ -192,18 +191,25 @@ func initRepository(ctx context.Context, u *user_model.User, repo *repo_model.Re } } - if err = UpdateRepository(ctx, repo, false); err != nil { + if err = repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_empty", "default_branch", "default_wiki_branch"); err != nil { return fmt.Errorf("updateRepository: %w", err) } + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { + log.Error("Failed to update size for repository: %v", err) + } + return nil } // CreateRepositoryDirectly creates a repository for the user/organization. -func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) { - if !doer.IsAdmin && !u.CanCreateRepo() { +// if needsUpdateToReady is true, it will update the repository status to ready when success +func CreateRepositoryDirectly(ctx context.Context, doer, owner *user_model.User, + opts CreateRepoOptions, needsUpdateToReady bool, +) (*repo_model.Repository, error) { + if !doer.CanCreateRepoIn(owner) { return nil, repo_model.ErrReachLimitOfRepo{ - Limit: u.MaxRepoCreation, + Limit: owner.MaxRepoCreation, } } @@ -223,9 +229,9 @@ func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opt } repo := &repo_model.Repository{ - OwnerID: u.ID, - Owner: u, - OwnerName: u.Name, + OwnerID: owner.ID, + Owner: owner, + OwnerName: owner.Name, Name: opts.Name, LowerName: strings.ToLower(opts.Name), Description: opts.Description, @@ -244,100 +250,91 @@ func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opt ObjectFormatName: opts.ObjectFormatName, } - var rollbackRepo *repo_model.Repository - - if err := db.WithTx(ctx, func(ctx context.Context) error { - if err := CreateRepositoryByExample(ctx, doer, u, repo, false, false); err != nil { - return err - } - - // No need for init mirror. - if opts.IsMirror { - return nil - } + // 1 - create the repository database operations first + err := db.WithTx(ctx, func(ctx context.Context) error { + return createRepositoryInDB(ctx, doer, owner, repo, false) + }) + if err != nil { + return nil, err + } - isExist, err := gitrepo.IsRepositoryExist(ctx, repo) + // last - clean up if something goes wrong + // WARNING: Don't override all later err with local variables + defer func() { if err != nil { - log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) - return err - } - if isExist { - // repo already exists - We have two or three options. - // 1. We fail stating that the directory exists - // 2. We create the db repository to go with this data and adopt the git repo - // 3. We delete it and start afresh - // - // Previously Gitea would just delete and start afresh - this was naughty. - // So we will now fail and delegate to other functionality to adopt or delete - log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName()) - return repo_model.ErrRepoFilesAlreadyExist{ - Uname: u.Name, - Name: repo.Name, - } + // we can not use the ctx because it maybe canceled or timeout + cleanupRepository(repo.ID) } + }() - if err = initRepository(ctx, doer, repo, opts); err != nil { - if err2 := gitrepo.DeleteRepository(ctx, repo); err2 != nil { - log.Error("initRepository: %v", err) - return fmt.Errorf( - "delete repo directory %s/%s failed(2): %v", u.Name, repo.Name, err2) - } - return fmt.Errorf("initRepository: %w", err) - } + // No need for init mirror. + if opts.IsMirror { + return repo, nil + } - // Initialize Issue Labels if selected - if len(opts.IssueLabels) > 0 { - if err = repo_module.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil { - rollbackRepo = repo - rollbackRepo.OwnerID = u.ID - return fmt.Errorf("InitializeLabels: %w", err) - } + // 2 - check whether the repository with the same storage exists + var isExist bool + isExist, err = gitrepo.IsRepositoryExist(ctx, repo) + if err != nil { + log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) + return nil, err + } + if isExist { + log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName()) + // Don't return directly, we need err in defer to cleanupRepository + err = repo_model.ErrRepoFilesAlreadyExist{ + Uname: repo.OwnerName, + Name: repo.Name, } + return nil, err + } - if err := repo_module.CheckDaemonExportOK(ctx, repo); err != nil { - return fmt.Errorf("checkDaemonExportOK: %w", err) - } + // 3 - init git repository in storage + if err = initRepository(ctx, doer, repo, opts); err != nil { + return nil, fmt.Errorf("initRepository: %w", err) + } - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil { - log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) - rollbackRepo = repo - rollbackRepo.OwnerID = u.ID - return fmt.Errorf("CreateRepository(git update-server-info): %w", err) + // 4 - Initialize Issue Labels if selected + if len(opts.IssueLabels) > 0 { + if err = repo_module.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil { + return nil, fmt.Errorf("InitializeLabels: %w", err) } + } - // update licenses - var licenses []string - if len(opts.License) > 0 { - licenses = append(licenses, opts.License) + // 5 - Update the git repository + if err = updateGitRepoAfterCreate(ctx, repo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) + } - stdout, _, err := git.NewCommand("rev-parse", "HEAD").RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}) - if err != nil { - log.Error("CreateRepository(git rev-parse HEAD) in %v: Stdout: %s\nError: %v", repo, stdout, err) - rollbackRepo = repo - rollbackRepo.OwnerID = u.ID - return fmt.Errorf("CreateRepository(git rev-parse HEAD): %w", err) - } - if err := repo_model.UpdateRepoLicenses(ctx, repo, stdout, licenses); err != nil { - return err - } + // 6 - update licenses + var licenses []string + if len(opts.License) > 0 { + licenses = append(licenses, opts.License) + + var stdout string + stdout, _, err = git.NewCommand("rev-parse", "HEAD").RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}) + if err != nil { + log.Error("CreateRepository(git rev-parse HEAD) in %v: Stdout: %s\nError: %v", repo, stdout, err) + return nil, fmt.Errorf("CreateRepository(git rev-parse HEAD): %w", err) } - return nil - }); err != nil { - if rollbackRepo != nil { - if errDelete := DeleteRepositoryDirectly(ctx, doer, rollbackRepo.ID); errDelete != nil { - log.Error("Rollback deleteRepository: %v", errDelete) - } + if err = repo_model.UpdateRepoLicenses(ctx, repo, stdout, licenses); err != nil { + return nil, err } + } - return nil, err + // 7 - update repository status to be ready + if needsUpdateToReady { + repo.Status = repo_model.RepositoryReady + if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "status"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) + } } return repo, nil } -// CreateRepositoryByExample creates a repository for the user/organization. -func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository, overwriteOrAdopt, isFork bool) (err error) { +// createRepositoryInDB creates a repository for the user/organization. +func createRepositoryInDB(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository, isFork bool) (err error) { if err = repo_model.IsUsableRepoName(repo.Name); err != nil { return err } @@ -352,19 +349,6 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re } } - isExist, err := gitrepo.IsRepositoryExist(ctx, repo) - if err != nil { - log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) - return err - } - if !overwriteOrAdopt && isExist { - log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName()) - return repo_model.ErrRepoFilesAlreadyExist{ - Uname: u.Name, - Name: repo.Name, - } - } - if err = db.Insert(ctx, repo); err != nil { return err } @@ -384,7 +368,8 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re } units := make([]repo_model.RepoUnit, 0, len(defaultUnits)) for _, tp := range defaultUnits { - if tp == unit.TypeIssues { + switch tp { + case unit.TypeIssues: units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, Type: tp, @@ -394,7 +379,7 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re EnableDependencies: setting.Service.DefaultEnableDependencies, }, }) - } else if tp == unit.TypePullRequests { + case unit.TypePullRequests: units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, Type: tp, @@ -404,13 +389,13 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re AllowRebaseUpdate: true, }, }) - } else if tp == unit.TypeProjects { + case unit.TypeProjects: units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, Type: tp, Config: &repo_model.ProjectsConfig{ProjectsMode: repo_model.ProjectsModeAll}, }) - } else { + default: units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, Type: tp, @@ -472,3 +457,26 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re return nil } + +func cleanupRepository(repoID int64) { + if errDelete := DeleteRepositoryDirectly(db.DefaultContext, repoID); errDelete != nil { + log.Error("cleanupRepository failed: %v", errDelete) + // add system notice + if err := system_model.CreateRepositoryNotice("DeleteRepositoryDirectly failed when cleanup repository: %v", errDelete); err != nil { + log.Error("CreateRepositoryNotice: %v", err) + } + } +} + +func updateGitRepoAfterCreate(ctx context.Context, repo *repo_model.Repository) error { + if err := checkDaemonExportOK(ctx, repo); err != nil { + return fmt.Errorf("checkDaemonExportOK: %w", err) + } + + if stdout, _, err := git.NewCommand("update-server-info"). + RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil { + log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) + return fmt.Errorf("CreateRepository(git update-server-info): %w", err) + } + return nil +} diff --git a/services/repository/create_test.go b/services/repository/create_test.go new file mode 100644 index 0000000000..fe464c1441 --- /dev/null +++ b/services/repository/create_test.go @@ -0,0 +1,57 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package repository + +import ( + "os" + "testing" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/util" + + "github.com/stretchr/testify/assert" +) + +func TestCreateRepositoryDirectly(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + // a successful creating repository + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + createdRepo, err := CreateRepositoryDirectly(git.DefaultContext, user2, user2, CreateRepoOptions{ + Name: "created-repo", + }, true) + assert.NoError(t, err) + assert.NotNil(t, createdRepo) + + exist, err := util.IsExist(repo_model.RepoPath(user2.Name, createdRepo.Name)) + assert.NoError(t, err) + assert.True(t, exist) + + unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: createdRepo.Name}) + + err = DeleteRepositoryDirectly(db.DefaultContext, createdRepo.ID) + assert.NoError(t, err) + + // a failed creating because some mock data + // create the repository directory so that the creation will fail after database record created. + assert.NoError(t, os.MkdirAll(repo_model.RepoPath(user2.Name, createdRepo.Name), os.ModePerm)) + + createdRepo2, err := CreateRepositoryDirectly(db.DefaultContext, user2, user2, CreateRepoOptions{ + Name: "created-repo", + }, true) + assert.Nil(t, createdRepo2) + assert.Error(t, err) + + // assert the cleanup is successful + unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: createdRepo.Name}) + + exist, err = util.IsExist(repo_model.RepoPath(user2.Name, createdRepo.Name)) + assert.NoError(t, err) + assert.False(t, exist) +} diff --git a/services/repository/delete.go b/services/repository/delete.go index ff74a20817..c48d6e1d56 100644 --- a/services/repository/delete.go +++ b/services/repository/delete.go @@ -27,14 +27,29 @@ import ( "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/storage" + actions_service "code.gitea.io/gitea/services/actions" asymkey_service "code.gitea.io/gitea/services/asymkey" + issue_service "code.gitea.io/gitea/services/issue" "xorm.io/builder" ) +func deleteDBRepository(ctx context.Context, repoID int64) error { + if cnt, err := db.GetEngine(ctx).ID(repoID).Delete(&repo_model.Repository{}); err != nil { + return err + } else if cnt != 1 { + return repo_model.ErrRepoNotExist{ + ID: repoID, + OwnerName: "", + Name: "", + } + } + return nil +} + // DeleteRepository deletes a repository for a user or organization. // make sure if you call this func to close open sessions (sqlite will otherwise get a deadlock) -func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID int64, ignoreOrgTeams ...bool) error { +func DeleteRepositoryDirectly(ctx context.Context, repoID int64, ignoreOrgTeams ...bool) error { ctx, committer, err := db.TxContext(ctx) if err != nil { return err @@ -82,14 +97,8 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID } needRewriteKeysFile := deleted > 0 - if cnt, err := sess.ID(repoID).Delete(&repo_model.Repository{}); err != nil { + if err := deleteDBRepository(ctx, repoID); err != nil { return err - } else if cnt != 1 { - return repo_model.ErrRepoNotExist{ - ID: repoID, - OwnerName: "", - Name: "", - } } if org != nil && org.IsOrganization() { @@ -126,6 +135,14 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID return err } + // CleanupEphemeralRunnersByPickedTaskOfRepo deletes ephemeral global/org/user that have started any task of this repo + // The cannot pick a second task hardening for ephemeral runners expect that task objects remain available until runner deletion + // This method will delete affected ephemeral global/org/user runners + // &actions_model.ActionRunner{RepoID: repoID} does only handle ephemeral repository runners + if err := actions_service.CleanupEphemeralRunnersByPickedTaskOfRepo(ctx, repoID); err != nil { + return fmt.Errorf("cleanupEphemeralRunners: %w", err) + } + if err := db.DeleteBeans(ctx, &access_model.Access{RepoID: repo.ID}, &activities_model.Action{RepoID: repo.ID}, @@ -177,7 +194,7 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID // Delete Issues and related objects var attachmentPaths []string - if attachmentPaths, err = issues_model.DeleteIssuesByRepoID(ctx, repoID); err != nil { + if attachmentPaths, err = issue_service.DeleteIssuesByRepoID(ctx, repoID); err != nil { return err } @@ -358,7 +375,7 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID // DeleteOwnerRepositoriesDirectly calls DeleteRepositoryDirectly for all repos of the given owner func DeleteOwnerRepositoriesDirectly(ctx context.Context, owner *user_model.User) error { for { - repos, _, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{ + repos, _, err := repo_model.GetUserRepositories(ctx, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ PageSize: repo_model.RepositoryListDefaultPageSize, Page: 1, @@ -374,7 +391,7 @@ func DeleteOwnerRepositoriesDirectly(ctx context.Context, owner *user_model.User break } for _, repo := range repos { - if err := DeleteRepositoryDirectly(ctx, owner, repo.ID); err != nil { + if err := DeleteRepositoryDirectly(ctx, repo.ID); err != nil { return fmt.Errorf("unable to delete repository %s for %s[%d]. Error: %w", repo.Name, owner.Name, owner.ID, err) } } diff --git a/services/repository/files/cherry_pick.go b/services/repository/files/cherry_pick.go index 0e069fb2ce..6818bb343d 100644 --- a/services/repository/files/cherry_pick.go +++ b/services/repository/files/cherry_pick.go @@ -5,6 +5,7 @@ package files import ( "context" + "errors" "fmt" "strings" @@ -100,7 +101,7 @@ func CherryPick(ctx context.Context, repo *repo_model.Repository, doer *user_mod } if conflict { - return nil, fmt.Errorf("failed to merge due to conflicts") + return nil, errors.New("failed to merge due to conflicts") } treeHash, err := t.WriteTree(ctx) diff --git a/services/repository/files/content.go b/services/repository/files/content.go index 0ab7422ce2..2c1e88bb59 100644 --- a/services/repository/files/content.go +++ b/services/repository/files/content.go @@ -5,17 +5,18 @@ package files import ( "context" - "fmt" + "io" "net/url" "path" "strings" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/api/v1/utils" ) // ContentType repo content type @@ -23,14 +24,10 @@ type ContentType string // The string representations of different content types const ( - // ContentTypeRegular regular content type (file) - ContentTypeRegular ContentType = "file" - // ContentTypeDir dir content type (dir) - ContentTypeDir ContentType = "dir" - // ContentLink link content type (symlink) - ContentTypeLink ContentType = "symlink" - // ContentTag submodule content type (submodule) - ContentTypeSubmodule ContentType = "submodule" + ContentTypeRegular ContentType = "file" // regular content type (file) + ContentTypeDir ContentType = "dir" // dir content type (dir) + ContentTypeLink ContentType = "symlink" // link content type (symlink) + ContentTypeSubmodule ContentType = "submodule" // submodule content type (submodule) ) // String gets the string of ContentType @@ -38,67 +35,52 @@ func (ct *ContentType) String() string { return string(*ct) } -// GetContentsOrList gets the meta data of a file's contents (*ContentsResponse) if treePath not a tree -// directory, otherwise a listing of file contents ([]*ContentsResponse). Ref can be a branch, commit or tag -func GetContentsOrList(ctx context.Context, repo *repo_model.Repository, treePath, ref string) (any, error) { - if repo.IsEmpty { - return make([]any, 0), nil - } - if ref == "" { - ref = repo.DefaultBranch - } - origRef := ref - - // Check that the path given in opts.treePath is valid (not a git path) - cleanTreePath := CleanUploadFileName(treePath) - if cleanTreePath == "" && treePath != "" { - return nil, ErrFilenameInvalid{ - Path: treePath, - } - } - treePath = cleanTreePath - - gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo) - if err != nil { - return nil, err - } - defer closer.Close() +type GetContentsOrListOptions struct { + TreePath string + IncludeSingleFileContent bool // include the file's content when the tree path is a file + IncludeLfsMetadata bool + IncludeCommitMetadata bool + IncludeCommitMessage bool +} - // Get the commit object for the ref - commit, err := gitRepo.GetCommit(ref) - if err != nil { - return nil, err +// GetContentsOrList gets the metadata of a file's contents (*ContentsResponse) if treePath not a tree +// directory, otherwise a listing of file contents ([]*ContentsResponse). Ref can be a branch, commit or tag +func GetContentsOrList(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, opts GetContentsOrListOptions) (ret api.ContentsExtResponse, _ error) { + entry, err := prepareGetContentsEntry(refCommit, &opts.TreePath) + if repo.IsEmpty && opts.TreePath == "" { + return api.ContentsExtResponse{DirContents: make([]*api.ContentsResponse, 0)}, nil } - - entry, err := commit.GetTreeEntryByPath(treePath) if err != nil { - return nil, err + return ret, err } + // get file contents if entry.Type() != "tree" { - return GetContents(ctx, repo, treePath, origRef, false) + ret.FileContents, err = getFileContentsByEntryInternal(ctx, repo, gitRepo, refCommit, entry, opts) + return ret, err } - // We are in a directory, so we return a list of FileContentResponse objects - var fileList []*api.ContentsResponse - - gitTree, err := commit.SubTree(treePath) + // list directory contents + gitTree, err := refCommit.Commit.SubTree(opts.TreePath) if err != nil { - return nil, err + return ret, err } entries, err := gitTree.ListEntries() if err != nil { - return nil, err + return ret, err } + ret.DirContents = make([]*api.ContentsResponse, 0, len(entries)) for _, e := range entries { - subTreePath := path.Join(treePath, e.Name()) - fileContentResponse, err := GetContents(ctx, repo, subTreePath, origRef, true) + subOpts := opts + subOpts.TreePath = path.Join(opts.TreePath, e.Name()) + subOpts.IncludeSingleFileContent = false // never include file content when listing a directory + fileContentResponse, err := GetFileContents(ctx, repo, gitRepo, refCommit, subOpts) if err != nil { - return nil, err + return ret, err } - fileList = append(fileList, fileContentResponse) + ret.DirContents = append(ret.DirContents, fileContentResponse) } - return fileList, nil + return ret, nil } // GetObjectTypeFromTreeEntry check what content is behind it @@ -117,86 +99,96 @@ func GetObjectTypeFromTreeEntry(entry *git.TreeEntry) ContentType { } } -// GetContents gets the meta data on a file's contents. Ref can be a branch, commit or tag -func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref string, forList bool) (*api.ContentsResponse, error) { - if ref == "" { - ref = repo.DefaultBranch - } - origRef := ref - +func prepareGetContentsEntry(refCommit *utils.RefCommit, treePath *string) (*git.TreeEntry, error) { // Check that the path given in opts.treePath is valid (not a git path) - cleanTreePath := CleanUploadFileName(treePath) - if cleanTreePath == "" && treePath != "" { - return nil, ErrFilenameInvalid{ - Path: treePath, - } + cleanTreePath := CleanGitTreePath(*treePath) + if cleanTreePath == "" && *treePath != "" { + return nil, ErrFilenameInvalid{Path: *treePath} } - treePath = cleanTreePath + *treePath = cleanTreePath - gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo) - if err != nil { - return nil, err + // Only allow safe ref types + refType := refCommit.RefName.RefType() + if refType != git.RefTypeBranch && refType != git.RefTypeTag && refType != git.RefTypeCommit { + return nil, util.NewNotExistErrorf("no commit found for the ref [ref: %s]", refCommit.RefName) } - defer closer.Close() - // Get the commit object for the ref - commit, err := gitRepo.GetCommit(ref) - if err != nil { - return nil, err - } - commitID := commit.ID.String() - if len(ref) >= 4 && strings.HasPrefix(commitID, ref) { - ref = commit.ID.String() - } + return refCommit.Commit.GetTreeEntryByPath(*treePath) +} - entry, err := commit.GetTreeEntryByPath(treePath) +// GetFileContents gets the metadata on a file's contents. Ref can be a branch, commit or tag +func GetFileContents(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, opts GetContentsOrListOptions) (*api.ContentsResponse, error) { + entry, err := prepareGetContentsEntry(refCommit, &opts.TreePath) if err != nil { return nil, err } + return getFileContentsByEntryInternal(ctx, repo, gitRepo, refCommit, entry, opts) +} - refType := gitRepo.GetRefType(ref) - if refType == "invalid" { - return nil, fmt.Errorf("no commit found for the ref [ref: %s]", ref) - } - - selfURL, err := url.Parse(repo.APIURL() + "/contents/" + util.PathEscapeSegments(treePath) + "?ref=" + url.QueryEscape(origRef)) +func getFileContentsByEntryInternal(_ context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, entry *git.TreeEntry, opts GetContentsOrListOptions) (*api.ContentsResponse, error) { + refType := refCommit.RefName.RefType() + commit := refCommit.Commit + selfURL, err := url.Parse(repo.APIURL() + "/contents/" + util.PathEscapeSegments(opts.TreePath) + "?ref=" + url.QueryEscape(refCommit.InputRef)) if err != nil { return nil, err } selfURLString := selfURL.String() - err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(ref, refType != git.ObjectCommit), repo.FullName(), commitID) - if err != nil { - return nil, err - } - - lastCommit, err := commit.GetCommitByPath(treePath) - if err != nil { - return nil, err - } - // All content types have these fields in populated contentsResponse := &api.ContentsResponse{ - Name: entry.Name(), - Path: treePath, - SHA: entry.ID.String(), - LastCommitSHA: lastCommit.ID.String(), - Size: entry.Size(), - URL: &selfURLString, + Name: entry.Name(), + Path: opts.TreePath, + SHA: entry.ID.String(), + Size: entry.Size(), + URL: &selfURLString, Links: &api.FileLinksResponse{ Self: &selfURLString, }, } - // Now populate the rest of the ContentsResponse based on entry type + if opts.IncludeCommitMetadata || opts.IncludeCommitMessage { + err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(refCommit.InputRef, refType != git.RefTypeCommit), repo.FullName(), refCommit.CommitID) + if err != nil { + return nil, err + } + + lastCommit, err := refCommit.Commit.GetCommitByPath(opts.TreePath) + if err != nil { + return nil, err + } + + if opts.IncludeCommitMetadata { + contentsResponse.LastCommitSHA = util.ToPointer(lastCommit.ID.String()) + // GitHub doesn't have these fields in the response, but we could follow other similar APIs to name them + // https://docs.github.com/en/rest/commits/commits?apiVersion=2022-11-28#list-commits + if lastCommit.Committer != nil { + contentsResponse.LastCommitterDate = util.ToPointer(lastCommit.Committer.When) + } + if lastCommit.Author != nil { + contentsResponse.LastAuthorDate = util.ToPointer(lastCommit.Author.When) + } + } + if opts.IncludeCommitMessage { + contentsResponse.LastCommitMessage = util.ToPointer(lastCommit.Message()) + } + } + + // Now populate the rest of the ContentsResponse based on the entry type if entry.IsRegular() || entry.IsExecutable() { contentsResponse.Type = string(ContentTypeRegular) - if blobResponse, err := GetBlobBySHA(ctx, repo, gitRepo, entry.ID.String()); err != nil { - return nil, err - } else if !forList { - // We don't show the content if we are getting a list of FileContentResponses - contentsResponse.Encoding = &blobResponse.Encoding - contentsResponse.Content = &blobResponse.Content + // if it is listing the repo root dir, don't waste system resources on reading content + if opts.IncludeSingleFileContent { + blobResponse, err := GetBlobBySHA(repo, gitRepo, entry.ID.String()) + if err != nil { + return nil, err + } + contentsResponse.Encoding, contentsResponse.Content = blobResponse.Encoding, blobResponse.Content + contentsResponse.LfsOid, contentsResponse.LfsSize = blobResponse.LfsOid, blobResponse.LfsSize + } else if opts.IncludeLfsMetadata { + contentsResponse.LfsOid, contentsResponse.LfsSize, err = parsePossibleLfsPointerBlob(gitRepo, entry.ID.String()) + if err != nil { + return nil, err + } } } else if entry.IsDir() { contentsResponse.Type = string(ContentTypeDir) @@ -210,7 +202,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref contentsResponse.Target = &targetFromContent } else if entry.IsSubModule() { contentsResponse.Type = string(ContentTypeSubmodule) - submodule, err := commit.GetSubModule(treePath) + submodule, err := commit.GetSubModule(opts.TreePath) if err != nil { return nil, err } @@ -220,7 +212,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref } // Handle links if entry.IsRegular() || entry.IsLink() || entry.IsExecutable() { - downloadURL, err := url.Parse(repo.HTMLURL() + "/raw/" + url.PathEscape(string(refType)) + "/" + util.PathEscapeSegments(ref) + "/" + util.PathEscapeSegments(treePath)) + downloadURL, err := url.Parse(repo.HTMLURL() + "/raw/" + refCommit.RefName.RefWebLinkPath() + "/" + util.PathEscapeSegments(opts.TreePath)) if err != nil { return nil, err } @@ -228,7 +220,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref contentsResponse.DownloadURL = &downloadURLString } if !entry.IsSubModule() { - htmlURL, err := url.Parse(repo.HTMLURL() + "/src/" + url.PathEscape(string(refType)) + "/" + util.PathEscapeSegments(ref) + "/" + util.PathEscapeSegments(treePath)) + htmlURL, err := url.Parse(repo.HTMLURL() + "/src/" + refCommit.RefName.RefWebLinkPath() + "/" + util.PathEscapeSegments(opts.TreePath)) if err != nil { return nil, err } @@ -248,49 +240,59 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref return contentsResponse, nil } -// GetBlobBySHA get the GitBlobResponse of a repository using a sha hash. -func GetBlobBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, sha string) (*api.GitBlobResponse, error) { +func GetBlobBySHA(repo *repo_model.Repository, gitRepo *git.Repository, sha string) (*api.GitBlobResponse, error) { gitBlob, err := gitRepo.GetBlob(sha) if err != nil { return nil, err } - content := "" - if gitBlob.Size() <= setting.API.DefaultMaxBlobSize { - content, err = gitBlob.GetBlobContentBase64() - if err != nil { - return nil, err - } + ret := &api.GitBlobResponse{ + SHA: gitBlob.ID.String(), + URL: repo.APIURL() + "/git/blobs/" + url.PathEscape(gitBlob.ID.String()), + Size: gitBlob.Size(), } - return &api.GitBlobResponse{ - SHA: gitBlob.ID.String(), - URL: repo.APIURL() + "/git/blobs/" + url.PathEscape(gitBlob.ID.String()), - Size: gitBlob.Size(), - Encoding: "base64", - Content: content, - }, nil -} -// TryGetContentLanguage tries to get the (linguist) language of the file content -func TryGetContentLanguage(gitRepo *git.Repository, commitID, treePath string) (string, error) { - indexFilename, worktree, deleteTemporaryFile, err := gitRepo.ReadTreeToTemporaryIndex(commitID) - if err != nil { - return "", err + blobSize := gitBlob.Size() + if blobSize > setting.API.DefaultMaxBlobSize { + return ret, nil } - defer deleteTemporaryFile() + var originContent *strings.Builder + if 0 < blobSize && blobSize < lfs.MetaFileMaxSize { + originContent = &strings.Builder{} + } - filename2attribute2info, err := gitRepo.CheckAttribute(git.CheckAttributeOpts{ - CachedOnly: true, - Attributes: []string{git.AttributeLinguistLanguage, git.AttributeGitlabLanguage}, - Filenames: []string{treePath}, - IndexFile: indexFilename, - WorkTree: worktree, - }) + content, err := gitBlob.GetBlobContentBase64(originContent) if err != nil { - return "", err + return nil, err + } + + ret.Encoding, ret.Content = util.ToPointer("base64"), &content + if originContent != nil { + ret.LfsOid, ret.LfsSize = parsePossibleLfsPointerBuffer(strings.NewReader(originContent.String())) } + return ret, nil +} - language := git.TryReadLanguageAttribute(filename2attribute2info[treePath]) +func parsePossibleLfsPointerBuffer(r io.Reader) (*string, *int64) { + p, _ := lfs.ReadPointer(r) + if p.IsValid() { + return &p.Oid, &p.Size + } + return nil, nil +} - return language.Value(), nil +func parsePossibleLfsPointerBlob(gitRepo *git.Repository, sha string) (*string, *int64, error) { + gitBlob, err := gitRepo.GetBlob(sha) + if err != nil { + return nil, nil, err + } + if gitBlob.Size() > lfs.MetaFileMaxSize { + return nil, nil, nil // not a LFS pointer + } + buf, err := gitBlob.GetBlobContent(lfs.MetaFileMaxSize) + if err != nil { + return nil, nil, err + } + oid, size := parsePossibleLfsPointerBuffer(strings.NewReader(buf)) + return oid, size, nil } diff --git a/services/repository/files/content_test.go b/services/repository/files/content_test.go index 7cb46c0bb6..d72f918074 100644 --- a/services/repository/files/content_test.go +++ b/services/repository/files/content_test.go @@ -7,8 +7,8 @@ import ( "testing" "code.gitea.io/gitea/models/unittest" - "code.gitea.io/gitea/modules/gitrepo" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/contexttest" _ "code.gitea.io/gitea/models/actions" @@ -20,36 +20,6 @@ func TestMain(m *testing.M) { unittest.MainTest(m) } -func getExpectedReadmeContentsResponse() *api.ContentsResponse { - treePath := "README.md" - sha := "4b4851ad51df6a7d9f25c979345979eaeb5b349f" - encoding := "base64" - content := "IyByZXBvMQoKRGVzY3JpcHRpb24gZm9yIHJlcG8x" - selfURL := "https://try.gitea.io/api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master" - htmlURL := "https://try.gitea.io/user2/repo1/src/branch/master/" + treePath - gitURL := "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/" + sha - downloadURL := "https://try.gitea.io/user2/repo1/raw/branch/master/" + treePath - return &api.ContentsResponse{ - Name: treePath, - Path: treePath, - SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f", - LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", - Type: "file", - Size: 30, - Encoding: &encoding, - Content: &content, - URL: &selfURL, - HTMLURL: &htmlURL, - GitURL: &gitURL, - DownloadURL: &downloadURL, - Links: &api.FileLinksResponse{ - Self: &selfURL, - GitURL: &gitURL, - HTMLURL: &htmlURL, - }, - } -} - func TestGetContents(t *testing.T) { unittest.PrepareTestEnv(t) ctx, _ := contexttest.MockContext(t, "user2/repo1") @@ -58,195 +28,22 @@ func TestGetContents(t *testing.T) { contexttest.LoadRepoCommit(t, ctx) contexttest.LoadUser(t, ctx, 2) contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - treePath := "README.md" - ref := ctx.Repo.Repository.DefaultBranch - - expectedContentsResponse := getExpectedReadmeContentsResponse() - - t.Run("Get README.md contents with GetContents(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContents(ctx, ctx.Repo.Repository, treePath, ref, false) - assert.EqualValues(t, expectedContentsResponse, fileContentResponse) - assert.NoError(t, err) - }) - - t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContents(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContents(ctx, ctx.Repo.Repository, treePath, "", false) - assert.EqualValues(t, expectedContentsResponse, fileContentResponse) - assert.NoError(t, err) - }) -} - -func TestGetContentsOrListForDir(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - treePath := "" // root dir - ref := ctx.Repo.Repository.DefaultBranch - - readmeContentsResponse := getExpectedReadmeContentsResponse() - // because will be in a list, doesn't have encoding and content - readmeContentsResponse.Encoding = nil - readmeContentsResponse.Content = nil - - expectedContentsListResponse := []*api.ContentsResponse{ - readmeContentsResponse, - } - - t.Run("Get root dir contents with GetContentsOrList(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContentsOrList(ctx, ctx.Repo.Repository, treePath, ref) - assert.EqualValues(t, expectedContentsListResponse, fileContentResponse) - assert.NoError(t, err) - }) - - t.Run("Get root dir contents with ref as empty string (should then use the repo's default branch) with GetContentsOrList(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContentsOrList(ctx, ctx.Repo.Repository, treePath, "") - assert.EqualValues(t, expectedContentsListResponse, fileContentResponse) - assert.NoError(t, err) - }) -} - -func TestGetContentsOrListForFile(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - treePath := "README.md" - ref := ctx.Repo.Repository.DefaultBranch - - expectedContentsResponse := getExpectedReadmeContentsResponse() - - t.Run("Get README.md contents with GetContentsOrList(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContentsOrList(ctx, ctx.Repo.Repository, treePath, ref) - assert.EqualValues(t, expectedContentsResponse, fileContentResponse) - assert.NoError(t, err) - }) - - t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContentsOrList(ctx, )", func(t *testing.T) { - fileContentResponse, err := GetContentsOrList(ctx, ctx.Repo.Repository, treePath, "") - assert.EqualValues(t, expectedContentsResponse, fileContentResponse) - assert.NoError(t, err) - }) -} - -func TestGetContentsErrors(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - repo := ctx.Repo.Repository - treePath := "README.md" - ref := repo.DefaultBranch - - t.Run("bad treePath", func(t *testing.T) { - badTreePath := "bad/tree.md" - fileContentResponse, err := GetContents(ctx, repo, badTreePath, ref, false) - assert.Error(t, err) - assert.EqualError(t, err, "object does not exist [id: , rel_path: bad]") - assert.Nil(t, fileContentResponse) - }) - - t.Run("bad ref", func(t *testing.T) { - badRef := "bad_ref" - fileContentResponse, err := GetContents(ctx, repo, treePath, badRef, false) - assert.Error(t, err) - assert.EqualError(t, err, "object does not exist [id: "+badRef+", rel_path: ]") - assert.Nil(t, fileContentResponse) - }) -} - -func TestGetContentsOrListErrors(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - repo := ctx.Repo.Repository - treePath := "README.md" - ref := repo.DefaultBranch - - t.Run("bad treePath", func(t *testing.T) { - badTreePath := "bad/tree.md" - fileContentResponse, err := GetContentsOrList(ctx, repo, badTreePath, ref) - assert.Error(t, err) - assert.EqualError(t, err, "object does not exist [id: , rel_path: bad]") - assert.Nil(t, fileContentResponse) - }) - - t.Run("bad ref", func(t *testing.T) { - badRef := "bad_ref" - fileContentResponse, err := GetContentsOrList(ctx, repo, treePath, badRef) - assert.Error(t, err) - assert.EqualError(t, err, "object does not exist [id: "+badRef+", rel_path: ]") - assert.Nil(t, fileContentResponse) - }) -} - -func TestGetContentsOrListOfEmptyRepos(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user30/empty") - ctx.SetPathParam("id", "52") - contexttest.LoadRepo(t, ctx, 52) - contexttest.LoadUser(t, ctx, 30) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - repo := ctx.Repo.Repository - - t.Run("empty repo", func(t *testing.T) { - contents, err := GetContentsOrList(ctx, repo, "", "") + // GetContentsOrList's behavior is fully tested in integration tests, so we don't need to test it here. + + t.Run("GetBlobBySHA", func(t *testing.T) { + sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d" + ctx.SetPathParam("id", "1") + ctx.SetPathParam("sha", sha) + gbr, err := GetBlobBySHA(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("sha")) + expectedGBR := &api.GitBlobResponse{ + Content: util.ToPointer("dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK"), + Encoding: util.ToPointer("base64"), + URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/65f1bf27bc3bf70f64657658635e66094edbcb4d", + SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", + Size: 180, + } assert.NoError(t, err) - assert.Empty(t, contents) + assert.Equal(t, expectedGBR, gbr) }) } - -func TestGetBlobBySHA(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d" - ctx.SetPathParam("id", "1") - ctx.SetPathParam("sha", sha) - - gitRepo, err := gitrepo.OpenRepository(ctx, ctx.Repo.Repository) - if err != nil { - t.Fail() - } - - gbr, err := GetBlobBySHA(ctx, ctx.Repo.Repository, gitRepo, ctx.PathParam("sha")) - expectedGBR := &api.GitBlobResponse{ - Content: "dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK", - Encoding: "base64", - URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/65f1bf27bc3bf70f64657658635e66094edbcb4d", - SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", - Size: 180, - } - assert.NoError(t, err) - assert.Equal(t, expectedGBR, gbr) -} diff --git a/services/repository/files/diff.go b/services/repository/files/diff.go index 0b3550452a..50d01f9d7c 100644 --- a/services/repository/files/diff.go +++ b/services/repository/files/diff.go @@ -29,7 +29,7 @@ func GetDiffPreview(ctx context.Context, repo *repo_model.Repository, branch, tr } // Add the object to the database - objectHash, err := t.HashObject(ctx, strings.NewReader(content)) + objectHash, err := t.HashObjectAndWrite(ctx, strings.NewReader(content)) if err != nil { return nil, err } diff --git a/services/repository/files/diff_test.go b/services/repository/files/diff_test.go index a8514791cc..ae702e4189 100644 --- a/services/repository/files/diff_test.go +++ b/services/repository/files/diff_test.go @@ -118,7 +118,7 @@ func TestGetDiffPreview(t *testing.T) { assert.NoError(t, err) bs, err := json.Marshal(diff) assert.NoError(t, err) - assert.EqualValues(t, string(expectedBs), string(bs)) + assert.Equal(t, string(expectedBs), string(bs)) }) t.Run("empty branch, same results", func(t *testing.T) { @@ -128,7 +128,7 @@ func TestGetDiffPreview(t *testing.T) { assert.NoError(t, err) bs, err := json.Marshal(diff) assert.NoError(t, err) - assert.EqualValues(t, expectedBs, bs) + assert.Equal(t, expectedBs, bs) }) } diff --git a/services/repository/files/file.go b/services/repository/files/file.go index 2caa1b4946..13d171d139 100644 --- a/services/repository/files/file.go +++ b/services/repository/files/file.go @@ -5,6 +5,7 @@ package files import ( "context" + "errors" "fmt" "net/url" "strings" @@ -12,18 +13,40 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/api/v1/utils" ) -func GetFilesResponseFromCommit(ctx context.Context, repo *repo_model.Repository, commit *git.Commit, branch string, treeNames []string) (*api.FilesResponse, error) { - files := []*api.ContentsResponse{} - for _, file := range treeNames { - fileContents, _ := GetContents(ctx, repo, file, branch, false) // ok if fails, then will be nil +func GetContentsListFromTreePaths(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, treePaths []string) (files []*api.ContentsResponse) { + var size int64 + for _, treePath := range treePaths { + // ok if fails, then will be nil + fileContents, _ := GetFileContents(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{ + TreePath: treePath, + IncludeSingleFileContent: true, + IncludeCommitMetadata: true, + }) + if fileContents != nil && fileContents.Content != nil && *fileContents.Content != "" { + // if content isn't empty (e.g., due to the single blob being too large), add file size to response size + size += int64(len(*fileContents.Content)) + } + if size > setting.API.DefaultMaxResponseSize { + break // stop if max response size would be exceeded + } files = append(files, fileContents) + if len(files) == setting.API.DefaultPagingNum { + break // stop if paging num reached + } } - fileCommitResponse, _ := GetFileCommitResponse(repo, commit) // ok if fails, then will be nil - verification := GetPayloadCommitVerification(ctx, commit) + return files +} + +func GetFilesResponseFromCommit(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, treeNames []string) (*api.FilesResponse, error) { + files := GetContentsListFromTreePaths(ctx, repo, gitRepo, refCommit, treeNames) + fileCommitResponse, _ := GetFileCommitResponse(repo, refCommit.Commit) // ok if fails, then will be nil + verification := GetPayloadCommitVerification(ctx, refCommit.Commit) filesResponse := &api.FilesResponse{ Files: files, Commit: fileCommitResponse, @@ -32,19 +55,6 @@ func GetFilesResponseFromCommit(ctx context.Context, repo *repo_model.Repository return filesResponse, nil } -// GetFileResponseFromCommit Constructs a FileResponse from a Commit object -func GetFileResponseFromCommit(ctx context.Context, repo *repo_model.Repository, commit *git.Commit, branch, treeName string) (*api.FileResponse, error) { - fileContents, _ := GetContents(ctx, repo, treeName, branch, false) // ok if fails, then will be nil - fileCommitResponse, _ := GetFileCommitResponse(repo, commit) // ok if fails, then will be nil - verification := GetPayloadCommitVerification(ctx, commit) - fileResponse := &api.FileResponse{ - Content: fileContents, - Commit: fileCommitResponse, - Verification: verification, - } - return fileResponse, nil -} - // constructs a FileResponse with the file at the index from FilesResponse func GetFileResponseFromFilesResponse(filesResponse *api.FilesResponse, index int) *api.FileResponse { content := &api.ContentsResponse{} @@ -62,10 +72,10 @@ func GetFileResponseFromFilesResponse(filesResponse *api.FilesResponse, index in // GetFileCommitResponse Constructs a FileCommitResponse from a Commit object func GetFileCommitResponse(repo *repo_model.Repository, commit *git.Commit) (*api.FileCommitResponse, error) { if repo == nil { - return nil, fmt.Errorf("repo cannot be nil") + return nil, errors.New("repo cannot be nil") } if commit == nil { - return nil, fmt.Errorf("commit cannot be nil") + return nil, errors.New("commit cannot be nil") } commitURL, _ := url.Parse(repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String())) commitTreeURL, _ := url.Parse(repo.APIURL() + "/git/trees/" + url.PathEscape(commit.Tree.ID.String())) @@ -129,15 +139,17 @@ func (err ErrFilenameInvalid) Unwrap() error { return util.ErrInvalidArgument } -// CleanUploadFileName Trims a filename and returns empty string if it is a .git directory -func CleanUploadFileName(name string) string { - // Rebase the filename +// CleanGitTreePath cleans a tree path for git, it returns an empty string the path is invalid (e.g.: contains ".git" part) +func CleanGitTreePath(name string) string { name = util.PathJoinRel(name) // Git disallows any filenames to have a .git directory in them. - for _, part := range strings.Split(name, "/") { + for part := range strings.SplitSeq(name, "/") { if strings.ToLower(part) == ".git" { return "" } } + if name == "." { + name = "" + } return name } diff --git a/services/repository/files/file_test.go b/services/repository/files/file_test.go index 52c0574883..cdb6a266ff 100644 --- a/services/repository/files/file_test.go +++ b/services/repository/files/file_test.go @@ -6,115 +6,22 @@ package files import ( "testing" - "code.gitea.io/gitea/models/unittest" - "code.gitea.io/gitea/modules/gitrepo" - "code.gitea.io/gitea/modules/setting" - api "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/services/contexttest" - "github.com/stretchr/testify/assert" ) func TestCleanUploadFileName(t *testing.T) { - t.Run("Clean regular file", func(t *testing.T) { - name := "this/is/test" - cleanName := CleanUploadFileName(name) - expectedCleanName := name - assert.EqualValues(t, expectedCleanName, cleanName) - }) - - t.Run("Clean a .git path", func(t *testing.T) { - name := "this/is/test/.git" - cleanName := CleanUploadFileName(name) - expectedCleanName := "" - assert.EqualValues(t, expectedCleanName, cleanName) - }) -} - -func getExpectedFileResponse() *api.FileResponse { - treePath := "README.md" - sha := "4b4851ad51df6a7d9f25c979345979eaeb5b349f" - encoding := "base64" - content := "IyByZXBvMQoKRGVzY3JpcHRpb24gZm9yIHJlcG8x" - selfURL := setting.AppURL + "api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master" - htmlURL := setting.AppURL + "user2/repo1/src/branch/master/" + treePath - gitURL := setting.AppURL + "api/v1/repos/user2/repo1/git/blobs/" + sha - downloadURL := setting.AppURL + "user2/repo1/raw/branch/master/" + treePath - return &api.FileResponse{ - Content: &api.ContentsResponse{ - Name: treePath, - Path: treePath, - SHA: sha, - LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", - Type: "file", - Size: 30, - Encoding: &encoding, - Content: &content, - URL: &selfURL, - HTMLURL: &htmlURL, - GitURL: &gitURL, - DownloadURL: &downloadURL, - Links: &api.FileLinksResponse{ - Self: &selfURL, - GitURL: &gitURL, - HTMLURL: &htmlURL, - }, - }, - Commit: &api.FileCommitResponse{ - CommitMeta: api.CommitMeta{ - URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/commits/65f1bf27bc3bf70f64657658635e66094edbcb4d", - SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d", - }, - HTMLURL: "https://try.gitea.io/user2/repo1/commit/65f1bf27bc3bf70f64657658635e66094edbcb4d", - Author: &api.CommitUser{ - Identity: api.Identity{ - Name: "user1", - Email: "address1@example.com", - }, - Date: "2017-03-19T20:47:59Z", - }, - Committer: &api.CommitUser{ - Identity: api.Identity{ - Name: "Ethan Koenig", - Email: "ethantkoenig@gmail.com", - }, - Date: "2017-03-19T20:47:59Z", - }, - Parents: []*api.CommitMeta{}, - Message: "Initial commit\n", - Tree: &api.CommitMeta{ - URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/trees/2a2f1d4670728a2e10049e345bd7a276468beab6", - SHA: "2a2f1d4670728a2e10049e345bd7a276468beab6", - }, - }, - Verification: &api.PayloadCommitVerification{ - Verified: false, - Reason: "gpg.error.not_signed_commit", - Signature: "", - Payload: "", - }, + cases := []struct { + input, expected string + }{ + {"", ""}, + {".", ""}, + {"a/./b", "a/b"}, + {"a.git", "a.git"}, + {".git/b", ""}, + {"a/.git", ""}, + {"/a/../../b", "b"}, + } + for _, c := range cases { + assert.Equal(t, c.expected, CleanGitTreePath(c.input), "input: %q", c.input) } -} - -func TestGetFileResponseFromCommit(t *testing.T) { - unittest.PrepareTestEnv(t) - ctx, _ := contexttest.MockContext(t, "user2/repo1") - ctx.SetPathParam("id", "1") - contexttest.LoadRepo(t, ctx, 1) - contexttest.LoadRepoCommit(t, ctx) - contexttest.LoadUser(t, ctx, 2) - contexttest.LoadGitRepo(t, ctx) - defer ctx.Repo.GitRepo.Close() - - repo := ctx.Repo.Repository - branch := repo.DefaultBranch - treePath := "README.md" - gitRepo, _ := gitrepo.OpenRepository(ctx, repo) - defer gitRepo.Close() - commit, _ := gitRepo.GetBranchCommit(branch) - expectedFileResponse := getExpectedFileResponse() - - fileResponse, err := GetFileResponseFromCommit(ctx, repo, commit, branch, treePath) - assert.NoError(t, err) - assert.EqualValues(t, expectedFileResponse, fileResponse) } diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go index 1941adb86a..11a8744b7f 100644 --- a/services/repository/files/patch.go +++ b/services/repository/files/patch.go @@ -12,7 +12,6 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" @@ -45,7 +44,6 @@ type ApplyDiffPatchOptions struct { NewBranch string Message string Content string - SHA string Author *IdentityOptions Committer *IdentityOptions Dates *CommitDateOptions @@ -62,29 +60,26 @@ func (opts *ApplyDiffPatchOptions) Validate(ctx context.Context, repo *repo_mode opts.NewBranch = opts.OldBranch } - gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo) - if err != nil { - return err - } - defer closer.Close() - // oldBranch must exist for this operation - if _, err := gitRepo.GetBranch(opts.OldBranch); err != nil { + if exist, err := git_model.IsBranchExist(ctx, repo.ID, opts.OldBranch); err != nil { return err + } else if !exist { + return git_model.ErrBranchNotExist{ + BranchName: opts.OldBranch, + } } // A NewBranch can be specified for the patch to be applied to. // Check to make sure the branch does not already exist, otherwise we can't proceed. // If we aren't branching to a new branch, make sure user can commit to the given branch if opts.NewBranch != opts.OldBranch { - existingBranch, err := gitRepo.GetBranch(opts.NewBranch) - if existingBranch != nil { + exist, err := git_model.IsBranchExist(ctx, repo.ID, opts.NewBranch) + if err != nil { + return err + } else if exist { return git_model.ErrBranchAlreadyExists{ BranchName: opts.NewBranch, } } - if err != nil && !git.IsErrBranchNotExist(err) { - return err - } } else { protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, opts.OldBranch) if err != nil { diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go index d2c70a7a34..c2f61c8223 100644 --- a/services/repository/files/temp_repo.go +++ b/services/repository/files/temp_repo.go @@ -6,6 +6,7 @@ package files import ( "bytes" "context" + "errors" "fmt" "io" "os" @@ -29,23 +30,24 @@ type TemporaryUploadRepository struct { repo *repo_model.Repository gitRepo *git.Repository basePath string + cleanup func() } // NewTemporaryUploadRepository creates a new temporary upload repository func NewTemporaryUploadRepository(repo *repo_model.Repository) (*TemporaryUploadRepository, error) { - basePath, err := repo_module.CreateTemporaryPath("upload") + basePath, cleanup, err := repo_module.CreateTemporaryPath("upload") if err != nil { return nil, err } - t := &TemporaryUploadRepository{repo: repo, basePath: basePath} + t := &TemporaryUploadRepository{repo: repo, basePath: basePath, cleanup: cleanup} return t, nil } // Close the repository cleaning up all files func (t *TemporaryUploadRepository) Close() { defer t.gitRepo.Close() - if err := repo_module.RemoveTemporaryPath(t.basePath); err != nil { - log.Error("Failed to remove temporary path %s: %v", t.basePath, err) + if t.cleanup != nil { + t.cleanup() } } @@ -126,7 +128,7 @@ func (t *TemporaryUploadRepository) LsFiles(ctx context.Context, filenames ...st } fileList := make([]string, 0, len(filenames)) - for _, line := range bytes.Split(stdOut.Bytes(), []byte{'\000'}) { + for line := range bytes.SplitSeq(stdOut.Bytes(), []byte{'\000'}) { fileList = append(fileList, string(line)) } @@ -162,8 +164,8 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, fi return nil } -// HashObject writes the provided content to the object db and returns its hash -func (t *TemporaryUploadRepository) HashObject(ctx context.Context, content io.Reader) (string, error) { +// HashObjectAndWrite writes the provided content to the object db and returns its hash +func (t *TemporaryUploadRepository) HashObjectAndWrite(ctx context.Context, content io.Reader) (string, error) { stdOut := new(bytes.Buffer) stdErr := new(bytes.Buffer) @@ -291,15 +293,18 @@ func (t *TemporaryUploadRepository) CommitTree(ctx context.Context, opts *Commit } var sign bool - var keyID string + var key *git.SigningKey var signer *git.Signature if opts.ParentCommitID != "" { - sign, keyID, signer, _ = asymkey_service.SignCRUDAction(ctx, t.repo.RepoPath(), opts.DoerUser, t.basePath, opts.ParentCommitID) + sign, key, signer, _ = asymkey_service.SignCRUDAction(ctx, t.repo.RepoPath(), opts.DoerUser, t.basePath, opts.ParentCommitID) } else { - sign, keyID, signer, _ = asymkey_service.SignInitialCommit(ctx, t.repo.RepoPath(), opts.DoerUser) + sign, key, signer, _ = asymkey_service.SignInitialCommit(ctx, t.repo.RepoPath(), opts.DoerUser) } if sign { - cmdCommitTree.AddOptionFormat("-S%s", keyID) + if key.Format != "" { + cmdCommitTree.AddConfig("gpg.format", key.Format) + } + cmdCommitTree.AddOptionFormat("-S%s", key.KeyID) if t.repo.GetTrustModel() == repo_model.CommitterTrustModel || t.repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { if committerSig.Name != authorSig.Name || committerSig.Email != authorSig.Email { // Add trailers @@ -414,7 +419,7 @@ func (t *TemporaryUploadRepository) DiffIndex(ctx context.Context) (*gitdiff.Dif // GetBranchCommit Gets the commit object of the given branch func (t *TemporaryUploadRepository) GetBranchCommit(branch string) (*git.Commit, error) { if t.gitRepo == nil { - return nil, fmt.Errorf("repository has not been cloned") + return nil, errors.New("repository has not been cloned") } return t.gitRepo.GetBranchCommit(branch) } @@ -422,7 +427,7 @@ func (t *TemporaryUploadRepository) GetBranchCommit(branch string) (*git.Commit, // GetCommit Gets the commit object of the given commit ID func (t *TemporaryUploadRepository) GetCommit(commitID string) (*git.Commit, error) { if t.gitRepo == nil { - return nil, fmt.Errorf("repository has not been cloned") + return nil, errors.New("repository has not been cloned") } return t.gitRepo.GetCommit(commitID) } diff --git a/services/repository/files/tree.go b/services/repository/files/tree.go index 9142416347..f2cbacbf1c 100644 --- a/services/repository/files/tree.go +++ b/services/repository/files/tree.go @@ -6,12 +6,14 @@ package files import ( "context" "fmt" + "html/template" "net/url" "path" "sort" "strings" repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/fileicon" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" @@ -92,11 +94,7 @@ func GetTreeBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git if len(entries) > perPage { tree.Truncated = true } - if rangeStart+perPage < len(entries) { - rangeEnd = rangeStart + perPage - } else { - rangeEnd = len(entries) - } + rangeEnd = min(rangeStart+perPage, len(entries)) tree.Entries = make([]api.GitEntry, rangeEnd-rangeStart) for e := rangeStart; e < rangeEnd; e++ { i := e - rangeStart @@ -140,8 +138,13 @@ func entryModeString(entryMode git.EntryMode) string { } type TreeViewNode struct { - EntryName string `json:"entryName"` - EntryMode string `json:"entryMode"` + EntryName string `json:"entryName"` + EntryMode string `json:"entryMode"` + EntryIcon template.HTML `json:"entryIcon"` + EntryIconOpen template.HTML `json:"entryIconOpen,omitempty"` + + SymLinkedToMode string `json:"symLinkedToMode,omitempty"` // TODO: for the EntryMode="symlink" + FullPath string `json:"fullPath"` SubmoduleURL string `json:"submoduleUrl,omitempty"` Children []*TreeViewNode `json:"children,omitempty"` @@ -151,13 +154,20 @@ func (node *TreeViewNode) sortLevel() int { return util.Iif(node.EntryMode == "tree" || node.EntryMode == "commit", 0, 1) } -func newTreeViewNodeFromEntry(ctx context.Context, commit *git.Commit, parentDir string, entry *git.TreeEntry) *TreeViewNode { +func newTreeViewNodeFromEntry(ctx context.Context, renderedIconPool *fileicon.RenderedIconPool, commit *git.Commit, parentDir string, entry *git.TreeEntry) *TreeViewNode { node := &TreeViewNode{ EntryName: entry.Name(), EntryMode: entryModeString(entry.Mode()), FullPath: path.Join(parentDir, entry.Name()), } + entryInfo := fileicon.EntryInfoFromGitTreeEntry(commit, node.FullPath, entry) + node.EntryIcon = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo) + if entryInfo.EntryMode.IsDir() { + entryInfo.IsOpen = true + node.EntryIconOpen = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo) + } + if node.EntryMode == "commit" { if subModule, err := commit.GetSubModule(node.FullPath); err != nil { log.Error("GetSubModule: %v", err) @@ -182,7 +192,7 @@ func sortTreeViewNodes(nodes []*TreeViewNode) { }) } -func listTreeNodes(ctx context.Context, commit *git.Commit, tree *git.Tree, treePath, subPath string) ([]*TreeViewNode, error) { +func listTreeNodes(ctx context.Context, renderedIconPool *fileicon.RenderedIconPool, commit *git.Commit, tree *git.Tree, treePath, subPath string) ([]*TreeViewNode, error) { entries, err := tree.ListEntries() if err != nil { return nil, err @@ -191,14 +201,14 @@ func listTreeNodes(ctx context.Context, commit *git.Commit, tree *git.Tree, tree subPathDirName, subPathRemaining, _ := strings.Cut(subPath, "/") nodes := make([]*TreeViewNode, 0, len(entries)) for _, entry := range entries { - node := newTreeViewNodeFromEntry(ctx, commit, treePath, entry) + node := newTreeViewNodeFromEntry(ctx, renderedIconPool, commit, treePath, entry) nodes = append(nodes, node) if entry.IsDir() && subPathDirName == entry.Name() { subTreePath := treePath + "/" + node.EntryName if subTreePath[0] == '/' { subTreePath = subTreePath[1:] } - subNodes, err := listTreeNodes(ctx, commit, entry.Tree(), subTreePath, subPathRemaining) + subNodes, err := listTreeNodes(ctx, renderedIconPool, commit, entry.Tree(), subTreePath, subPathRemaining) if err != nil { log.Error("listTreeNodes: %v", err) } else { @@ -210,10 +220,10 @@ func listTreeNodes(ctx context.Context, commit *git.Commit, tree *git.Tree, tree return nodes, nil } -func GetTreeViewNodes(ctx context.Context, commit *git.Commit, treePath, subPath string) ([]*TreeViewNode, error) { +func GetTreeViewNodes(ctx context.Context, renderedIconPool *fileicon.RenderedIconPool, commit *git.Commit, treePath, subPath string) ([]*TreeViewNode, error) { entry, err := commit.GetTreeEntryByPath(treePath) if err != nil { return nil, err } - return listTreeNodes(ctx, commit, entry.Tree(), treePath, subPath) + return listTreeNodes(ctx, renderedIconPool, commit, entry.Tree(), treePath, subPath) } diff --git a/services/repository/files/tree_test.go b/services/repository/files/tree_test.go index 8ea54969ce..a53f342d40 100644 --- a/services/repository/files/tree_test.go +++ b/services/repository/files/tree_test.go @@ -4,9 +4,11 @@ package files import ( + "html/template" "testing" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/fileicon" "code.gitea.io/gitea/modules/git" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/services/contexttest" @@ -49,7 +51,7 @@ func TestGetTreeBySHA(t *testing.T) { TotalCount: 1, } - assert.EqualValues(t, expectedTree, tree) + assert.Equal(t, expectedTree, tree) } func TestGetTreeViewNodes(t *testing.T) { @@ -62,40 +64,56 @@ func TestGetTreeViewNodes(t *testing.T) { contexttest.LoadGitRepo(t, ctx) defer ctx.Repo.GitRepo.Close() - treeNodes, err := GetTreeViewNodes(ctx, ctx.Repo.Commit, "", "") + renderedIconPool := fileicon.NewRenderedIconPool() + mockIconForFile := func(id string) template.HTML { + return template.HTML(`<svg class="svg git-entry-icon octicon-file" width="16" height="16" aria-hidden="true"><use xlink:href="#` + id + `"></use></svg>`) + } + mockIconForFolder := func(id string) template.HTML { + return template.HTML(`<svg class="svg git-entry-icon octicon-file-directory-fill" width="16" height="16" aria-hidden="true"><use xlink:href="#` + id + `"></use></svg>`) + } + mockOpenIconForFolder := func(id string) template.HTML { + return template.HTML(`<svg class="svg git-entry-icon octicon-file-directory-open-fill" width="16" height="16" aria-hidden="true"><use xlink:href="#` + id + `"></use></svg>`) + } + treeNodes, err := GetTreeViewNodes(ctx, renderedIconPool, ctx.Repo.Commit, "", "") assert.NoError(t, err) assert.Equal(t, []*TreeViewNode{ { - EntryName: "docs", - EntryMode: "tree", - FullPath: "docs", + EntryName: "docs", + EntryMode: "tree", + FullPath: "docs", + EntryIcon: mockIconForFolder(`svg-mfi-folder-docs`), + EntryIconOpen: mockOpenIconForFolder(`svg-mfi-folder-docs`), }, }, treeNodes) - treeNodes, err = GetTreeViewNodes(ctx, ctx.Repo.Commit, "", "docs/README.md") + treeNodes, err = GetTreeViewNodes(ctx, renderedIconPool, ctx.Repo.Commit, "", "docs/README.md") assert.NoError(t, err) assert.Equal(t, []*TreeViewNode{ { - EntryName: "docs", - EntryMode: "tree", - FullPath: "docs", + EntryName: "docs", + EntryMode: "tree", + FullPath: "docs", + EntryIcon: mockIconForFolder(`svg-mfi-folder-docs`), + EntryIconOpen: mockOpenIconForFolder(`svg-mfi-folder-docs`), Children: []*TreeViewNode{ { EntryName: "README.md", EntryMode: "blob", FullPath: "docs/README.md", + EntryIcon: mockIconForFile(`svg-mfi-readme`), }, }, }, }, treeNodes) - treeNodes, err = GetTreeViewNodes(ctx, ctx.Repo.Commit, "docs", "README.md") + treeNodes, err = GetTreeViewNodes(ctx, renderedIconPool, ctx.Repo.Commit, "docs", "README.md") assert.NoError(t, err) assert.Equal(t, []*TreeViewNode{ { EntryName: "README.md", EntryMode: "blob", FullPath: "docs/README.md", + EntryIcon: mockIconForFile(`svg-mfi-readme`), }, }, treeNodes) } diff --git a/services/repository/files/update.go b/services/repository/files/update.go index cade7ba2bf..e871f777e5 100644 --- a/services/repository/files/update.go +++ b/services/repository/files/update.go @@ -8,6 +8,7 @@ import ( "fmt" "io" "path" + "slices" "strings" "time" @@ -15,12 +16,14 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/git/attribute" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/api/v1/utils" asymkey_service "code.gitea.io/gitea/services/asymkey" pull_service "code.gitea.io/gitea/services/pull" ) @@ -85,14 +88,32 @@ func (err ErrRepoFileDoesNotExist) Unwrap() error { return util.ErrNotExist } +type LazyReadSeeker interface { + io.ReadSeeker + io.Closer + OpenLazyReader() error +} + // ChangeRepoFiles adds, updates or removes multiple files in the given repository -func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *ChangeRepoFilesOptions) (*structs.FilesResponse, error) { +func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *ChangeRepoFilesOptions) (_ *structs.FilesResponse, errRet error) { + var addedLfsPointers []lfs.Pointer + defer func() { + if errRet != nil { + for _, lfsPointer := range addedLfsPointers { + _, err := git_model.RemoveLFSMetaObjectByOid(ctx, repo.ID, lfsPointer.Oid) + if err != nil { + log.Error("ChangeRepoFiles: RemoveLFSMetaObjectByOid failed: %v", err) + } + } + } + }() + err := repo.MustNotBeArchived() if err != nil { return nil, err } - // If no branch name is set, assume default branch + // If no branch name is set, assume the default branch if opts.OldBranch == "" { opts.OldBranch = repo.DefaultBranch } @@ -107,8 +128,13 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use defer closer.Close() // oldBranch must exist for this operation - if _, err := gitRepo.GetBranch(opts.OldBranch); err != nil && !repo.IsEmpty { + if exist, err := git_model.IsBranchExist(ctx, repo.ID, opts.OldBranch); err != nil { return nil, err + } else if !exist && !repo.IsEmpty { + return nil, git_model.ErrBranchNotExist{ + RepoID: repo.ID, + BranchName: opts.OldBranch, + } } var treePaths []string @@ -119,14 +145,14 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use } // Check that the path given in opts.treePath is valid (not a git path) - treePath := CleanUploadFileName(file.TreePath) + treePath := CleanGitTreePath(file.TreePath) if treePath == "" { return nil, ErrFilenameInvalid{ Path: file.TreePath, } } // If there is a fromTreePath (we are copying it), also clean it up - fromTreePath := CleanUploadFileName(file.FromTreePath) + fromTreePath := CleanGitTreePath(file.FromTreePath) if fromTreePath == "" && file.FromTreePath != "" { return nil, ErrFilenameInvalid{ Path: file.FromTreePath, @@ -145,15 +171,15 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use // Check to make sure the branch does not already exist, otherwise we can't proceed. // If we aren't branching to a new branch, make sure user can commit to the given branch if opts.NewBranch != opts.OldBranch { - existingBranch, err := gitRepo.GetBranch(opts.NewBranch) - if existingBranch != nil { + exist, err := git_model.IsBranchExist(ctx, repo.ID, opts.NewBranch) + if err != nil { + return nil, err + } + if exist { return nil, git_model.ErrBranchAlreadyExists{ BranchName: opts.NewBranch, } } - if err != nil && !git.IsErrBranchNotExist(err) { - return nil, err - } } else if err := VerifyBranchProtection(ctx, repo, doer, opts.OldBranch, treePaths); err != nil { return nil, err } @@ -196,13 +222,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use } // Find the file we want to delete in the index - inFilelist := false - for _, indexFile := range filesInIndex { - if indexFile == file.TreePath { - inFilelist = true - break - } - } + inFilelist := slices.Contains(filesInIndex, file.TreePath) if !inFilelist { return nil, ErrRepoFileDoesNotExist{ Path: file.TreePath, @@ -218,7 +238,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use return nil, err // Couldn't get a commit for the branch } - // Assigned LastCommitID in opts if it hasn't been set + // Assigned LastCommitID in "opts" if it hasn't been set if opts.LastCommitID == "" { opts.LastCommitID = commit.ID.String() } else { @@ -230,22 +250,25 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use } for _, file := range opts.Files { - if err := handleCheckErrors(file, commit, opts); err != nil { + if err = handleCheckErrors(file, commit, opts); err != nil { return nil, err } } } - contentStore := lfs.NewContentStore() + lfsContentStore := lfs.NewContentStore() for _, file := range opts.Files { switch file.Operation { - case "create", "update": - if err := CreateOrUpdateFile(ctx, t, file, contentStore, repo.ID, hasOldBranch); err != nil { + case "create", "update", "rename", "upload": + addedLfsPointer, err := modifyFile(ctx, t, file, lfsContentStore, repo.ID) + if err != nil { return nil, err } + if addedLfsPointer != nil { + addedLfsPointers = append(addedLfsPointers, *addedLfsPointer) + } case "delete": - // Remove the file from the index - if err := t.RemoveFilesFromIndex(ctx, file.TreePath); err != nil { + if err = t.RemoveFilesFromIndex(ctx, file.TreePath); err != nil { return nil, err } default: @@ -290,14 +313,16 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use return nil, err } - filesResponse, err := GetFilesResponseFromCommit(ctx, repo, commit, opts.NewBranch, treePaths) + // FIXME: this call seems not right, why it needs to read the file content again + // FIXME: why it uses the NewBranch as "ref", it should use the commit ID because the response is only for this commit + filesResponse, err := GetFilesResponseFromCommit(ctx, repo, gitRepo, utils.NewRefCommit(git.RefNameFromBranch(opts.NewBranch), commit), treePaths) if err != nil { return nil, err } if repo.IsEmpty { if isEmpty, err := gitRepo.IsEmpty(); err == nil && !isEmpty { - _ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: repo.ID, IsEmpty: false, DefaultBranch: opts.NewBranch}, "is_empty", "default_branch") + _ = repo_model.UpdateRepositoryColsWithAutoTime(ctx, &repo_model.Repository{ID: repo.ID, IsEmpty: false, DefaultBranch: opts.NewBranch}, "is_empty", "default_branch") } } @@ -363,22 +388,33 @@ func (err ErrSHAOrCommitIDNotProvided) Error() string { // handles the check for various issues for ChangeRepoFiles func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions) error { - if file.Operation == "update" || file.Operation == "delete" { - fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath) - if err != nil { - return err + // check old entry (fromTreePath/fromEntry) + if file.Operation == "update" || file.Operation == "upload" || file.Operation == "delete" || file.Operation == "rename" { + var fromEntryIDString string + { + fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath) + if file.Operation == "upload" && git.IsErrNotExist(err) { + fromEntry = nil + } else if err != nil { + return err + } + if fromEntry != nil { + fromEntryIDString = fromEntry.ID.String() + file.Options.executable = fromEntry.IsExecutable() // FIXME: legacy hacky approach, it shouldn't prepare the "Options" in the "check" function + } } + if file.SHA != "" { - // If a SHA was given and the SHA given doesn't match the SHA of the fromTreePath, throw error - if file.SHA != fromEntry.ID.String() { + // If the SHA given doesn't match the SHA of the fromTreePath, throw error + if file.SHA != fromEntryIDString { return pull_service.ErrSHADoesNotMatch{ Path: file.Options.treePath, GivenSHA: file.SHA, - CurrentSHA: fromEntry.ID.String(), + CurrentSHA: fromEntryIDString, } } } else if opts.LastCommitID != "" { - // If a lastCommitID was given and it doesn't match the commitID of the head of the branch throw + // If a lastCommitID given doesn't match the branch head's commitID throw // an error, but only if we aren't creating a new branch. if commit.ID.String() != opts.LastCommitID && opts.OldBranch == opts.NewBranch { if changed, err := commit.FileChangedSinceCommit(file.Options.treePath, opts.LastCommitID); err != nil { @@ -396,13 +432,13 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep // haven't been made. We throw an error if one wasn't provided. return ErrSHAOrCommitIDNotProvided{} } - file.Options.executable = fromEntry.IsExecutable() } - if file.Operation == "create" || file.Operation == "update" { - // For the path where this file will be created/updated, we need to make - // sure no parts of the path are existing files or links except for the last - // item in the path which is the file name, and that shouldn't exist IF it is - // a new file OR is being moved to a new path. + + // check new entry (treePath/treeEntry) + if file.Operation == "create" || file.Operation == "update" || file.Operation == "upload" || file.Operation == "rename" { + // For operation's target path, we need to make sure no parts of the path are existing files or links + // except for the last item in the path (which is the file name). + // And that shouldn't exist IF it is a new file OR is being moved to a new path. treePathParts := strings.Split(file.Options.treePath, "/") subTreePath := "" for index, part := range treePathParts { @@ -439,7 +475,7 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep Type: git.EntryModeTree, } } else if file.Options.fromTreePath != file.Options.treePath || file.Operation == "create" { - // The entry shouldn't exist if we are creating new file or moving to a new path + // The entry shouldn't exist if we are creating the new file or moving to a new path return ErrRepoFileAlreadyExists{ Path: file.Options.treePath, } @@ -450,21 +486,23 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep return nil } -// CreateOrUpdateFile handles creating or updating a file for ChangeRepoFiles -func CreateOrUpdateFile(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile, contentStore *lfs.ContentStore, repoID int64, hasOldBranch bool) error { +func modifyFile(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile, contentStore *lfs.ContentStore, repoID int64) (addedLfsPointer *lfs.Pointer, _ error) { + if rd, ok := file.ContentReader.(LazyReadSeeker); ok { + if err := rd.OpenLazyReader(); err != nil { + return nil, fmt.Errorf("OpenLazyReader: %w", err) + } + defer rd.Close() + } + // Get the two paths (might be the same if not moving) from the index if they exist filesInIndex, err := t.LsFiles(ctx, file.TreePath, file.FromTreePath) if err != nil { - return fmt.Errorf("UpdateRepoFile: %w", err) + return nil, fmt.Errorf("LsFiles: %w", err) } // If is a new file (not updating) then the given path shouldn't exist if file.Operation == "create" { - for _, indexFile := range filesInIndex { - if indexFile == file.TreePath { - return ErrRepoFileAlreadyExists{ - Path: file.TreePath, - } - } + if slices.Contains(filesInIndex, file.TreePath) { + return nil, ErrRepoFileAlreadyExists{Path: file.TreePath} } } @@ -472,79 +510,178 @@ func CreateOrUpdateFile(ctx context.Context, t *TemporaryUploadRepository, file if file.Options.fromTreePath != file.Options.treePath && len(filesInIndex) > 0 { for _, indexFile := range filesInIndex { if indexFile == file.Options.fromTreePath { - if err := t.RemoveFilesFromIndex(ctx, file.FromTreePath); err != nil { - return err + if err = t.RemoveFilesFromIndex(ctx, file.FromTreePath); err != nil { + return nil, err } } } } - treeObjectContentReader := file.ContentReader - var lfsMetaObject *git_model.LFSMetaObject - if setting.LFS.StartServer && hasOldBranch { - // Check there is no way this can return multiple infos - filename2attribute2info, err := t.gitRepo.CheckAttribute(git.CheckAttributeOpts{ - Attributes: []string{"filter"}, - Filenames: []string{file.Options.treePath}, - CachedOnly: true, - }) + var writeObjectRet *writeRepoObjectRet + switch file.Operation { + case "create", "update", "upload": + writeObjectRet, err = writeRepoObjectForModify(ctx, t, file) + case "rename": + writeObjectRet, err = writeRepoObjectForRename(ctx, t, file) + default: + return nil, util.NewInvalidArgumentErrorf("unknown file modification operation: '%s'", file.Operation) + } + if err != nil { + return nil, err + } + + // Add the object to the index, the "file.Options.executable" is set in handleCheckErrors by the caller (legacy hacky approach) + if err = t.AddObjectToIndex(ctx, util.Iif(file.Options.executable, "100755", "100644"), writeObjectRet.ObjectHash, file.Options.treePath); err != nil { + return nil, err + } + + if writeObjectRet.LfsContent == nil { + return nil, nil // No LFS pointer, so nothing to do + } + defer writeObjectRet.LfsContent.Close() + + // Now we must store the content into an LFS object + lfsMetaObject, err := git_model.NewLFSMetaObject(ctx, repoID, writeObjectRet.LfsPointer) + if err != nil { + return nil, err + } + exist, err := contentStore.Exists(lfsMetaObject.Pointer) + if err != nil { + return nil, err + } + if !exist { + err = contentStore.Put(lfsMetaObject.Pointer, writeObjectRet.LfsContent) if err != nil { - return err + if _, errRemove := git_model.RemoveLFSMetaObjectByOid(ctx, repoID, lfsMetaObject.Oid); errRemove != nil { + return nil, fmt.Errorf("unable to remove failed inserted LFS object %s: %v (Prev Error: %w)", lfsMetaObject.Oid, errRemove, err) + } + return nil, err } + } + return &lfsMetaObject.Pointer, nil +} + +func checkIsLfsFileInGitAttributes(ctx context.Context, t *TemporaryUploadRepository, paths []string) (ret []bool, err error) { + attributesMap, err := attribute.CheckAttributes(ctx, t.gitRepo, "" /* use temp repo's working dir */, attribute.CheckAttributeOpts{ + Attributes: []string{attribute.Filter}, + Filenames: paths, + }) + if err != nil { + return nil, err + } + for _, p := range paths { + isLFSFile := attributesMap[p] != nil && attributesMap[p].Get(attribute.Filter).ToString().Value() == "lfs" + ret = append(ret, isLFSFile) + } + return ret, nil +} - if filename2attribute2info[file.Options.treePath] != nil && filename2attribute2info[file.Options.treePath]["filter"] == "lfs" { - // OK so we are supposed to LFS this data! - pointer, err := lfs.GeneratePointer(treeObjectContentReader) +type writeRepoObjectRet struct { + ObjectHash string + LfsContent io.ReadCloser // if not nil, then the caller should store its content in LfsPointer, then close it + LfsPointer lfs.Pointer +} + +// writeRepoObjectForModify hashes the git object for create or update operations +func writeRepoObjectForModify(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile) (ret *writeRepoObjectRet, err error) { + ret = &writeRepoObjectRet{} + treeObjectContentReader := file.ContentReader + if setting.LFS.StartServer { + checkIsLfsFiles, err := checkIsLfsFileInGitAttributes(ctx, t, []string{file.Options.treePath}) + if err != nil { + return nil, err + } + if checkIsLfsFiles[0] { + // OK, so we are supposed to LFS this data! + ret.LfsPointer, err = lfs.GeneratePointer(file.ContentReader) if err != nil { - return err + return nil, err } - lfsMetaObject = &git_model.LFSMetaObject{Pointer: pointer, RepositoryID: repoID} - treeObjectContentReader = strings.NewReader(pointer.StringContent()) + if _, err = file.ContentReader.Seek(0, io.SeekStart); err != nil { + return nil, err + } + ret.LfsContent = io.NopCloser(file.ContentReader) + treeObjectContentReader = strings.NewReader(ret.LfsPointer.StringContent()) } } - // Add the object to the database - objectHash, err := t.HashObject(ctx, treeObjectContentReader) + ret.ObjectHash, err = t.HashObjectAndWrite(ctx, treeObjectContentReader) if err != nil { - return err + return nil, err } + return ret, nil +} - // Add the object to the index - if file.Options.executable { - if err := t.AddObjectToIndex(ctx, "100755", objectHash, file.Options.treePath); err != nil { - return err - } - } else { - if err := t.AddObjectToIndex(ctx, "100644", objectHash, file.Options.treePath); err != nil { - return err +// writeRepoObjectForRename the same as writeRepoObjectForModify buf for "rename" +func writeRepoObjectForRename(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile) (ret *writeRepoObjectRet, err error) { + lastCommitID, err := t.GetLastCommit(ctx) + if err != nil { + return nil, err + } + commit, err := t.GetCommit(lastCommitID) + if err != nil { + return nil, err + } + oldEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath) + if err != nil { + return nil, err + } + + ret = &writeRepoObjectRet{ObjectHash: oldEntry.ID.String()} + if !setting.LFS.StartServer { + return ret, nil + } + + checkIsLfsFiles, err := checkIsLfsFileInGitAttributes(ctx, t, []string{file.Options.fromTreePath, file.Options.treePath}) + if err != nil { + return nil, err + } + oldIsLfs, newIsLfs := checkIsLfsFiles[0], checkIsLfsFiles[1] + + // If the old and new paths are both in lfs or both not in lfs, the object hash of the old file can be used directly + // as the object doesn't change + if oldIsLfs == newIsLfs { + return ret, nil + } + + oldEntryBlobPointerBy := func(f func(r io.Reader) (lfs.Pointer, error)) (lfsPointer lfs.Pointer, err error) { + r, err := oldEntry.Blob().DataAsync() + if err != nil { + return lfsPointer, err } + defer r.Close() + return f(r) } - if lfsMetaObject != nil { - // We have an LFS object - create it - lfsMetaObject, err = git_model.NewLFSMetaObject(ctx, lfsMetaObject.RepositoryID, lfsMetaObject.Pointer) + var treeObjectContentReader io.ReadCloser + if oldIsLfs { + // If the old is in lfs but the new isn't, read the content from lfs and add it as a normal git object + pointer, err := oldEntryBlobPointerBy(lfs.ReadPointer) if err != nil { - return err + return nil, err } - exist, err := contentStore.Exists(lfsMetaObject.Pointer) + treeObjectContentReader, err = lfs.ReadMetaObject(pointer) if err != nil { - return err + return nil, err } - if !exist { - _, err := file.ContentReader.Seek(0, io.SeekStart) - if err != nil { - return err - } - if err := contentStore.Put(lfsMetaObject.Pointer, file.ContentReader); err != nil { - if _, err2 := git_model.RemoveLFSMetaObjectByOid(ctx, repoID, lfsMetaObject.Oid); err2 != nil { - return fmt.Errorf("unable to remove failed inserted LFS object %s: %v (Prev Error: %w)", lfsMetaObject.Oid, err2, err) - } - return err - } + defer treeObjectContentReader.Close() + } else { + // If the new is in lfs but the old isn't, read the content from the git object and generate a lfs pointer of it + ret.LfsPointer, err = oldEntryBlobPointerBy(lfs.GeneratePointer) + if err != nil { + return nil, err + } + ret.LfsContent, err = oldEntry.Blob().DataAsync() + if err != nil { + return nil, err } + treeObjectContentReader = io.NopCloser(strings.NewReader(ret.LfsPointer.StringContent())) } - - return nil + ret.ObjectHash, err = t.HashObjectAndWrite(ctx, treeObjectContentReader) + if err != nil { + return nil, err + } + return ret, nil } // VerifyBranchProtection verify the branch protection for modifying the given treePath on the given branch diff --git a/services/repository/files/upload.go b/services/repository/files/upload.go index 2e4ed1744e..b783cbd01d 100644 --- a/services/repository/files/upload.go +++ b/services/repository/files/upload.go @@ -8,14 +8,11 @@ import ( "fmt" "os" "path" - "strings" + "sync" - git_model "code.gitea.io/gitea/models/git" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/lfs" - "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/log" ) // UploadRepoFileOptions contains the uploaded repository file options @@ -31,202 +28,84 @@ type UploadRepoFileOptions struct { Committer *IdentityOptions } -type uploadInfo struct { - upload *repo_model.Upload - lfsMetaObject *git_model.LFSMetaObject +type lazyLocalFileReader struct { + *os.File + localFilename string + counter int + mu sync.Mutex } -func cleanUpAfterFailure(ctx context.Context, infos *[]uploadInfo, t *TemporaryUploadRepository, original error) error { - for _, info := range *infos { - if info.lfsMetaObject == nil { - continue - } - if !info.lfsMetaObject.Existing { - if _, err := git_model.RemoveLFSMetaObjectByOid(ctx, t.repo.ID, info.lfsMetaObject.Oid); err != nil { - original = fmt.Errorf("%w, %v", original, err) // We wrap the original error - as this is the underlying error that required the fallback - } - } - } - return original -} - -// UploadRepoFiles uploads files to the given repository -func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *UploadRepoFileOptions) error { - if len(opts.Files) == 0 { - return nil - } +var _ LazyReadSeeker = (*lazyLocalFileReader)(nil) - uploads, err := repo_model.GetUploadsByUUIDs(ctx, opts.Files) - if err != nil { - return fmt.Errorf("GetUploadsByUUIDs [uuids: %v]: %w", opts.Files, err) - } +func (l *lazyLocalFileReader) Close() error { + l.mu.Lock() + defer l.mu.Unlock() - names := make([]string, len(uploads)) - infos := make([]uploadInfo, len(uploads)) - for i, upload := range uploads { - // Check file is not lfs locked, will return nil if lock setting not enabled - filepath := path.Join(opts.TreePath, upload.Name) - lfsLock, err := git_model.GetTreePathLock(ctx, repo.ID, filepath) - if err != nil { - return err - } - if lfsLock != nil && lfsLock.OwnerID != doer.ID { - u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID) - if err != nil { - return err + if l.counter > 0 { + l.counter-- + if l.counter == 0 { + if err := l.File.Close(); err != nil { + return fmt.Errorf("close file %s: %w", l.localFilename, err) } - return git_model.ErrLFSFileLocked{RepoID: repo.ID, Path: filepath, UserName: u.Name} - } - - names[i] = upload.Name - infos[i] = uploadInfo{upload: upload} - } - - t, err := NewTemporaryUploadRepository(repo) - if err != nil { - return err - } - defer t.Close() - - hasOldBranch := true - if err = t.Clone(ctx, opts.OldBranch, true); err != nil { - if !git.IsErrBranchNotExist(err) || !repo.IsEmpty { - return err - } - if err = t.Init(ctx, repo.ObjectFormatName); err != nil { - return err - } - hasOldBranch = false - opts.LastCommitID = "" - } - if hasOldBranch { - if err = t.SetDefaultIndex(ctx); err != nil { - return err - } - } - - var filename2attribute2info map[string]map[string]string - if setting.LFS.StartServer { - filename2attribute2info, err = t.gitRepo.CheckAttribute(git.CheckAttributeOpts{ - Attributes: []string{"filter"}, - Filenames: names, - CachedOnly: true, - }) - if err != nil { - return err + l.File = nil } + return nil } + return fmt.Errorf("file %s already closed", l.localFilename) +} - // Copy uploaded files into repository. - for i := range infos { - if err := copyUploadedLFSFileIntoRepository(ctx, &infos[i], filename2attribute2info, t, opts.TreePath); err != nil { - return err - } - } +func (l *lazyLocalFileReader) OpenLazyReader() error { + l.mu.Lock() + defer l.mu.Unlock() - // Now write the tree - treeHash, err := t.WriteTree(ctx) - if err != nil { - return err + if l.File != nil { + l.counter++ + return nil } - // Now commit the tree - commitOpts := &CommitTreeUserOptions{ - ParentCommitID: opts.LastCommitID, - TreeHash: treeHash, - CommitMessage: opts.Message, - SignOff: opts.Signoff, - DoerUser: doer, - AuthorIdentity: opts.Author, - CommitterIdentity: opts.Committer, - } - commitHash, err := t.CommitTree(ctx, commitOpts) + file, err := os.Open(l.localFilename) if err != nil { return err } + l.File = file + l.counter = 1 + return nil +} - // Now deal with LFS objects - for i := range infos { - if infos[i].lfsMetaObject == nil { - continue - } - infos[i].lfsMetaObject, err = git_model.NewLFSMetaObject(ctx, infos[i].lfsMetaObject.RepositoryID, infos[i].lfsMetaObject.Pointer) - if err != nil { - // OK Now we need to cleanup - return cleanUpAfterFailure(ctx, &infos, t, err) - } - // Don't move the files yet - we need to ensure that - // everything can be inserted first - } - - // OK now we can insert the data into the store - there's no way to clean up the store - // once it's in there, it's in there. - contentStore := lfs.NewContentStore() - for _, info := range infos { - if err := uploadToLFSContentStore(info, contentStore); err != nil { - return cleanUpAfterFailure(ctx, &infos, t, err) - } - } - - // Then push this tree to NewBranch - if err := t.Push(ctx, doer, commitHash, opts.NewBranch); err != nil { - return err +// UploadRepoFiles uploads files to the given repository +func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *user_model.User, opts *UploadRepoFileOptions) error { + if len(opts.Files) == 0 { + return nil } - return repo_model.DeleteUploads(ctx, uploads...) -} - -func copyUploadedLFSFileIntoRepository(ctx context.Context, info *uploadInfo, filename2attribute2info map[string]map[string]string, t *TemporaryUploadRepository, treePath string) error { - file, err := os.Open(info.upload.LocalPath()) + uploads, err := repo_model.GetUploadsByUUIDs(ctx, opts.Files) if err != nil { - return err + return fmt.Errorf("GetUploadsByUUIDs [uuids: %v]: %w", opts.Files, err) } - defer file.Close() - var objectHash string - if setting.LFS.StartServer && filename2attribute2info[info.upload.Name] != nil && filename2attribute2info[info.upload.Name]["filter"] == "lfs" { - // Handle LFS - // FIXME: Inefficient! this should probably happen in models.Upload - pointer, err := lfs.GeneratePointer(file) - if err != nil { - return err - } - - info.lfsMetaObject = &git_model.LFSMetaObject{Pointer: pointer, RepositoryID: t.repo.ID} - - if objectHash, err = t.HashObject(ctx, strings.NewReader(pointer.StringContent())); err != nil { - return err - } - } else if objectHash, err = t.HashObject(ctx, file); err != nil { - return err + changeOpts := &ChangeRepoFilesOptions{ + LastCommitID: opts.LastCommitID, + OldBranch: opts.OldBranch, + NewBranch: opts.NewBranch, + Message: opts.Message, + Signoff: opts.Signoff, + Author: opts.Author, + Committer: opts.Committer, + } + for _, upload := range uploads { + changeOpts.Files = append(changeOpts.Files, &ChangeRepoFile{ + Operation: "upload", + TreePath: path.Join(opts.TreePath, upload.Name), + ContentReader: &lazyLocalFileReader{localFilename: upload.LocalPath()}, + }) } - // Add the object to the index - return t.AddObjectToIndex(ctx, "100644", objectHash, path.Join(treePath, info.upload.Name)) -} - -func uploadToLFSContentStore(info uploadInfo, contentStore *lfs.ContentStore) error { - if info.lfsMetaObject == nil { - return nil - } - exist, err := contentStore.Exists(info.lfsMetaObject.Pointer) + _, err = ChangeRepoFiles(ctx, repo, doer, changeOpts) if err != nil { return err } - if !exist { - file, err := os.Open(info.upload.LocalPath()) - if err != nil { - return err - } - - defer file.Close() - // FIXME: Put regenerates the hash and copies the file over. - // I guess this strictly ensures the soundness of the store but this is inefficient. - if err := contentStore.Put(info.lfsMetaObject.Pointer, file); err != nil { - // OK Now we need to cleanup - // Can't clean up the store, once uploaded there they're there. - return err - } + if err := repo_model.DeleteUploads(ctx, uploads...); err != nil { + log.Error("DeleteUploads: %v", err) } return nil } diff --git a/services/repository/fork.go b/services/repository/fork.go index 7f7364acfc..8bd3498b17 100644 --- a/services/repository/fork.go +++ b/services/repository/fork.go @@ -65,7 +65,7 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork } // Fork is prohibited, if user has reached maximum limit of repositories - if !owner.CanForkRepo() { + if !doer.CanForkRepoIn(owner) { return nil, repo_model.ErrReachLimitOfRepo{ Limit: owner.MaxRepoCreation, } @@ -100,114 +100,106 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork IsFork: true, ForkID: opts.BaseRepo.ID, ObjectFormatName: opts.BaseRepo.ObjectFormatName, + Status: repo_model.RepositoryBeingMigrated, } - oldRepoPath := opts.BaseRepo.RepoPath() - - needsRollback := false - rollbackFn := func() { - if !needsRollback { - return - } - - if exists, _ := gitrepo.IsRepositoryExist(ctx, repo); !exists { - return - } - - // As the transaction will be failed and hence database changes will be destroyed we only need - // to delete the related repository on the filesystem - if errDelete := gitrepo.DeleteRepository(ctx, repo); errDelete != nil { - log.Error("Failed to remove fork repo") - } - } - - needsRollbackInPanic := true - defer func() { - panicErr := recover() - if panicErr == nil { - return - } - - if needsRollbackInPanic { - rollbackFn() - } - panic(panicErr) - }() - - err = db.WithTx(ctx, func(txCtx context.Context) error { - if err = CreateRepositoryByExample(txCtx, doer, owner, repo, false, true); err != nil { + // 1 - Create the repository in the database + err = db.WithTx(ctx, func(ctx context.Context) error { + if err = createRepositoryInDB(ctx, doer, owner, repo, true); err != nil { return err } - - if err = repo_model.IncrementRepoForkNum(txCtx, opts.BaseRepo.ID); err != nil { + if err = repo_model.IncrementRepoForkNum(ctx, opts.BaseRepo.ID); err != nil { return err } // copy lfs files failure should not be ignored - if err = git_model.CopyLFS(txCtx, repo, opts.BaseRepo); err != nil { - return err - } - - needsRollback = true + return git_model.CopyLFS(ctx, repo, opts.BaseRepo) + }) + if err != nil { + return nil, err + } - cloneCmd := git.NewCommand("clone", "--bare") - if opts.SingleBranch != "" { - cloneCmd.AddArguments("--single-branch", "--branch").AddDynamicArguments(opts.SingleBranch) - } - if stdout, _, err := cloneCmd.AddDynamicArguments(oldRepoPath, repo.RepoPath()). - RunStdBytes(txCtx, &git.RunOpts{Timeout: 10 * time.Minute}); err != nil { - log.Error("Fork Repository (git clone) Failed for %v (from %v):\nStdout: %s\nError: %v", repo, opts.BaseRepo, stdout, err) - return fmt.Errorf("git clone: %w", err) + // last - clean up if something goes wrong + // WARNING: Don't override all later err with local variables + defer func() { + if err != nil { + // we can not use the ctx because it maybe canceled or timeout + cleanupRepository(repo.ID) } + }() - if err := repo_module.CheckDaemonExportOK(txCtx, repo); err != nil { - return fmt.Errorf("checkDaemonExportOK: %w", err) + // 2 - check whether the repository with the same storage exists + var isExist bool + isExist, err = gitrepo.IsRepositoryExist(ctx, repo) + if err != nil { + log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err) + return nil, err + } + if isExist { + log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName()) + // Don't return directly, we need err in defer to cleanupRepository + err = repo_model.ErrRepoFilesAlreadyExist{ + Uname: repo.OwnerName, + Name: repo.Name, } + return nil, err + } - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(txCtx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil { - log.Error("Fork Repository (git update-server-info) failed for %v:\nStdout: %s\nError: %v", repo, stdout, err) - return fmt.Errorf("git update-server-info: %w", err) - } + // 3 - Clone the repository + cloneCmd := git.NewCommand("clone", "--bare") + if opts.SingleBranch != "" { + cloneCmd.AddArguments("--single-branch", "--branch").AddDynamicArguments(opts.SingleBranch) + } + var stdout []byte + if stdout, _, err = cloneCmd.AddDynamicArguments(opts.BaseRepo.RepoPath(), repo.RepoPath()). + RunStdBytes(ctx, &git.RunOpts{Timeout: 10 * time.Minute}); err != nil { + log.Error("Fork Repository (git clone) Failed for %v (from %v):\nStdout: %s\nError: %v", repo, opts.BaseRepo, stdout, err) + return nil, fmt.Errorf("git clone: %w", err) + } - if err = gitrepo.CreateDelegateHooksForRepo(ctx, repo); err != nil { - return fmt.Errorf("createDelegateHooks: %w", err) - } + // 4 - Update the git repository + if err = updateGitRepoAfterCreate(ctx, repo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) + } - gitRepo, err := gitrepo.OpenRepository(txCtx, repo) - if err != nil { - return fmt.Errorf("OpenRepository: %w", err) - } - defer gitRepo.Close() + // 5 - Create hooks + if err = gitrepo.CreateDelegateHooks(ctx, repo); err != nil { + return nil, fmt.Errorf("createDelegateHooks: %w", err) + } - _, err = repo_module.SyncRepoBranchesWithRepo(txCtx, repo, gitRepo, doer.ID) - return err - }) - needsRollbackInPanic = false + // 6 - Sync the repository branches and tags + var gitRepo *git.Repository + gitRepo, err = gitrepo.OpenRepository(ctx, repo) if err != nil { - rollbackFn() - return nil, err + return nil, fmt.Errorf("OpenRepository: %w", err) } + defer gitRepo.Close() + if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, doer.ID); err != nil { + return nil, fmt.Errorf("SyncRepoBranchesWithRepo: %w", err) + } + if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil { + return nil, fmt.Errorf("Sync releases from git tags failed: %v", err) + } + + // 7 - Update the repository // even if below operations failed, it could be ignored. And they will be retried - if err := repo_module.UpdateRepoSize(ctx, repo); err != nil { + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { log.Error("Failed to update size for repository: %v", err) + err = nil } - if err := repo_model.CopyLanguageStat(ctx, opts.BaseRepo, repo); err != nil { + if err = repo_model.CopyLanguageStat(ctx, opts.BaseRepo, repo); err != nil { log.Error("Copy language stat from oldRepo failed: %v", err) + err = nil } - if err := repo_model.CopyLicense(ctx, opts.BaseRepo, repo); err != nil { + if err = repo_model.CopyLicense(ctx, opts.BaseRepo, repo); err != nil { return nil, err } - gitRepo, err := gitrepo.OpenRepository(ctx, repo) - if err != nil { - log.Error("Open created git repository failed: %v", err) - } else { - defer gitRepo.Close() - if err := repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil { - log.Error("Sync releases from git tags failed: %v", err) - } + // 8 - update repository status to be ready + repo.Status = repo_model.RepositoryReady + if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "status"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) } notify_service.ForkRepository(ctx, doer, opts.BaseRepo, repo) @@ -217,7 +209,7 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork // ConvertForkToNormalRepository convert the provided repo from a forked repo to normal repo func ConvertForkToNormalRepository(ctx context.Context, repo *repo_model.Repository) error { - err := db.WithTx(ctx, func(ctx context.Context) error { + return db.WithTx(ctx, func(ctx context.Context) error { repo, err := repo_model.GetRepositoryByID(ctx, repo.ID) if err != nil { return err @@ -234,16 +226,8 @@ func ConvertForkToNormalRepository(ctx context.Context, repo *repo_model.Reposit repo.IsFork = false repo.ForkID = 0 - - if err := repo_module.UpdateRepository(ctx, repo, false); err != nil { - log.Error("Unable to update repository %-v whilst converting from fork. Error: %v", repo, err) - return err - } - - return nil + return repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_fork", "fork_id") }) - - return err } type findForksOptions struct { diff --git a/services/repository/fork_test.go b/services/repository/fork_test.go index 452798b25b..5375f79028 100644 --- a/services/repository/fork_test.go +++ b/services/repository/fork_test.go @@ -4,13 +4,17 @@ package repository import ( + "os" "testing" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" + "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" ) @@ -35,7 +39,7 @@ func TestForkRepository(t *testing.T) { assert.False(t, repo_model.IsErrReachLimitOfRepo(err)) // change AllowForkWithoutMaximumLimit to false for the test - setting.Repository.AllowForkWithoutMaximumLimit = false + defer test.MockVariableValue(&setting.Repository.AllowForkWithoutMaximumLimit, false)() // user has reached maximum limit of repositories user.MaxRepoCreation = 0 fork2, err := ForkRepository(git.DefaultContext, user, user, ForkRepoOptions{ @@ -46,3 +50,43 @@ func TestForkRepository(t *testing.T) { assert.Nil(t, fork2) assert.True(t, repo_model.IsErrReachLimitOfRepo(err)) } + +func TestForkRepositoryCleanup(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + // a successful fork + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + repo10 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10}) + + fork, err := ForkRepository(git.DefaultContext, user2, user2, ForkRepoOptions{ + BaseRepo: repo10, + Name: "test", + }) + assert.NoError(t, err) + assert.NotNil(t, fork) + + exist, err := util.IsExist(repo_model.RepoPath(user2.Name, "test")) + assert.NoError(t, err) + assert.True(t, exist) + + err = DeleteRepositoryDirectly(db.DefaultContext, fork.ID) + assert.NoError(t, err) + + // a failed creating because some mock data + // create the repository directory so that the creation will fail after database record created. + assert.NoError(t, os.MkdirAll(repo_model.RepoPath(user2.Name, "test"), os.ModePerm)) + + fork2, err := ForkRepository(db.DefaultContext, user2, user2, ForkRepoOptions{ + BaseRepo: repo10, + Name: "test", + }) + assert.Nil(t, fork2) + assert.Error(t, err) + + // assert the cleanup is successful + unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerName: user2.Name, Name: "test"}) + + exist, err = util.IsExist(repo_model.RepoPath(user2.Name, "test")) + assert.NoError(t, err) + assert.False(t, exist) +} diff --git a/services/repository/generate.go b/services/repository/generate.go index 9d2bbb1f7f..867b5d7855 100644 --- a/services/repository/generate.go +++ b/services/repository/generate.go @@ -17,11 +17,11 @@ import ( git_model "code.gitea.io/gitea/models/git" repo_model "code.gitea.io/gitea/models/repo" - user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" "github.com/gobwas/glob" @@ -42,10 +42,8 @@ type expansion struct { var defaultTransformers = []transformer{ {Name: "SNAKE", Transform: xstrings.ToSnakeCase}, {Name: "KEBAB", Transform: xstrings.ToKebabCase}, - {Name: "CAMEL", Transform: func(str string) string { - return xstrings.FirstRuneToLower(xstrings.ToCamelCase(str)) - }}, - {Name: "PASCAL", Transform: xstrings.ToCamelCase}, + {Name: "CAMEL", Transform: xstrings.ToCamelCase}, + {Name: "PASCAL", Transform: xstrings.ToPascalCase}, {Name: "LOWER", Transform: strings.ToLower}, {Name: "UPPER", Transform: strings.ToUpper}, {Name: "TITLE", Transform: util.ToTitleCase}, @@ -255,48 +253,35 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r return initRepoCommit(ctx, tmpDir, repo, repo.Owner, defaultBranch) } -func generateGitContent(ctx context.Context, repo, templateRepo, generateRepo *repo_model.Repository) (err error) { - tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-"+repo.Name) +// GenerateGitContent generates git content from a template repository +func GenerateGitContent(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) (err error) { + tmpDir, cleanup, err := setting.AppDataTempDir("git-repo-content").MkdirTempRandom("gitea-" + generateRepo.Name) if err != nil { - return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.FullName(), err) + return fmt.Errorf("failed to create temp dir for repository %s: %w", generateRepo.FullName(), err) } + defer cleanup() - defer func() { - if err := util.RemoveAll(tmpDir); err != nil { - log.Error("RemoveAll: %v", err) - } - }() - - if err = generateRepoCommit(ctx, repo, templateRepo, generateRepo, tmpDir); err != nil { + if err = generateRepoCommit(ctx, generateRepo, templateRepo, generateRepo, tmpDir); err != nil { return fmt.Errorf("generateRepoCommit: %w", err) } // re-fetch repo - if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil { + if generateRepo, err = repo_model.GetRepositoryByID(ctx, generateRepo.ID); err != nil { return fmt.Errorf("getRepositoryByID: %w", err) } // if there was no default branch supplied when generating the repo, use the default one from the template - if strings.TrimSpace(repo.DefaultBranch) == "" { - repo.DefaultBranch = templateRepo.DefaultBranch + if strings.TrimSpace(generateRepo.DefaultBranch) == "" { + generateRepo.DefaultBranch = templateRepo.DefaultBranch } - if err = gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil { + if err = gitrepo.SetDefaultBranch(ctx, generateRepo, generateRepo.DefaultBranch); err != nil { return fmt.Errorf("setDefaultBranch: %w", err) } - if err = UpdateRepository(ctx, repo, false); err != nil { + if err = repo_model.UpdateRepositoryColsNoAutoTime(ctx, generateRepo, "default_branch"); err != nil { return fmt.Errorf("updateRepository: %w", err) } - return nil -} - -// GenerateGitContent generates git content from a template repository -func GenerateGitContent(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error { - if err := generateGitContent(ctx, generateRepo, templateRepo, generateRepo); err != nil { - return err - } - if err := repo_module.UpdateRepoSize(ctx, generateRepo); err != nil { return fmt.Errorf("failed to update size for repository: %w", err) } @@ -328,57 +313,6 @@ func (gro GenerateRepoOptions) IsValid() bool { gro.IssueLabels || gro.ProtectedBranch // or other items as they are added } -// generateRepository generates a repository from a template -func generateRepository(ctx context.Context, doer, owner *user_model.User, templateRepo *repo_model.Repository, opts GenerateRepoOptions) (_ *repo_model.Repository, err error) { - generateRepo := &repo_model.Repository{ - OwnerID: owner.ID, - Owner: owner, - OwnerName: owner.Name, - Name: opts.Name, - LowerName: strings.ToLower(opts.Name), - Description: opts.Description, - DefaultBranch: opts.DefaultBranch, - IsPrivate: opts.Private, - IsEmpty: !opts.GitContent || templateRepo.IsEmpty, - IsFsckEnabled: templateRepo.IsFsckEnabled, - TemplateID: templateRepo.ID, - TrustModel: templateRepo.TrustModel, - ObjectFormatName: templateRepo.ObjectFormatName, - } - - if err = CreateRepositoryByExample(ctx, doer, owner, generateRepo, false, false); err != nil { - return nil, err - } - - isExist, err := gitrepo.IsRepositoryExist(ctx, generateRepo) - if err != nil { - log.Error("Unable to check if %s exists. Error: %v", generateRepo.FullName(), err) - return nil, err - } - if isExist { - return nil, repo_model.ErrRepoFilesAlreadyExist{ - Uname: generateRepo.OwnerName, - Name: generateRepo.Name, - } - } - - if err = repo_module.CheckInitRepository(ctx, generateRepo); err != nil { - return generateRepo, err - } - - if err = repo_module.CheckDaemonExportOK(ctx, generateRepo); err != nil { - return generateRepo, fmt.Errorf("checkDaemonExportOK: %w", err) - } - - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(ctx, &git.RunOpts{Dir: generateRepo.RepoPath()}); err != nil { - log.Error("GenerateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", generateRepo, stdout, err) - return generateRepo, fmt.Errorf("error in GenerateRepository(git update-server-info): %w", err) - } - - return generateRepo, nil -} - var fileNameSanitizeRegexp = regexp.MustCompile(`(?i)\.\.|[<>:\"/\\|?*\x{0000}-\x{001F}]|^(con|prn|aux|nul|com\d|lpt\d)$`) // Sanitize user input to valid OS filenames diff --git a/services/repository/generate_test.go b/services/repository/generate_test.go index b0f97d0ffb..1163c392c9 100644 --- a/services/repository/generate_test.go +++ b/services/repository/generate_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) var giteaTemplate = []byte(` @@ -65,3 +66,26 @@ func TestFileNameSanitize(t *testing.T) { assert.Equal(t, "_", fileNameSanitize("\u0000")) assert.Equal(t, "ç›®æ ‡", fileNameSanitize("ç›®æ ‡")) } + +func TestTransformers(t *testing.T) { + cases := []struct { + name string + expected string + }{ + {"SNAKE", "abc_def_xyz"}, + {"KEBAB", "abc-def-xyz"}, + {"CAMEL", "abcDefXyz"}, + {"PASCAL", "AbcDefXyz"}, + {"LOWER", "abc_def-xyz"}, + {"UPPER", "ABC_DEF-XYZ"}, + {"TITLE", "Abc_def-Xyz"}, + } + + input := "Abc_Def-XYZ" + assert.Len(t, defaultTransformers, len(cases)) + for i, c := range cases { + tf := defaultTransformers[i] + require.Equal(t, c.name, tf.Name) + assert.Equal(t, c.expected, tf.Transform(input), "case %s", c.name) + } +} diff --git a/services/repository/gitgraph/graph_models.go b/services/repository/gitgraph/graph_models.go index c45662836b..02b0268cd9 100644 --- a/services/repository/gitgraph/graph_models.go +++ b/services/repository/gitgraph/graph_models.go @@ -121,7 +121,7 @@ func (graph *Graph) LoadAndProcessCommits(ctx context.Context, repository *repo_ return repo_model.IsOwnerMemberCollaborator(ctx, repository, user.ID) }, &keyMap) - statuses, _, err := git_model.GetLatestCommitStatus(ctx, repository.ID, c.Commit.ID.String(), db.ListOptions{}) + statuses, err := git_model.GetLatestCommitStatus(ctx, repository.ID, c.Commit.ID.String(), db.ListOptionsAll) if err != nil { log.Error("GetLatestCommitStatus: %v", err) } else { @@ -232,8 +232,8 @@ func newRefsFromRefNames(refNames []byte) []git.Reference { continue } refName := string(refNameBytes) - if strings.HasPrefix(refName, "tag: ") { - refName = strings.TrimPrefix(refName, "tag: ") + if after, ok := strings.CutPrefix(refName, "tag: "); ok { + refName = after } else { refName = strings.TrimPrefix(refName, "HEAD -> ") } diff --git a/services/repository/gitgraph/graph_test.go b/services/repository/gitgraph/graph_test.go index 4c48b94aa2..93fa1aec6a 100644 --- a/services/repository/gitgraph/graph_test.go +++ b/services/repository/gitgraph/graph_test.go @@ -6,6 +6,7 @@ package gitgraph import ( "bytes" "fmt" + "slices" "strings" "testing" @@ -117,13 +118,7 @@ func TestReleaseUnusedColors(t *testing.T) { if parser.firstAvailable == -1 { // All in use for _, color := range parser.availableColors { - found := false - for _, oldColor := range parser.oldColors { - if oldColor == color { - found = true - break - } - } + found := slices.Contains(parser.oldColors, color) if !found { t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should be available but is not", testcase.availableColors, @@ -141,13 +136,7 @@ func TestReleaseUnusedColors(t *testing.T) { // Some in use for i := parser.firstInUse; i != parser.firstAvailable; i = (i + 1) % len(parser.availableColors) { color := parser.availableColors[i] - found := false - for _, oldColor := range parser.oldColors { - if oldColor == color { - found = true - break - } - } + found := slices.Contains(parser.oldColors, color) if !found { t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should be available but is not", testcase.availableColors, @@ -163,13 +152,7 @@ func TestReleaseUnusedColors(t *testing.T) { } for i := parser.firstAvailable; i != parser.firstInUse; i = (i + 1) % len(parser.availableColors) { color := parser.availableColors[i] - found := false - for _, oldColor := range parser.oldColors { - if oldColor == color { - found = true - break - } - } + found := slices.Contains(parser.oldColors, color) if found { t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should not be available but is", testcase.availableColors, diff --git a/services/repository/hooks.go b/services/repository/hooks.go index 2b3eb79153..c13b272550 100644 --- a/services/repository/hooks.go +++ b/services/repository/hooks.go @@ -31,11 +31,11 @@ func SyncRepositoryHooks(ctx context.Context) error { default: } - if err := gitrepo.CreateDelegateHooksForRepo(ctx, repo); err != nil { + if err := gitrepo.CreateDelegateHooks(ctx, repo); err != nil { return fmt.Errorf("SyncRepositoryHook: %w", err) } if repo.HasWiki() { - if err := gitrepo.CreateDelegateHooksForWiki(ctx, repo); err != nil { + if err := gitrepo.CreateDelegateHooks(ctx, repo.WikiStorageRepo()); err != nil { return fmt.Errorf("SyncRepositoryHook: %w", err) } } diff --git a/services/repository/init.go b/services/repository/init.go index bd777b8a2f..1eeeb4aa4f 100644 --- a/services/repository/init.go +++ b/services/repository/init.go @@ -42,9 +42,12 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi cmd := git.NewCommand("commit", "--message=Initial commit"). AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email) - sign, keyID, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u) + sign, key, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u) if sign { - cmd.AddOptionFormat("-S%s", keyID) + if key.Format != "" { + cmd.AddConfig("gpg.format", key.Format) + } + cmd.AddOptionFormat("-S%s", key.KeyID) if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { // need to set the committer to the KeyID owner diff --git a/services/repository/license_test.go b/services/repository/license_test.go index 9e74a268f5..eb897f3c03 100644 --- a/services/repository/license_test.go +++ b/services/repository/license_test.go @@ -4,7 +4,6 @@ package repository import ( - "fmt" "strings" "testing" @@ -45,7 +44,7 @@ func Test_detectLicense(t *testing.T) { assert.NoError(t, err) tests = append(tests, DetectLicenseTest{ - name: fmt.Sprintf("single license test: %s", licenseName), + name: "single license test: " + licenseName, arg: string(license), want: []string{licenseName}, }) diff --git a/services/repository/merge_upstream.go b/services/repository/merge_upstream.go index 34e01df723..8d6f11372c 100644 --- a/services/repository/merge_upstream.go +++ b/services/repository/merge_upstream.go @@ -18,7 +18,7 @@ import ( ) // MergeUpstream merges the base repository's default branch into the fork repository's current branch. -func MergeUpstream(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, branch string) (mergeStyle string, err error) { +func MergeUpstream(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, branch string, ffOnly bool) (mergeStyle string, err error) { if err = repo.MustNotBeArchived(); err != nil { return "", err } @@ -45,6 +45,11 @@ func MergeUpstream(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_ return "", err } + // If ff_only is requested and fast-forward failed, return error + if ffOnly { + return "", util.NewInvalidArgumentErrorf("fast-forward merge not possible: branch has diverged") + } + // TODO: FakePR: it is somewhat hacky, but it is the only way to "merge" at the moment // ideally in the future the "merge" functions should be refactored to decouple from the PullRequest fakeIssue := &issue_model.Issue{ diff --git a/services/repository/migrate.go b/services/repository/migrate.go index 1969b16a2d..0a3dc45339 100644 --- a/services/repository/migrate.go +++ b/services/repository/migrate.go @@ -13,6 +13,7 @@ import ( "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" + unit_model "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" @@ -117,14 +118,8 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, repo.Owner = u } - if err := repo_module.CheckDaemonExportOK(ctx, repo); err != nil { - return repo, fmt.Errorf("checkDaemonExportOK: %w", err) - } - - if stdout, _, err := git.NewCommand("update-server-info"). - RunStdString(ctx, &git.RunOpts{Dir: repoPath}); err != nil { - log.Error("MigrateRepositoryGitData(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) - return repo, fmt.Errorf("error in MigrateRepositoryGitData(git update-server-info): %w", err) + if err := updateGitRepoAfterCreate(ctx, repo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) } gitRepo, err := git.OpenRepository(ctx, repoPath) @@ -141,12 +136,12 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, if !repo.IsEmpty { if len(repo.DefaultBranch) == 0 { // Try to get HEAD branch and set it as default branch. - headBranch, err := gitRepo.GetHEADBranch() + headBranchName, err := git.GetDefaultBranch(ctx, repoPath) if err != nil { return repo, fmt.Errorf("GetHEADBranch: %w", err) } - if headBranch != nil { - repo.DefaultBranch = headBranch.Name + if headBranchName != "" { + repo.DefaultBranch = headBranchName } } @@ -154,9 +149,9 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, return repo, fmt.Errorf("SyncRepoBranchesWithRepo: %v", err) } + // if releases migration are not requested, we will sync all tags here + // otherwise, the releases sync will be done out of this function if !opts.Releases { - // note: this will greatly improve release (tag) sync - // for pull-mirrors with many tags repo.IsMirror = opts.Mirror if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil { log.Error("Failed to synchronize tags to releases for repository: %v", err) @@ -225,10 +220,14 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, } repo.IsMirror = true - if err = UpdateRepository(ctx, repo, false); err != nil { + if err = repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "num_watches", "is_empty", "default_branch", "default_wiki_branch", "is_mirror"); err != nil { return nil, err } + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { + log.Error("Failed to update size for repository: %v", err) + } + // this is necessary for sync local tags from remote configName := fmt.Sprintf("remote.%s.fetch", mirrorModel.GetRemoteName()) if stdout, _, err := git.NewCommand("config"). @@ -246,6 +245,19 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, } } + var enableRepoUnits []repo_model.RepoUnit + if opts.Releases && !unit_model.TypeReleases.UnitGlobalDisabled() { + enableRepoUnits = append(enableRepoUnits, repo_model.RepoUnit{RepoID: repo.ID, Type: unit_model.TypeReleases}) + } + if opts.Wiki && !unit_model.TypeWiki.UnitGlobalDisabled() { + enableRepoUnits = append(enableRepoUnits, repo_model.RepoUnit{RepoID: repo.ID, Type: unit_model.TypeWiki}) + } + if len(enableRepoUnits) > 0 { + err = UpdateRepositoryUnits(ctx, repo, enableRepoUnits, nil) + if err != nil { + return nil, err + } + } return repo, committer.Commit() } @@ -265,11 +277,11 @@ func cleanUpMigrateGitConfig(ctx context.Context, repoPath string) error { // CleanUpMigrateInfo finishes migrating repository and/or wiki with things that don't need to be done for mirrors. func CleanUpMigrateInfo(ctx context.Context, repo *repo_model.Repository) (*repo_model.Repository, error) { - if err := gitrepo.CreateDelegateHooksForRepo(ctx, repo); err != nil { + if err := gitrepo.CreateDelegateHooks(ctx, repo); err != nil { return repo, fmt.Errorf("createDelegateHooks: %w", err) } if repo.HasWiki() { - if err := gitrepo.CreateDelegateHooksForWiki(ctx, repo); err != nil { + if err := gitrepo.CreateDelegateHooks(ctx, repo.WikiStorageRepo()); err != nil { return repo, fmt.Errorf("createDelegateHooks.(wiki): %w", err) } } diff --git a/services/repository/push.go b/services/repository/push.go index c40333f0a8..af3c873d15 100644 --- a/services/repository/push.go +++ b/services/repository/push.go @@ -66,7 +66,7 @@ func PushUpdates(opts []*repo_module.PushUpdateOptions) error { for _, opt := range opts { if opt.IsNewRef() && opt.IsDelRef() { - return fmt.Errorf("Old and new revisions are both NULL") + return errors.New("Old and new revisions are both NULL") } } @@ -167,8 +167,9 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { } } - branch := opts.RefFullName.BranchName() if !opts.IsDelRef() { + branch := opts.RefFullName.BranchName() + log.Trace("TriggerTask '%s/%s' by %s", repo.Name, branch, pusher.Name) newCommit, err := gitRepo.GetCommit(opts.NewCommitID) @@ -176,60 +177,15 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { return fmt.Errorf("gitRepo.GetCommit(%s) in %s/%s[%d]: %w", opts.NewCommitID, repo.OwnerName, repo.Name, repo.ID, err) } - refName := opts.RefName() - // Push new branch. var l []*git.Commit if opts.IsNewRef() { - if repo.IsEmpty { // Change default branch and empty status only if pushed ref is non-empty branch. - repo.DefaultBranch = refName - repo.IsEmpty = false - if repo.DefaultBranch != setting.Repository.DefaultBranch { - if err := gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil { - return err - } - } - // Update the is empty and default_branch columns - if err := repo_model.UpdateRepositoryCols(ctx, repo, "default_branch", "is_empty"); err != nil { - return fmt.Errorf("UpdateRepositoryCols: %w", err) - } - } - - l, err = newCommit.CommitsBeforeLimit(10) - if err != nil { - return fmt.Errorf("newCommit.CommitsBeforeLimit: %w", err) - } - notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID) + l, err = pushNewBranch(ctx, repo, pusher, opts, newCommit) } else { - l, err = newCommit.CommitsBeforeUntil(opts.OldCommitID) - if err != nil { - return fmt.Errorf("newCommit.CommitsBeforeUntil: %w", err) - } - - isForcePush, err := newCommit.IsForcePush(opts.OldCommitID) - if err != nil { - log.Error("IsForcePush %s:%s failed: %v", repo.FullName(), branch, err) - } - - // only update branch can trigger pull request task because the pull request hasn't been created yet when creaing a branch - go pull_service.AddTestPullRequestTask(pull_service.TestPullRequestOptions{ - RepoID: repo.ID, - Doer: pusher, - Branch: branch, - IsSync: true, - IsForcePush: isForcePush, - OldCommitID: opts.OldCommitID, - NewCommitID: opts.NewCommitID, - }) - - if isForcePush { - log.Trace("Push %s is a force push", opts.NewCommitID) - - cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true)) - } else { - // TODO: increment update the commit count cache but not remove - cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true)) - } + l, err = pushUpdateBranch(ctx, repo, pusher, opts, newCommit) + } + if err != nil { + return err } // delete cache for divergence @@ -246,36 +202,11 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { commits := repo_module.GitToPushCommits(l) commits.HeadCommit = repo_module.CommitToPushCommit(newCommit) - if err := issue_service.UpdateIssuesCommit(ctx, pusher, repo, commits.Commits, refName); err != nil { + if err := issue_service.UpdateIssuesCommit(ctx, pusher, repo, commits.Commits, opts.RefName()); err != nil { log.Error("updateIssuesCommit: %v", err) } - oldCommitID := opts.OldCommitID - if oldCommitID == objectFormat.EmptyObjectID().String() && len(commits.Commits) > 0 { - oldCommit, err := gitRepo.GetCommit(commits.Commits[len(commits.Commits)-1].Sha1) - if err != nil && !git.IsErrNotExist(err) { - log.Error("unable to GetCommit %s from %-v: %v", oldCommitID, repo, err) - } - if oldCommit != nil { - for i := 0; i < oldCommit.ParentCount(); i++ { - commitID, _ := oldCommit.ParentID(i) - if !commitID.IsZero() { - oldCommitID = commitID.String() - break - } - } - } - } - - if oldCommitID == objectFormat.EmptyObjectID().String() && repo.DefaultBranch != branch { - oldCommitID = repo.DefaultBranch - } - - if oldCommitID != objectFormat.EmptyObjectID().String() { - commits.CompareURL = repo.ComposeCompareURL(oldCommitID, opts.NewCommitID) - } else { - commits.CompareURL = "" - } + commits.CompareURL = getCompareURL(repo, gitRepo, objectFormat, commits.Commits, opts) if len(commits.Commits) > setting.UI.FeedMaxCommitNum { commits.Commits = commits.Commits[:setting.UI.FeedMaxCommitNum] @@ -288,12 +219,7 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { log.Error("repo_module.CacheRef %s/%s failed: %v", repo.ID, branch, err) } } else { - notify_service.DeleteRef(ctx, pusher, repo, opts.RefFullName) - - if err := pull_service.AdjustPullsCausedByBranchDeleted(ctx, pusher, repo, branch); err != nil { - // close all related pulls - log.Error("close related pull request failed: %v", err) - } + pushDeleteBranch(ctx, repo, pusher, opts) } // Even if user delete a branch on a repository which he didn't watch, he will be watch that. @@ -304,8 +230,11 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { log.Trace("Non-tag and non-branch commits pushed.") } } - if err := PushUpdateAddDeleteTags(ctx, repo, gitRepo, addTags, delTags); err != nil { - return fmt.Errorf("PushUpdateAddDeleteTags: %w", err) + + if len(addTags)+len(delTags) > 0 { + if err := PushUpdateAddDeleteTags(ctx, repo, gitRepo, pusher, addTags, delTags); err != nil { + return fmt.Errorf("PushUpdateAddDeleteTags: %w", err) + } } // Change repository last updated time. @@ -316,18 +245,114 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { return nil } +func getCompareURL(repo *repo_model.Repository, gitRepo *git.Repository, objectFormat git.ObjectFormat, commits []*repo_module.PushCommit, opts *repo_module.PushUpdateOptions) string { + oldCommitID := opts.OldCommitID + if oldCommitID == objectFormat.EmptyObjectID().String() && len(commits) > 0 { + oldCommit, err := gitRepo.GetCommit(commits[len(commits)-1].Sha1) + if err != nil && !git.IsErrNotExist(err) { + log.Error("unable to GetCommit %s from %-v: %v", oldCommitID, repo, err) + } + if oldCommit != nil { + for i := 0; i < oldCommit.ParentCount(); i++ { + commitID, _ := oldCommit.ParentID(i) + if !commitID.IsZero() { + oldCommitID = commitID.String() + break + } + } + } + } + + if oldCommitID == objectFormat.EmptyObjectID().String() && repo.DefaultBranch != opts.RefFullName.BranchName() { + oldCommitID = repo.DefaultBranch + } + + if oldCommitID != objectFormat.EmptyObjectID().String() { + return repo.ComposeCompareURL(oldCommitID, opts.NewCommitID) + } + return "" +} + +func pushNewBranch(ctx context.Context, repo *repo_model.Repository, pusher *user_model.User, opts *repo_module.PushUpdateOptions, newCommit *git.Commit) ([]*git.Commit, error) { + if repo.IsEmpty { // Change default branch and empty status only if pushed ref is non-empty branch. + repo.DefaultBranch = opts.RefName() + repo.IsEmpty = false + if repo.DefaultBranch != setting.Repository.DefaultBranch { + if err := gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil { + return nil, err + } + } + // Update the is empty and default_branch columns + if err := repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "default_branch", "is_empty"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) + } + } + + l, err := newCommit.CommitsBeforeLimit(10) + if err != nil { + return nil, fmt.Errorf("newCommit.CommitsBeforeLimit: %w", err) + } + notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID) + return l, nil +} + +func pushUpdateBranch(_ context.Context, repo *repo_model.Repository, pusher *user_model.User, opts *repo_module.PushUpdateOptions, newCommit *git.Commit) ([]*git.Commit, error) { + l, err := newCommit.CommitsBeforeUntil(opts.OldCommitID) + if err != nil { + return nil, fmt.Errorf("newCommit.CommitsBeforeUntil: %w", err) + } + + branch := opts.RefFullName.BranchName() + + isForcePush, err := newCommit.IsForcePush(opts.OldCommitID) + if err != nil { + log.Error("IsForcePush %s:%s failed: %v", repo.FullName(), branch, err) + } + + // only update branch can trigger pull request task because the pull request hasn't been created yet when creating a branch + go pull_service.AddTestPullRequestTask(pull_service.TestPullRequestOptions{ + RepoID: repo.ID, + Doer: pusher, + Branch: branch, + IsSync: true, + IsForcePush: isForcePush, + OldCommitID: opts.OldCommitID, + NewCommitID: opts.NewCommitID, + }) + + if isForcePush { + log.Trace("Push %s is a force push", opts.NewCommitID) + + cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true)) + } else { + // TODO: increment update the commit count cache but not remove + cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true)) + } + + return l, nil +} + +func pushDeleteBranch(ctx context.Context, repo *repo_model.Repository, pusher *user_model.User, opts *repo_module.PushUpdateOptions) { + notify_service.DeleteRef(ctx, pusher, repo, opts.RefFullName) + + if err := pull_service.AdjustPullsCausedByBranchDeleted(ctx, pusher, repo, opts.RefFullName.BranchName()); err != nil { + // close all related pulls + log.Error("close related pull request failed: %v", err) + } +} + // PushUpdateAddDeleteTags updates a number of added and delete tags -func PushUpdateAddDeleteTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, addTags, delTags []string) error { +func PushUpdateAddDeleteTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, pusher *user_model.User, addTags, delTags []string) error { return db.WithTx(ctx, func(ctx context.Context) error { - if err := repo_model.PushUpdateDeleteTagsContext(ctx, repo, delTags); err != nil { + if err := repo_model.PushUpdateDeleteTags(ctx, repo, delTags); err != nil { return err } - return pushUpdateAddTags(ctx, repo, gitRepo, addTags) + return pushUpdateAddTags(ctx, repo, gitRepo, pusher, addTags) }) } // pushUpdateAddTags updates a number of add tags -func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, tags []string) error { +func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, pusher *user_model.User, tags []string) error { if len(tags) == 0 { return nil } @@ -353,14 +378,12 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo newReleases := make([]*repo_model.Release, 0, len(lowerTags)-len(relMap)) - emailToUser := make(map[string]*user_model.User) - for i, lowerTag := range lowerTags { tag, err := gitRepo.GetTag(tags[i]) if err != nil { return fmt.Errorf("GetTag: %w", err) } - commit, err := tag.Commit(gitRepo) + commit, err := gitRepo.GetTagCommit(tag.Name) if err != nil { return fmt.Errorf("Commit: %w", err) } @@ -372,29 +395,12 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo if sig == nil { sig = commit.Committer } - var author *user_model.User - createdAt := time.Unix(1, 0) + createdAt := time.Unix(1, 0) if sig != nil { - var ok bool - author, ok = emailToUser[sig.Email] - if !ok { - author, err = user_model.GetUserByEmail(ctx, sig.Email) - if err != nil && !user_model.IsErrUserNotExist(err) { - return fmt.Errorf("GetUserByEmail: %w", err) - } - if author != nil { - emailToUser[sig.Email] = author - } - } createdAt = sig.When } - commitsCount, err := commit.CommitsCount() - if err != nil { - return fmt.Errorf("CommitsCount: %w", err) - } - rel, has := relMap[lowerTag] parts := strings.SplitN(tag.Message, "\n", 2) @@ -410,31 +416,26 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo LowerTagName: lowerTag, Target: "", Sha1: commit.ID.String(), - NumCommits: commitsCount, + NumCommits: -1, // the commits count will be updated when the UI needs it Note: note, IsDraft: false, IsPrerelease: false, IsTag: true, + PublisherID: pusher.ID, CreatedUnix: timeutil.TimeStamp(createdAt.Unix()), } - if author != nil { - rel.PublisherID = author.ID - } newReleases = append(newReleases, rel) } else { rel.Sha1 = commit.ID.String() rel.CreatedUnix = timeutil.TimeStamp(createdAt.Unix()) - rel.NumCommits = commitsCount if rel.IsTag { rel.Title = parts[0] rel.Note = note - if author != nil { - rel.PublisherID = author.ID - } } else { rel.IsDraft = false } + rel.PublisherID = pusher.ID if err = repo_model.UpdateRelease(ctx, rel); err != nil { return fmt.Errorf("Update: %w", err) } diff --git a/services/repository/repository.go b/services/repository/repository.go index fcc617979e..e574dc6c01 100644 --- a/services/repository/repository.go +++ b/services/repository/repository.go @@ -5,22 +5,29 @@ package repository import ( "context" + "errors" "fmt" + "os" + "path/filepath" + "strings" + activities_model "code.gitea.io/gitea/models/activities" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/organization" + access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" - system_model "code.gitea.io/gitea/models/system" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/graceful" + issue_indexer "code.gitea.io/gitea/modules/indexer/issues" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/queue" repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" notify_service "code.gitea.io/gitea/services/notify" pull_service "code.gitea.io/gitea/services/pull" ) @@ -40,7 +47,7 @@ type WebSearchResults struct { // CreateRepository creates a repository for the user/organization. func CreateRepository(ctx context.Context, doer, owner *user_model.User, opts CreateRepoOptions) (*repo_model.Repository, error) { - repo, err := CreateRepositoryDirectly(ctx, doer, owner, opts) + repo, err := CreateRepositoryDirectly(ctx, doer, owner, opts, true) if err != nil { // No need to rollback here we should do this in CreateRepository... return nil, err @@ -62,7 +69,7 @@ func DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_mod notify_service.DeleteRepository(ctx, doer, repo) } - return DeleteRepositoryDirectly(ctx, doer, repo.ID) + return DeleteRepositoryDirectly(ctx, repo.ID) } // PushCreateRepo creates a repository when a new repository is pushed to an appropriate namespace @@ -72,10 +79,10 @@ func PushCreateRepo(ctx context.Context, authUser, owner *user_model.User, repoN if ok, err := organization.CanCreateOrgRepo(ctx, owner.ID, authUser.ID); err != nil { return nil, err } else if !ok { - return nil, fmt.Errorf("cannot push-create repository for org") + return nil, errors.New("cannot push-create repository for org") } } else if authUser.ID != owner.ID { - return nil, fmt.Errorf("cannot push-create repository for another user") + return nil, errors.New("cannot push-create repository for another user") } } @@ -94,15 +101,13 @@ func PushCreateRepo(ctx context.Context, authUser, owner *user_model.User, repoN func Init(ctx context.Context) error { licenseUpdaterQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "repo_license_updater", repoLicenseUpdater) if licenseUpdaterQueue == nil { - return fmt.Errorf("unable to create repo_license_updater queue") + return errors.New("unable to create repo_license_updater queue") } go graceful.GetManager().RunWithCancel(licenseUpdaterQueue) if err := repo_module.LoadRepoConfig(); err != nil { return err } - system_model.RemoveAllWithNotice(ctx, "Clean up temporary repository uploads", setting.Repository.Upload.TempPath) - system_model.RemoveAllWithNotice(ctx, "Clean up temporary repositories", repo_module.LocalCopyPath()) if err := initPushQueue(); err != nil { return err } @@ -111,42 +116,107 @@ func Init(ctx context.Context) error { // UpdateRepository updates a repository func UpdateRepository(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) (err error) { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - if err = repo_module.UpdateRepository(ctx, repo, visibilityChanged); err != nil { - return fmt.Errorf("updateRepository: %w", err) - } - - return committer.Commit() + return db.WithTx(ctx, func(ctx context.Context) error { + if err = updateRepository(ctx, repo, visibilityChanged); err != nil { + return fmt.Errorf("updateRepository: %w", err) + } + return nil + }) } -func UpdateRepositoryVisibility(ctx context.Context, repo *repo_model.Repository, isPrivate bool) (err error) { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } +func MakeRepoPublic(ctx context.Context, repo *repo_model.Repository) (err error) { + return db.WithTx(ctx, func(ctx context.Context) error { + repo.IsPrivate = false + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_private"); err != nil { + return err + } - defer committer.Close() + if err = repo.LoadOwner(ctx); err != nil { + return fmt.Errorf("LoadOwner: %w", err) + } + if repo.Owner.IsOrganization() { + // Organization repository need to recalculate access table when visibility is changed. + if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil { + return fmt.Errorf("recalculateTeamAccesses: %w", err) + } + } - repo.IsPrivate = isPrivate + // Create/Remove git-daemon-export-ok for git-daemon... + if err := checkDaemonExportOK(ctx, repo); err != nil { + return err + } - if err = repo_module.UpdateRepository(ctx, repo, true); err != nil { - return fmt.Errorf("UpdateRepositoryVisibility: %w", err) - } + forkRepos, err := repo_model.GetRepositoriesByForkID(ctx, repo.ID) + if err != nil { + return fmt.Errorf("getRepositoriesByForkID: %w", err) + } - return committer.Commit() -} + if repo.Owner.Visibility != structs.VisibleTypePrivate { + for i := range forkRepos { + if err = MakeRepoPublic(ctx, forkRepos[i]); err != nil { + return fmt.Errorf("MakeRepoPublic[%d]: %w", forkRepos[i].ID, err) + } + } + } -func MakeRepoPublic(ctx context.Context, repo *repo_model.Repository) (err error) { - return UpdateRepositoryVisibility(ctx, repo, false) + // If visibility is changed, we need to update the issue indexer. + // Since the data in the issue indexer have field to indicate if the repo is public or not. + issue_indexer.UpdateRepoIndexer(ctx, repo.ID) + + return nil + }) } func MakeRepoPrivate(ctx context.Context, repo *repo_model.Repository) (err error) { - return UpdateRepositoryVisibility(ctx, repo, true) + return db.WithTx(ctx, func(ctx context.Context) error { + repo.IsPrivate = true + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "is_private"); err != nil { + return err + } + + if err = repo.LoadOwner(ctx); err != nil { + return fmt.Errorf("LoadOwner: %w", err) + } + if repo.Owner.IsOrganization() { + // Organization repository need to recalculate access table when visibility is changed. + if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil { + return fmt.Errorf("recalculateTeamAccesses: %w", err) + } + } + + // If repo has become private, we need to set its actions to private. + _, err = db.GetEngine(ctx).Where("repo_id = ?", repo.ID).Cols("is_private").Update(&activities_model.Action{ + IsPrivate: true, + }) + if err != nil { + return err + } + + if err = repo_model.ClearRepoStars(ctx, repo.ID); err != nil { + return err + } + + // Create/Remove git-daemon-export-ok for git-daemon... + if err := checkDaemonExportOK(ctx, repo); err != nil { + return err + } + + forkRepos, err := repo_model.GetRepositoriesByForkID(ctx, repo.ID) + if err != nil { + return fmt.Errorf("getRepositoriesByForkID: %w", err) + } + for i := range forkRepos { + if err = MakeRepoPrivate(ctx, forkRepos[i]); err != nil { + return fmt.Errorf("MakeRepoPrivate[%d]: %w", forkRepos[i].ID, err) + } + } + + // If visibility is changed, we need to update the issue indexer. + // Since the data in the issue indexer have field to indicate if the repo is public or not. + issue_indexer.UpdateRepoIndexer(ctx, repo.ID) + + return nil + }) } // LinkedRepository returns the linked repo if any @@ -172,3 +242,97 @@ func LinkedRepository(ctx context.Context, a *repo_model.Attachment) (*repo_mode } return nil, -1, nil } + +// checkDaemonExportOK creates/removes git-daemon-export-ok for git-daemon... +func checkDaemonExportOK(ctx context.Context, repo *repo_model.Repository) error { + if err := repo.LoadOwner(ctx); err != nil { + return err + } + + // Create/Remove git-daemon-export-ok for git-daemon... + daemonExportFile := filepath.Join(repo.RepoPath(), `git-daemon-export-ok`) + + isExist, err := util.IsExist(daemonExportFile) + if err != nil { + log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err) + return err + } + + isPublic := !repo.IsPrivate && repo.Owner.Visibility == structs.VisibleTypePublic + if !isPublic && isExist { + if err = util.Remove(daemonExportFile); err != nil { + log.Error("Failed to remove %s: %v", daemonExportFile, err) + } + } else if isPublic && !isExist { + if f, err := os.Create(daemonExportFile); err != nil { + log.Error("Failed to create %s: %v", daemonExportFile, err) + } else { + f.Close() + } + } + + return nil +} + +// updateRepository updates a repository with db context +func updateRepository(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) (err error) { + repo.LowerName = strings.ToLower(repo.Name) + + e := db.GetEngine(ctx) + + if _, err = e.ID(repo.ID).NoAutoTime().AllCols().Update(repo); err != nil { + return fmt.Errorf("update: %w", err) + } + + if err = repo_module.UpdateRepoSize(ctx, repo); err != nil { + log.Error("Failed to update size for repository: %v", err) + } + + if visibilityChanged { + if err = repo.LoadOwner(ctx); err != nil { + return fmt.Errorf("LoadOwner: %w", err) + } + if repo.Owner.IsOrganization() { + // Organization repository need to recalculate access table when visibility is changed. + if err = access_model.RecalculateTeamAccesses(ctx, repo, 0); err != nil { + return fmt.Errorf("recalculateTeamAccesses: %w", err) + } + } + + // If repo has become private, we need to set its actions to private. + if repo.IsPrivate { + _, err = e.Where("repo_id = ?", repo.ID).Cols("is_private").Update(&activities_model.Action{ + IsPrivate: true, + }) + if err != nil { + return err + } + + if err = repo_model.ClearRepoStars(ctx, repo.ID); err != nil { + return err + } + } + + // Create/Remove git-daemon-export-ok for git-daemon... + if err := checkDaemonExportOK(ctx, repo); err != nil { + return err + } + + forkRepos, err := repo_model.GetRepositoriesByForkID(ctx, repo.ID) + if err != nil { + return fmt.Errorf("getRepositoriesByForkID: %w", err) + } + for i := range forkRepos { + forkRepos[i].IsPrivate = repo.IsPrivate || repo.Owner.Visibility == structs.VisibleTypePrivate + if err = updateRepository(ctx, forkRepos[i], true); err != nil { + return fmt.Errorf("updateRepository[%d]: %w", forkRepos[i].ID, err) + } + } + + // If visibility is changed, we need to update the issue indexer. + // Since the data in the issue indexer have field to indicate if the repo is public or not. + issue_indexer.UpdateRepoIndexer(ctx, repo.ID) + } + + return nil +} diff --git a/services/repository/repository_test.go b/services/repository/repository_test.go index 892a11a23e..8f9fdf8fa1 100644 --- a/services/repository/repository_test.go +++ b/services/repository/repository_test.go @@ -6,6 +6,7 @@ package repository import ( "testing" + activities_model "code.gitea.io/gitea/models/activities" "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -40,3 +41,23 @@ func TestLinkedRepository(t *testing.T) { }) } } + +func TestUpdateRepositoryVisibilityChanged(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + // Get sample repo and change visibility + repo, err := repo_model.GetRepositoryByID(db.DefaultContext, 9) + assert.NoError(t, err) + repo.IsPrivate = true + + // Update it + err = updateRepository(db.DefaultContext, repo, true) + assert.NoError(t, err) + + // Check visibility of action has become private + act := activities_model.Action{} + _, err = db.GetEngine(db.DefaultContext).ID(3).Get(&act) + + assert.NoError(t, err) + assert.True(t, act.IsPrivate) +} diff --git a/services/repository/template.go b/services/repository/template.go index 36a680c8e2..6906a60083 100644 --- a/services/repository/template.go +++ b/services/repository/template.go @@ -5,12 +5,17 @@ package repository import ( "context" + "fmt" + "strings" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/log" notify_service "code.gitea.io/gitea/services/notify" ) @@ -63,70 +68,124 @@ func GenerateProtectedBranch(ctx context.Context, templateRepo, generateRepo *re // GenerateRepository generates a repository from a template func GenerateRepository(ctx context.Context, doer, owner *user_model.User, templateRepo *repo_model.Repository, opts GenerateRepoOptions) (_ *repo_model.Repository, err error) { - if !doer.IsAdmin && !owner.CanCreateRepo() { + if !doer.CanCreateRepoIn(owner) { return nil, repo_model.ErrReachLimitOfRepo{ Limit: owner.MaxRepoCreation, } } - var generateRepo *repo_model.Repository - if err = db.WithTx(ctx, func(ctx context.Context) error { - generateRepo, err = generateRepository(ctx, doer, owner, templateRepo, opts) + generateRepo := &repo_model.Repository{ + OwnerID: owner.ID, + Owner: owner, + OwnerName: owner.Name, + Name: opts.Name, + LowerName: strings.ToLower(opts.Name), + Description: opts.Description, + DefaultBranch: opts.DefaultBranch, + IsPrivate: opts.Private, + IsEmpty: !opts.GitContent || templateRepo.IsEmpty, + IsFsckEnabled: templateRepo.IsFsckEnabled, + TemplateID: templateRepo.ID, + TrustModel: templateRepo.TrustModel, + ObjectFormatName: templateRepo.ObjectFormatName, + Status: repo_model.RepositoryBeingMigrated, + } + + // 1 - Create the repository in the database + if err := db.WithTx(ctx, func(ctx context.Context) error { + return createRepositoryInDB(ctx, doer, owner, generateRepo, false) + }); err != nil { + return nil, err + } + + // last - clean up the repository if something goes wrong + defer func() { if err != nil { - return err + // we can not use the ctx because it maybe canceled or timeout + cleanupRepository(generateRepo.ID) } + }() - // Git Content - if opts.GitContent && !templateRepo.IsEmpty { - if err = GenerateGitContent(ctx, templateRepo, generateRepo); err != nil { - return err - } + // 2 - check whether the repository with the same storage exists + isExist, err := gitrepo.IsRepositoryExist(ctx, generateRepo) + if err != nil { + log.Error("Unable to check if %s exists. Error: %v", generateRepo.FullName(), err) + return nil, err + } + if isExist { + // Don't return directly, we need err in defer to cleanupRepository + err = repo_model.ErrRepoFilesAlreadyExist{ + Uname: generateRepo.OwnerName, + Name: generateRepo.Name, } + return nil, err + } - // Topics - if opts.Topics { - if err = repo_model.GenerateTopics(ctx, templateRepo, generateRepo); err != nil { - return err - } + // 3 -Init git bare new repository. + if err = git.InitRepository(ctx, generateRepo.RepoPath(), true, generateRepo.ObjectFormatName); err != nil { + return nil, fmt.Errorf("git.InitRepository: %w", err) + } else if err = gitrepo.CreateDelegateHooks(ctx, generateRepo); err != nil { + return nil, fmt.Errorf("createDelegateHooks: %w", err) + } + + // 4 - Update the git repository + if err = updateGitRepoAfterCreate(ctx, generateRepo); err != nil { + return nil, fmt.Errorf("updateGitRepoAfterCreate: %w", err) + } + + // 5 - generate the repository contents according to the template + // Git Content + if opts.GitContent && !templateRepo.IsEmpty { + if err = GenerateGitContent(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - // Git Hooks - if opts.GitHooks { - if err = GenerateGitHooks(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Topics + if opts.Topics { + if err = repo_model.GenerateTopics(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - // Webhooks - if opts.Webhooks { - if err = GenerateWebhooks(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Git Hooks + if opts.GitHooks { + if err = GenerateGitHooks(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - // Avatar - if opts.Avatar && len(templateRepo.Avatar) > 0 { - if err = generateAvatar(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Webhooks + if opts.Webhooks { + if err = GenerateWebhooks(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - // Issue Labels - if opts.IssueLabels { - if err = GenerateIssueLabels(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Avatar + if opts.Avatar && len(templateRepo.Avatar) > 0 { + if err = generateAvatar(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - if opts.ProtectedBranch { - if err = GenerateProtectedBranch(ctx, templateRepo, generateRepo); err != nil { - return err - } + // Issue Labels + if opts.IssueLabels { + if err = GenerateIssueLabels(ctx, templateRepo, generateRepo); err != nil { + return nil, err } + } - return nil - }); err != nil { - return nil, err + if opts.ProtectedBranch { + if err = GenerateProtectedBranch(ctx, templateRepo, generateRepo); err != nil { + return nil, err + } + } + + // 6 - update repository status to be ready + generateRepo.Status = repo_model.RepositoryReady + if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, generateRepo, "status"); err != nil { + return nil, fmt.Errorf("UpdateRepositoryCols: %w", err) } notify_service.CreateRepository(ctx, doer, owner, generateRepo) diff --git a/services/repository/transfer.go b/services/repository/transfer.go index 3940b2a142..5ad63cca67 100644 --- a/services/repository/transfer.go +++ b/services/repository/transfer.go @@ -20,10 +20,22 @@ import ( "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/globallock" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" notify_service "code.gitea.io/gitea/services/notify" ) +type LimitReachedError struct{ Limit int } + +func (LimitReachedError) Error() string { + return "Repository limit has been reached" +} + +func IsRepositoryLimitReached(err error) bool { + _, ok := err.(LimitReachedError) + return ok +} + func getRepoWorkingLockKey(repoID int64) string { return fmt.Sprintf("repo_working_%d", repoID) } @@ -49,6 +61,11 @@ func AcceptTransferOwnership(ctx context.Context, repo *repo_model.Repository, d return err } + if !doer.CanCreateRepoIn(repoTransfer.Recipient) { + limit := util.Iif(repoTransfer.Recipient.MaxRepoCreation >= 0, repoTransfer.Recipient.MaxRepoCreation, setting.Repository.MaxCreationLimit) + return LimitReachedError{Limit: limit} + } + if !repoTransfer.CanUserAcceptOrRejectTransfer(ctx, doer) { return util.ErrPermissionDenied } @@ -143,7 +160,7 @@ func transferOwnership(ctx context.Context, doer *user_model.User, newOwnerName repo.OwnerName = newOwner.Name // Update repository. - if err := repo_model.UpdateRepositoryCols(ctx, repo, "owner_id", "owner_name"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "owner_id", "owner_name"); err != nil { return fmt.Errorf("update owner: %w", err) } @@ -287,7 +304,7 @@ func transferOwnership(ctx context.Context, doer *user_model.User, newOwnerName return fmt.Errorf("deleteRepositoryTransfer: %w", err) } repo.Status = repo_model.RepositoryReady - if err := repo_model.UpdateRepositoryCols(ctx, repo, "status"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "status"); err != nil { return err } @@ -331,12 +348,13 @@ func changeRepositoryName(ctx context.Context, repo *repo_model.Repository, newR return fmt.Errorf("IsRepositoryExist: %w", err) } else if has { return repo_model.ErrRepoAlreadyExist{ - Uname: repo.Owner.Name, + Uname: repo.OwnerName, Name: newRepoName, } } - if err = gitrepo.RenameRepository(ctx, repo, newRepoName); err != nil { + if err = gitrepo.RenameRepository(ctx, repo, + repo_model.StorageRepo(repo_model.RelativePath(repo.OwnerName, newRepoName))); err != nil { return fmt.Errorf("rename repository directory: %w", err) } @@ -398,6 +416,11 @@ func StartRepositoryTransfer(ctx context.Context, doer, newOwner *user_model.Use return err } + if !doer.CanForkRepoIn(newOwner) { + limit := util.Iif(newOwner.MaxRepoCreation >= 0, newOwner.MaxRepoCreation, setting.Repository.MaxCreationLimit) + return LimitReachedError{Limit: limit} + } + var isDirectTransfer bool oldOwnerName := repo.OwnerName @@ -472,7 +495,7 @@ func RejectRepositoryTransfer(ctx context.Context, repo *repo_model.Repository, } repo.Status = repo_model.RepositoryReady - if err := repo_model.UpdateRepositoryCols(ctx, repo, "status"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "status"); err != nil { return err } @@ -520,7 +543,7 @@ func CancelRepositoryTransfer(ctx context.Context, repoTransfer *repo_model.Repo } repoTransfer.Repo.Status = repo_model.RepositoryReady - if err := repo_model.UpdateRepositoryCols(ctx, repoTransfer.Repo, "status"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repoTransfer.Repo, "status"); err != nil { return err } diff --git a/services/repository/transfer_test.go b/services/repository/transfer_test.go index 16a8fb6e1e..80a073e9f9 100644 --- a/services/repository/transfer_test.go +++ b/services/repository/transfer_test.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Gitea Authors. All rights reserved. +// Copyright 2025 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT package repository @@ -14,11 +14,14 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/feed" notify_service "code.gitea.io/gitea/services/notify" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) var notifySync sync.Once @@ -125,3 +128,40 @@ func TestRepositoryTransfer(t *testing.T) { err = RejectRepositoryTransfer(db.DefaultContext, repo2, doer) assert.True(t, repo_model.IsErrNoPendingTransfer(err)) } + +// Test transfer rejections +func TestRepositoryTransferRejection(t *testing.T) { + require.NoError(t, unittest.PrepareTestDatabase()) + // Set limit to 0 repositories so no repositories can be transferred + defer test.MockVariableValue(&setting.Repository.MaxCreationLimit, 0)() + + // Admin case + doerAdmin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 5}) + + transfer, err := repo_model.GetPendingRepositoryTransfer(db.DefaultContext, repo) + require.NoError(t, err) + require.NotNil(t, transfer) + require.NoError(t, transfer.LoadRecipient(db.DefaultContext)) + + require.True(t, doerAdmin.CanCreateRepoIn(transfer.Recipient)) // admin is not subject to limits + + // Administrator should not be affected by the limits so transfer should be successful + assert.NoError(t, AcceptTransferOwnership(db.DefaultContext, repo, doerAdmin)) + + // Non admin user case + doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 10}) + repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21}) + + transfer, err = repo_model.GetPendingRepositoryTransfer(db.DefaultContext, repo) + require.NoError(t, err) + require.NotNil(t, transfer) + require.NoError(t, transfer.LoadRecipient(db.DefaultContext)) + + require.False(t, doer.CanCreateRepoIn(transfer.Recipient)) // regular user is subject to limits + + // Cannot accept because of the limit + err = AcceptTransferOwnership(db.DefaultContext, repo, doer) + assert.Error(t, err) + assert.True(t, IsRepositoryLimitReached(err)) +} diff --git a/services/task/task.go b/services/task/task.go index c90ee91270..ee5fa1f348 100644 --- a/services/task/task.go +++ b/services/task/task.go @@ -5,6 +5,7 @@ package task import ( "context" + "errors" "fmt" admin_model "code.gitea.io/gitea/models/admin" @@ -41,7 +42,7 @@ func Run(ctx context.Context, t *admin_model.Task) error { func Init() error { taskQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "task", handler) if taskQueue == nil { - return fmt.Errorf("unable to create task queue") + return errors.New("unable to create task queue") } go graceful.GetManager().RunWithCancel(taskQueue) return nil @@ -110,7 +111,7 @@ func CreateMigrateTask(ctx context.Context, doer, u *user_model.User, opts base. IsPrivate: opts.Private || setting.Repository.ForcePrivate, IsMirror: opts.Mirror, Status: repo_model.RepositoryBeingMigrated, - }) + }, false) if err != nil { task.EndTime = timeutil.TimeStampNow() task.Status = structs.TaskStatusFailed diff --git a/services/user/update.go b/services/user/update.go index 4a39f4f783..d7354542bf 100644 --- a/services/user/update.go +++ b/services/user/update.go @@ -15,6 +15,26 @@ import ( "code.gitea.io/gitea/modules/structs" ) +type UpdateOptionField[T any] struct { + FieldValue T + FromSync bool +} + +func UpdateOptionFieldFromValue[T any](value T) optional.Option[UpdateOptionField[T]] { + return optional.Some(UpdateOptionField[T]{FieldValue: value}) +} + +func UpdateOptionFieldFromSync[T any](value T) optional.Option[UpdateOptionField[T]] { + return optional.Some(UpdateOptionField[T]{FieldValue: value, FromSync: true}) +} + +func UpdateOptionFieldFromPtr[T any](value *T) optional.Option[UpdateOptionField[T]] { + if value == nil { + return optional.None[UpdateOptionField[T]]() + } + return UpdateOptionFieldFromValue(*value) +} + type UpdateOptions struct { KeepEmailPrivate optional.Option[bool] FullName optional.Option[string] @@ -32,7 +52,7 @@ type UpdateOptions struct { DiffViewStyle optional.Option[string] AllowCreateOrganization optional.Option[bool] IsActive optional.Option[bool] - IsAdmin optional.Option[bool] + IsAdmin optional.Option[UpdateOptionField[bool]] EmailNotificationsPreference optional.Option[string] SetLastLogin bool RepoAdminChangeTeamAccess optional.Option[bool] @@ -111,13 +131,18 @@ func UpdateUser(ctx context.Context, u *user_model.User, opts *UpdateOptions) er cols = append(cols, "is_restricted") } if opts.IsAdmin.Has() { - if !opts.IsAdmin.Value() && user_model.IsLastAdminUser(ctx, u) { - return user_model.ErrDeleteLastAdminUser{UID: u.ID} + if opts.IsAdmin.Value().FieldValue /* true */ { + u.IsAdmin = opts.IsAdmin.Value().FieldValue // set IsAdmin=true + cols = append(cols, "is_admin") + } else if !user_model.IsLastAdminUser(ctx, u) /* not the last admin */ { + u.IsAdmin = opts.IsAdmin.Value().FieldValue // it's safe to change it from false to true (not the last admin) + cols = append(cols, "is_admin") + } else /* IsAdmin=false but this is the last admin user */ { //nolint:gocritic // make it easier to read + if !opts.IsAdmin.Value().FromSync { + return user_model.ErrDeleteLastAdminUser{UID: u.ID} + } + // else: syncing from external-source, this user is the last admin, so skip the "IsAdmin=false" change } - - u.IsAdmin = opts.IsAdmin.Value() - - cols = append(cols, "is_admin") } if opts.Visibility.Has() { diff --git a/services/user/update_test.go b/services/user/update_test.go index fc24a6c212..27513e8040 100644 --- a/services/user/update_test.go +++ b/services/user/update_test.go @@ -22,7 +22,11 @@ func TestUpdateUser(t *testing.T) { admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) assert.Error(t, UpdateUser(db.DefaultContext, admin, &UpdateOptions{ - IsAdmin: optional.Some(false), + IsAdmin: UpdateOptionFieldFromValue(false), + })) + + assert.NoError(t, UpdateUser(db.DefaultContext, admin, &UpdateOptions{ + IsAdmin: UpdateOptionFieldFromSync(false), })) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 28}) @@ -38,7 +42,7 @@ func TestUpdateUser(t *testing.T) { MaxRepoCreation: optional.Some(10), IsRestricted: optional.Some(true), IsActive: optional.Some(false), - IsAdmin: optional.Some(true), + IsAdmin: UpdateOptionFieldFromValue(true), Visibility: optional.Some(structs.VisibleTypePrivate), KeepActivityPrivate: optional.Some(true), Language: optional.Some("lang"), @@ -60,7 +64,7 @@ func TestUpdateUser(t *testing.T) { assert.Equal(t, opts.MaxRepoCreation.Value(), user.MaxRepoCreation) assert.Equal(t, opts.IsRestricted.Value(), user.IsRestricted) assert.Equal(t, opts.IsActive.Value(), user.IsActive) - assert.Equal(t, opts.IsAdmin.Value(), user.IsAdmin) + assert.Equal(t, opts.IsAdmin.Value().FieldValue, user.IsAdmin) assert.Equal(t, opts.Visibility.Value(), user.Visibility) assert.Equal(t, opts.KeepActivityPrivate.Value(), user.KeepActivityPrivate) assert.Equal(t, opts.Language.Value(), user.Language) @@ -80,7 +84,7 @@ func TestUpdateUser(t *testing.T) { assert.Equal(t, opts.MaxRepoCreation.Value(), user.MaxRepoCreation) assert.Equal(t, opts.IsRestricted.Value(), user.IsRestricted) assert.Equal(t, opts.IsActive.Value(), user.IsActive) - assert.Equal(t, opts.IsAdmin.Value(), user.IsAdmin) + assert.Equal(t, opts.IsAdmin.Value().FieldValue, user.IsAdmin) assert.Equal(t, opts.Visibility.Value(), user.Visibility) assert.Equal(t, opts.KeepActivityPrivate.Value(), user.KeepActivityPrivate) assert.Equal(t, opts.Language.Value(), user.Language) diff --git a/services/user/user.go b/services/user/user.go index 1aeebff142..c7252430de 100644 --- a/services/user/user.go +++ b/services/user/user.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/agit" asymkey_service "code.gitea.io/gitea/services/asymkey" @@ -177,8 +178,8 @@ func DeleteUser(ctx context.Context, u *user_model.User, purge bool) error { PageSize: repo_model.RepositoryListDefaultPageSize, Page: 1, }, - UserID: u.ID, - IncludePrivate: true, + UserID: u.ID, + IncludeVisibility: structs.VisibleTypePrivate, }) if err != nil { return fmt.Errorf("unable to find org list for %s[%d]. Error: %w", u.Name, u.ID, err) diff --git a/services/user/user_test.go b/services/user/user_test.go index 162a735cd4..28a0df8628 100644 --- a/services/user/user_test.go +++ b/services/user/user_test.go @@ -150,7 +150,7 @@ func TestRenameUser(t *testing.T) { redirectUID, err := user_model.LookupUserRedirect(db.DefaultContext, oldUsername) assert.NoError(t, err) - assert.EqualValues(t, user.ID, redirectUID) + assert.Equal(t, user.ID, redirectUID) unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, OwnerName: user.Name}) }) diff --git a/services/versioned_migration/migration.go b/services/versioned_migration/migration.go index daec89d7c1..b66d853531 100644 --- a/services/versioned_migration/migration.go +++ b/services/versioned_migration/migration.go @@ -1,7 +1,7 @@ // Copyright 2025 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package versioned_migration //nolint +package versioned_migration import ( "context" diff --git a/services/webhook/deliver.go b/services/webhook/deliver.go index df32d5741e..e8e6ed19c1 100644 --- a/services/webhook/deliver.go +++ b/services/webhook/deliver.go @@ -10,6 +10,7 @@ import ( "crypto/sha256" "crypto/tls" "encoding/hex" + "errors" "fmt" "io" "net/http" @@ -41,7 +42,7 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook case http.MethodPost: switch w.ContentType { case webhook_model.ContentTypeJSON: - req, err = http.NewRequest("POST", w.URL, strings.NewReader(t.PayloadContent)) + req, err = http.NewRequest(http.MethodPost, w.URL, strings.NewReader(t.PayloadContent)) if err != nil { return nil, nil, err } @@ -52,7 +53,7 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook "payload": []string{t.PayloadContent}, } - req, err = http.NewRequest("POST", w.URL, strings.NewReader(forms.Encode())) + req, err = http.NewRequest(http.MethodPost, w.URL, strings.NewReader(forms.Encode())) if err != nil { return nil, nil, err } @@ -69,7 +70,7 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook vals := u.Query() vals["payload"] = []string{t.PayloadContent} u.RawQuery = vals.Encode() - req, err = http.NewRequest("GET", u.String(), nil) + req, err = http.NewRequest(http.MethodGet, u.String(), nil) if err != nil { return nil, nil, err } @@ -81,7 +82,7 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook return nil, nil, err } url := fmt.Sprintf("%s/%s", w.URL, url.PathEscape(txnID)) - req, err = http.NewRequest("PUT", url, strings.NewReader(t.PayloadContent)) + req, err = http.NewRequest(http.MethodPut, url, strings.NewReader(t.PayloadContent)) if err != nil { return nil, nil, err } @@ -328,7 +329,7 @@ func Init() error { hookQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "webhook_sender", handler) if hookQueue == nil { - return fmt.Errorf("unable to create webhook_sender queue") + return errors.New("unable to create webhook_sender queue") } go graceful.GetManager().RunWithCancel(hookQueue) diff --git a/services/webhook/deliver_test.go b/services/webhook/deliver_test.go index 6a74b1455c..1d32d7b772 100644 --- a/services/webhook/deliver_test.go +++ b/services/webhook/deliver_test.go @@ -64,7 +64,7 @@ func TestWebhookProxy(t *testing.T) { } for _, tt := range tests { t.Run(tt.req, func(t *testing.T) { - req, err := http.NewRequest("POST", tt.req, nil) + req, err := http.NewRequest(http.MethodPost, tt.req, nil) require.NoError(t, err) u, err := webhookProxy(allowedHostMatcher)(req) @@ -91,7 +91,7 @@ func TestWebhookDeliverAuthorizationHeader(t *testing.T) { s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "/webhook", r.URL.Path) assert.Equal(t, "Bearer s3cr3t-t0ken", r.Header.Get("Authorization")) - w.WriteHeader(200) + w.WriteHeader(http.StatusOK) done <- struct{}{} })) t.Cleanup(s.Close) @@ -138,7 +138,7 @@ func TestWebhookDeliverHookTask(t *testing.T) { case "/webhook/66d222a5d6349e1311f551e50722d837e30fce98": // Version 1 assert.Equal(t, "push", r.Header.Get("X-GitHub-Event")) - assert.Equal(t, "", r.Header.Get("Content-Type")) + assert.Empty(t, r.Header.Get("Content-Type")) body, err := io.ReadAll(r.Body) assert.NoError(t, err) assert.Equal(t, `{"data": 42}`, string(body)) @@ -152,11 +152,11 @@ func TestWebhookDeliverHookTask(t *testing.T) { assert.Len(t, body, 2147) default: - w.WriteHeader(404) + w.WriteHeader(http.StatusNotFound) t.Fatalf("unexpected url path %s", r.URL.Path) return } - w.WriteHeader(200) + w.WriteHeader(http.StatusOK) done <- struct{}{} })) t.Cleanup(s.Close) diff --git a/services/webhook/dingtalk.go b/services/webhook/dingtalk.go index 5afca8d65a..5bbc610fe5 100644 --- a/services/webhook/dingtalk.go +++ b/services/webhook/dingtalk.go @@ -30,7 +30,7 @@ func (dc dingtalkConvertor) Create(p *api.CreatePayload) (DingtalkPayload, error refName := git.RefName(p.Ref).ShortName() title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName) - return createDingtalkPayload(title, title, fmt.Sprintf("view ref %s", refName), p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil + return createDingtalkPayload(title, title, "view ref "+refName, p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil } // Delete implements PayloadConvertor Delete method @@ -39,14 +39,14 @@ func (dc dingtalkConvertor) Delete(p *api.DeletePayload) (DingtalkPayload, error refName := git.RefName(p.Ref).ShortName() title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName) - return createDingtalkPayload(title, title, fmt.Sprintf("view ref %s", refName), p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil + return createDingtalkPayload(title, title, "view ref "+refName, p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName)), nil } // Fork implements PayloadConvertor Fork method func (dc dingtalkConvertor) Fork(p *api.ForkPayload) (DingtalkPayload, error) { title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName) - return createDingtalkPayload(title, title, fmt.Sprintf("view forked repo %s", p.Repo.FullName), p.Repo.HTMLURL), nil + return createDingtalkPayload(title, title, "view forked repo "+p.Repo.FullName, p.Repo.HTMLURL), nil } // Push implements PayloadConvertor Push method @@ -176,6 +176,12 @@ func (dc dingtalkConvertor) Status(p *api.CommitStatusPayload) (DingtalkPayload, return createDingtalkPayload(text, text, "Status Changed", p.TargetURL), nil } +func (dingtalkConvertor) WorkflowRun(p *api.WorkflowRunPayload) (DingtalkPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, noneLinkFormatter, true) + + return createDingtalkPayload(text, text, "Workflow Run", p.WorkflowRun.HTMLURL), nil +} + func (dingtalkConvertor) WorkflowJob(p *api.WorkflowJobPayload) (DingtalkPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true) diff --git a/services/webhook/discord.go b/services/webhook/discord.go index 0a7eb0b166..0426964181 100644 --- a/services/webhook/discord.go +++ b/services/webhook/discord.go @@ -101,6 +101,13 @@ var ( redColor = color("ff3232") ) +// https://discord.com/developers/docs/resources/message#embed-object-embed-limits +// Discord has some limits in place for the embeds. +// According to some tests, there is no consistent limit for different character sets. +// For example: 4096 ASCII letters are allowed, but only 2490 emoji characters are allowed. +// To keep it simple, we currently truncate at 2000. +const discordDescriptionCharactersLimit = 2000 + type discordConvertor struct { Username string AvatarURL string @@ -271,6 +278,12 @@ func (d discordConvertor) Status(p *api.CommitStatusPayload) (DiscordPayload, er return d.createPayload(p.Sender, text, "", p.TargetURL, color), nil } +func (d discordConvertor) WorkflowRun(p *api.WorkflowRunPayload) (DiscordPayload, error) { + text, color := getWorkflowRunPayloadInfo(p, noneLinkFormatter, false) + + return d.createPayload(p.Sender, text, "", p.WorkflowRun.HTMLURL, color), nil +} + func (d discordConvertor) WorkflowJob(p *api.WorkflowJobPayload) (DiscordPayload, error) { text, color := getWorkflowJobPayloadInfo(p, noneLinkFormatter, false) @@ -298,7 +311,7 @@ func parseHookPullRequestEventType(event webhook_module.HookEventType) (string, case webhook_module.HookEventPullRequestReviewApproved: return "approved", nil case webhook_module.HookEventPullRequestReviewRejected: - return "rejected", nil + return "requested changes", nil case webhook_module.HookEventPullRequestReviewComment: return "comment", nil default: @@ -313,7 +326,7 @@ func (d discordConvertor) createPayload(s *api.User, title, text, url string, co Embeds: []DiscordEmbed{ { Title: title, - Description: text, + Description: util.TruncateRunes(text, discordDescriptionCharactersLimit), URL: url, Color: color, Author: DiscordEmbedAuthor{ diff --git a/services/webhook/feishu.go b/services/webhook/feishu.go index 274aaf90b3..b6ee80c44c 100644 --- a/services/webhook/feishu.go +++ b/services/webhook/feishu.go @@ -5,9 +5,13 @@ package webhook import ( "context" + "crypto/hmac" + "crypto/sha256" + "encoding/base64" "fmt" "net/http" "strings" + "time" webhook_model "code.gitea.io/gitea/models/webhook" "code.gitea.io/gitea/modules/git" @@ -16,10 +20,12 @@ import ( ) type ( - // FeishuPayload represents + // FeishuPayload represents the payload for Feishu webhook FeishuPayload struct { - MsgType string `json:"msg_type"` // text / post / image / share_chat / interactive / file /audio / media - Content struct { + Timestamp int64 `json:"timestamp,omitempty"` // Unix timestamp for signature verification + Sign string `json:"sign,omitempty"` // Signature for verification + MsgType string `json:"msg_type"` // text / post / image / share_chat / interactive / file /audio / media + Content struct { Text string `json:"text"` } `json:"content"` } @@ -172,15 +178,41 @@ func (fc feishuConvertor) Status(p *api.CommitStatusPayload) (FeishuPayload, err return newFeishuTextPayload(text), nil } +func (feishuConvertor) WorkflowRun(p *api.WorkflowRunPayload) (FeishuPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, noneLinkFormatter, true) + + return newFeishuTextPayload(text), nil +} + func (feishuConvertor) WorkflowJob(p *api.WorkflowJobPayload) (FeishuPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true) return newFeishuTextPayload(text), nil } +// feishuGenSign generates a signature for Feishu webhook +// https://open.feishu.cn/document/client-docs/bot-v3/add-custom-bot +func feishuGenSign(secret string, timestamp int64) string { + // key="{timestamp}\n{secret}", then hmac-sha256, then base64 encode + stringToSign := fmt.Sprintf("%d\n%s", timestamp, secret) + h := hmac.New(sha256.New, []byte(stringToSign)) + return base64.StdEncoding.EncodeToString(h.Sum(nil)) +} + func newFeishuRequest(_ context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) { - var pc payloadConvertor[FeishuPayload] = feishuConvertor{} - return newJSONRequest(pc, w, t, true) + payload, err := newPayload(feishuConvertor{}, []byte(t.PayloadContent), t.EventType) + if err != nil { + return nil, nil, err + } + + // Add timestamp and signature if secret is provided + if w.Secret != "" { + timestamp := time.Now().Unix() + payload.Timestamp = timestamp + payload.Sign = feishuGenSign(w.Secret, timestamp) + } + + return prepareJSONRequest(payload, w, t, false /* no default headers */) } func init() { diff --git a/services/webhook/feishu_test.go b/services/webhook/feishu_test.go index c4249bdb30..7e200ea132 100644 --- a/services/webhook/feishu_test.go +++ b/services/webhook/feishu_test.go @@ -168,6 +168,7 @@ func TestFeishuJSONPayload(t *testing.T) { URL: "https://feishu.example.com/", Meta: `{}`, HTTPMethod: "POST", + Secret: "secret", } task := &webhook_model.HookTask{ HookID: hook.ID, @@ -183,10 +184,13 @@ func TestFeishuJSONPayload(t *testing.T) { assert.Equal(t, "POST", req.Method) assert.Equal(t, "https://feishu.example.com/", req.URL.String()) - assert.Equal(t, "sha256=", req.Header.Get("X-Hub-Signature-256")) assert.Equal(t, "application/json", req.Header.Get("Content-Type")) var body FeishuPayload err = json.NewDecoder(req.Body).Decode(&body) assert.NoError(t, err) assert.Equal(t, "[test/repo:test] \r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1\r\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) commit message - user1", body.Content.Text) + assert.Equal(t, feishuGenSign(hook.Secret, body.Timestamp), body.Sign) + + // a separate sign test, the result is generated by official python code, so the algo must be correct + assert.Equal(t, "rWZ84lcag1x9aBFhn1gtV4ZN+4gme3pilfQNMk86vKg=", feishuGenSign("a", 1)) } diff --git a/services/webhook/general.go b/services/webhook/general.go index ea75038faf..be457e46f5 100644 --- a/services/webhook/general.go +++ b/services/webhook/general.go @@ -39,19 +39,20 @@ func getPullRequestInfo(p *api.PullRequestPayload) (title, link, by, operator, o for i, user := range assignList { assignStringList[i] = user.UserName } - if p.Action == api.HookIssueAssigned { + switch p.Action { + case api.HookIssueAssigned: operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName) - } else if p.Action == api.HookIssueUnassigned { - operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName) - } else if p.Action == api.HookIssueMilestoned { + case api.HookIssueUnassigned: + operateResult = p.Sender.UserName + " unassigned this for someone" + case api.HookIssueMilestoned: operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.PullRequest.Milestone.ID) } link = p.PullRequest.HTMLURL - by = fmt.Sprintf("PullRequest by %s", p.PullRequest.Poster.UserName) + by = "PullRequest by " + p.PullRequest.Poster.UserName if len(assignStringList) > 0 { - assignees = fmt.Sprintf("Assignees: %s", strings.Join(assignStringList, ", ")) + assignees = "Assignees: " + strings.Join(assignStringList, ", ") } - operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + operator = "Operator: " + p.Sender.UserName return title, link, by, operator, operateResult, assignees } @@ -64,19 +65,20 @@ func getIssuesInfo(p *api.IssuePayload) (issueTitle, link, by, operator, operate for i, user := range assignList { assignStringList[i] = user.UserName } - if p.Action == api.HookIssueAssigned { + switch p.Action { + case api.HookIssueAssigned: operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName) - } else if p.Action == api.HookIssueUnassigned { - operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName) - } else if p.Action == api.HookIssueMilestoned { + case api.HookIssueUnassigned: + operateResult = p.Sender.UserName + " unassigned this for someone" + case api.HookIssueMilestoned: operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.Issue.Milestone.ID) } link = p.Issue.HTMLURL - by = fmt.Sprintf("Issue by %s", p.Issue.Poster.UserName) + by = "Issue by " + p.Issue.Poster.UserName if len(assignStringList) > 0 { - assignees = fmt.Sprintf("Assignees: %s", strings.Join(assignStringList, ", ")) + assignees = "Assignees: " + strings.Join(assignStringList, ", ") } - operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + operator = "Operator: " + p.Sender.UserName return issueTitle, link, by, operator, operateResult, assignees } @@ -85,11 +87,11 @@ func getIssuesCommentInfo(p *api.IssueCommentPayload) (title, link, by, operator title = fmt.Sprintf("[Comment-%s #%d]: %s\n%s", p.Repository.FullName, p.Issue.Index, p.Action, p.Issue.Title) link = p.Issue.HTMLURL if p.IsPull { - by = fmt.Sprintf("PullRequest by %s", p.Issue.Poster.UserName) + by = "PullRequest by " + p.Issue.Poster.UserName } else { - by = fmt.Sprintf("Issue by %s", p.Issue.Poster.UserName) + by = "Issue by " + p.Issue.Poster.UserName } - operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + operator = "Operator: " + p.Sender.UserName return title, link, by, operator } @@ -133,7 +135,7 @@ func getIssuesPayloadInfo(p *api.IssuePayload, linkFormatter linkFormatter, with text = fmt.Sprintf("[%s] Issue milestone cleared: %s", repoLink, titleLink) } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } if p.Action == api.HookIssueOpened || p.Action == api.HookIssueEdited { @@ -198,7 +200,7 @@ func getPullRequestPayloadInfo(p *api.PullRequestPayload, linkFormatter linkForm text = fmt.Sprintf("[%s] Pull request review request removed: %s", repoLink, titleLink) } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName) } return text, issueTitle, extraMarkdown, color @@ -220,7 +222,7 @@ func getReleasePayloadInfo(p *api.ReleasePayload, linkFormatter linkFormatter, w color = redColor } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, color @@ -249,7 +251,7 @@ func getWikiPayloadInfo(p *api.WikiPayload, linkFormatter linkFormatter, withSen } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, color, pageLink @@ -285,7 +287,7 @@ func getIssueCommentPayloadInfo(p *api.IssueCommentPayload, linkFormatter linkFo color = redColor } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, issueTitle, color @@ -296,14 +298,14 @@ func getPackagePayloadInfo(p *api.PackagePayload, linkFormatter linkFormatter, w switch p.Action { case api.HookPackageCreated: - text = fmt.Sprintf("Package created: %s", refLink) + text = "Package created: " + refLink color = greenColor case api.HookPackageDeleted: - text = fmt.Sprintf("Package deleted: %s", refLink) + text = "Package deleted: " + refLink color = redColor } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, color @@ -316,15 +318,46 @@ func getStatusPayloadInfo(p *api.CommitStatusPayload, linkFormatter linkFormatte color = greenColor if withSender { if user_model.IsGiteaActionsUserName(p.Sender.UserName) { - text += fmt.Sprintf(" by %s", p.Sender.FullName) + text += " by " + p.Sender.FullName } else { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } } return text, color } +func getWorkflowRunPayloadInfo(p *api.WorkflowRunPayload, linkFormatter linkFormatter, withSender bool) (text string, color int) { + description := p.WorkflowRun.Conclusion + if description == "" { + description = p.WorkflowRun.Status + } + refLink := linkFormatter(p.WorkflowRun.HTMLURL, fmt.Sprintf("%s(#%d)", p.WorkflowRun.DisplayTitle, p.WorkflowRun.ID)+"["+base.ShortSha(p.WorkflowRun.HeadSha)+"]:"+description) + + text = fmt.Sprintf("Workflow Run %s: %s", p.Action, refLink) + switch description { + case "waiting": + color = orangeColor + case "queued": + color = orangeColorLight + case "success": + color = greenColor + case "failure": + color = redColor + case "cancelled": + color = yellowColor + case "skipped": + color = purpleColor + default: + color = greyColor + } + if withSender { + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) + } + + return text, color +} + func getWorkflowJobPayloadInfo(p *api.WorkflowJobPayload, linkFormatter linkFormatter, withSender bool) (text string, color int) { description := p.WorkflowJob.Conclusion if description == "" { @@ -350,7 +383,7 @@ func getWorkflowJobPayloadInfo(p *api.WorkflowJobPayload, linkFormatter linkForm color = greyColor } if withSender { - text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)) + text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName) } return text, color diff --git a/services/webhook/matrix.go b/services/webhook/matrix.go index 5bc7ba097e..3e9163f78c 100644 --- a/services/webhook/matrix.go +++ b/services/webhook/matrix.go @@ -252,6 +252,12 @@ func (m matrixConvertor) Status(p *api.CommitStatusPayload) (MatrixPayload, erro return m.newPayload(text) } +func (m matrixConvertor) WorkflowRun(p *api.WorkflowRunPayload) (MatrixPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, htmlLinkFormatter, true) + + return m.newPayload(text) +} + func (m matrixConvertor) WorkflowJob(p *api.WorkflowJobPayload) (MatrixPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, htmlLinkFormatter, true) diff --git a/services/webhook/msteams.go b/services/webhook/msteams.go index f70e235f20..450a544b42 100644 --- a/services/webhook/msteams.go +++ b/services/webhook/msteams.go @@ -8,6 +8,7 @@ import ( "fmt" "net/http" "net/url" + "strconv" "strings" webhook_model "code.gitea.io/gitea/models/webhook" @@ -73,7 +74,7 @@ func (m msteamsConvertor) Create(p *api.CreatePayload) (MSTeamsPayload, error) { "", p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName), greenColor, - &MSTeamsFact{fmt.Sprintf("%s:", p.RefType), refName}, + &MSTeamsFact{p.RefType + ":", refName}, ), nil } @@ -90,7 +91,7 @@ func (m msteamsConvertor) Delete(p *api.DeletePayload) (MSTeamsPayload, error) { "", p.Repo.HTMLURL+"/src/"+util.PathEscapeSegments(refName), yellowColor, - &MSTeamsFact{fmt.Sprintf("%s:", p.RefType), refName}, + &MSTeamsFact{p.RefType + ":", refName}, ), nil } @@ -148,7 +149,7 @@ func (m msteamsConvertor) Push(p *api.PushPayload) (MSTeamsPayload, error) { text, titleLink, greenColor, - &MSTeamsFact{"Commit count:", fmt.Sprintf("%d", p.TotalCommits)}, + &MSTeamsFact{"Commit count:", strconv.Itoa(p.TotalCommits)}, ), nil } @@ -163,7 +164,7 @@ func (m msteamsConvertor) Issue(p *api.IssuePayload) (MSTeamsPayload, error) { extraMarkdown, p.Issue.HTMLURL, color, - &MSTeamsFact{"Issue #:", fmt.Sprintf("%d", p.Issue.ID)}, + &MSTeamsFact{"Issue #:", strconv.FormatInt(p.Issue.ID, 10)}, ), nil } @@ -178,7 +179,7 @@ func (m msteamsConvertor) IssueComment(p *api.IssueCommentPayload) (MSTeamsPaylo p.Comment.Body, p.Comment.HTMLURL, color, - &MSTeamsFact{"Issue #:", fmt.Sprintf("%d", p.Issue.ID)}, + &MSTeamsFact{"Issue #:", strconv.FormatInt(p.Issue.ID, 10)}, ), nil } @@ -193,7 +194,7 @@ func (m msteamsConvertor) PullRequest(p *api.PullRequestPayload) (MSTeamsPayload extraMarkdown, p.PullRequest.HTMLURL, color, - &MSTeamsFact{"Pull request #:", fmt.Sprintf("%d", p.PullRequest.ID)}, + &MSTeamsFact{"Pull request #:", strconv.FormatInt(p.PullRequest.ID, 10)}, ), nil } @@ -230,7 +231,7 @@ func (m msteamsConvertor) Review(p *api.PullRequestPayload, event webhook_module text, p.PullRequest.HTMLURL, color, - &MSTeamsFact{"Pull request #:", fmt.Sprintf("%d", p.PullRequest.ID)}, + &MSTeamsFact{"Pull request #:", strconv.FormatInt(p.PullRequest.ID, 10)}, ), nil } @@ -317,6 +318,20 @@ func (m msteamsConvertor) Status(p *api.CommitStatusPayload) (MSTeamsPayload, er ), nil } +func (msteamsConvertor) WorkflowRun(p *api.WorkflowRunPayload) (MSTeamsPayload, error) { + title, color := getWorkflowRunPayloadInfo(p, noneLinkFormatter, false) + + return createMSTeamsPayload( + p.Repo, + p.Sender, + title, + "", + p.WorkflowRun.HTMLURL, + color, + &MSTeamsFact{"WorkflowRun:", p.WorkflowRun.DisplayTitle}, + ), nil +} + func (msteamsConvertor) WorkflowJob(p *api.WorkflowJobPayload) (MSTeamsPayload, error) { title, color := getWorkflowJobPayloadInfo(p, noneLinkFormatter, false) diff --git a/services/webhook/msteams_test.go b/services/webhook/msteams_test.go index d5020d3ff5..0d98b94bad 100644 --- a/services/webhook/msteams_test.go +++ b/services/webhook/msteams_test.go @@ -335,7 +335,7 @@ func TestMSTeamsPayload(t *testing.T) { assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment)", pl.Summary) assert.Len(t, pl.Sections, 1) assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle) - assert.Equal(t, "", pl.Sections[0].Text) + assert.Empty(t, pl.Sections[0].Text) assert.Len(t, pl.Sections[0].Facts, 2) for _, fact := range pl.Sections[0].Facts { if fact.Name == "Repository:" { @@ -356,7 +356,7 @@ func TestMSTeamsPayload(t *testing.T) { assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment)", pl.Summary) assert.Len(t, pl.Sections, 1) assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle) - assert.Equal(t, "", pl.Sections[0].Text) + assert.Empty(t, pl.Sections[0].Text) assert.Len(t, pl.Sections[0].Facts, 2) for _, fact := range pl.Sections[0].Facts { if fact.Name == "Repository:" { diff --git a/services/webhook/notifier.go b/services/webhook/notifier.go index 9e3f21de29..672abd5c95 100644 --- a/services/webhook/notifier.go +++ b/services/webhook/notifier.go @@ -5,10 +5,8 @@ package webhook import ( "context" - "fmt" actions_model "code.gitea.io/gitea/models/actions" - "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/organization" @@ -18,6 +16,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/httplib" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/repository" @@ -295,6 +294,43 @@ func (m *webhookNotifier) NewIssue(ctx context.Context, issue *issues_model.Issu } } +func (m *webhookNotifier) DeleteIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) { + permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer) + if issue.IsPull { + if err := issue.LoadPullRequest(ctx); err != nil { + log.Error("LoadPullRequest: %v", err) + return + } + if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequest, &api.PullRequestPayload{ + Action: api.HookIssueDeleted, + Index: issue.Index, + PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer), + Repository: convert.ToRepo(ctx, issue.Repo, permission), + Sender: convert.ToUser(ctx, doer, nil), + }); err != nil { + log.Error("PrepareWebhooks: %v", err) + } + } else { + if err := issue.LoadRepo(ctx); err != nil { + log.Error("issue.LoadRepo: %v", err) + return + } + if err := issue.LoadPoster(ctx); err != nil { + log.Error("issue.LoadPoster: %v", err) + return + } + if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventIssues, &api.IssuePayload{ + Action: api.HookIssueDeleted, + Index: issue.Index, + Issue: convert.ToAPIIssue(ctx, issue.Poster, issue), + Repository: convert.ToRepo(ctx, issue.Repo, permission), + Sender: convert.ToUser(ctx, doer, nil), + }); err != nil { + log.Error("PrepareWebhooks: %v", err) + } + } +} + func (m *webhookNotifier) NewPullRequest(ctx context.Context, pull *issues_model.PullRequest, mentions []*user_model.User) { if err := pull.LoadIssue(ctx); err != nil { log.Error("pull.LoadIssue: %v", err) @@ -956,72 +992,61 @@ func (*webhookNotifier) WorkflowJobStatusUpdate(ctx context.Context, repo *repo_ org = convert.ToOrganization(ctx, organization.OrgFromUser(repo.Owner)) } - err := job.LoadAttributes(ctx) + status, _ := convert.ToActionsStatus(job.Status) + + convertedJob, err := convert.ToActionWorkflowJob(ctx, repo, task, job) if err != nil { - log.Error("Error loading job attributes: %v", err) + log.Error("ToActionWorkflowJob: %v", err) return } - jobIndex := 0 - jobs, err := actions_model.GetRunJobsByRunID(ctx, job.RunID) + if err := PrepareWebhooks(ctx, source, webhook_module.HookEventWorkflowJob, &api.WorkflowJobPayload{ + Action: status, + WorkflowJob: convertedJob, + Organization: org, + Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}), + Sender: convert.ToUser(ctx, sender, nil), + }); err != nil { + log.Error("PrepareWebhooks: %v", err) + } +} + +func (*webhookNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, run *actions_model.ActionRun) { + source := EventSource{ + Repository: repo, + Owner: repo.Owner, + } + + var org *api.Organization + if repo.Owner.IsOrganization() { + org = convert.ToOrganization(ctx, organization.OrgFromUser(repo.Owner)) + } + + status := convert.ToWorkflowRunAction(run.Status) + + gitRepo, err := gitrepo.OpenRepository(ctx, repo) if err != nil { - log.Error("Error loading getting run jobs: %v", err) + log.Error("OpenRepository: %v", err) return } - for i, j := range jobs { - if j.ID == job.ID { - jobIndex = i - break - } - } + defer gitRepo.Close() - status, conclusion := toActionStatus(job.Status) - var runnerID int64 - var runnerName string - var steps []*api.ActionWorkflowStep + convertedWorkflow, err := convert.GetActionWorkflow(ctx, gitRepo, repo, run.WorkflowID) + if err != nil { + log.Error("GetActionWorkflow: %v", err) + return + } - if task != nil { - runnerID = task.RunnerID - if runner, ok, _ := db.GetByID[actions_model.ActionRunner](ctx, runnerID); ok { - runnerName = runner.Name - } - for i, step := range task.Steps { - stepStatus, stepConclusion := toActionStatus(job.Status) - steps = append(steps, &api.ActionWorkflowStep{ - Name: step.Name, - Number: int64(i), - Status: stepStatus, - Conclusion: stepConclusion, - StartedAt: step.Started.AsTime().UTC(), - CompletedAt: step.Stopped.AsTime().UTC(), - }) - } + convertedRun, err := convert.ToActionWorkflowRun(ctx, repo, run) + if err != nil { + log.Error("ToActionWorkflowRun: %v", err) + return } - if err := PrepareWebhooks(ctx, source, webhook_module.HookEventWorkflowJob, &api.WorkflowJobPayload{ - Action: status, - WorkflowJob: &api.ActionWorkflowJob{ - ID: job.ID, - // missing api endpoint for this location - URL: fmt.Sprintf("%s/actions/runs/%d/jobs/%d", repo.APIURL(), job.RunID, job.ID), - HTMLURL: fmt.Sprintf("%s/jobs/%d", job.Run.HTMLURL(), jobIndex), - RunID: job.RunID, - // Missing api endpoint for this location, artifacts are available under a nested url - RunURL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), job.RunID), - Name: job.Name, - Labels: job.RunsOn, - RunAttempt: job.Attempt, - HeadSha: job.Run.CommitSHA, - HeadBranch: git.RefName(job.Run.Ref).BranchName(), - Status: status, - Conclusion: conclusion, - RunnerID: runnerID, - RunnerName: runnerName, - Steps: steps, - CreatedAt: job.Created.AsTime().UTC(), - StartedAt: job.Started.AsTime().UTC(), - CompletedAt: job.Stopped.AsTime().UTC(), - }, + if err := PrepareWebhooks(ctx, source, webhook_module.HookEventWorkflowRun, &api.WorkflowRunPayload{ + Action: status, + Workflow: convertedWorkflow, + WorkflowRun: convertedRun, Organization: org, Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}), Sender: convert.ToUser(ctx, sender, nil), @@ -1029,29 +1054,3 @@ func (*webhookNotifier) WorkflowJobStatusUpdate(ctx context.Context, repo *repo_ log.Error("PrepareWebhooks: %v", err) } } - -func toActionStatus(status actions_model.Status) (string, string) { - var action string - var conclusion string - switch status { - // This is a naming conflict of the webhook between Gitea and GitHub Actions - case actions_model.StatusWaiting: - action = "queued" - case actions_model.StatusBlocked: - action = "waiting" - case actions_model.StatusRunning: - action = "in_progress" - } - if status.IsDone() { - action = "completed" - switch status { - case actions_model.StatusSuccess: - conclusion = "success" - case actions_model.StatusCancelled: - conclusion = "cancelled" - case actions_model.StatusFailure: - conclusion = "failure" - } - } - return action, conclusion -} diff --git a/services/webhook/packagist.go b/services/webhook/packagist.go index 8829d95da6..e6a00b0293 100644 --- a/services/webhook/packagist.go +++ b/services/webhook/packagist.go @@ -114,6 +114,10 @@ func (pc packagistConvertor) Status(_ *api.CommitStatusPayload) (PackagistPayloa return PackagistPayload{}, nil } +func (pc packagistConvertor) WorkflowRun(_ *api.WorkflowRunPayload) (PackagistPayload, error) { + return PackagistPayload{}, nil +} + func (pc packagistConvertor) WorkflowJob(_ *api.WorkflowJobPayload) (PackagistPayload, error) { return PackagistPayload{}, nil } diff --git a/services/webhook/packagist_test.go b/services/webhook/packagist_test.go index 638dcfd302..4e77f29edc 100644 --- a/services/webhook/packagist_test.go +++ b/services/webhook/packagist_test.go @@ -210,5 +210,5 @@ func TestPackagistEmptyPayload(t *testing.T) { var body PackagistPayload err = json.NewDecoder(req.Body).Decode(&body) assert.NoError(t, err) - assert.Equal(t, "", body.PackagistRepository.URL) + assert.Empty(t, body.PackagistRepository.URL) } diff --git a/services/webhook/payloader.go b/services/webhook/payloader.go index adb7243fb1..b607bf3250 100644 --- a/services/webhook/payloader.go +++ b/services/webhook/payloader.go @@ -29,6 +29,7 @@ type payloadConvertor[T any] interface { Wiki(*api.WikiPayload) (T, error) Package(*api.PackagePayload) (T, error) Status(*api.CommitStatusPayload) (T, error) + WorkflowRun(*api.WorkflowRunPayload) (T, error) WorkflowJob(*api.WorkflowJobPayload) (T, error) } @@ -81,6 +82,8 @@ func newPayload[T any](rc payloadConvertor[T], data []byte, event webhook_module return convertUnmarshalledJSON(rc.Package, data) case webhook_module.HookEventStatus: return convertUnmarshalledJSON(rc.Status, data) + case webhook_module.HookEventWorkflowRun: + return convertUnmarshalledJSON(rc.WorkflowRun, data) case webhook_module.HookEventWorkflowJob: return convertUnmarshalledJSON(rc.WorkflowJob, data) } @@ -92,7 +95,10 @@ func newJSONRequest[T any](pc payloadConvertor[T], w *webhook_model.Webhook, t * if err != nil { return nil, nil, err } + return prepareJSONRequest(payload, w, t, withDefaultHeaders) +} +func prepareJSONRequest[T any](payload T, w *webhook_model.Webhook, t *webhook_model.HookTask, withDefaultHeaders bool) (*http.Request, []byte, error) { body, err := json.MarshalIndent(payload, "", " ") if err != nil { return nil, nil, err diff --git a/services/webhook/slack.go b/services/webhook/slack.go index 589ef3fe9b..3d645a55d0 100644 --- a/services/webhook/slack.go +++ b/services/webhook/slack.go @@ -173,6 +173,12 @@ func (s slackConvertor) Status(p *api.CommitStatusPayload) (SlackPayload, error) return s.createPayload(text, nil), nil } +func (s slackConvertor) WorkflowRun(p *api.WorkflowRunPayload) (SlackPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, SlackLinkFormatter, true) + + return s.createPayload(text, nil), nil +} + func (s slackConvertor) WorkflowJob(p *api.WorkflowJobPayload) (SlackPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, SlackLinkFormatter, true) diff --git a/services/webhook/telegram.go b/services/webhook/telegram.go index ca74eabe1c..fdd428b45c 100644 --- a/services/webhook/telegram.go +++ b/services/webhook/telegram.go @@ -180,6 +180,12 @@ func (t telegramConvertor) Status(p *api.CommitStatusPayload) (TelegramPayload, return createTelegramPayloadHTML(text), nil } +func (telegramConvertor) WorkflowRun(p *api.WorkflowRunPayload) (TelegramPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, htmlLinkFormatter, true) + + return createTelegramPayloadHTML(text), nil +} + func (telegramConvertor) WorkflowJob(p *api.WorkflowJobPayload) (TelegramPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, htmlLinkFormatter, true) @@ -189,7 +195,7 @@ func (telegramConvertor) WorkflowJob(p *api.WorkflowJobPayload) (TelegramPayload func createTelegramPayloadHTML(msgHTML string) TelegramPayload { // https://core.telegram.org/bots/api#formatting-options return TelegramPayload{ - Message: strings.TrimSpace(markup.Sanitize(msgHTML)), + Message: strings.TrimSpace(string(markup.Sanitize(msgHTML))), ParseMode: "HTML", DisableWebPreview: true, } diff --git a/services/webhook/webhook_test.go b/services/webhook/webhook_test.go index 6bac02712b..5a805347e3 100644 --- a/services/webhook/webhook_test.go +++ b/services/webhook/webhook_test.go @@ -13,6 +13,7 @@ import ( webhook_model "code.gitea.io/gitea/models/webhook" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/test" webhook_module "code.gitea.io/gitea/modules/webhook" "code.gitea.io/gitea/services/convert" @@ -84,7 +85,8 @@ func TestPrepareWebhooksBranchFilterNoMatch(t *testing.T) { func TestWebhookUserMail(t *testing.T) { require.NoError(t, unittest.PrepareTestDatabase()) - setting.Service.NoReplyAddress = "no-reply.com" + defer test.MockVariableValue(&setting.Service.NoReplyAddress, "no-reply.com")() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) assert.Equal(t, user.GetPlaceholderEmail(), convert.ToUser(db.DefaultContext, user, nil).Email) assert.Equal(t, user.Email, convert.ToUser(db.DefaultContext, user, user).Email) diff --git a/services/webhook/wechatwork.go b/services/webhook/wechatwork.go index 2b19822caf..1875317406 100644 --- a/services/webhook/wechatwork.go +++ b/services/webhook/wechatwork.go @@ -181,6 +181,12 @@ func (wc wechatworkConvertor) Status(p *api.CommitStatusPayload) (WechatworkPayl return newWechatworkMarkdownPayload(text), nil } +func (wc wechatworkConvertor) WorkflowRun(p *api.WorkflowRunPayload) (WechatworkPayload, error) { + text, _ := getWorkflowRunPayloadInfo(p, noneLinkFormatter, true) + + return newWechatworkMarkdownPayload(text), nil +} + func (wc wechatworkConvertor) WorkflowJob(p *api.WorkflowJobPayload) (WechatworkPayload, error) { text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true) diff --git a/services/webtheme/webtheme.go b/services/webtheme/webtheme.go index 58aea3bc74..4e89d6dbac 100644 --- a/services/webtheme/webtheme.go +++ b/services/webtheme/webtheme.go @@ -70,11 +70,11 @@ func parseThemeMetaInfoToMap(cssContent string) map[string]string { m := map[string]string{} for _, item := range matchedItems { v := item[3] - if strings.HasPrefix(v, `"`) { - v = strings.TrimSuffix(strings.TrimPrefix(v, `"`), `"`) + if after, ok := strings.CutPrefix(v, `"`); ok { + v = strings.TrimSuffix(after, `"`) v = strings.ReplaceAll(v, `\"`, `"`) - } else if strings.HasPrefix(v, `'`) { - v = strings.TrimSuffix(strings.TrimPrefix(v, `'`), `'`) + } else if after, ok := strings.CutPrefix(v, `'`); ok { + v = strings.TrimSuffix(after, `'`) v = strings.ReplaceAll(v, `\'`, `'`) } m[item[2]] = v diff --git a/services/wiki/wiki.go b/services/wiki/wiki.go index a3fe07927d..0a955406e2 100644 --- a/services/wiki/wiki.go +++ b/services/wiki/wiki.go @@ -41,7 +41,7 @@ func InitWiki(ctx context.Context, repo *repo_model.Repository) error { if err := git.InitRepository(ctx, repo.WikiPath(), true, repo.ObjectFormatName); err != nil { return fmt.Errorf("InitRepository: %w", err) - } else if err = gitrepo.CreateDelegateHooksForWiki(ctx, repo); err != nil { + } else if err = gitrepo.CreateDelegateHooks(ctx, repo.WikiStorageRepo()); err != nil { return fmt.Errorf("createDelegateHooks: %w", err) } else if _, _, err = git.NewCommand("symbolic-ref", "HEAD").AddDynamicArguments(git.BranchPrefix+repo.DefaultWikiBranch).RunStdString(ctx, &git.RunOpts{Dir: repo.WikiPath()}); err != nil { return fmt.Errorf("unable to set default wiki branch to %q: %w", repo.DefaultWikiBranch, err) @@ -100,17 +100,13 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model return fmt.Errorf("InitWiki: %w", err) } - hasDefaultBranch := gitrepo.IsWikiBranchExist(ctx, repo, repo.DefaultWikiBranch) + hasDefaultBranch := gitrepo.IsBranchExist(ctx, repo.WikiStorageRepo(), repo.DefaultWikiBranch) - basePath, err := repo_module.CreateTemporaryPath("update-wiki") + basePath, cleanup, err := repo_module.CreateTemporaryPath("update-wiki") if err != nil { return err } - defer func() { - if err := repo_module.RemoveTemporaryPath(basePath); err != nil { - log.Error("Merge: RemoveTemporaryPath: %s", err) - } - }() + defer cleanup() cloneOpts := git.CloneRepoOptions{ Bare: true, @@ -198,7 +194,7 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model sign, signingKey, signer, _ := asymkey_service.SignWikiCommit(ctx, repo, doer) if sign { - commitTreeOpts.KeyID = signingKey + commitTreeOpts.Key = signingKey if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { committer = signer } @@ -264,15 +260,11 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model return fmt.Errorf("InitWiki: %w", err) } - basePath, err := repo_module.CreateTemporaryPath("update-wiki") + basePath, cleanup, err := repo_module.CreateTemporaryPath("update-wiki") if err != nil { return err } - defer func() { - if err := repo_module.RemoveTemporaryPath(basePath); err != nil { - log.Error("Merge: RemoveTemporaryPath: %s", err) - } - }() + defer cleanup() if err := git.Clone(ctx, repo.WikiPath(), basePath, git.CloneRepoOptions{ Bare: true, @@ -324,7 +316,7 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model sign, signingKey, signer, _ := asymkey_service.SignWikiCommit(ctx, repo, doer) if sign { - commitTreeOpts.KeyID = signingKey + commitTreeOpts.Key = signingKey if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { committer = signer } @@ -373,7 +365,7 @@ func ChangeDefaultWikiBranch(ctx context.Context, repo *repo_model.Repository, n } return db.WithTx(ctx, func(ctx context.Context) error { repo.DefaultWikiBranch = newBranch - if err := repo_model.UpdateRepositoryCols(ctx, repo, "default_wiki_branch"); err != nil { + if err := repo_model.UpdateRepositoryColsNoAutoTime(ctx, repo, "default_wiki_branch"); err != nil { return fmt.Errorf("unable to update database: %w", err) } @@ -381,7 +373,7 @@ func ChangeDefaultWikiBranch(ctx context.Context, repo *repo_model.Repository, n return nil } - oldDefBranch, err := gitrepo.GetWikiDefaultBranch(ctx, repo) + oldDefBranch, err := gitrepo.GetDefaultBranch(ctx, repo.WikiStorageRepo()) if err != nil { return fmt.Errorf("unable to get default branch: %w", err) } @@ -389,7 +381,7 @@ func ChangeDefaultWikiBranch(ctx context.Context, repo *repo_model.Repository, n return nil } - gitRepo, err := gitrepo.OpenWikiRepository(ctx, repo) + gitRepo, err := gitrepo.OpenRepository(ctx, repo.WikiStorageRepo()) if errors.Is(err, util.ErrNotExist) { return nil // no git repo on storage, no need to do anything else } else if err != nil { diff --git a/services/wiki/wiki_test.go b/services/wiki/wiki_test.go index e8b89f5e97..6ea3ca9c1b 100644 --- a/services/wiki/wiki_test.go +++ b/services/wiki/wiki_test.go @@ -26,7 +26,7 @@ func TestMain(m *testing.M) { func TestWebPathSegments(t *testing.T) { a := WebPathSegments("a%2Fa/b+c/d-e/f-g.-") - assert.EqualValues(t, []string{"a/a", "b c", "d e", "f-g"}, a) + assert.Equal(t, []string{"a/a", "b c", "d e", "f-g"}, a) } func TestUserTitleToWebPath(t *testing.T) { @@ -63,7 +63,7 @@ func TestWebPathToDisplayName(t *testing.T) { {"a b", "a%20b.md"}, } { _, displayName := WebPathToUserTitle(test.WebPath) - assert.EqualValues(t, test.Expected, displayName) + assert.Equal(t, test.Expected, displayName) } } @@ -80,7 +80,7 @@ func TestWebPathToGitPath(t *testing.T) { {"2000-01-02-meeting.md", "2000-01-02+meeting"}, {"2000-01-02 meeting.-.md", "2000-01-02%20meeting.-"}, } { - assert.EqualValues(t, test.Expected, WebPathToGitPath(test.WikiName)) + assert.Equal(t, test.Expected, WebPathToGitPath(test.WikiName)) } } @@ -116,9 +116,9 @@ func TestGitPathToWebPath(t *testing.T) { func TestUserWebGitPathConsistency(t *testing.T) { maxLen := 20 b := make([]byte, maxLen) - for i := 0; i < 1000; i++ { + for range 1000 { l := rand.Intn(maxLen) - for j := 0; j < l; j++ { + for j := range l { r := rand.Intn(0x80-0x20) + 0x20 b[j] = byte(r) } @@ -134,9 +134,9 @@ func TestUserWebGitPathConsistency(t *testing.T) { _, userTitle1 := WebPathToUserTitle(webPath1) gitPath1 := WebPathToGitPath(webPath1) - assert.EqualValues(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle) - assert.EqualValues(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle) - assert.EqualValues(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle) + assert.Equal(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle) + assert.Equal(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle) + assert.Equal(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle) } } @@ -166,7 +166,7 @@ func TestRepository_AddWikiPage(t *testing.T) { webPath := UserTitleToWebPath("", userTitle) assert.NoError(t, AddWikiPage(git.DefaultContext, doer, repo, webPath, wikiContent, commitMsg)) // Now need to show that the page has been added: - gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo) + gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo.WikiStorageRepo()) require.NoError(t, err) defer gitRepo.Close() @@ -175,7 +175,7 @@ func TestRepository_AddWikiPage(t *testing.T) { gitPath := WebPathToGitPath(webPath) entry, err := masterTree.GetTreeEntryByPath(gitPath) assert.NoError(t, err) - assert.EqualValues(t, gitPath, entry.Name(), "%s not added correctly", userTitle) + assert.Equal(t, gitPath, entry.Name(), "%s not added correctly", userTitle) }) } @@ -213,14 +213,14 @@ func TestRepository_EditWikiPage(t *testing.T) { assert.NoError(t, EditWikiPage(git.DefaultContext, doer, repo, "Home", webPath, newWikiContent, commitMsg)) // Now need to show that the page has been added: - gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo) + gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo.WikiStorageRepo()) assert.NoError(t, err) masterTree, err := gitRepo.GetTree(repo.DefaultWikiBranch) assert.NoError(t, err) gitPath := WebPathToGitPath(webPath) entry, err := masterTree.GetTreeEntryByPath(gitPath) assert.NoError(t, err) - assert.EqualValues(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName) + assert.Equal(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName) if newWikiName != "Home" { _, err := masterTree.GetTreeEntryByPath("Home.md") @@ -237,7 +237,7 @@ func TestRepository_DeleteWikiPage(t *testing.T) { assert.NoError(t, DeleteWikiPage(git.DefaultContext, doer, repo, "Home")) // Now need to show that the page has been added: - gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo) + gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo.WikiStorageRepo()) require.NoError(t, err) defer gitRepo.Close() @@ -251,7 +251,7 @@ func TestRepository_DeleteWikiPage(t *testing.T) { func TestPrepareWikiFileName(t *testing.T) { unittest.PrepareTestEnv(t) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) - gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo) + gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo.WikiStorageRepo()) require.NoError(t, err) defer gitRepo.Close() @@ -290,7 +290,7 @@ func TestPrepareWikiFileName(t *testing.T) { t.Errorf("expect to find an escaped file but we could not detect one") } } - assert.EqualValues(t, tt.wikiPath, newWikiPath) + assert.Equal(t, tt.wikiPath, newWikiPath) }) } } @@ -312,13 +312,13 @@ func TestPrepareWikiFileName_FirstPage(t *testing.T) { existence, newWikiPath, err := prepareGitPath(gitRepo, "master", "Home") assert.False(t, existence) assert.NoError(t, err) - assert.EqualValues(t, "Home.md", newWikiPath) + assert.Equal(t, "Home.md", newWikiPath) } func TestWebPathConversion(t *testing.T) { assert.Equal(t, "path/wiki", WebPathToURLPath(WebPath("path/wiki"))) assert.Equal(t, "wiki", WebPathToURLPath(WebPath("wiki"))) - assert.Equal(t, "", WebPathToURLPath(WebPath(""))) + assert.Empty(t, WebPathToURLPath(WebPath(""))) } func TestWebPathFromRequest(t *testing.T) { |