diff options
Diffstat (limited to 'models')
490 files changed, 4768 insertions, 3873 deletions
diff --git a/models/actions/artifact.go b/models/actions/artifact.go index 0bc66ba24e..757bd13acd 100644 --- a/models/actions/artifact.go +++ b/models/actions/artifact.go @@ -30,6 +30,25 @@ const ( ArtifactStatusDeleted // 6, ArtifactStatusDeleted is the status of an artifact that is deleted ) +func (status ArtifactStatus) ToString() string { + switch status { + case ArtifactStatusUploadPending: + return "upload is not yet completed" + case ArtifactStatusUploadConfirmed: + return "upload is completed" + case ArtifactStatusUploadError: + return "upload failed" + case ArtifactStatusExpired: + return "expired" + case ArtifactStatusPendingDeletion: + return "pending deletion" + case ArtifactStatusDeleted: + return "deleted" + default: + return "unknown" + } +} + func init() { db.RegisterModel(new(ActionArtifact)) } @@ -48,7 +67,7 @@ type ActionArtifact struct { ContentEncoding string // The content encoding of the artifact ArtifactPath string `xorm:"index unique(runid_name_path)"` // The path to the artifact when runner uploads it ArtifactName string `xorm:"index unique(runid_name_path)"` // The name of the artifact when runner uploads it - Status int64 `xorm:"index"` // The status of the artifact, uploading, expired or need-delete + Status ArtifactStatus `xorm:"index"` // The status of the artifact, uploading, expired or need-delete CreatedUnix timeutil.TimeStamp `xorm:"created"` UpdatedUnix timeutil.TimeStamp `xorm:"updated index"` ExpiredUnix timeutil.TimeStamp `xorm:"index"` // The time when the artifact will be expired @@ -68,7 +87,7 @@ func CreateArtifact(ctx context.Context, t *ActionTask, artifactName, artifactPa RepoID: t.RepoID, OwnerID: t.OwnerID, CommitSHA: t.CommitSHA, - Status: int64(ArtifactStatusUploadPending), + Status: ArtifactStatusUploadPending, ExpiredUnix: timeutil.TimeStamp(time.Now().Unix() + timeutil.Day*expiredDays), } if _, err := db.GetEngine(ctx).Insert(artifact); err != nil { @@ -108,12 +127,19 @@ func UpdateArtifactByID(ctx context.Context, id int64, art *ActionArtifact) erro type FindArtifactsOptions struct { db.ListOptions - RepoID int64 - RunID int64 - ArtifactName string - Status int + RepoID int64 + RunID int64 + ArtifactName string + Status int + FinalizedArtifactsV4 bool } +func (opts FindArtifactsOptions) ToOrders() string { + return "id" +} + +var _ db.FindOptionsOrder = (*FindArtifactsOptions)(nil) + func (opts FindArtifactsOptions) ToConds() builder.Cond { cond := builder.NewCond() if opts.RepoID > 0 { @@ -128,11 +154,15 @@ func (opts FindArtifactsOptions) ToConds() builder.Cond { if opts.Status > 0 { cond = cond.And(builder.Eq{"status": opts.Status}) } + if opts.FinalizedArtifactsV4 { + cond = cond.And(builder.Eq{"status": ArtifactStatusUploadConfirmed}.Or(builder.Eq{"status": ArtifactStatusExpired})) + cond = cond.And(builder.Eq{"content_encoding": "application/zip"}) + } return cond } -// ActionArtifactMeta is the meta data of an artifact +// ActionArtifactMeta is the meta-data of an artifact type ActionArtifactMeta struct { ArtifactName string FileSize int64 @@ -166,18 +196,18 @@ func ListPendingDeleteArtifacts(ctx context.Context, limit int) ([]*ActionArtifa // SetArtifactExpired sets an artifact to expired func SetArtifactExpired(ctx context.Context, artifactID int64) error { - _, err := db.GetEngine(ctx).Where("id=? AND status = ?", artifactID, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusExpired)}) + _, err := db.GetEngine(ctx).Where("id=? AND status = ?", artifactID, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusExpired}) return err } // SetArtifactNeedDelete sets an artifact to need-delete, cron job will delete it func SetArtifactNeedDelete(ctx context.Context, runID int64, name string) error { - _, err := db.GetEngine(ctx).Where("run_id=? AND artifact_name=? AND status = ?", runID, name, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusPendingDeletion)}) + _, err := db.GetEngine(ctx).Where("run_id=? AND artifact_name=? AND status = ?", runID, name, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusPendingDeletion}) return err } // SetArtifactDeleted sets an artifact to deleted func SetArtifactDeleted(ctx context.Context, artifactID int64) error { - _, err := db.GetEngine(ctx).ID(artifactID).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusDeleted)}) + _, err := db.GetEngine(ctx).ID(artifactID).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusDeleted}) return err } diff --git a/models/actions/run.go b/models/actions/run.go index a224a910ab..f0ab61b200 100644 --- a/models/actions/run.go +++ b/models/actions/run.go @@ -5,6 +5,7 @@ package actions import ( "context" + "errors" "fmt" "slices" "strings" @@ -15,6 +16,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" @@ -88,7 +90,7 @@ func (run *ActionRun) RefLink() string { if refName.IsPull() { return run.Repo.Link() + "/pulls/" + refName.ShortName() } - return git.RefURL(run.Repo.Link(), run.Ref) + return run.Repo.Link() + "/src/" + refName.RefWebLinkPath() } // PrettyRef return #id for pull ref or ShortName for others @@ -154,7 +156,7 @@ func (run *ActionRun) GetPushEventPayload() (*api.PushPayload, error) { } func (run *ActionRun) GetPullRequestEventPayload() (*api.PullRequestPayload, error) { - if run.Event == webhook_module.HookEventPullRequest || run.Event == webhook_module.HookEventPullRequestSync { + if run.Event.IsPullRequest() { var payload api.PullRequestPayload if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil { return nil, err @@ -164,12 +166,24 @@ func (run *ActionRun) GetPullRequestEventPayload() (*api.PullRequestPayload, err return nil, fmt.Errorf("event %s is not a pull request event", run.Event) } +func (run *ActionRun) GetWorkflowRunEventPayload() (*api.WorkflowRunPayload, error) { + if run.Event == webhook_module.HookEventWorkflowRun { + var payload api.WorkflowRunPayload + if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil { + return nil, err + } + return &payload, nil + } + return nil, fmt.Errorf("event %s is not a workflow run event", run.Event) +} + func (run *ActionRun) IsSchedule() bool { return run.ScheduleID > 0 } func updateRepoRunsNumbers(ctx context.Context, repo *repo_model.Repository) error { _, err := db.GetEngine(ctx).ID(repo.ID). + NoAutoTime(). SetExpr("num_action_runs", builder.Select("count(*)").From("action_run"). Where(builder.Eq{"repo_id": repo.ID}), @@ -194,7 +208,7 @@ func updateRepoRunsNumbers(ctx context.Context, repo *repo_model.Repository) err // CancelPreviousJobs cancels all previous jobs of the same repository, reference, workflow, and event. // It's useful when a new run is triggered, and all previous runs needn't be continued anymore. -func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error { +func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) ([]*ActionRunJob, error) { // Find all runs in the specified repository, reference, and workflow with non-final status runs, total, err := db.FindAndCount[ActionRun](ctx, FindRunOptions{ RepoID: repoID, @@ -204,14 +218,16 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin Status: []Status{StatusRunning, StatusWaiting, StatusBlocked}, }) if err != nil { - return err + return nil, err } // If there are no runs found, there's no need to proceed with cancellation, so return nil. if total == 0 { - return nil + return nil, nil } + cancelledJobs := make([]*ActionRunJob, 0, total) + // Iterate over each found run and cancel its associated jobs. for _, run := range runs { // Find all jobs associated with the current run. @@ -219,7 +235,7 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin RunID: run.ID, }) if err != nil { - return err + return cancelledJobs, err } // Iterate over each job and attempt to cancel it. @@ -238,27 +254,29 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin // Update the job's status and stopped time in the database. n, err := UpdateRunJob(ctx, job, builder.Eq{"task_id": 0}, "status", "stopped") if err != nil { - return err + return cancelledJobs, err } // If the update affected 0 rows, it means the job has changed in the meantime, so we need to try again. if n == 0 { - return fmt.Errorf("job has changed, try again") + return cancelledJobs, errors.New("job has changed, try again") } + cancelledJobs = append(cancelledJobs, job) // Continue with the next job. continue } // If the job has an associated task, try to stop the task, effectively cancelling the job. if err := StopTask(ctx, job.TaskID, StatusCancelled); err != nil { - return err + return cancelledJobs, err } + cancelledJobs = append(cancelledJobs, job) } } // Return nil to indicate successful cancellation of all running and waiting jobs. - return nil + return cancelledJobs, nil } // InsertRun inserts a run @@ -337,13 +355,13 @@ func InsertRun(ctx context.Context, run *ActionRun, jobs []*jobparser.SingleWork return committer.Commit() } -func GetRunByID(ctx context.Context, id int64) (*ActionRun, error) { +func GetRunByRepoAndID(ctx context.Context, repoID, runID int64) (*ActionRun, error) { var run ActionRun - has, err := db.GetEngine(ctx).Where("id=?", id).Get(&run) + has, err := db.GetEngine(ctx).Where("id=? AND repo_id=?", runID, repoID).Get(&run) if err != nil { return nil, err } else if !has { - return nil, fmt.Errorf("run with id %d: %w", id, util.ErrNotExist) + return nil, fmt.Errorf("run with id %d: %w", runID, util.ErrNotExist) } return &run, nil @@ -408,23 +426,16 @@ func UpdateRun(ctx context.Context, run *ActionRun, cols ...string) error { return err } if affected == 0 { - return fmt.Errorf("run has changed") + return errors.New("run has changed") // It's impossible that the run is not found, since Gitea never deletes runs. } if run.Status != 0 || slices.Contains(cols, "status") { if run.RepoID == 0 { - run, err = GetRunByID(ctx, run.ID) - if err != nil { - return err - } + setting.PanicInDevOrTesting("RepoID should not be 0") } - if run.Repo == nil { - repo, err := repo_model.GetRepositoryByID(ctx, run.RepoID) - if err != nil { - return err - } - run.Repo = repo + if err = run.LoadRepo(ctx); err != nil { + return err } if err := updateRepoRunsNumbers(ctx, run.Repo); err != nil { return err diff --git a/models/actions/run_job.go b/models/actions/run_job.go index de4b6aab66..bad895036d 100644 --- a/models/actions/run_job.go +++ b/models/actions/run_job.go @@ -10,6 +10,7 @@ import ( "time" "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" @@ -19,11 +20,12 @@ import ( // ActionRunJob represents a job of a run type ActionRunJob struct { ID int64 - RunID int64 `xorm:"index"` - Run *ActionRun `xorm:"-"` - RepoID int64 `xorm:"index"` - OwnerID int64 `xorm:"index"` - CommitSHA string `xorm:"index"` + RunID int64 `xorm:"index"` + Run *ActionRun `xorm:"-"` + RepoID int64 `xorm:"index"` + Repo *repo_model.Repository `xorm:"-"` + OwnerID int64 `xorm:"index"` + CommitSHA string `xorm:"index"` IsForkPullRequest bool Name string `xorm:"VARCHAR(255)"` Attempt int64 @@ -49,7 +51,7 @@ func (job *ActionRunJob) Duration() time.Duration { func (job *ActionRunJob) LoadRun(ctx context.Context) error { if job.Run == nil { - run, err := GetRunByID(ctx, job.RunID) + run, err := GetRunByRepoAndID(ctx, job.RepoID, job.RunID) if err != nil { return err } @@ -58,6 +60,17 @@ func (job *ActionRunJob) LoadRun(ctx context.Context) error { return nil } +func (job *ActionRunJob) LoadRepo(ctx context.Context) error { + if job.Repo == nil { + repo, err := repo_model.GetRepositoryByID(ctx, job.RepoID) + if err != nil { + return err + } + job.Repo = repo + } + return nil +} + // LoadAttributes load Run if not loaded func (job *ActionRunJob) LoadAttributes(ctx context.Context) error { if job == nil { @@ -83,7 +96,7 @@ func GetRunJobByID(ctx context.Context, id int64) (*ActionRunJob, error) { return &job, nil } -func GetRunJobsByRunID(ctx context.Context, runID int64) ([]*ActionRunJob, error) { +func GetRunJobsByRunID(ctx context.Context, runID int64) (ActionJobList, error) { var jobs []*ActionRunJob if err := db.GetEngine(ctx).Where("run_id=?", runID).OrderBy("id").Find(&jobs); err != nil { return nil, err @@ -129,7 +142,7 @@ func UpdateRunJob(ctx context.Context, job *ActionRunJob, cond builder.Cond, col { // Other goroutines may aggregate the status of the run and update it too. // So we need load the run and its jobs before updating the run. - run, err := GetRunByID(ctx, job.RunID) + run, err := GetRunByRepoAndID(ctx, job.RepoID, job.RunID) if err != nil { return 0, err } @@ -172,10 +185,10 @@ func AggregateJobStatus(jobs []*ActionRunJob) Status { return StatusSuccess case hasCancelled: return StatusCancelled - case hasFailure: - return StatusFailure case hasRunning: return StatusRunning + case hasFailure: + return StatusFailure case hasWaiting: return StatusWaiting case hasBlocked: diff --git a/models/actions/run_job_list.go b/models/actions/run_job_list.go index 6c5d3b3252..5f7bb62878 100644 --- a/models/actions/run_job_list.go +++ b/models/actions/run_job_list.go @@ -7,6 +7,7 @@ import ( "context" "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/timeutil" @@ -21,7 +22,33 @@ func (jobs ActionJobList) GetRunIDs() []int64 { }) } +func (jobs ActionJobList) LoadRepos(ctx context.Context) error { + repoIDs := container.FilterSlice(jobs, func(j *ActionRunJob) (int64, bool) { + return j.RepoID, j.RepoID != 0 && j.Repo == nil + }) + if len(repoIDs) == 0 { + return nil + } + + repos := make(map[int64]*repo_model.Repository, len(repoIDs)) + if err := db.GetEngine(ctx).In("id", repoIDs).Find(&repos); err != nil { + return err + } + for _, j := range jobs { + if j.RepoID > 0 && j.Repo == nil { + j.Repo = repos[j.RepoID] + } + } + return nil +} + func (jobs ActionJobList) LoadRuns(ctx context.Context, withRepo bool) error { + if withRepo { + if err := jobs.LoadRepos(ctx); err != nil { + return err + } + } + runIDs := jobs.GetRunIDs() runs := make(map[int64]*ActionRun, len(runIDs)) if err := db.GetEngine(ctx).In("id", runIDs).Find(&runs); err != nil { @@ -30,15 +57,9 @@ func (jobs ActionJobList) LoadRuns(ctx context.Context, withRepo bool) error { for _, j := range jobs { if j.RunID > 0 && j.Run == nil { j.Run = runs[j.RunID] + j.Run.Repo = j.Repo } } - if withRepo { - var runsList RunList = make([]*ActionRun, 0, len(runs)) - for _, r := range runs { - runsList = append(runsList, r) - } - return runsList.LoadRepos(ctx) - } return nil } @@ -59,22 +80,31 @@ type FindRunJobOptions struct { func (opts FindRunJobOptions) ToConds() builder.Cond { cond := builder.NewCond() if opts.RunID > 0 { - cond = cond.And(builder.Eq{"run_id": opts.RunID}) + cond = cond.And(builder.Eq{"`action_run_job`.run_id": opts.RunID}) } if opts.RepoID > 0 { - cond = cond.And(builder.Eq{"repo_id": opts.RepoID}) - } - if opts.OwnerID > 0 { - cond = cond.And(builder.Eq{"owner_id": opts.OwnerID}) + cond = cond.And(builder.Eq{"`action_run_job`.repo_id": opts.RepoID}) } if opts.CommitSHA != "" { - cond = cond.And(builder.Eq{"commit_sha": opts.CommitSHA}) + cond = cond.And(builder.Eq{"`action_run_job`.commit_sha": opts.CommitSHA}) } if len(opts.Statuses) > 0 { - cond = cond.And(builder.In("status", opts.Statuses)) + cond = cond.And(builder.In("`action_run_job`.status", opts.Statuses)) } if opts.UpdatedBefore > 0 { - cond = cond.And(builder.Lt{"updated": opts.UpdatedBefore}) + cond = cond.And(builder.Lt{"`action_run_job`.updated": opts.UpdatedBefore}) } return cond } + +func (opts FindRunJobOptions) ToJoins() []db.JoinFunc { + if opts.OwnerID > 0 { + return []db.JoinFunc{ + func(sess db.Engine) error { + sess.Join("INNER", "repository", "repository.id = repo_id AND repository.owner_id = ?", opts.OwnerID) + return nil + }, + } + } + return nil +} diff --git a/models/actions/run_job_status_test.go b/models/actions/run_job_status_test.go index 523d38327e..2a5eb00a6f 100644 --- a/models/actions/run_job_status_test.go +++ b/models/actions/run_job_status_test.go @@ -58,14 +58,14 @@ func TestAggregateJobStatus(t *testing.T) { {[]Status{StatusCancelled, StatusRunning}, StatusCancelled}, {[]Status{StatusCancelled, StatusBlocked}, StatusCancelled}, - // failure with other status, fail fast - // Should "running" win? Maybe no: old code does make "running" win, but GitHub does fail fast. + // failure with other status, usually fail fast, but "running" wins to match GitHub's behavior + // another reason that we can't make "failure" wins over "running": it would cause a weird behavior that user cannot cancel a workflow or get current running workflows correctly by filter after a job fail. {[]Status{StatusFailure}, StatusFailure}, {[]Status{StatusFailure, StatusSuccess}, StatusFailure}, {[]Status{StatusFailure, StatusSkipped}, StatusFailure}, {[]Status{StatusFailure, StatusCancelled}, StatusCancelled}, {[]Status{StatusFailure, StatusWaiting}, StatusFailure}, - {[]Status{StatusFailure, StatusRunning}, StatusFailure}, + {[]Status{StatusFailure, StatusRunning}, StatusRunning}, {[]Status{StatusFailure, StatusBlocked}, StatusFailure}, // skipped with other status diff --git a/models/actions/run_list.go b/models/actions/run_list.go index 4046c7d369..12c55e538e 100644 --- a/models/actions/run_list.go +++ b/models/actions/run_list.go @@ -10,6 +10,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" + "code.gitea.io/gitea/modules/translation" webhook_module "code.gitea.io/gitea/modules/webhook" "xorm.io/builder" @@ -71,39 +72,50 @@ type FindRunOptions struct { TriggerEvent webhook_module.HookEventType Approved bool // not util.OptionalBool, it works only when it's true Status []Status + CommitSHA string } func (opts FindRunOptions) ToConds() builder.Cond { cond := builder.NewCond() if opts.RepoID > 0 { - cond = cond.And(builder.Eq{"repo_id": opts.RepoID}) - } - if opts.OwnerID > 0 { - cond = cond.And(builder.Eq{"owner_id": opts.OwnerID}) + cond = cond.And(builder.Eq{"`action_run`.repo_id": opts.RepoID}) } if opts.WorkflowID != "" { - cond = cond.And(builder.Eq{"workflow_id": opts.WorkflowID}) + cond = cond.And(builder.Eq{"`action_run`.workflow_id": opts.WorkflowID}) } if opts.TriggerUserID > 0 { - cond = cond.And(builder.Eq{"trigger_user_id": opts.TriggerUserID}) + cond = cond.And(builder.Eq{"`action_run`.trigger_user_id": opts.TriggerUserID}) } if opts.Approved { - cond = cond.And(builder.Gt{"approved_by": 0}) + cond = cond.And(builder.Gt{"`action_run`.approved_by": 0}) } if len(opts.Status) > 0 { - cond = cond.And(builder.In("status", opts.Status)) + cond = cond.And(builder.In("`action_run`.status", opts.Status)) } if opts.Ref != "" { - cond = cond.And(builder.Eq{"ref": opts.Ref}) + cond = cond.And(builder.Eq{"`action_run`.ref": opts.Ref}) } if opts.TriggerEvent != "" { - cond = cond.And(builder.Eq{"trigger_event": opts.TriggerEvent}) + cond = cond.And(builder.Eq{"`action_run`.trigger_event": opts.TriggerEvent}) + } + if opts.CommitSHA != "" { + cond = cond.And(builder.Eq{"`action_run`.commit_sha": opts.CommitSHA}) } return cond } +func (opts FindRunOptions) ToJoins() []db.JoinFunc { + if opts.OwnerID > 0 { + return []db.JoinFunc{func(sess db.Engine) error { + sess.Join("INNER", "repository", "repository.id = repo_id AND repository.owner_id = ?", opts.OwnerID) + return nil + }} + } + return nil +} + func (opts FindRunOptions) ToOrders() string { - return "`id` DESC" + return "`action_run`.`id` DESC" } type StatusInfo struct { @@ -112,14 +124,14 @@ type StatusInfo struct { } // GetStatusInfoList returns a slice of StatusInfo -func GetStatusInfoList(ctx context.Context) []StatusInfo { +func GetStatusInfoList(ctx context.Context, lang translation.Locale) []StatusInfo { // same as those in aggregateJobStatus allStatus := []Status{StatusSuccess, StatusFailure, StatusWaiting, StatusRunning} statusInfoList := make([]StatusInfo, 0, 4) for _, s := range allStatus { statusInfoList = append(statusInfoList, StatusInfo{ Status: int(s), - DisplayedStatus: s.String(), + DisplayedStatus: s.LocaleString(lang), }) } return statusInfoList diff --git a/models/actions/runner.go b/models/actions/runner.go index 0d5464a5be..81d4249ae0 100644 --- a/models/actions/runner.go +++ b/models/actions/runner.go @@ -5,6 +5,7 @@ package actions import ( "context" + "errors" "fmt" "strings" "time" @@ -14,6 +15,7 @@ import ( "code.gitea.io/gitea/models/shared/types" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/translation" "code.gitea.io/gitea/modules/util" @@ -57,6 +59,8 @@ type ActionRunner struct { // Store labels defined in state file (default: .runner file) of `act_runner` AgentLabels []string `xorm:"TEXT"` + // Store if this is a runner that only ever get one single job assigned + Ephemeral bool `xorm:"ephemeral NOT NULL DEFAULT false"` Created timeutil.TimeStamp `xorm:"created"` Updated timeutil.TimeStamp `xorm:"updated"` @@ -84,9 +88,10 @@ func (r *ActionRunner) BelongsToOwnerType() types.OwnerType { return types.OwnerTypeRepository } if r.OwnerID != 0 { - if r.Owner.Type == user_model.UserTypeOrganization { + switch r.Owner.Type { + case user_model.UserTypeOrganization: return types.OwnerTypeOrganization - } else if r.Owner.Type == user_model.UserTypeIndividual { + case user_model.UserTypeIndividual: return types.OwnerTypeIndividual } } @@ -120,8 +125,15 @@ func (r *ActionRunner) IsOnline() bool { return false } -// Editable checks if the runner is editable by the user -func (r *ActionRunner) Editable(ownerID, repoID int64) bool { +// EditableInContext checks if the runner is editable by the "context" owner/repo +// ownerID == 0 and repoID == 0 means "admin" context, any runner including global runners could be edited +// ownerID == 0 and repoID != 0 means "repo" context, any runner belonging to the given repo could be edited +// ownerID != 0 and repoID == 0 means "owner(org/user)" context, any runner belonging to the given user/org could be edited +// ownerID != 0 and repoID != 0 means "owner" OR "repo" context, legacy behavior, but we should forbid using it +func (r *ActionRunner) EditableInContext(ownerID, repoID int64) bool { + if ownerID != 0 && repoID != 0 { + setting.PanicInDevOrTesting("ownerID and repoID should not be both set") + } if ownerID == 0 && repoID == 0 { return true } @@ -165,8 +177,15 @@ func init() { db.RegisterModel(&ActionRunner{}) } +// FindRunnerOptions +// ownerID == 0 and repoID == 0 means any runner including global runners +// repoID != 0 and WithAvailable == false means any runner for the given repo +// repoID != 0 and WithAvailable == true means any runner for the given repo, parent user/org, and global runners +// ownerID != 0 and repoID == 0 and WithAvailable == false means any runner for the given user/org +// ownerID != 0 and repoID == 0 and WithAvailable == true means any runner for the given user/org and global runners type FindRunnerOptions struct { db.ListOptions + IDs []int64 RepoID int64 OwnerID int64 // it will be ignored if RepoID is set Sort string @@ -178,6 +197,14 @@ type FindRunnerOptions struct { func (opts FindRunnerOptions) ToConds() builder.Cond { cond := builder.NewCond() + if len(opts.IDs) > 0 { + if len(opts.IDs) == 1 { + cond = cond.And(builder.Eq{"id": opts.IDs[0]}) + } else { + cond = cond.And(builder.In("id", opts.IDs)) + } + } + if opts.RepoID > 0 { c := builder.NewCond().And(builder.Eq{"repo_id": opts.RepoID}) if opts.WithAvailable { @@ -272,6 +299,23 @@ func DeleteRunner(ctx context.Context, id int64) error { return err } +// DeleteEphemeralRunner deletes a ephemeral runner by given ID. +func DeleteEphemeralRunner(ctx context.Context, id int64) error { + runner, err := GetRunnerByID(ctx, id) + if err != nil { + if errors.Is(err, util.ErrNotExist) { + return nil + } + return err + } + if !runner.Ephemeral { + return nil + } + + _, err = db.DeleteByID[ActionRunner](ctx, id) + return err +} + // CreateRunner creates new runner. func CreateRunner(ctx context.Context, t *ActionRunner) error { if t.OwnerID != 0 && t.RepoID != 0 { @@ -328,3 +372,17 @@ func FixRunnersWithoutBelongingRepo(ctx context.Context) (int64, error) { } return res.RowsAffected() } + +func CountWrongRepoLevelRunners(ctx context.Context) (int64, error) { + var result int64 + _, err := db.GetEngine(ctx).SQL("SELECT count(`id`) FROM `action_runner` WHERE `repo_id` > 0 AND `owner_id` > 0").Get(&result) + return result, err +} + +func UpdateWrongRepoLevelRunners(ctx context.Context) (int64, error) { + result, err := db.GetEngine(ctx).Exec("UPDATE `action_runner` SET `owner_id` = 0 WHERE `repo_id` > 0 AND `owner_id` > 0") + if err != nil { + return 0, err + } + return result.RowsAffected() +} diff --git a/models/actions/runner_token_test.go b/models/actions/runner_token_test.go index 159805e5f7..21614b7086 100644 --- a/models/actions/runner_token_test.go +++ b/models/actions/runner_token_test.go @@ -17,7 +17,7 @@ func TestGetLatestRunnerToken(t *testing.T) { token := unittest.AssertExistsAndLoadBean(t, &ActionRunnerToken{ID: 3}) expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0) assert.NoError(t, err) - assert.EqualValues(t, expectedToken, token) + assert.Equal(t, expectedToken, token) } func TestNewRunnerToken(t *testing.T) { @@ -26,7 +26,7 @@ func TestNewRunnerToken(t *testing.T) { assert.NoError(t, err) expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0) assert.NoError(t, err) - assert.EqualValues(t, expectedToken, token) + assert.Equal(t, expectedToken, token) } func TestUpdateRunnerToken(t *testing.T) { @@ -36,5 +36,5 @@ func TestUpdateRunnerToken(t *testing.T) { assert.NoError(t, UpdateRunnerToken(db.DefaultContext, token)) expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0) assert.NoError(t, err) - assert.EqualValues(t, expectedToken, token) + assert.Equal(t, expectedToken, token) } diff --git a/models/actions/schedule.go b/models/actions/schedule.go index e2cc32eedc..2edf483fe0 100644 --- a/models/actions/schedule.go +++ b/models/actions/schedule.go @@ -43,15 +43,12 @@ func init() { // GetSchedulesMapByIDs returns the schedules by given id slice. func GetSchedulesMapByIDs(ctx context.Context, ids []int64) (map[int64]*ActionSchedule, error) { schedules := make(map[int64]*ActionSchedule, len(ids)) + if len(ids) == 0 { + return schedules, nil + } return schedules, db.GetEngine(ctx).In("id", ids).Find(&schedules) } -// GetReposMapByIDs returns the repos by given id slice. -func GetReposMapByIDs(ctx context.Context, ids []int64) (map[int64]*repo_model.Repository, error) { - repos := make(map[int64]*repo_model.Repository, len(ids)) - return repos, db.GetEngine(ctx).In("id", ids).Find(&repos) -} - // CreateScheduleTask creates new schedule task. func CreateScheduleTask(ctx context.Context, rows []*ActionSchedule) error { // Return early if there are no rows to insert @@ -120,21 +117,22 @@ func DeleteScheduleTaskByRepo(ctx context.Context, id int64) error { return committer.Commit() } -func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) error { +func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) ([]*ActionRunJob, error) { // If actions disabled when there is schedule task, this will remove the outdated schedule tasks // There is no other place we can do this because the app.ini will be changed manually if err := DeleteScheduleTaskByRepo(ctx, repo.ID); err != nil { - return fmt.Errorf("DeleteCronTaskByRepo: %v", err) + return nil, fmt.Errorf("DeleteCronTaskByRepo: %v", err) } // cancel running cron jobs of this repository and delete old schedules - if err := CancelPreviousJobs( + jobs, err := CancelPreviousJobs( ctx, repo.ID, repo.DefaultBranch, "", webhook_module.HookEventSchedule, - ); err != nil { - return fmt.Errorf("CancelPreviousJobs: %v", err) + ) + if err != nil { + return jobs, fmt.Errorf("CancelPreviousJobs: %v", err) } - return nil + return jobs, nil } diff --git a/models/actions/schedule_spec_list.go b/models/actions/schedule_spec_list.go index f7dac72f8b..e26b2c1120 100644 --- a/models/actions/schedule_spec_list.go +++ b/models/actions/schedule_spec_list.go @@ -32,7 +32,7 @@ func (specs SpecList) LoadSchedules(ctx context.Context) error { } repoIDs := specs.GetRepoIDs() - repos, err := GetReposMapByIDs(ctx, repoIDs) + repos, err := repo_model.GetRepositoriesMapByIDs(ctx, repoIDs) if err != nil { return err } diff --git a/models/actions/status.go b/models/actions/status.go index eda2234137..2b1d70613c 100644 --- a/models/actions/status.go +++ b/models/actions/status.go @@ -4,6 +4,8 @@ package actions import ( + "slices" + "code.gitea.io/gitea/modules/translation" runnerv1 "code.gitea.io/actions-proto-go/runner/v1" @@ -88,12 +90,7 @@ func (s Status) IsBlocked() bool { // In returns whether s is one of the given statuses func (s Status) In(statuses ...Status) bool { - for _, v := range statuses { - if s == v { - return true - } - } - return false + return slices.Contains(statuses, s) } func (s Status) AsResult() runnerv1.Result { diff --git a/models/actions/task.go b/models/actions/task.go index 9f13ff94c9..e0756b10c2 100644 --- a/models/actions/task.go +++ b/models/actions/task.go @@ -6,6 +6,7 @@ package actions import ( "context" "crypto/subtle" + "errors" "fmt" "time" @@ -277,14 +278,13 @@ func CreateTaskForRunner(ctx context.Context, runner *ActionRunner) (*ActionTask return nil, false, err } - var workflowJob *jobparser.Job - if gots, err := jobparser.Parse(job.WorkflowPayload); err != nil { + parsedWorkflows, err := jobparser.Parse(job.WorkflowPayload) + if err != nil { return nil, false, fmt.Errorf("parse workflow of job %d: %w", job.ID, err) - } else if len(gots) != 1 { + } else if len(parsedWorkflows) != 1 { return nil, false, fmt.Errorf("workflow of job %d: not single workflow", job.ID) - } else { //nolint:revive - _, workflowJob = gots[0].Job() } + _, workflowJob := parsedWorkflows[0].Job() if _, err := e.Insert(task); err != nil { return nil, false, err @@ -335,6 +335,11 @@ func UpdateTask(ctx context.Context, task *ActionTask, cols ...string) error { sess.Cols(cols...) } _, err := sess.Update(task) + + // Automatically delete the ephemeral runner if the task is done + if err == nil && task.Status.IsDone() && util.SliceContainsString(cols, "status") { + return DeleteEphemeralRunner(ctx, task.RunnerID) + } return err } @@ -361,7 +366,7 @@ func UpdateTaskByState(ctx context.Context, runnerID int64, state *runnerv1.Task } else if !has { return nil, util.ErrNotExist } else if runnerID != task.RunnerID { - return nil, fmt.Errorf("invalid runner for task") + return nil, errors.New("invalid runner for task") } if task.Status.IsDone() { diff --git a/models/actions/task_list.go b/models/actions/task_list.go index df4b43c5ef..0c80397899 100644 --- a/models/actions/task_list.go +++ b/models/actions/task_list.go @@ -48,6 +48,7 @@ func (tasks TaskList) LoadAttributes(ctx context.Context) error { type FindTaskOptions struct { db.ListOptions RepoID int64 + JobID int64 OwnerID int64 CommitSHA string Status Status @@ -61,6 +62,9 @@ func (opts FindTaskOptions) ToConds() builder.Cond { if opts.RepoID > 0 { cond = cond.And(builder.Eq{"repo_id": opts.RepoID}) } + if opts.JobID > 0 { + cond = cond.And(builder.Eq{"job_id": opts.JobID}) + } if opts.OwnerID > 0 { cond = cond.And(builder.Eq{"owner_id": opts.OwnerID}) } diff --git a/models/actions/utils.go b/models/actions/utils.go index 12657942fc..f6ba661ae3 100644 --- a/models/actions/utils.go +++ b/models/actions/utils.go @@ -82,3 +82,22 @@ func calculateDuration(started, stopped timeutil.TimeStamp, status Status) time. } return timeSince(s).Truncate(time.Second) } + +// best effort function to convert an action schedule to action run, to be used in GenerateGiteaContext +func (s *ActionSchedule) ToActionRun() *ActionRun { + return &ActionRun{ + Title: s.Title, + RepoID: s.RepoID, + Repo: s.Repo, + OwnerID: s.OwnerID, + WorkflowID: s.WorkflowID, + TriggerUserID: s.TriggerUserID, + TriggerUser: s.TriggerUser, + Ref: s.Ref, + CommitSHA: s.CommitSHA, + Event: s.Event, + EventPayload: s.EventPayload, + Created: s.Created, + Updated: s.Updated, + } +} diff --git a/models/actions/variable.go b/models/actions/variable.go index d0f917d923..7154843c17 100644 --- a/models/actions/variable.go +++ b/models/actions/variable.go @@ -6,10 +6,12 @@ package actions import ( "context" "strings" + "unicode/utf8" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "xorm.io/builder" ) @@ -32,32 +34,46 @@ type ActionVariable struct { RepoID int64 `xorm:"INDEX UNIQUE(owner_repo_name)"` Name string `xorm:"UNIQUE(owner_repo_name) NOT NULL"` Data string `xorm:"LONGTEXT NOT NULL"` + Description string `xorm:"TEXT"` CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"` UpdatedUnix timeutil.TimeStamp `xorm:"updated"` } +const ( + VariableDataMaxLength = 65536 + VariableDescriptionMaxLength = 4096 +) + func init() { db.RegisterModel(new(ActionVariable)) } -func InsertVariable(ctx context.Context, ownerID, repoID int64, name, data string) (*ActionVariable, error) { +func InsertVariable(ctx context.Context, ownerID, repoID int64, name, data, description string) (*ActionVariable, error) { if ownerID != 0 && repoID != 0 { // It's trying to create a variable that belongs to a repository, but OwnerID has been set accidentally. // Remove OwnerID to avoid confusion; it's not worth returning an error here. ownerID = 0 } + if utf8.RuneCountInString(data) > VariableDataMaxLength { + return nil, util.NewInvalidArgumentErrorf("data too long") + } + + description = util.TruncateRunes(description, VariableDescriptionMaxLength) + variable := &ActionVariable{ - OwnerID: ownerID, - RepoID: repoID, - Name: strings.ToUpper(name), - Data: data, + OwnerID: ownerID, + RepoID: repoID, + Name: strings.ToUpper(name), + Data: data, + Description: description, } return variable, db.Insert(ctx, variable) } type FindVariablesOpts struct { db.ListOptions + IDs []int64 RepoID int64 OwnerID int64 // it will be ignored if RepoID is set Name string @@ -65,6 +81,15 @@ type FindVariablesOpts struct { func (opts FindVariablesOpts) ToConds() builder.Cond { cond := builder.NewCond() + + if len(opts.IDs) > 0 { + if len(opts.IDs) == 1 { + cond = cond.And(builder.Eq{"id": opts.IDs[0]}) + } else { + cond = cond.And(builder.In("id", opts.IDs)) + } + } + // Since we now support instance-level variables, // there is no need to check for null values for `owner_id` and `repo_id` cond = cond.And(builder.Eq{"repo_id": opts.RepoID}) @@ -85,12 +110,18 @@ func FindVariables(ctx context.Context, opts FindVariablesOpts) ([]*ActionVariab return db.Find[ActionVariable](ctx, opts) } -func UpdateVariable(ctx context.Context, variable *ActionVariable) (bool, error) { - count, err := db.GetEngine(ctx).ID(variable.ID).Cols("name", "data"). - Update(&ActionVariable{ - Name: variable.Name, - Data: variable.Data, - }) +func UpdateVariableCols(ctx context.Context, variable *ActionVariable, cols ...string) (bool, error) { + if utf8.RuneCountInString(variable.Data) > VariableDataMaxLength { + return false, util.NewInvalidArgumentErrorf("data too long") + } + + variable.Description = util.TruncateRunes(variable.Description, VariableDescriptionMaxLength) + + variable.Name = strings.ToUpper(variable.Name) + count, err := db.GetEngine(ctx). + ID(variable.ID). + Cols(cols...). + Update(variable) return count != 0, err } @@ -137,3 +168,17 @@ func GetVariablesOfRun(ctx context.Context, run *ActionRun) (map[string]string, return variables, nil } + +func CountWrongRepoLevelVariables(ctx context.Context) (int64, error) { + var result int64 + _, err := db.GetEngine(ctx).SQL("SELECT count(`id`) FROM `action_variable` WHERE `repo_id` > 0 AND `owner_id` > 0").Get(&result) + return result, err +} + +func UpdateWrongRepoLevelVariables(ctx context.Context) (int64, error) { + result, err := db.GetEngine(ctx).Exec("UPDATE `action_variable` SET `owner_id` = 0 WHERE `repo_id` > 0 AND `owner_id` > 0") + if err != nil { + return 0, err + } + return result.RowsAffected() +} diff --git a/models/activities/action.go b/models/activities/action.go index 8304210188..1a0dfe6412 100644 --- a/models/activities/action.go +++ b/models/activities/action.go @@ -9,6 +9,7 @@ import ( "fmt" "net/url" "path" + "slices" "strconv" "strings" "time" @@ -16,9 +17,7 @@ import ( "code.gitea.io/gitea/models/db" issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/organization" - access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" - "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" @@ -72,9 +71,9 @@ func (at ActionType) String() string { case ActionRenameRepo: return "rename_repo" case ActionStarRepo: - return "star_repo" + return "star_repo" // will not displayed in feeds.tmpl case ActionWatchRepo: - return "watch_repo" + return "watch_repo" // will not displayed in feeds.tmpl case ActionCommitRepo: return "commit_repo" case ActionCreateIssue: @@ -127,12 +126,7 @@ func (at ActionType) String() string { } func (at ActionType) InActions(actions ...string) bool { - for _, action := range actions { - if action == at.String() { - return true - } - } - return false + return slices.Contains(actions, at.String()) } // Action represents user operation type and other information to @@ -174,7 +168,10 @@ func (a *Action) TableIndices() []*schemas.Index { cuIndex := schemas.NewIndex("c_u", schemas.IndexType) cuIndex.AddColumn("user_id", "is_deleted") - indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex, cuIndex} + actUserUserIndex := schemas.NewIndex("au_c_u", schemas.IndexType) + actUserUserIndex.AddColumn("act_user_id", "created_unix", "user_id") + + indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex, cuIndex, actUserUserIndex} return indices } @@ -190,7 +187,7 @@ func (a *Action) LoadActUser(ctx context.Context) { return } var err error - a.ActUser, err = user_model.GetUserByID(ctx, a.ActUserID) + a.ActUser, err = user_model.GetPossibleUserByID(ctx, a.ActUserID) if err == nil { return } else if user_model.IsErrUserNotExist(err) { @@ -200,15 +197,13 @@ func (a *Action) LoadActUser(ctx context.Context) { } } -func (a *Action) LoadRepo(ctx context.Context) { +func (a *Action) LoadRepo(ctx context.Context) error { if a.Repo != nil { - return + return nil } var err error a.Repo, err = repo_model.GetRepositoryByID(ctx, a.RepoID) - if err != nil { - log.Error("repo_model.GetRepositoryByID(%d): %v", a.RepoID, err) - } + return err } // GetActFullName gets the action's user full name. @@ -250,7 +245,7 @@ func (a *Action) GetActDisplayNameTitle(ctx context.Context) string { // GetRepoUserName returns the name of the action repository owner. func (a *Action) GetRepoUserName(ctx context.Context) string { - a.LoadRepo(ctx) + _ = a.LoadRepo(ctx) if a.Repo == nil { return "(non-existing-repo)" } @@ -265,7 +260,7 @@ func (a *Action) ShortRepoUserName(ctx context.Context) string { // GetRepoName returns the name of the action repository. func (a *Action) GetRepoName(ctx context.Context) string { - a.LoadRepo(ctx) + _ = a.LoadRepo(ctx) if a.Repo == nil { return "(non-existing-repo)" } @@ -355,7 +350,7 @@ func (a *Action) GetBranch() string { // GetRefLink returns the action's ref link. func (a *Action) GetRefLink(ctx context.Context) string { - return git.RefURL(a.GetRepoLink(ctx), a.RefName) + return a.GetRepoLink(ctx) + "/src/" + git.RefName(a.RefName).RefWebLinkPath() } // GetTag returns the action's repository tag. @@ -446,6 +441,7 @@ type GetFeedsOptions struct { OnlyPerformedBy bool // only actions performed by requested user IncludeDeleted bool // include deleted actions Date string // the day we want activity for: YYYY-MM-DD + DontCount bool // do counting in GetFeeds } // ActivityReadable return whether doer can read activities of user @@ -454,6 +450,24 @@ func ActivityReadable(user, doer *user_model.User) bool { doer != nil && (doer.IsAdmin || user.ID == doer.ID) } +func FeedDateCond(opts GetFeedsOptions) builder.Cond { + cond := builder.NewCond() + if opts.Date == "" { + return cond + } + + dateLow, err := time.ParseInLocation("2006-01-02", opts.Date, setting.DefaultUILocation) + if err != nil { + log.Warn("Unable to parse %s, filter not applied: %v", opts.Date, err) + } else { + dateHigh := dateLow.Add(86399000000000) // 23h59m59s + + cond = cond.And(builder.Gte{"`action`.created_unix": dateLow.Unix()}) + cond = cond.And(builder.Lte{"`action`.created_unix": dateHigh.Unix()}) + } + return cond +} + func ActivityQueryCondition(ctx context.Context, opts GetFeedsOptions) (builder.Cond, error) { cond := builder.NewCond() @@ -511,8 +525,8 @@ func ActivityQueryCondition(ctx context.Context, opts GetFeedsOptions) (builder. } if opts.RequestedTeam != nil { - env := repo_model.AccessibleTeamReposEnv(ctx, organization.OrgFromUser(opts.RequestedUser), opts.RequestedTeam) - teamRepoIDs, err := env.RepoIDs(1, opts.RequestedUser.NumRepos) + env := repo_model.AccessibleTeamReposEnv(organization.OrgFromUser(opts.RequestedUser), opts.RequestedTeam) + teamRepoIDs, err := env.RepoIDs(ctx) if err != nil { return nil, fmt.Errorf("GetTeamRepositories: %w", err) } @@ -534,17 +548,7 @@ func ActivityQueryCondition(ctx context.Context, opts GetFeedsOptions) (builder. cond = cond.And(builder.Eq{"is_deleted": false}) } - if opts.Date != "" { - dateLow, err := time.ParseInLocation("2006-01-02", opts.Date, setting.DefaultUILocation) - if err != nil { - log.Warn("Unable to parse %s, filter not applied: %v", opts.Date, err) - } else { - dateHigh := dateLow.Add(86399000000000) // 23h59m59s - - cond = cond.And(builder.Gte{"`action`.created_unix": dateLow.Unix()}) - cond = cond.And(builder.Lte{"`action`.created_unix": dateHigh.Unix()}) - } - } + cond = cond.And(FeedDateCond(opts)) return cond, nil } @@ -559,130 +563,6 @@ func DeleteOldActions(ctx context.Context, olderThan time.Duration) (err error) return err } -// NotifyWatchers creates batch of actions for every watcher. -// It could insert duplicate actions for a repository action, like this: -// * Original action: UserID=1 (the real actor), ActUserID=1 -// * Organization action: UserID=100 (the repo's org), ActUserID=1 -// * Watcher action: UserID=20 (a user who is watching a repo), ActUserID=1 -func NotifyWatchers(ctx context.Context, actions ...*Action) error { - var watchers []*repo_model.Watch - var repo *repo_model.Repository - var err error - var permCode []bool - var permIssue []bool - var permPR []bool - - e := db.GetEngine(ctx) - - for _, act := range actions { - repoChanged := repo == nil || repo.ID != act.RepoID - - if repoChanged { - // Add feeds for user self and all watchers. - watchers, err = repo_model.GetWatchers(ctx, act.RepoID) - if err != nil { - return fmt.Errorf("get watchers: %w", err) - } - } - - // Add feed for actioner. - act.UserID = act.ActUserID - if _, err = e.Insert(act); err != nil { - return fmt.Errorf("insert new actioner: %w", err) - } - - if repoChanged { - act.LoadRepo(ctx) - repo = act.Repo - - // check repo owner exist. - if err := act.Repo.LoadOwner(ctx); err != nil { - return fmt.Errorf("can't get repo owner: %w", err) - } - } else if act.Repo == nil { - act.Repo = repo - } - - // Add feed for organization - if act.Repo.Owner.IsOrganization() && act.ActUserID != act.Repo.Owner.ID { - act.ID = 0 - act.UserID = act.Repo.Owner.ID - if err = db.Insert(ctx, act); err != nil { - return fmt.Errorf("insert new actioner: %w", err) - } - } - - if repoChanged { - permCode = make([]bool, len(watchers)) - permIssue = make([]bool, len(watchers)) - permPR = make([]bool, len(watchers)) - for i, watcher := range watchers { - user, err := user_model.GetUserByID(ctx, watcher.UserID) - if err != nil { - permCode[i] = false - permIssue[i] = false - permPR[i] = false - continue - } - perm, err := access_model.GetUserRepoPermission(ctx, repo, user) - if err != nil { - permCode[i] = false - permIssue[i] = false - permPR[i] = false - continue - } - permCode[i] = perm.CanRead(unit.TypeCode) - permIssue[i] = perm.CanRead(unit.TypeIssues) - permPR[i] = perm.CanRead(unit.TypePullRequests) - } - } - - for i, watcher := range watchers { - if act.ActUserID == watcher.UserID { - continue - } - act.ID = 0 - act.UserID = watcher.UserID - act.Repo.Units = nil - - switch act.OpType { - case ActionCommitRepo, ActionPushTag, ActionDeleteTag, ActionPublishRelease, ActionDeleteBranch: - if !permCode[i] { - continue - } - case ActionCreateIssue, ActionCommentIssue, ActionCloseIssue, ActionReopenIssue: - if !permIssue[i] { - continue - } - case ActionCreatePullRequest, ActionCommentPull, ActionMergePullRequest, ActionClosePullRequest, ActionReopenPullRequest, ActionAutoMergePullRequest: - if !permPR[i] { - continue - } - } - - if err = db.Insert(ctx, act); err != nil { - return fmt.Errorf("insert new action: %w", err) - } - } - } - return nil -} - -// NotifyWatchersActions creates batch of actions for every watcher. -func NotifyWatchersActions(ctx context.Context, acts []*Action) error { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - for _, act := range acts { - if err := NotifyWatchers(ctx, act); err != nil { - return err - } - } - return committer.Commit() -} - // DeleteIssueActions delete all actions related with issueID func DeleteIssueActions(ctx context.Context, repoID, issueID, issueIndex int64) error { // delete actions assigned to this issue diff --git a/models/activities/action_list.go b/models/activities/action_list.go index 5f9acb8f2a..b52cf7ee49 100644 --- a/models/activities/action_list.go +++ b/models/activities/action_list.go @@ -5,6 +5,7 @@ package activities import ( "context" + "errors" "fmt" "strconv" @@ -205,12 +206,34 @@ func (actions ActionList) LoadIssues(ctx context.Context) error { // GetFeeds returns actions according to the provided options func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, error) { if opts.RequestedUser == nil && opts.RequestedTeam == nil && opts.RequestedRepo == nil { - return nil, 0, fmt.Errorf("need at least one of these filters: RequestedUser, RequestedTeam, RequestedRepo") + return nil, 0, errors.New("need at least one of these filters: RequestedUser, RequestedTeam, RequestedRepo") } - cond, err := ActivityQueryCondition(ctx, opts) - if err != nil { - return nil, 0, err + var err error + var cond builder.Cond + // if the actor is the requested user or is an administrator, we can skip the ActivityQueryCondition + if opts.Actor != nil && opts.RequestedUser != nil && (opts.Actor.IsAdmin || opts.Actor.ID == opts.RequestedUser.ID) { + cond = builder.Eq{ + "user_id": opts.RequestedUser.ID, + }.And( + FeedDateCond(opts), + ) + + if !opts.IncludeDeleted { + cond = cond.And(builder.Eq{"is_deleted": false}) + } + + if !opts.IncludePrivate { + cond = cond.And(builder.Eq{"is_private": false}) + } + if opts.OnlyPerformedBy { + cond = cond.And(builder.Eq{"act_user_id": opts.RequestedUser.ID}) + } + } else { + cond, err = ActivityQueryCondition(ctx, opts) + if err != nil { + return nil, 0, err + } } actions := make([]*Action, 0, opts.PageSize) @@ -221,7 +244,11 @@ func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, err sess := db.GetEngine(ctx).Where(cond) sess = db.SetSessionPagination(sess, &opts) - count, err = sess.Desc("`action`.created_unix").FindAndCount(&actions) + if opts.DontCount { + err = sess.Desc("`action`.created_unix").Find(&actions) + } else { + count, err = sess.Desc("`action`.created_unix").FindAndCount(&actions) + } if err != nil { return nil, 0, fmt.Errorf("FindAndCount: %w", err) } @@ -235,11 +262,13 @@ func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, err return nil, 0, fmt.Errorf("Find(actionsIDs): %w", err) } - count, err = db.GetEngine(ctx).Where(cond). - Table("action"). - Cols("`action`.id").Count() - if err != nil { - return nil, 0, fmt.Errorf("Count: %w", err) + if !opts.DontCount { + count, err = db.GetEngine(ctx).Where(cond). + Table("action"). + Cols("`action`.id").Count() + if err != nil { + return nil, 0, fmt.Errorf("Count: %w", err) + } } if err := db.GetEngine(ctx).In("`action`.id", actionIDs).Desc("`action`.created_unix").Find(&actions); err != nil { @@ -253,3 +282,9 @@ func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, err return actions, count, nil } + +func CountUserFeeds(ctx context.Context, userID int64) (int64, error) { + return db.GetEngine(ctx).Where("user_id = ?", userID). + And("is_deleted = ?", false). + Count(&Action{}) +} diff --git a/models/activities/action_test.go b/models/activities/action_test.go index 9cfe981656..ff311ac891 100644 --- a/models/activities/action_test.go +++ b/models/activities/action_test.go @@ -82,43 +82,6 @@ func TestActivityReadable(t *testing.T) { } } -func TestNotifyWatchers(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - action := &activities_model.Action{ - ActUserID: 8, - RepoID: 1, - OpType: activities_model.ActionStarRepo, - } - assert.NoError(t, activities_model.NotifyWatchers(db.DefaultContext, action)) - - // One watchers are inactive, thus action is only created for user 8, 1, 4, 11 - unittest.AssertExistsAndLoadBean(t, &activities_model.Action{ - ActUserID: action.ActUserID, - UserID: 8, - RepoID: action.RepoID, - OpType: action.OpType, - }) - unittest.AssertExistsAndLoadBean(t, &activities_model.Action{ - ActUserID: action.ActUserID, - UserID: 1, - RepoID: action.RepoID, - OpType: action.OpType, - }) - unittest.AssertExistsAndLoadBean(t, &activities_model.Action{ - ActUserID: action.ActUserID, - UserID: 4, - RepoID: action.RepoID, - OpType: action.OpType, - }) - unittest.AssertExistsAndLoadBean(t, &activities_model.Action{ - ActUserID: action.ActUserID, - UserID: 11, - RepoID: action.RepoID, - OpType: action.OpType, - }) -} - func TestConsistencyUpdateAction(t *testing.T) { if !setting.Database.Type.IsSQLite3() { t.Skip("Test is only for SQLite database.") @@ -167,7 +130,7 @@ func TestDeleteIssueActions(t *testing.T) { // load an issue issue := unittest.AssertExistsAndLoadBean(t, &issue_model.Issue{ID: 4}) - assert.NotEqualValues(t, issue.ID, issue.Index) // it needs to use different ID/Index to test the DeleteIssueActions to delete some actions by IssueIndex + assert.NotEqual(t, issue.ID, issue.Index) // it needs to use different ID/Index to test the DeleteIssueActions to delete some actions by IssueIndex // insert a comment err := db.Insert(db.DefaultContext, &issue_model.Comment{Type: issue_model.CommentTypeComment, IssueID: issue.ID}) diff --git a/models/activities/notification_list.go b/models/activities/notification_list.go index 0cbb91df3c..b47f5dc404 100644 --- a/models/activities/notification_list.go +++ b/models/activities/notification_list.go @@ -208,10 +208,7 @@ func (nl NotificationList) LoadRepos(ctx context.Context) (repo_model.Repository repos := make(map[int64]*repo_model.Repository, len(repoIDs)) left := len(repoIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("id", repoIDs[:limit]). Rows(new(repo_model.Repository)) @@ -282,10 +279,7 @@ func (nl NotificationList) LoadIssues(ctx context.Context) ([]int, error) { issues := make(map[int64]*issues_model.Issue, len(issueIDs)) left := len(issueIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("id", issueIDs[:limit]). Rows(new(issues_model.Issue)) @@ -377,10 +371,7 @@ func (nl NotificationList) LoadUsers(ctx context.Context) ([]int, error) { users := make(map[int64]*user_model.User, len(userIDs)) left := len(userIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("id", userIDs[:limit]). Rows(new(user_model.User)) @@ -428,10 +419,7 @@ func (nl NotificationList) LoadComments(ctx context.Context) ([]int, error) { comments := make(map[int64]*issues_model.Comment, len(commentIDs)) left := len(commentIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("id", commentIDs[:limit]). Rows(new(issues_model.Comment)) diff --git a/models/activities/notification_test.go b/models/activities/notification_test.go index 52f0eacba1..5d2a29bc36 100644 --- a/models/activities/notification_test.go +++ b/models/activities/notification_test.go @@ -44,11 +44,11 @@ func TestNotificationsForUser(t *testing.T) { assert.NoError(t, err) if assert.Len(t, notfs, 3) { assert.EqualValues(t, 5, notfs[0].ID) - assert.EqualValues(t, user.ID, notfs[0].UserID) + assert.Equal(t, user.ID, notfs[0].UserID) assert.EqualValues(t, 4, notfs[1].ID) - assert.EqualValues(t, user.ID, notfs[1].UserID) + assert.Equal(t, user.ID, notfs[1].UserID) assert.EqualValues(t, 2, notfs[2].ID) - assert.EqualValues(t, user.ID, notfs[2].UserID) + assert.Equal(t, user.ID, notfs[2].UserID) } } @@ -58,7 +58,7 @@ func TestNotification_GetRepo(t *testing.T) { repo, err := notf.GetRepo(db.DefaultContext) assert.NoError(t, err) assert.Equal(t, repo, notf.Repository) - assert.EqualValues(t, notf.RepoID, repo.ID) + assert.Equal(t, notf.RepoID, repo.ID) } func TestNotification_GetIssue(t *testing.T) { @@ -67,7 +67,7 @@ func TestNotification_GetIssue(t *testing.T) { issue, err := notf.GetIssue(db.DefaultContext) assert.NoError(t, err) assert.Equal(t, issue, notf.Issue) - assert.EqualValues(t, notf.IssueID, issue.ID) + assert.Equal(t, notf.IssueID, issue.ID) } func TestGetNotificationCount(t *testing.T) { @@ -136,5 +136,5 @@ func TestSetIssueReadBy(t *testing.T) { nt, err := activities_model.GetIssueNotification(db.DefaultContext, user.ID, issue.ID) assert.NoError(t, err) - assert.EqualValues(t, activities_model.NotificationStatusRead, nt.Status) + assert.Equal(t, activities_model.NotificationStatusRead, nt.Status) } diff --git a/models/activities/repo_activity.go b/models/activities/repo_activity.go index 3ccdbd47d3..aeaa452c9e 100644 --- a/models/activities/repo_activity.go +++ b/models/activities/repo_activity.go @@ -139,10 +139,7 @@ func GetActivityStatsTopAuthors(ctx context.Context, repo *repo_model.Repository return v[i].Commits > v[j].Commits }) - cnt := count - if cnt > len(v) { - cnt = len(v) - } + cnt := min(count, len(v)) return v[:cnt], nil } diff --git a/models/activities/statistic.go b/models/activities/statistic.go index ff81ad78a1..940651d359 100644 --- a/models/activities/statistic.go +++ b/models/activities/statistic.go @@ -17,13 +17,16 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/models/webhook" + "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" ) // Statistic contains the database statistics type Statistic struct { Counter struct { - User, Org, PublicKey, + UsersActive, UsersNotActive, + Org, PublicKey, Repo, Watch, Star, Access, Issue, IssueClosed, IssueOpen, Comment, Oauth, Follow, @@ -53,8 +56,20 @@ type IssueByRepositoryCount struct { // GetStatistic returns the database statistics func GetStatistic(ctx context.Context) (stats Statistic) { e := db.GetEngine(ctx) - stats.Counter.User = user_model.CountUsers(ctx, nil) - stats.Counter.Org, _ = db.Count[organization.Organization](ctx, organization.FindOrgOptions{IncludePrivate: true}) + + // Number of active users + usersActiveOpts := user_model.CountUserFilter{ + IsActive: optional.Some(true), + } + stats.Counter.UsersActive = user_model.CountUsers(ctx, &usersActiveOpts) + + // Number of inactive users + usersNotActiveOpts := user_model.CountUserFilter{ + IsActive: optional.Some(false), + } + stats.Counter.UsersNotActive = user_model.CountUsers(ctx, &usersNotActiveOpts) + + stats.Counter.Org, _ = db.Count[organization.Organization](ctx, organization.FindOrgOptions{IncludeVisibility: structs.VisibleTypePrivate}) stats.Counter.PublicKey, _ = e.Count(new(asymkey_model.PublicKey)) stats.Counter.Repo, _ = repo_model.CountRepositories(ctx, repo_model.CountRepositoryOptions{}) stats.Counter.Watch, _ = e.Count(new(repo_model.Watch)) diff --git a/models/activities/user_heatmap.go b/models/activities/user_heatmap.go index 1f8f0f590e..ef67838be7 100644 --- a/models/activities/user_heatmap.go +++ b/models/activities/user_heatmap.go @@ -66,7 +66,7 @@ func getUserHeatmapData(ctx context.Context, user *user_model.User, team *organi Select(groupBy+" AS timestamp, count(user_id) as contributions"). Table("action"). Where(cond). - And("created_unix > ?", timeutil.TimeStampNow()-31536000). + And("created_unix > ?", timeutil.TimeStampNow()-(366+7)*86400). // (366+7) days to include the first week for the heatmap GroupBy(groupByName). OrderBy("timestamp"). Find(&hdata) diff --git a/models/admin/task.go b/models/admin/task.go index 10f8e6d570..0541a8ec78 100644 --- a/models/admin/task.go +++ b/models/admin/task.go @@ -44,7 +44,7 @@ func init() { // TranslatableMessage represents JSON struct that can be translated with a Locale type TranslatableMessage struct { Format string - Args []any `json:"omitempty"` + Args []any `json:",omitempty"` } // LoadRepo loads repository of the task diff --git a/models/asymkey/error.go b/models/asymkey/error.go index 2e65d76612..b765624579 100644 --- a/models/asymkey/error.go +++ b/models/asymkey/error.go @@ -25,7 +25,7 @@ func (err ErrKeyUnableVerify) Error() string { } // ErrKeyIsPrivate is returned when the provided key is a private key not a public key -var ErrKeyIsPrivate = util.NewSilentWrapErrorf(util.ErrInvalidArgument, "the provided key is a private key") +var ErrKeyIsPrivate = util.ErrorWrap(util.ErrInvalidArgument, "the provided key is a private key") // ErrKeyNotExist represents a "KeyNotExist" kind of error. type ErrKeyNotExist struct { @@ -132,7 +132,7 @@ func IsErrGPGKeyParsing(err error) bool { } func (err ErrGPGKeyParsing) Error() string { - return fmt.Sprintf("failed to parse gpg key %s", err.ParseError.Error()) + return "failed to parse gpg key " + err.ParseError.Error() } // ErrGPGKeyNotExist represents a "GPGKeyNotExist" kind of error. diff --git a/models/asymkey/gpg_key.go b/models/asymkey/gpg_key.go index 5236b2d450..220f46ad1d 100644 --- a/models/asymkey/gpg_key.go +++ b/models/asymkey/gpg_key.go @@ -5,6 +5,7 @@ package asymkey import ( "context" + "errors" "fmt" "strings" "time" @@ -13,8 +14,8 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/timeutil" - "github.com/keybase/go-crypto/openpgp" - "github.com/keybase/go-crypto/openpgp/packet" + "github.com/ProtonMail/go-crypto/openpgp" + "github.com/ProtonMail/go-crypto/openpgp/packet" "xorm.io/builder" ) @@ -106,7 +107,7 @@ func GPGKeyToEntity(ctx context.Context, k *GPGKey) (*openpgp.Entity, error) { if err != nil { return nil, err } - keys, err := checkArmoredGPGKeyString(impKey.Content) + keys, err := CheckArmoredGPGKeyString(impKey.Content) if err != nil { return nil, err } @@ -115,7 +116,7 @@ func GPGKeyToEntity(ctx context.Context, k *GPGKey) (*openpgp.Entity, error) { // parseSubGPGKey parse a sub Key func parseSubGPGKey(ownerID int64, primaryID string, pubkey *packet.PublicKey, expiry time.Time) (*GPGKey, error) { - content, err := base64EncPubKey(pubkey) + content, err := Base64EncPubKey(pubkey) if err != nil { return nil, err } @@ -141,7 +142,11 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified // Parse Subkeys subkeys := make([]*GPGKey, len(e.Subkeys)) for i, k := range e.Subkeys { - subs, err := parseSubGPGKey(ownerID, pubkey.KeyIdString(), k.PublicKey, expiry) + subkeyExpiry := expiry + if k.Sig.KeyLifetimeSecs != nil { + subkeyExpiry = k.PublicKey.CreationTime.Add(time.Duration(*k.Sig.KeyLifetimeSecs) * time.Second) + } + subs, err := parseSubGPGKey(ownerID, pubkey.KeyIdString(), k.PublicKey, subkeyExpiry) if err != nil { return nil, ErrGPGKeyParsing{ParseError: err} } @@ -156,7 +161,7 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified emails := make([]*user_model.EmailAddress, 0, len(e.Identities)) for _, ident := range e.Identities { - if ident.Revocation != nil { + if ident.Revoked(time.Now()) { continue } email := strings.ToLower(strings.TrimSpace(ident.UserId.Email)) @@ -179,7 +184,7 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified } } - content, err := base64EncPubKey(pubkey) + content, err := Base64EncPubKey(pubkey) if err != nil { return nil, err } @@ -203,7 +208,7 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified // deleteGPGKey does the actual key deletion func deleteGPGKey(ctx context.Context, keyID string) (int64, error) { if keyID == "" { - return 0, fmt.Errorf("empty KeyId forbidden") // Should never happen but just to be sure + return 0, errors.New("empty KeyId forbidden") // Should never happen but just to be sure } // Delete imported key n, err := db.GetEngine(ctx).Where("key_id=?", keyID).Delete(new(GPGKeyImport)) @@ -236,32 +241,9 @@ func DeleteGPGKey(ctx context.Context, doer *user_model.User, id int64) (err err return committer.Commit() } -func checkKeyEmails(ctx context.Context, email string, keys ...*GPGKey) (bool, string) { - uid := int64(0) - var userEmails []*user_model.EmailAddress - var user *user_model.User - for _, key := range keys { - for _, e := range key.Emails { - if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) { - return true, e.Email - } - } - if key.Verified && key.OwnerID != 0 { - if uid != key.OwnerID { - userEmails, _ = user_model.GetEmailAddresses(ctx, key.OwnerID) - uid = key.OwnerID - user = &user_model.User{ID: uid} - _, _ = user_model.GetUser(ctx, user) - } - for _, e := range userEmails { - if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) { - return true, e.Email - } - } - if user.KeepEmailPrivate && strings.EqualFold(email, user.GetEmail()) { - return true, user.GetEmail() - } - } - } - return false, email +func FindGPGKeyWithSubKeys(ctx context.Context, keyID string) ([]*GPGKey, error) { + return db.Find[GPGKey](ctx, FindGPGKeyOptions{ + KeyID: keyID, + IncludeSubKeys: true, + }) } diff --git a/models/asymkey/gpg_key_add.go b/models/asymkey/gpg_key_add.go index 11124b1366..1c7d2c1da2 100644 --- a/models/asymkey/gpg_key_add.go +++ b/models/asymkey/gpg_key_add.go @@ -10,7 +10,7 @@ import ( "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/log" - "github.com/keybase/go-crypto/openpgp" + "github.com/ProtonMail/go-crypto/openpgp" ) // __________________ ________ ____ __. @@ -67,7 +67,7 @@ func addGPGSubKey(ctx context.Context, key *GPGKey) (err error) { // AddGPGKey adds new public key to database. func AddGPGKey(ctx context.Context, ownerID int64, content, token, signature string) ([]*GPGKey, error) { - ekeys, err := checkArmoredGPGKeyString(content) + ekeys, err := CheckArmoredGPGKeyString(content) if err != nil { return nil, err } @@ -83,15 +83,15 @@ func AddGPGKey(ctx context.Context, ownerID int64, content, token, signature str verified := false // Handle provided signature if signature != "" { - signer, err := openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token), strings.NewReader(signature)) + signer, err := openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token), strings.NewReader(signature), nil) if err != nil { - signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\n"), strings.NewReader(signature)) + signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\n"), strings.NewReader(signature), nil) } if err != nil { - signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\r\n"), strings.NewReader(signature)) + signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\r\n"), strings.NewReader(signature), nil) } if err != nil { - log.Error("Unable to validate token signature. Error: %v", err) + log.Debug("AddGPGKey CheckArmoredDetachedSignature failed: %v", err) return nil, ErrGPGInvalidTokenSignature{ ID: ekeys[0].PrimaryKey.KeyIdString(), Wrapped: err, diff --git a/models/asymkey/gpg_key_commit_verification.go b/models/asymkey/gpg_key_commit_verification.go index 26fad3bb3f..39ec893606 100644 --- a/models/asymkey/gpg_key_commit_verification.go +++ b/models/asymkey/gpg_key_commit_verification.go @@ -4,19 +4,15 @@ package asymkey import ( - "context" + "errors" "fmt" "hash" - "strings" - "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/setting" - "github.com/keybase/go-crypto/openpgp/packet" + "github.com/ProtonMail/go-crypto/openpgp/packet" ) // __________________ ________ ____ __. @@ -70,267 +66,10 @@ const ( NoKeyFound = "gpg.error.no_gpg_keys_found" ) -// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys. -func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.UserCommit, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error)) []*SignCommit { - newCommits := make([]*SignCommit, 0, len(oldCommits)) - keyMap := map[string]bool{} - - for _, c := range oldCommits { - signCommit := &SignCommit{ - UserCommit: c, - Verification: ParseCommitWithSignature(ctx, c.Commit), - } - - _ = CalculateTrustStatus(signCommit.Verification, repoTrustModel, isOwnerMemberCollaborator, &keyMap) - - newCommits = append(newCommits, signCommit) - } - return newCommits -} - -// ParseCommitWithSignature check if signature is good against keystore. -func ParseCommitWithSignature(ctx context.Context, c *git.Commit) *CommitVerification { - var committer *user_model.User - if c.Committer != nil { - var err error - // Find Committer account - committer, err = user_model.GetUserByEmail(ctx, c.Committer.Email) // This finds the user by primary email or activated email so commit will not be valid if email is not - if err != nil { // Skipping not user for committer - committer = &user_model.User{ - Name: c.Committer.Name, - Email: c.Committer.Email, - } - // We can expect this to often be an ErrUserNotExist. in the case - // it is not, however, it is important to log it. - if !user_model.IsErrUserNotExist(err) { - log.Error("GetUserByEmail: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.no_committer_account", - } - } - } - } - - // If no signature just report the committer - if c.Signature == nil { - return &CommitVerification{ - CommittingUser: committer, - Verified: false, // Default value - Reason: "gpg.error.not_signed_commit", // Default value - } - } - - // If this a SSH signature handle it differently - if strings.HasPrefix(c.Signature.Signature, "-----BEGIN SSH SIGNATURE-----") { - return ParseCommitWithSSHSignature(ctx, c, committer) - } - - // Parsing signature - sig, err := extractSignature(c.Signature.Signature) - if err != nil { // Skipping failed to extract sign - log.Error("SignatureRead err: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.extract_sign", - } - } - - keyID := tryGetKeyIDFromSignature(sig) - defaultReason := NoKeyFound - - // First check if the sig has a keyID and if so just look at that - if commitVerification := hashAndVerifyForKeyID( - ctx, - sig, - c.Signature.Payload, - committer, - keyID, - setting.AppName, - ""); commitVerification != nil { - if commitVerification.Reason == BadSignature { - defaultReason = BadSignature - } else { - return commitVerification - } - } - - // Now try to associate the signature with the committer, if present - if committer.ID != 0 { - keys, err := db.Find[GPGKey](ctx, FindGPGKeyOptions{ - OwnerID: committer.ID, - }) - if err != nil { // Skipping failed to get gpg keys of user - log.Error("ListGPGKeys: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.failed_retrieval_gpg_keys", - } - } - - if err := GPGKeyList(keys).LoadSubKeys(ctx); err != nil { - log.Error("LoadSubKeys: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.failed_retrieval_gpg_keys", - } - } - - committerEmailAddresses, _ := user_model.GetEmailAddresses(ctx, committer.ID) - activated := false - for _, e := range committerEmailAddresses { - if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { - activated = true - break - } - } - - for _, k := range keys { - // Pre-check (& optimization) that emails attached to key can be attached to the committer email and can validate - canValidate := false - email := "" - if k.Verified && activated { - canValidate = true - email = c.Committer.Email - } - if !canValidate { - for _, e := range k.Emails { - if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { - canValidate = true - email = e.Email - break - } - } - } - if !canValidate { - continue // Skip this key - } - - commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, c.Signature.Payload, k, committer, committer, email) - if commitVerification != nil { - return commitVerification - } - } - } - - if setting.Repository.Signing.SigningKey != "" && setting.Repository.Signing.SigningKey != "default" && setting.Repository.Signing.SigningKey != "none" { - // OK we should try the default key - gpgSettings := git.GPGSettings{ - Sign: true, - KeyID: setting.Repository.Signing.SigningKey, - Name: setting.Repository.Signing.SigningName, - Email: setting.Repository.Signing.SigningEmail, - } - if err := gpgSettings.LoadPublicKeyContent(); err != nil { - log.Error("Error getting default signing key: %s %v", gpgSettings.KeyID, err) - } else if commitVerification := verifyWithGPGSettings(ctx, &gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { - if commitVerification.Reason == BadSignature { - defaultReason = BadSignature - } else { - return commitVerification - } - } - } - - defaultGPGSettings, err := c.GetRepositoryDefaultPublicGPGKey(false) - if err != nil { - log.Error("Error getting default public gpg key: %v", err) - } else if defaultGPGSettings == nil { - log.Warn("Unable to get defaultGPGSettings for unattached commit: %s", c.ID.String()) - } else if defaultGPGSettings.Sign { - if commitVerification := verifyWithGPGSettings(ctx, defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil { - if commitVerification.Reason == BadSignature { - defaultReason = BadSignature - } else { - return commitVerification - } - } - } - - return &CommitVerification{ // Default at this stage - CommittingUser: committer, - Verified: false, - Warning: defaultReason != NoKeyFound, - Reason: defaultReason, - SigningKey: &GPGKey{ - KeyID: keyID, - }, - } -} - -func verifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *user_model.User, keyID string) *CommitVerification { - // First try to find the key in the db - if commitVerification := hashAndVerifyForKeyID(ctx, sig, payload, committer, gpgSettings.KeyID, gpgSettings.Name, gpgSettings.Email); commitVerification != nil { - return commitVerification - } - - // Otherwise we have to parse the key - ekeys, err := checkArmoredGPGKeyString(gpgSettings.PublicKeyContent) - if err != nil { - log.Error("Unable to get default signing key: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.generate_hash", - } - } - for _, ekey := range ekeys { - pubkey := ekey.PrimaryKey - content, err := base64EncPubKey(pubkey) - if err != nil { - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.generate_hash", - } - } - k := &GPGKey{ - Content: content, - CanSign: pubkey.CanSign(), - KeyID: pubkey.KeyIdString(), - } - for _, subKey := range ekey.Subkeys { - content, err := base64EncPubKey(subKey.PublicKey) - if err != nil { - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.generate_hash", - } - } - k.SubsKey = append(k.SubsKey, &GPGKey{ - Content: content, - CanSign: subKey.PublicKey.CanSign(), - KeyID: subKey.PublicKey.KeyIdString(), - }) - } - if commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, payload, k, committer, &user_model.User{ - Name: gpgSettings.Name, - Email: gpgSettings.Email, - }, gpgSettings.Email); commitVerification != nil { - return commitVerification - } - if keyID == k.KeyID { - // This is a bad situation ... We have a key id that matches our default key but the signature doesn't match. - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Warning: true, - Reason: BadSignature, - } - } - } - return nil -} - func verifySign(s *packet.Signature, h hash.Hash, k *GPGKey) error { // Check if key can sign if !k.CanSign { - return fmt.Errorf("key can not sign") + return errors.New("key can not sign") } // Decode key pkey, err := base64DecPubKey(k.Content) @@ -369,7 +108,7 @@ func hashAndVerifyWithSubKeys(sig *packet.Signature, payload string, k *GPGKey) return nil, nil } -func hashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload string, k *GPGKey, committer, signer *user_model.User, email string) *CommitVerification { +func HashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload string, k *GPGKey, committer, signer *user_model.User, email string) *CommitVerification { key, err := hashAndVerifyWithSubKeys(sig, payload, k) if err != nil { // Skipping failed to generate hash return &CommitVerification{ @@ -392,78 +131,6 @@ func hashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload s return nil } -func hashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload string, committer *user_model.User, keyID, name, email string) *CommitVerification { - if keyID == "" { - return nil - } - keys, err := db.Find[GPGKey](ctx, FindGPGKeyOptions{ - KeyID: keyID, - IncludeSubKeys: true, - }) - if err != nil { - log.Error("GetGPGKeysByKeyID: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.failed_retrieval_gpg_keys", - } - } - if len(keys) == 0 { - return nil - } - for _, key := range keys { - var primaryKeys []*GPGKey - if key.PrimaryKeyID != "" { - primaryKeys, err = db.Find[GPGKey](ctx, FindGPGKeyOptions{ - KeyID: key.PrimaryKeyID, - IncludeSubKeys: true, - }) - if err != nil { - log.Error("GetGPGKeysByKeyID: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.failed_retrieval_gpg_keys", - } - } - } - - activated, email := checkKeyEmails(ctx, email, append([]*GPGKey{key}, primaryKeys...)...) - if !activated { - continue - } - - signer := &user_model.User{ - Name: name, - Email: email, - } - if key.OwnerID != 0 { - owner, err := user_model.GetUserByID(ctx, key.OwnerID) - if err == nil { - signer = owner - } else if !user_model.IsErrUserNotExist(err) { - log.Error("Failed to user_model.GetUserByID: %d for key ID: %d (%s) %v", key.OwnerID, key.ID, key.KeyID, err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.no_committer_account", - } - } - } - commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, payload, key, committer, signer, email) - if commitVerification != nil { - return commitVerification - } - } - // This is a bad situation ... We have a key id that is in our database but the signature doesn't match. - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Warning: true, - Reason: BadSignature, - } -} - // CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository // There are several trust models in Gitea func CalculateTrustStatus(verification *CommitVerification, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error), keyMap *map[string]bool) error { diff --git a/models/asymkey/gpg_key_common.go b/models/asymkey/gpg_key_common.go index 28cb8f4e76..76f52a3ca4 100644 --- a/models/asymkey/gpg_key_common.go +++ b/models/asymkey/gpg_key_common.go @@ -7,15 +7,16 @@ import ( "bytes" "crypto" "encoding/base64" + "errors" "fmt" "hash" "io" "strings" "time" - "github.com/keybase/go-crypto/openpgp" - "github.com/keybase/go-crypto/openpgp/armor" - "github.com/keybase/go-crypto/openpgp/packet" + "github.com/ProtonMail/go-crypto/openpgp" + "github.com/ProtonMail/go-crypto/openpgp/armor" + "github.com/ProtonMail/go-crypto/openpgp/packet" ) // __________________ ________ ____ __. @@ -33,9 +34,9 @@ import ( // This file provides common functions relating to GPG Keys -// checkArmoredGPGKeyString checks if the given key string is a valid GPG armored key. +// CheckArmoredGPGKeyString checks if the given key string is a valid GPG armored key. // The function returns the actual public key on success -func checkArmoredGPGKeyString(content string) (openpgp.EntityList, error) { +func CheckArmoredGPGKeyString(content string) (openpgp.EntityList, error) { list, err := openpgp.ReadArmoredKeyRing(strings.NewReader(content)) if err != nil { return nil, ErrGPGKeyParsing{err} @@ -43,8 +44,8 @@ func checkArmoredGPGKeyString(content string) (openpgp.EntityList, error) { return list, nil } -// base64EncPubKey encode public key content to base 64 -func base64EncPubKey(pubkey *packet.PublicKey) (string, error) { +// Base64EncPubKey encode public key content to base 64 +func Base64EncPubKey(pubkey *packet.PublicKey) (string, error) { var w bytes.Buffer err := pubkey.Serialize(&w) if err != nil { @@ -75,12 +76,12 @@ func base64DecPubKey(content string) (*packet.PublicKey, error) { // Check type pkey, ok := p.(*packet.PublicKey) if !ok { - return nil, fmt.Errorf("key is not a public key") + return nil, errors.New("key is not a public key") } return pkey, nil } -// getExpiryTime extract the expire time of primary key based on sig +// getExpiryTime extract the expiry time of primary key based on sig func getExpiryTime(e *openpgp.Entity) time.Time { expiry := time.Time{} // Extract self-sign for expire date based on : https://github.com/golang/crypto/blob/master/openpgp/keys.go#L165 @@ -88,12 +89,12 @@ func getExpiryTime(e *openpgp.Entity) time.Time { for _, ident := range e.Identities { if selfSig == nil { selfSig = ident.SelfSignature - } else if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId { + } else if ident.SelfSignature != nil && ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId { selfSig = ident.SelfSignature break } } - if selfSig.KeyLifetimeSecs != nil { + if selfSig != nil && selfSig.KeyLifetimeSecs != nil { expiry = e.PrimaryKey.CreationTime.Add(time.Duration(*selfSig.KeyLifetimeSecs) * time.Second) } return expiry @@ -119,23 +120,23 @@ func readArmoredSign(r io.Reader) (body io.Reader, err error) { return block.Body, nil } -func extractSignature(s string) (*packet.Signature, error) { +func ExtractSignature(s string) (*packet.Signature, error) { r, err := readArmoredSign(strings.NewReader(s)) if err != nil { - return nil, fmt.Errorf("Failed to read signature armor") + return nil, errors.New("Failed to read signature armor") } p, err := packet.Read(r) if err != nil { - return nil, fmt.Errorf("Failed to read signature packet") + return nil, errors.New("Failed to read signature packet") } sig, ok := p.(*packet.Signature) if !ok { - return nil, fmt.Errorf("Packet is not a signature") + return nil, errors.New("Packet is not a signature") } return sig, nil } -func tryGetKeyIDFromSignature(sig *packet.Signature) string { +func TryGetKeyIDFromSignature(sig *packet.Signature) string { if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 { return fmt.Sprintf("%016X", *sig.IssuerKeyId) } diff --git a/models/asymkey/gpg_key_test.go b/models/asymkey/gpg_key_test.go index d3fbb01d82..408cf15763 100644 --- a/models/asymkey/gpg_key_test.go +++ b/models/asymkey/gpg_key_test.go @@ -13,8 +13,10 @@ import ( "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" - "github.com/keybase/go-crypto/openpgp/packet" + "github.com/ProtonMail/go-crypto/openpgp" + "github.com/ProtonMail/go-crypto/openpgp/packet" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCheckArmoredGPGKeyString(t *testing.T) { @@ -49,7 +51,7 @@ MkM/fdpyc2hY7Dl/+qFmN5MG5yGmMpQcX+RNNR222ibNC1D3wg== =i9b7 -----END PGP PUBLIC KEY BLOCK-----` - key, err := checkArmoredGPGKeyString(testGPGArmor) + key, err := CheckArmoredGPGKeyString(testGPGArmor) assert.NoError(t, err, "Could not parse a valid GPG public armored rsa key", key) // TODO verify value of key } @@ -70,7 +72,7 @@ OyjLLnFQiVmq7kEA/0z0CQe3ZQiQIq5zrs7Nh1XRkFAo8GlU/SGC9XFFi722 =ZiSe -----END PGP PUBLIC KEY BLOCK-----` - key, err := checkArmoredGPGKeyString(testGPGArmor) + key, err := CheckArmoredGPGKeyString(testGPGArmor) assert.NoError(t, err, "Could not parse a valid GPG public armored brainpoolP256r1 key", key) // TODO verify value of key } @@ -106,15 +108,14 @@ Av844q/BfRuVsJsK1NDNG09LC30B0l3LKBqlrRmRTUMHtgchdX2dY+p7GPOoSzlR MkM/fdpyc2hY7Dl/+qFmN5MG5yGmMpQcX+RNNR222ibNC1D3wg== =i9b7 -----END PGP PUBLIC KEY BLOCK-----` - keys, err := checkArmoredGPGKeyString(testGPGArmor) - if !assert.NotEmpty(t, keys) { - return - } + keys, err := CheckArmoredGPGKeyString(testGPGArmor) + require.NotEmpty(t, keys) + ekey := keys[0] assert.NoError(t, err, "Could not parse a valid GPG armored key", ekey) pubkey := ekey.PrimaryKey - content, err := base64EncPubKey(pubkey) + content, err := Base64EncPubKey(pubkey) assert.NoError(t, err, "Could not base64 encode a valid PublicKey content", ekey) key := &GPGKey{ @@ -175,9 +176,9 @@ committer Antoine GIRARD <sapk@sapk.fr> 1489013107 +0100 Unknown GPG key with good email ` // Reading Sign - goodSig, err := extractSignature(testGoodSigArmor) + goodSig, err := ExtractSignature(testGoodSigArmor) assert.NoError(t, err, "Could not parse a valid GPG armored signature", testGoodSigArmor) - badSig, err := extractSignature(testBadSigArmor) + badSig, err := ExtractSignature(testBadSigArmor) assert.NoError(t, err, "Could not parse a valid GPG armored signature", testBadSigArmor) // Generating hash of commit @@ -385,7 +386,7 @@ epiDVQ== =VSKJ -----END PGP PUBLIC KEY BLOCK----- ` - keys, err := checkArmoredGPGKeyString(testIssue6599) + keys, err := CheckArmoredGPGKeyString(testIssue6599) assert.NoError(t, err) if assert.NotEmpty(t, keys) { ekey := keys[0] @@ -395,11 +396,33 @@ epiDVQ== } func TestTryGetKeyIDFromSignature(t *testing.T) { - assert.Empty(t, tryGetKeyIDFromSignature(&packet.Signature{})) - assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{ + assert.Empty(t, TryGetKeyIDFromSignature(&packet.Signature{})) + assert.Equal(t, "038D1A3EADDBEA9C", TryGetKeyIDFromSignature(&packet.Signature{ IssuerKeyId: util.ToPointer(uint64(0x38D1A3EADDBEA9C)), })) - assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{ + assert.Equal(t, "038D1A3EADDBEA9C", TryGetKeyIDFromSignature(&packet.Signature{ IssuerFingerprint: []uint8{0xb, 0x23, 0x24, 0xc7, 0xe6, 0xfe, 0x4f, 0x3a, 0x6, 0x26, 0xc1, 0x21, 0x3, 0x8d, 0x1a, 0x3e, 0xad, 0xdb, 0xea, 0x9c}, })) } + +func TestParseGPGKey(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + assert.NoError(t, db.Insert(db.DefaultContext, &user_model.EmailAddress{UID: 1, Email: "email1@example.com", IsActivated: true})) + + // create a key for test email + e, err := openpgp.NewEntity("name", "comment", "email1@example.com", nil) + require.NoError(t, err) + k, err := parseGPGKey(db.DefaultContext, 1, e, true) + require.NoError(t, err) + assert.NotEmpty(t, k.KeyID) + assert.NotEmpty(t, k.Emails) // the key is valid, matches the email + + // then revoke the key + for _, id := range e.Identities { + id.Revocations = append(id.Revocations, &packet.Signature{RevocationReason: util.ToPointer(packet.KeyCompromised)}) + } + k, err = parseGPGKey(db.DefaultContext, 1, e, true) + require.NoError(t, err) + assert.NotEmpty(t, k.KeyID) + assert.Empty(t, k.Emails) // the key is revoked, matches no email +} diff --git a/models/asymkey/gpg_key_verify.go b/models/asymkey/gpg_key_verify.go index 01812a2d54..5ab2fd8081 100644 --- a/models/asymkey/gpg_key_verify.go +++ b/models/asymkey/gpg_key_verify.go @@ -50,7 +50,7 @@ func VerifyGPGKey(ctx context.Context, ownerID int64, keyID, token, signature st return "", err } - sig, err := extractSignature(signature) + sig, err := ExtractSignature(signature) if err != nil { return "", ErrGPGInvalidTokenSignature{ ID: key.KeyID, @@ -85,7 +85,7 @@ func VerifyGPGKey(ctx context.Context, ownerID int64, keyID, token, signature st } if signer == nil { - log.Error("Unable to validate token signature. Error: %v", err) + log.Debug("VerifyGPGKey failed: no signer") return "", ErrGPGInvalidTokenSignature{ ID: key.KeyID, } diff --git a/models/asymkey/ssh_key_commit_verification.go b/models/asymkey/ssh_key_commit_verification.go deleted file mode 100644 index 27c6df3578..0000000000 --- a/models/asymkey/ssh_key_commit_verification.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2021 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package asymkey - -import ( - "bytes" - "context" - "fmt" - "strings" - - "code.gitea.io/gitea/models/db" - user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/log" - - "github.com/42wim/sshsig" -) - -// ParseCommitWithSSHSignature check if signature is good against keystore. -func ParseCommitWithSSHSignature(ctx context.Context, c *git.Commit, committer *user_model.User) *CommitVerification { - // Now try to associate the signature with the committer, if present - if committer.ID != 0 { - keys, err := db.Find[PublicKey](ctx, FindPublicKeyOptions{ - OwnerID: committer.ID, - NotKeytype: KeyTypePrincipal, - }) - if err != nil { // Skipping failed to get ssh keys of user - log.Error("ListPublicKeys: %v", err) - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: "gpg.error.failed_retrieval_gpg_keys", - } - } - - committerEmailAddresses, err := user_model.GetEmailAddresses(ctx, committer.ID) - if err != nil { - log.Error("GetEmailAddresses: %v", err) - } - - activated := false - for _, e := range committerEmailAddresses { - if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) { - activated = true - break - } - } - - for _, k := range keys { - if k.Verified && activated { - commitVerification := verifySSHCommitVerification(c.Signature.Signature, c.Signature.Payload, k, committer, committer, c.Committer.Email) - if commitVerification != nil { - return commitVerification - } - } - } - } - - return &CommitVerification{ - CommittingUser: committer, - Verified: false, - Reason: NoKeyFound, - } -} - -func verifySSHCommitVerification(sig, payload string, k *PublicKey, committer, signer *user_model.User, email string) *CommitVerification { - if err := sshsig.Verify(bytes.NewBuffer([]byte(payload)), []byte(sig), []byte(k.Content), "git"); err != nil { - return nil - } - - return &CommitVerification{ // Everything is ok - CommittingUser: committer, - Verified: true, - Reason: fmt.Sprintf("%s / %s", signer.Name, k.Fingerprint), - SigningUser: signer, - SigningSSHKey: k, - SigningEmail: email, - } -} diff --git a/models/asymkey/ssh_key_fingerprint.go b/models/asymkey/ssh_key_fingerprint.go index 1ed3b5df2a..4dcfe1f279 100644 --- a/models/asymkey/ssh_key_fingerprint.go +++ b/models/asymkey/ssh_key_fingerprint.go @@ -6,27 +6,13 @@ package asymkey import ( "context" "fmt" - "strings" "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/process" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" "golang.org/x/crypto/ssh" "xorm.io/builder" ) -// ___________.__ .__ __ -// \_ _____/|__| ____ ____ ________________________|__| _____/ |_ -// | __) | |/ \ / ___\_/ __ \_ __ \____ \_ __ \ |/ \ __\ -// | \ | | | \/ /_/ > ___/| | \/ |_> > | \/ | | \ | -// \___ / |__|___| /\___ / \___ >__| | __/|__| |__|___| /__| -// \/ \//_____/ \/ |__| \/ -// -// This file contains functions for fingerprinting SSH keys -// // The database is used in checkKeyFingerprint however most of these functions probably belong in a module // checkKeyFingerprint only checks if key fingerprint has been used as public key, @@ -41,29 +27,6 @@ func checkKeyFingerprint(ctx context.Context, fingerprint string) error { return nil } -func calcFingerprintSSHKeygen(publicKeyContent string) (string, error) { - // Calculate fingerprint. - tmpPath, err := writeTmpKeyFile(publicKeyContent) - if err != nil { - return "", err - } - defer func() { - if err := util.Remove(tmpPath); err != nil { - log.Warn("Unable to remove temporary key file: %s: Error: %v", tmpPath, err) - } - }() - stdout, stderr, err := process.GetManager().Exec("AddPublicKey", "ssh-keygen", "-lf", tmpPath) - if err != nil { - if strings.Contains(stderr, "is not a public key file") { - return "", ErrKeyUnableVerify{stderr} - } - return "", util.NewInvalidArgumentErrorf("'ssh-keygen -lf %s' failed with error '%s': %s", tmpPath, err, stderr) - } else if len(stdout) < 2 { - return "", util.NewInvalidArgumentErrorf("not enough output for calculating fingerprint: %s", stdout) - } - return strings.Split(stdout, " ")[1], nil -} - func calcFingerprintNative(publicKeyContent string) (string, error) { // Calculate fingerprint. pk, _, _, _, err := ssh.ParseAuthorizedKey([]byte(publicKeyContent)) @@ -75,15 +38,12 @@ func calcFingerprintNative(publicKeyContent string) (string, error) { // CalcFingerprint calculate public key's fingerprint func CalcFingerprint(publicKeyContent string) (string, error) { - // Call the method based on configuration - useNative := setting.SSH.KeygenPath == "" - calcFn := util.Iif(useNative, calcFingerprintNative, calcFingerprintSSHKeygen) - fp, err := calcFn(publicKeyContent) + fp, err := calcFingerprintNative(publicKeyContent) if err != nil { if IsErrKeyUnableVerify(err) { return "", err } - return "", fmt.Errorf("CalcFingerprint(%s): %w", util.Iif(useNative, "native", "ssh-keygen"), err) + return "", fmt.Errorf("CalcFingerprint: %w", err) } return fp, nil } diff --git a/models/asymkey/ssh_key_parse.go b/models/asymkey/ssh_key_parse.go index 94b1cf112b..fc39f28624 100644 --- a/models/asymkey/ssh_key_parse.go +++ b/models/asymkey/ssh_key_parse.go @@ -10,14 +10,12 @@ import ( "encoding/base64" "encoding/binary" "encoding/pem" + "errors" "fmt" "math/big" - "os" - "strconv" "strings" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" @@ -93,7 +91,7 @@ func parseKeyString(content string) (string, error) { block, _ := pem.Decode([]byte(content)) if block == nil { - return "", fmt.Errorf("failed to parse PEM block containing the public key") + return "", errors.New("failed to parse PEM block containing the public key") } if strings.Contains(block.Type, "PRIVATE") { return "", ErrKeyIsPrivate @@ -174,20 +172,9 @@ func CheckPublicKeyString(content string) (_ string, err error) { return content, nil } - var ( - fnName string - keyType string - length int - ) - if len(setting.SSH.KeygenPath) == 0 { - fnName = "SSHNativeParsePublicKey" - keyType, length, err = SSHNativeParsePublicKey(content) - } else { - fnName = "SSHKeyGenParsePublicKey" - keyType, length, err = SSHKeyGenParsePublicKey(content) - } + keyType, length, err := SSHNativeParsePublicKey(content) if err != nil { - return "", fmt.Errorf("%s: %w", fnName, err) + return "", fmt.Errorf("SSHNativeParsePublicKey: %w", err) } log.Trace("Key info [native: %v]: %s-%d", setting.SSH.StartBuiltinServer, keyType, length) @@ -221,7 +208,7 @@ func SSHNativeParsePublicKey(keyLine string) (string, int, error) { // The ssh library can parse the key, so next we find out what key exactly we have. switch pkey.Type() { - case ssh.KeyAlgoDSA: + case ssh.KeyAlgoDSA: //nolint:staticcheck // it's deprecated rawPub := struct { Name string P, Q, G, Y *big.Int @@ -257,56 +244,3 @@ func SSHNativeParsePublicKey(keyLine string) (string, int, error) { } return "", 0, fmt.Errorf("unsupported key length detection for type: %s", pkey.Type()) } - -// writeTmpKeyFile writes key content to a temporary file -// and returns the name of that file, along with any possible errors. -func writeTmpKeyFile(content string) (string, error) { - tmpFile, err := os.CreateTemp(setting.SSH.KeyTestPath, "gitea_keytest") - if err != nil { - return "", fmt.Errorf("TempFile: %w", err) - } - defer tmpFile.Close() - - if _, err = tmpFile.WriteString(content); err != nil { - return "", fmt.Errorf("WriteString: %w", err) - } - return tmpFile.Name(), nil -} - -// SSHKeyGenParsePublicKey extracts key type and length using ssh-keygen. -func SSHKeyGenParsePublicKey(key string) (string, int, error) { - tmpName, err := writeTmpKeyFile(key) - if err != nil { - return "", 0, fmt.Errorf("writeTmpKeyFile: %w", err) - } - defer func() { - if err := util.Remove(tmpName); err != nil { - log.Warn("Unable to remove temporary key file: %s: Error: %v", tmpName, err) - } - }() - - keygenPath := setting.SSH.KeygenPath - if len(keygenPath) == 0 { - keygenPath = "ssh-keygen" - } - - stdout, stderr, err := process.GetManager().Exec("SSHKeyGenParsePublicKey", keygenPath, "-lf", tmpName) - if err != nil { - return "", 0, fmt.Errorf("fail to parse public key: %s - %s", err, stderr) - } - if strings.Contains(stdout, "is not a public key file") { - return "", 0, ErrKeyUnableVerify{stdout} - } - - fields := strings.Split(stdout, " ") - if len(fields) < 4 { - return "", 0, fmt.Errorf("invalid public key line: %s", stdout) - } - - keyType := strings.Trim(fields[len(fields)-1], "()\r\n") - length, err := strconv.ParseInt(fields[0], 10, 32) - if err != nil { - return "", 0, err - } - return strings.ToLower(keyType), int(length), nil -} diff --git a/models/asymkey/ssh_key_test.go b/models/asymkey/ssh_key_test.go index 3650f1892f..21e4ddf62e 100644 --- a/models/asymkey/ssh_key_test.go +++ b/models/asymkey/ssh_key_test.go @@ -42,29 +42,7 @@ func Test_SSHParsePublicKey(t *testing.T) { keyTypeN, lengthN, err := SSHNativeParsePublicKey(tc.content) assert.NoError(t, err) assert.Equal(t, tc.keyType, keyTypeN) - assert.EqualValues(t, tc.length, lengthN) - }) - if tc.skipSSHKeygen { - return - } - t.Run("SSHKeygen", func(t *testing.T) { - keyTypeK, lengthK, err := SSHKeyGenParsePublicKey(tc.content) - if err != nil { - // Some servers do not support ecdsa format. - if !strings.Contains(err.Error(), "line 1 too long:") { - assert.FailNow(t, "%v", err) - } - } - assert.Equal(t, tc.keyType, keyTypeK) - assert.EqualValues(t, tc.length, lengthK) - }) - t.Run("SSHParseKeyNative", func(t *testing.T) { - keyTypeK, lengthK, err := SSHNativeParsePublicKey(tc.content) - if err != nil { - assert.FailNow(t, "%v", err) - } - assert.Equal(t, tc.keyType, keyTypeK) - assert.EqualValues(t, tc.length, lengthK) + assert.Equal(t, tc.length, lengthN) }) }) } @@ -186,14 +164,6 @@ func Test_calcFingerprint(t *testing.T) { assert.NoError(t, err) assert.Equal(t, tc.fp, fpN) }) - if tc.skipSSHKeygen { - return - } - t.Run("SSHKeygen", func(t *testing.T) { - fpK, err := calcFingerprintSSHKeygen(tc.content) - assert.NoError(t, err) - assert.Equal(t, tc.fp, fpK) - }) }) } } diff --git a/models/asymkey/ssh_key_verify.go b/models/asymkey/ssh_key_verify.go index 208288c77b..0cf29ca9f1 100644 --- a/models/asymkey/ssh_key_verify.go +++ b/models/asymkey/ssh_key_verify.go @@ -4,8 +4,8 @@ package asymkey import ( - "bytes" "context" + "strings" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/log" @@ -30,12 +30,12 @@ func VerifySSHKey(ctx context.Context, ownerID int64, fingerprint, token, signat return "", ErrKeyNotExist{} } - err = sshsig.Verify(bytes.NewBuffer([]byte(token)), []byte(signature), []byte(key.Content), "gitea") + err = sshsig.Verify(strings.NewReader(token), []byte(signature), []byte(key.Content), "gitea") if err != nil { // edge case for Windows based shells that will add CR LF if piped to ssh-keygen command // see https://github.com/PowerShell/PowerShell/issues/5974 - if sshsig.Verify(bytes.NewBuffer([]byte(token+"\r\n")), []byte(signature), []byte(key.Content), "gitea") != nil { - log.Error("Unable to validate token signature. Error: %v", err) + if sshsig.Verify(strings.NewReader(token+"\r\n"), []byte(signature), []byte(key.Content), "gitea") != nil { + log.Debug("VerifySSHKey sshsig.Verify failed: %v", err) return "", ErrSSHInvalidTokenSignature{ Fingerprint: key.Fingerprint, } diff --git a/models/auth/access_token_scope.go b/models/auth/access_token_scope.go index 897ff3fc9e..3eae19b2a5 100644 --- a/models/auth/access_token_scope.go +++ b/models/auth/access_token_scope.go @@ -5,6 +5,7 @@ package auth import ( "fmt" + "slices" "strings" "code.gitea.io/gitea/models/perm" @@ -14,7 +15,7 @@ import ( type AccessTokenScopeCategory int const ( - AccessTokenScopeCategoryActivityPub = iota + AccessTokenScopeCategoryActivityPub AccessTokenScopeCategory = iota AccessTokenScopeCategoryAdmin AccessTokenScopeCategoryMisc // WARN: this is now just a placeholder, don't remove it which will change the following values AccessTokenScopeCategoryNotification @@ -193,6 +194,14 @@ var accessTokenScopes = map[AccessTokenScopeLevel]map[AccessTokenScopeCategory]A }, } +func GetAccessTokenCategories() (res []string) { + for _, cat := range accessTokenScopes[Read] { + res = append(res, strings.TrimPrefix(string(cat), "read:")) + } + slices.Sort(res) + return res +} + // GetRequiredScopes gets the specific scopes for a given level and categories func GetRequiredScopes(level AccessTokenScopeLevel, scopeCategories ...AccessTokenScopeCategory) []AccessTokenScope { scopes := make([]AccessTokenScope, 0, len(scopeCategories)) @@ -204,12 +213,7 @@ func GetRequiredScopes(level AccessTokenScopeLevel, scopeCategories ...AccessTok // ContainsCategory checks if a list of categories contains a specific category func ContainsCategory(categories []AccessTokenScopeCategory, category AccessTokenScopeCategory) bool { - for _, c := range categories { - if c == category { - return true - } - } - return false + return slices.Contains(categories, category) } // GetScopeLevelFromAccessMode converts permission access mode to scope level @@ -270,6 +274,9 @@ func (s AccessTokenScope) parse() (accessTokenScopeBitmap, error) { // StringSlice returns the AccessTokenScope as a []string func (s AccessTokenScope) StringSlice() []string { + if s == "" { + return nil + } return strings.Split(string(s), ",") } @@ -283,6 +290,10 @@ func (s AccessTokenScope) Normalize() (AccessTokenScope, error) { return bitmap.toScope(), nil } +func (s AccessTokenScope) HasPermissionScope() bool { + return s != "" && s != AccessTokenScopePublicOnly +} + // PublicOnly checks if this token scope is limited to public resources func (s AccessTokenScope) PublicOnly() (bool, error) { bitmap, err := s.parse() diff --git a/models/auth/access_token_scope_test.go b/models/auth/access_token_scope_test.go index a6097e45d7..b93c25528f 100644 --- a/models/auth/access_token_scope_test.go +++ b/models/auth/access_token_scope_test.go @@ -17,6 +17,7 @@ type scopeTestNormalize struct { } func TestAccessTokenScope_Normalize(t *testing.T) { + assert.Equal(t, []string{"activitypub", "admin", "issue", "misc", "notification", "organization", "package", "repository", "user"}, GetAccessTokenCategories()) tests := []scopeTestNormalize{ {"", "", nil}, {"write:misc,write:notification,read:package,write:notification,public-only", "public-only,write:misc,write:notification,read:package", nil}, @@ -25,13 +26,13 @@ func TestAccessTokenScope_Normalize(t *testing.T) { {"write:activitypub,write:admin,write:misc,write:notification,write:organization,write:package,write:issue,write:repository,write:user,public-only", "public-only,all", nil}, } - for _, scope := range []string{"activitypub", "admin", "misc", "notification", "organization", "package", "issue", "repository", "user"} { + for _, scope := range GetAccessTokenCategories() { tests = append(tests, - scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%s", scope)), AccessTokenScope(fmt.Sprintf("read:%s", scope)), nil}, - scopeTestNormalize{AccessTokenScope(fmt.Sprintf("write:%s", scope)), AccessTokenScope(fmt.Sprintf("write:%s", scope)), nil}, - scopeTestNormalize{AccessTokenScope(fmt.Sprintf("write:%[1]s,read:%[1]s", scope)), AccessTokenScope(fmt.Sprintf("write:%s", scope)), nil}, - scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%[1]s,write:%[1]s", scope)), AccessTokenScope(fmt.Sprintf("write:%s", scope)), nil}, - scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%[1]s,write:%[1]s,write:%[1]s", scope)), AccessTokenScope(fmt.Sprintf("write:%s", scope)), nil}, + scopeTestNormalize{AccessTokenScope("read:" + scope), AccessTokenScope("read:" + scope), nil}, + scopeTestNormalize{AccessTokenScope("write:" + scope), AccessTokenScope("write:" + scope), nil}, + scopeTestNormalize{AccessTokenScope(fmt.Sprintf("write:%[1]s,read:%[1]s", scope)), AccessTokenScope("write:" + scope), nil}, + scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%[1]s,write:%[1]s", scope)), AccessTokenScope("write:" + scope), nil}, + scopeTestNormalize{AccessTokenScope(fmt.Sprintf("read:%[1]s,write:%[1]s,write:%[1]s", scope)), AccessTokenScope("write:" + scope), nil}, ) } @@ -59,23 +60,23 @@ func TestAccessTokenScope_HasScope(t *testing.T) { {"public-only", "read:issue", false, nil}, } - for _, scope := range []string{"activitypub", "admin", "misc", "notification", "organization", "package", "issue", "repository", "user"} { + for _, scope := range GetAccessTokenCategories() { tests = append(tests, scopeTestHasScope{ - AccessTokenScope(fmt.Sprintf("read:%s", scope)), - AccessTokenScope(fmt.Sprintf("read:%s", scope)), true, nil, + AccessTokenScope("read:" + scope), + AccessTokenScope("read:" + scope), true, nil, }, scopeTestHasScope{ - AccessTokenScope(fmt.Sprintf("write:%s", scope)), - AccessTokenScope(fmt.Sprintf("write:%s", scope)), true, nil, + AccessTokenScope("write:" + scope), + AccessTokenScope("write:" + scope), true, nil, }, scopeTestHasScope{ - AccessTokenScope(fmt.Sprintf("write:%s", scope)), - AccessTokenScope(fmt.Sprintf("read:%s", scope)), true, nil, + AccessTokenScope("write:" + scope), + AccessTokenScope("read:" + scope), true, nil, }, scopeTestHasScope{ - AccessTokenScope(fmt.Sprintf("read:%s", scope)), - AccessTokenScope(fmt.Sprintf("write:%s", scope)), false, nil, + AccessTokenScope("read:" + scope), + AccessTokenScope("write:" + scope), false, nil, }, ) } diff --git a/models/auth/auth_token.go b/models/auth/auth_token.go index 81f07d1a83..54ff5a0d75 100644 --- a/models/auth/auth_token.go +++ b/models/auth/auth_token.go @@ -15,7 +15,7 @@ import ( var ErrAuthTokenNotExist = util.NewNotExistErrorf("auth token does not exist") -type AuthToken struct { //nolint:revive +type AuthToken struct { //nolint:revive // export stutter ID string `xorm:"pk"` TokenHash string UserID int64 `xorm:"INDEX"` diff --git a/models/auth/oauth2.go b/models/auth/oauth2.go index c270e4856e..c2b6690116 100644 --- a/models/auth/oauth2.go +++ b/models/auth/oauth2.go @@ -12,6 +12,7 @@ import ( "fmt" "net" "net/url" + "slices" "strings" "code.gitea.io/gitea/models/db" @@ -511,12 +512,7 @@ func (grant *OAuth2Grant) IncreaseCounter(ctx context.Context) error { // ScopeContains returns true if the grant scope contains the specified scope func (grant *OAuth2Grant) ScopeContains(scope string) bool { - for _, currentScope := range strings.Split(grant.Scope, " ") { - if scope == currentScope { - return true - } - } - return false + return slices.Contains(strings.Split(grant.Scope, " "), scope) } // SetNonce updates the current nonce value of a grant diff --git a/models/auth/oauth2_test.go b/models/auth/oauth2_test.go index 43daa0b5ec..c6626b283e 100644 --- a/models/auth/oauth2_test.go +++ b/models/auth/oauth2_test.go @@ -25,7 +25,7 @@ func TestOAuth2Application_GenerateClientSecret(t *testing.T) { func BenchmarkOAuth2Application_GenerateClientSecret(b *testing.B) { assert.NoError(b, unittest.PrepareTestDatabase()) app := unittest.AssertExistsAndLoadBean(b, &auth_model.OAuth2Application{ID: 1}) - for i := 0; i < b.N; i++ { + for b.Loop() { _, _ = app.GenerateClientSecret(db.DefaultContext) } } @@ -126,7 +126,7 @@ func TestOAuth2Application_CreateGrant(t *testing.T) { assert.NotNil(t, grant) assert.Equal(t, int64(2), grant.UserID) assert.Equal(t, int64(1), grant.ApplicationID) - assert.Equal(t, "", grant.Scope) + assert.Empty(t, grant.Scope) } //////////////////// Grant diff --git a/models/auth/source.go b/models/auth/source.go index a3a250cd91..7d7bc0f03c 100644 --- a/models/auth/source.go +++ b/models/auth/source.go @@ -58,6 +58,15 @@ var Names = map[Type]string{ // Config represents login config as far as the db is concerned type Config interface { convert.Conversion + SetAuthSource(*Source) +} + +type ConfigBase struct { + AuthSource *Source +} + +func (p *ConfigBase) SetAuthSource(s *Source) { + p.AuthSource = s } // SkipVerifiable configurations provide a IsSkipVerify to check if SkipVerify is set @@ -104,19 +113,15 @@ func RegisterTypeConfig(typ Type, exemplar Config) { } } -// SourceSettable configurations can have their authSource set on them -type SourceSettable interface { - SetAuthSource(*Source) -} - // Source represents an external way for authorizing users. type Source struct { - ID int64 `xorm:"pk autoincr"` - Type Type - Name string `xorm:"UNIQUE"` - IsActive bool `xorm:"INDEX NOT NULL DEFAULT false"` - IsSyncEnabled bool `xorm:"INDEX NOT NULL DEFAULT false"` - Cfg convert.Conversion `xorm:"TEXT"` + ID int64 `xorm:"pk autoincr"` + Type Type + Name string `xorm:"UNIQUE"` + IsActive bool `xorm:"INDEX NOT NULL DEFAULT false"` + IsSyncEnabled bool `xorm:"INDEX NOT NULL DEFAULT false"` + TwoFactorPolicy string `xorm:"two_factor_policy NOT NULL DEFAULT ''"` + Cfg Config `xorm:"TEXT"` CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` @@ -140,9 +145,7 @@ func (source *Source) BeforeSet(colName string, val xorm.Cell) { return } source.Cfg = constructor() - if settable, ok := source.Cfg.(SourceSettable); ok { - settable.SetAuthSource(source) - } + source.Cfg.SetAuthSource(source) } } @@ -200,6 +203,10 @@ func (source *Source) SkipVerify() bool { return ok && skipVerifiable.IsSkipVerify() } +func (source *Source) TwoFactorShouldSkip() bool { + return source.TwoFactorPolicy == "skip" +} + // CreateSource inserts a AuthSource in the DB if not already // existing with the given name. func CreateSource(ctx context.Context, source *Source) error { @@ -223,9 +230,7 @@ func CreateSource(ctx context.Context, source *Source) error { return nil } - if settable, ok := source.Cfg.(SourceSettable); ok { - settable.SetAuthSource(source) - } + source.Cfg.SetAuthSource(source) registerableSource, ok := source.Cfg.(RegisterableSource) if !ok { @@ -320,9 +325,7 @@ func UpdateSource(ctx context.Context, source *Source) error { return nil } - if settable, ok := source.Cfg.(SourceSettable); ok { - settable.SetAuthSource(source) - } + source.Cfg.SetAuthSource(source) registerableSource, ok := source.Cfg.(RegisterableSource) if !ok { diff --git a/models/auth/source_test.go b/models/auth/source_test.go index 84aede0a6b..64c7460b64 100644 --- a/models/auth/source_test.go +++ b/models/auth/source_test.go @@ -19,6 +19,8 @@ import ( ) type TestSource struct { + auth_model.ConfigBase + Provider string ClientID string ClientSecret string diff --git a/models/auth/twofactor.go b/models/auth/twofactor.go index d0c341a192..200ce7c7c0 100644 --- a/models/auth/twofactor.go +++ b/models/auth/twofactor.go @@ -164,3 +164,13 @@ func DeleteTwoFactorByID(ctx context.Context, id, userID int64) error { } return nil } + +func HasTwoFactorOrWebAuthn(ctx context.Context, id int64) (bool, error) { + has, err := HasTwoFactorByUID(ctx, id) + if err != nil { + return false, err + } else if has { + return true, nil + } + return HasWebAuthnRegistrationsByUID(ctx, id) +} diff --git a/models/db/context.go b/models/db/context.go index 51627712b1..ad99ada8c8 100644 --- a/models/db/context.go +++ b/models/db/context.go @@ -67,7 +67,7 @@ func contextSafetyCheck(e Engine) { _ = e.SQL("SELECT 1").Iterate(&m{}, func(int, any) error { callers := make([]uintptr, 32) callerNum := runtime.Callers(1, callers) - for i := 0; i < callerNum; i++ { + for i := range callerNum { if funcName := runtime.FuncForPC(callers[i]).Name(); funcName == "xorm.io/xorm.(*Session).Iterate" { contextSafetyDeniedFuncPCs = append(contextSafetyDeniedFuncPCs, callers[i]) } @@ -82,7 +82,7 @@ func contextSafetyCheck(e Engine) { // it should be very fast: xxxx ns/op callers := make([]uintptr, 32) callerNum := runtime.Callers(3, callers) // skip 3: runtime.Callers, contextSafetyCheck, GetEngine - for i := 0; i < callerNum; i++ { + for i := range callerNum { if slices.Contains(contextSafetyDeniedFuncPCs, callers[i]) { panic(errors.New("using database context in an iterator would cause corrupted results")) } @@ -178,6 +178,15 @@ func WithTx(parentCtx context.Context, f func(ctx context.Context) error) error return txWithNoCheck(parentCtx, f) } +// WithTx2 is similar to WithTx, but it has two return values: result and error. +func WithTx2[T any](parentCtx context.Context, f func(ctx context.Context) (T, error)) (ret T, errRet error) { + errRet = WithTx(parentCtx, func(ctx context.Context) (errInner error) { + ret, errInner = f(ctx) + return errInner + }) + return ret, errRet +} + func txWithNoCheck(parentCtx context.Context, f func(ctx context.Context) error) error { sess := xormEngine.NewSession() defer sess.Close() @@ -289,6 +298,9 @@ func FindIDs(ctx context.Context, tableName, idCol string, cond builder.Cond) ([ // DecrByIDs decreases the given column for entities of the "bean" type with one of the given ids by one // Timestamps of the entities won't be updated func DecrByIDs(ctx context.Context, ids []int64, decrCol string, bean any) error { + if len(ids) == 0 { + return nil + } _, err := GetEngine(ctx).Decr(decrCol).In("id", ids).NoAutoCondition().NoAutoTime().Update(bean) return err } diff --git a/models/db/context_test.go b/models/db/context_test.go index e8c6b74d93..a6bd11d2ae 100644 --- a/models/db/context_test.go +++ b/models/db/context_test.go @@ -118,7 +118,7 @@ func TestContextSafety(t *testing.T) { }) return nil }) - assert.EqualValues(t, testCount, actualCount) + assert.Equal(t, testCount, actualCount) // deny the bad usages assert.PanicsWithError(t, "using database context in an iterator would cause corrupted results", func() { diff --git a/models/db/engine.go b/models/db/engine.go index 91015f7038..ba287d58f0 100755 --- a/models/db/engine.go +++ b/models/db/engine.go @@ -127,7 +127,7 @@ func IsTableNotEmpty(beanOrTableName any) (bool, error) { // DeleteAllRecords will delete all the records of this table func DeleteAllRecords(tableName string) error { - _, err := xormEngine.Exec(fmt.Sprintf("DELETE FROM %s", tableName)) + _, err := xormEngine.Exec("DELETE FROM " + tableName) return err } diff --git a/models/db/engine_hook.go b/models/db/engine_hook.go index b4c543c3dd..8709a2c2a1 100644 --- a/models/db/engine_hook.go +++ b/models/db/engine_hook.go @@ -7,28 +7,41 @@ import ( "context" "time" + "code.gitea.io/gitea/modules/gtprof" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "xorm.io/xorm/contexts" ) -type SlowQueryHook struct { +type EngineHook struct { Threshold time.Duration Logger log.Logger } -var _ contexts.Hook = (*SlowQueryHook)(nil) +var _ contexts.Hook = (*EngineHook)(nil) -func (*SlowQueryHook) BeforeProcess(c *contexts.ContextHook) (context.Context, error) { - return c.Ctx, nil +func (*EngineHook) BeforeProcess(c *contexts.ContextHook) (context.Context, error) { + ctx, _ := gtprof.GetTracer().Start(c.Ctx, gtprof.TraceSpanDatabase) + return ctx, nil } -func (h *SlowQueryHook) AfterProcess(c *contexts.ContextHook) error { +func (h *EngineHook) AfterProcess(c *contexts.ContextHook) error { + span := gtprof.GetContextSpan(c.Ctx) + if span != nil { + // Do not record SQL parameters here: + // * It shouldn't expose the parameters because they contain sensitive information, end users need to report the trace details safely. + // * Some parameters contain quite long texts, waste memory and are difficult to display. + span.SetAttributeString(gtprof.TraceAttrDbSQL, c.SQL) + span.End() + } else { + setting.PanicInDevOrTesting("span in database engine hook is nil") + } if c.ExecuteTime >= h.Threshold { // 8 is the amount of skips passed to runtime.Caller, so that in the log the correct function // is being displayed (the function that ultimately wants to execute the query in the code) // instead of the function of the slow query hook being called. - h.Logger.Log(8, log.WARN, "[Slow SQL Query] %s %v - %v", c.SQL, c.Args, c.ExecuteTime) + h.Logger.Log(8, &log.Event{Level: log.WARN}, "[Slow SQL Query] %s %v - %v", c.SQL, c.Args, c.ExecuteTime) } return nil } diff --git a/models/db/engine_init.go b/models/db/engine_init.go index da85018957..bb02aff274 100644 --- a/models/db/engine_init.go +++ b/models/db/engine_init.go @@ -42,9 +42,10 @@ func newXORMEngine() (*xorm.Engine, error) { if err != nil { return nil, err } - if setting.Database.Type == "mysql" { + switch setting.Database.Type { + case "mysql": engine.Dialect().SetParams(map[string]string{"rowFormat": "DYNAMIC"}) - } else if setting.Database.Type == "mssql" { + case "mssql": engine.Dialect().SetParams(map[string]string{"DEFAULT_VARCHAR": "nvarchar"}) } engine.SetSchema(setting.Database.Schema) @@ -72,7 +73,7 @@ func InitEngine(ctx context.Context) error { xe.SetDefaultContext(ctx) if setting.Database.SlowQueryThreshold > 0 { - xe.AddHook(&SlowQueryHook{ + xe.AddHook(&EngineHook{ Threshold: setting.Database.SlowQueryThreshold, Logger: log.GetLogger("xorm"), }) @@ -105,7 +106,7 @@ func UnsetDefaultEngine() { // When called from the "doctor" command, the migration function is a version check // that prevents the doctor from fixing anything in the database if the migration level // is different from the expected value. -func InitEngineWithMigration(ctx context.Context, migrateFunc func(*xorm.Engine) error) (err error) { +func InitEngineWithMigration(ctx context.Context, migrateFunc func(context.Context, *xorm.Engine) error) (err error) { if err = InitEngine(ctx); err != nil { return err } @@ -122,7 +123,7 @@ func InitEngineWithMigration(ctx context.Context, migrateFunc func(*xorm.Engine) // Installation should only be being re-run if users want to recover an old database. // However, we should think carefully about should we support re-install on an installed instance, // as there may be other problems due to secret reinitialization. - if err = migrateFunc(xormEngine); err != nil { + if err = migrateFunc(ctx, xormEngine); err != nil { return fmt.Errorf("migrate: %w", err) } diff --git a/models/db/engine_test.go b/models/db/engine_test.go index e3dbfbdc24..a236f83735 100644 --- a/models/db/engine_test.go +++ b/models/db/engine_test.go @@ -15,6 +15,7 @@ import ( _ "code.gitea.io/gitea/cmd" // for TestPrimaryKeys "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestDumpDatabase(t *testing.T) { @@ -51,7 +52,7 @@ func TestDeleteOrphanedObjects(t *testing.T) { countAfter, err := db.GetEngine(db.DefaultContext).Count(&issues_model.PullRequest{}) assert.NoError(t, err) - assert.EqualValues(t, countBefore, countAfter) + assert.Equal(t, countBefore, countAfter) } func TestPrimaryKeys(t *testing.T) { @@ -62,9 +63,7 @@ func TestPrimaryKeys(t *testing.T) { // Import "code.gitea.io/gitea/cmd" to make sure each db.RegisterModel in init functions has been called. beans, err := db.NamesToBean() - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) whitelist := map[string]string{ "the_table_name_to_skip_checking": "Write a note here to explain why", @@ -79,8 +78,6 @@ func TestPrimaryKeys(t *testing.T) { t.Logf("ignore %q because %q", table.Name, why) continue } - if len(table.PrimaryKeys) == 0 { - t.Errorf("table %q has no primary key", table.Name) - } + assert.NotEmpty(t, table.PrimaryKeys, "table %q has no primary key", table.Name) } } diff --git a/models/db/error.go b/models/db/error.go index 665e970e17..d47c7adac4 100644 --- a/models/db/error.go +++ b/models/db/error.go @@ -65,7 +65,7 @@ func (err ErrNotExist) Error() string { if err.ID != 0 { return fmt.Sprintf("%s does not exist [id: %d]", name, err.ID) } - return fmt.Sprintf("%s does not exist", name) + return name + " does not exist" } // Unwrap unwraps this as a ErrNotExist err diff --git a/models/db/list_test.go b/models/db/list_test.go index 45194611f8..170473a968 100644 --- a/models/db/list_test.go +++ b/models/db/list_test.go @@ -47,6 +47,6 @@ func TestFind(t *testing.T) { repoUnits, newCnt, err := db.FindAndCount[repo_model.RepoUnit](db.DefaultContext, opts) assert.NoError(t, err) - assert.EqualValues(t, cnt, newCnt) + assert.Equal(t, cnt, newCnt) assert.Len(t, repoUnits, repoUnitCount) } diff --git a/models/db/log.go b/models/db/log.go index 307788ea2e..a9df6f541d 100644 --- a/models/db/log.go +++ b/models/db/log.go @@ -29,7 +29,7 @@ const stackLevel = 8 // Log a message with defined skip and at logging level func (l *XORMLogBridge) Log(skip int, level log.Level, format string, v ...any) { - l.logger.Log(skip+1, level, format, v...) + l.logger.Log(skip+1, &log.Event{Level: level}, format, v...) } // Debug show debug log diff --git a/models/db/name.go b/models/db/name.go index 5f98edbb28..48c7fdbce5 100644 --- a/models/db/name.go +++ b/models/db/name.go @@ -5,14 +5,13 @@ package db import ( "fmt" + "slices" "strings" "unicode/utf8" "code.gitea.io/gitea/modules/util" ) -var ErrNameEmpty = util.SilentWrap{Message: "name is empty", Err: util.ErrInvalidArgument} - // ErrNameReserved represents a "reserved name" error. type ErrNameReserved struct { Name string @@ -79,13 +78,11 @@ func (err ErrNameCharsNotAllowed) Unwrap() error { func IsUsableName(reservedNames, reservedPatterns []string, name string) error { name = strings.TrimSpace(strings.ToLower(name)) if utf8.RuneCountInString(name) == 0 { - return ErrNameEmpty + return util.NewInvalidArgumentErrorf("name is empty") } - for i := range reservedNames { - if name == reservedNames[i] { - return ErrNameReserved{name} - } + if slices.Contains(reservedNames, name) { + return ErrNameReserved{name} } for _, pat := range reservedPatterns { diff --git a/models/db/search.go b/models/db/search.go index e0a1b6bde9..44d54f21fc 100644 --- a/models/db/search.go +++ b/models/db/search.go @@ -29,7 +29,3 @@ const ( // NoConditionID means a condition to filter the records which don't match any id. // eg: "milestone_id=-1" means "find the items without any milestone. const NoConditionID int64 = -1 - -// NonExistingID means a condition to match no result (eg: a non-existing user) -// It doesn't use -1 or -2 because they are used as builtin users. -const NonExistingID int64 = -1000000 diff --git a/models/db/sql_postgres_with_schema.go b/models/db/sql_postgres_with_schema.go index 64b61b2ef3..812fe4a6a6 100644 --- a/models/db/sql_postgres_with_schema.go +++ b/models/db/sql_postgres_with_schema.go @@ -39,7 +39,7 @@ func (d *postgresSchemaDriver) Open(name string) (driver.Conn, error) { // golangci lint is incorrect here - there is no benefit to using driver.ExecerContext here // and in any case pq does not implement it - if execer, ok := conn.(driver.Execer); ok { //nolint:staticcheck + if execer, ok := conn.(driver.Execer); ok { //nolint:staticcheck // see above _, err := execer.Exec(`SELECT set_config( 'search_path', $1 || ',' || current_setting('search_path'), @@ -64,7 +64,7 @@ func (d *postgresSchemaDriver) Open(name string) (driver.Conn, error) { // driver.String.ConvertValue will never return err for string // golangci lint is incorrect here - there is no benefit to using stmt.ExecWithContext here - _, err = stmt.Exec([]driver.Value{schemaValue}) //nolint:staticcheck + _, err = stmt.Exec([]driver.Value{schemaValue}) //nolint:staticcheck // see above if err != nil { _ = conn.Close() return nil, err diff --git a/models/dbfs/dbfile.go b/models/dbfs/dbfile.go index dd27b5c36b..eaf506fbe6 100644 --- a/models/dbfs/dbfile.go +++ b/models/dbfs/dbfile.go @@ -46,10 +46,7 @@ func (f *file) readAt(fileMeta *dbfsMeta, offset int64, p []byte) (n int, err er blobPos := int(offset % f.blockSize) blobOffset := offset - int64(blobPos) blobRemaining := int(f.blockSize) - blobPos - needRead := len(p) - if needRead > blobRemaining { - needRead = blobRemaining - } + needRead := min(len(p), blobRemaining) if blobOffset+int64(blobPos)+int64(needRead) > fileMeta.FileSize { needRead = int(fileMeta.FileSize - blobOffset - int64(blobPos)) } @@ -66,14 +63,8 @@ func (f *file) readAt(fileMeta *dbfsMeta, offset int64, p []byte) (n int, err er blobData = nil } - canCopy := len(blobData) - blobPos - if canCopy <= 0 { - canCopy = 0 - } - realRead := needRead - if realRead > canCopy { - realRead = canCopy - } + canCopy := max(len(blobData)-blobPos, 0) + realRead := min(needRead, canCopy) if realRead > 0 { copy(p[:realRead], fileData.BlobData[blobPos:blobPos+realRead]) } @@ -113,10 +104,7 @@ func (f *file) Write(p []byte) (n int, err error) { blobPos := int(f.offset % f.blockSize) blobOffset := f.offset - int64(blobPos) blobRemaining := int(f.blockSize) - blobPos - needWrite := len(p) - if needWrite > blobRemaining { - needWrite = blobRemaining - } + needWrite := min(len(p), blobRemaining) buf := make([]byte, f.blockSize) readBytes, err := f.readAt(fileMeta, blobOffset, buf) if err != nil && !errors.Is(err, io.EOF) { diff --git a/models/dbfs/dbfs_test.go b/models/dbfs/dbfs_test.go index 96cb1014c7..0257d2bd15 100644 --- a/models/dbfs/dbfs_test.go +++ b/models/dbfs/dbfs_test.go @@ -31,15 +31,15 @@ func TestDbfsBasic(t *testing.T) { n, err := f.Write([]byte("0123456789")) // blocks: 0123 4567 89 assert.NoError(t, err) - assert.EqualValues(t, 10, n) + assert.Equal(t, 10, n) _, err = f.Seek(0, io.SeekStart) assert.NoError(t, err) buf, err := io.ReadAll(f) assert.NoError(t, err) - assert.EqualValues(t, 10, n) - assert.EqualValues(t, "0123456789", string(buf)) + assert.Equal(t, 10, n) + assert.Equal(t, "0123456789", string(buf)) // write some new data _, err = f.Seek(1, io.SeekStart) @@ -50,14 +50,14 @@ func TestDbfsBasic(t *testing.T) { // read from offset buf, err = io.ReadAll(f) assert.NoError(t, err) - assert.EqualValues(t, "9", string(buf)) + assert.Equal(t, "9", string(buf)) // read all _, err = f.Seek(0, io.SeekStart) assert.NoError(t, err) buf, err = io.ReadAll(f) assert.NoError(t, err) - assert.EqualValues(t, "0bcdefghi9", string(buf)) + assert.Equal(t, "0bcdefghi9", string(buf)) // write to new size _, err = f.Seek(-1, io.SeekEnd) @@ -68,7 +68,7 @@ func TestDbfsBasic(t *testing.T) { assert.NoError(t, err) buf, err = io.ReadAll(f) assert.NoError(t, err) - assert.EqualValues(t, "0bcdefghiJKLMNOP", string(buf)) + assert.Equal(t, "0bcdefghiJKLMNOP", string(buf)) // write beyond EOF and fill with zero _, err = f.Seek(5, io.SeekCurrent) @@ -79,7 +79,7 @@ func TestDbfsBasic(t *testing.T) { assert.NoError(t, err) buf, err = io.ReadAll(f) assert.NoError(t, err) - assert.EqualValues(t, "0bcdefghiJKLMNOP\x00\x00\x00\x00\x00xyzu", string(buf)) + assert.Equal(t, "0bcdefghiJKLMNOP\x00\x00\x00\x00\x00xyzu", string(buf)) // write to the block with zeros _, err = f.Seek(-6, io.SeekCurrent) @@ -90,7 +90,7 @@ func TestDbfsBasic(t *testing.T) { assert.NoError(t, err) buf, err = io.ReadAll(f) assert.NoError(t, err) - assert.EqualValues(t, "0bcdefghiJKLMNOP\x00\x00\x00ABCDzu", string(buf)) + assert.Equal(t, "0bcdefghiJKLMNOP\x00\x00\x00ABCDzu", string(buf)) assert.NoError(t, f.Close()) @@ -117,7 +117,7 @@ func TestDbfsBasic(t *testing.T) { assert.NoError(t, err) stat, err := f.Stat() assert.NoError(t, err) - assert.EqualValues(t, "test.txt", stat.Name()) + assert.Equal(t, "test.txt", stat.Name()) assert.EqualValues(t, 0, stat.Size()) _, err = f.Write([]byte("0123456789")) assert.NoError(t, err) @@ -144,7 +144,7 @@ func TestDbfsReadWrite(t *testing.T) { line, err := f2r.ReadString('\n') assert.NoError(t, err) - assert.EqualValues(t, "line 1\n", line) + assert.Equal(t, "line 1\n", line) _, err = f2r.ReadString('\n') assert.ErrorIs(t, err, io.EOF) @@ -153,7 +153,7 @@ func TestDbfsReadWrite(t *testing.T) { line, err = f2r.ReadString('\n') assert.NoError(t, err) - assert.EqualValues(t, "line 2\n", line) + assert.Equal(t, "line 2\n", line) _, err = f2r.ReadString('\n') assert.ErrorIs(t, err, io.EOF) } @@ -186,5 +186,5 @@ func TestDbfsSeekWrite(t *testing.T) { buf, err := io.ReadAll(fr) assert.NoError(t, err) - assert.EqualValues(t, "111333", string(buf)) + assert.Equal(t, "111333", string(buf)) } diff --git a/models/fixtures/access.yml b/models/fixtures/access.yml index 4171e31fef..596046e950 100644 --- a/models/fixtures/access.yml +++ b/models/fixtures/access.yml @@ -171,3 +171,9 @@ user_id: 40 repo_id: 61 mode: 4 + +- + id: 30 + user_id: 40 + repo_id: 1 + mode: 2 diff --git a/models/fixtures/action_artifact.yml b/models/fixtures/action_artifact.yml index 2c51c11ebd..ee8ef0d5ce 100644 --- a/models/fixtures/action_artifact.yml +++ b/models/fixtures/action_artifact.yml @@ -11,6 +11,24 @@ content_encoding: "" artifact_path: "abc.txt" artifact_name: "artifact-download" + status: 2 + created_unix: 1712338649 + updated_unix: 1712338649 + expired_unix: 1720114649 + +- + id: 2 + run_id: 791 + runner_id: 1 + repo_id: 4 + owner_id: 1 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + storage_path: "" + file_size: 1024 + file_compressed_size: 1024 + content_encoding: "30/20/1712348022422036662.chunk" + artifact_path: "abc.txt" + artifact_name: "artifact-download-incomplete" status: 1 created_unix: 1712338649 updated_unix: 1712338649 @@ -69,3 +87,57 @@ created_unix: 1730330775 updated_unix: 1730330775 expired_unix: 1738106775 + +- + id: 23 + run_id: 793 + runner_id: 1 + repo_id: 2 + owner_id: 2 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + storage_path: "27/5/1730330775594233150.chunk" + file_size: 1024 + file_compressed_size: 1024 + content_encoding: "application/zip" + artifact_path: "artifact-v4-download.zip" + artifact_name: "artifact-v4-download" + status: 2 + created_unix: 1730330775 + updated_unix: 1730330775 + expired_unix: 1738106775 + +- + id: 24 + run_id: 795 + runner_id: 1 + repo_id: 2 + owner_id: 2 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + storage_path: "27/5/1730330775594233150.chunk" + file_size: 1024 + file_compressed_size: 1024 + content_encoding: "application/zip" + artifact_path: "artifact-795-1.zip" + artifact_name: "artifact-795-1" + status: 2 + created_unix: 1730330775 + updated_unix: 1730330775 + expired_unix: 1738106775 + +- + id: 25 + run_id: 795 + runner_id: 1 + repo_id: 2 + owner_id: 2 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + storage_path: "27/5/1730330775594233150.chunk" + file_size: 1024 + file_compressed_size: 1024 + content_encoding: "application/zip" + artifact_path: "artifact-795-2.zip" + artifact_name: "artifact-795-2" + status: 2 + created_unix: 1730330775 + updated_unix: 1730330775 + expired_unix: 1738106775 diff --git a/models/fixtures/action_run.yml b/models/fixtures/action_run.yml index 1db849352f..09dfa6cccb 100644 --- a/models/fixtures/action_run.yml +++ b/models/fixtures/action_run.yml @@ -9,6 +9,7 @@ ref: "refs/heads/master" commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0" event: "push" + trigger_event: "push" is_fork_pull_request: 0 status: 1 started: 1683636528 @@ -28,6 +29,7 @@ ref: "refs/heads/master" commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0" event: "push" + trigger_event: "push" is_fork_pull_request: 0 status: 1 started: 1683636528 @@ -47,8 +49,9 @@ ref: "refs/heads/master" commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0" event: "push" + trigger_event: "push" is_fork_pull_request: 0 - status: 1 + status: 6 # running started: 1683636528 stopped: 1683636626 created: 1683636108 @@ -66,6 +69,47 @@ ref: "refs/heads/test" commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0" event: "push" + trigger_event: "push" + is_fork_pull_request: 0 + status: 1 + started: 1683636528 + stopped: 1683636626 + created: 1683636108 + updated: 1683636626 + need_approval: 0 + approved_by: 0 +- + id: 802 + title: "workflow run list" + repo_id: 5 + owner_id: 3 + workflow_id: "test.yaml" + index: 191 + trigger_user_id: 1 + ref: "refs/heads/test" + commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0" + event: "push" + trigger_event: "push" + is_fork_pull_request: 0 + status: 1 + started: 1683636528 + stopped: 1683636626 + created: 1683636108 + updated: 1683636626 + need_approval: 0 + approved_by: 0 +- + id: 803 + title: "workflow run list for user" + repo_id: 2 + owner_id: 0 + workflow_id: "test.yaml" + index: 192 + trigger_user_id: 1 + ref: "refs/heads/test" + commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0" + event: "push" + trigger_event: "push" is_fork_pull_request: 0 status: 1 started: 1683636528 @@ -74,3 +118,24 @@ updated: 1683636626 need_approval: 0 approved_by: 0 + +- + id: 795 + title: "to be deleted (test)" + repo_id: 2 + owner_id: 2 + workflow_id: "test.yaml" + index: 191 + trigger_user_id: 1 + ref: "refs/heads/test" + commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0" + event: "push" + trigger_event: "push" + is_fork_pull_request: 0 + status: 2 + started: 1683636528 + stopped: 1683636626 + created: 1683636108 + updated: 1683636626 + need_approval: 0 + approved_by: 0 diff --git a/models/fixtures/action_run_job.yml b/models/fixtures/action_run_job.yml index 8837e6ec2d..6c06d94aa4 100644 --- a/models/fixtures/action_run_job.yml +++ b/models/fixtures/action_run_job.yml @@ -69,3 +69,63 @@ status: 5 started: 1683636528 stopped: 1683636626 + +- + id: 198 + run_id: 795 + repo_id: 2 + owner_id: 2 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + is_fork_pull_request: 0 + name: job_1 + attempt: 1 + job_id: job_1 + task_id: 53 + status: 1 + started: 1683636528 + stopped: 1683636626 + +- + id: 199 + run_id: 795 + repo_id: 2 + owner_id: 2 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + is_fork_pull_request: 0 + name: job_2 + attempt: 1 + job_id: job_2 + task_id: 54 + status: 2 + started: 1683636528 + stopped: 1683636626 +- + id: 203 + run_id: 802 + repo_id: 5 + owner_id: 0 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + is_fork_pull_request: 0 + name: job2 + attempt: 1 + job_id: job2 + needs: '["job1"]' + task_id: 51 + status: 5 + started: 1683636528 + stopped: 1683636626 +- + id: 204 + run_id: 803 + repo_id: 2 + owner_id: 0 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + is_fork_pull_request: 0 + name: job2 + attempt: 1 + job_id: job2 + needs: '["job1"]' + task_id: 51 + status: 5 + started: 1683636528 + stopped: 1683636626 diff --git a/models/fixtures/action_runner.yml b/models/fixtures/action_runner.yml new file mode 100644 index 0000000000..ecb7214006 --- /dev/null +++ b/models/fixtures/action_runner.yml @@ -0,0 +1,51 @@ +- + id: 34346 + name: runner_to_be_deleted-user + uuid: 3EF231BD-FBB7-4E4B-9602-E6F28363EF18 + token_hash: 3EF231BD-FBB7-4E4B-9602-E6F28363EF18 + version: "1.0.0" + owner_id: 1 + repo_id: 0 + description: "This runner is going to be deleted" + agent_labels: '["runner_to_be_deleted","linux"]' +- + id: 34347 + name: runner_to_be_deleted-org + uuid: 3EF231BD-FBB7-4E4B-9602-E6F28363EF19 + token_hash: 3EF231BD-FBB7-4E4B-9602-E6F28363EF19 + version: "1.0.0" + owner_id: 3 + repo_id: 0 + description: "This runner is going to be deleted" + agent_labels: '["runner_to_be_deleted","linux"]' +- + id: 34348 + name: runner_to_be_deleted-repo1 + uuid: 3EF231BD-FBB7-4E4B-9602-E6F28363EF20 + token_hash: 3EF231BD-FBB7-4E4B-9602-E6F28363EF20 + version: "1.0.0" + owner_id: 0 + repo_id: 1 + description: "This runner is going to be deleted" + agent_labels: '["runner_to_be_deleted","linux"]' +- + id: 34349 + name: runner_to_be_deleted + uuid: 3EF231BD-FBB7-4E4B-9602-E6F28363EF17 + token_hash: 3EF231BD-FBB7-4E4B-9602-E6F28363EF17 + version: "1.0.0" + owner_id: 0 + repo_id: 0 + description: "This runner is going to be deleted" + agent_labels: '["runner_to_be_deleted","linux"]' +- + id: 34350 + name: runner_to_be_deleted-org-ephemeral + uuid: 3FF231BD-FBB7-4E4B-9602-E6F28363EF20 + token_hash: 3FF231BD-FBB7-4E4B-9602-E6F28363EF20 + ephemeral: true + version: "1.0.0" + owner_id: 3 + repo_id: 0 + description: "This runner is going to be deleted" + agent_labels: '["runner_to_be_deleted","linux"]' diff --git a/models/fixtures/action_task.yml b/models/fixtures/action_task.yml index 506a47d8a0..c79fb07050 100644 --- a/models/fixtures/action_task.yml +++ b/models/fixtures/action_task.yml @@ -117,3 +117,63 @@ log_length: 707 log_size: 90179 log_expired: 0 +- + id: 52 + job_id: 196 + attempt: 1 + runner_id: 34350 + status: 6 # running + started: 1683636528 + stopped: 1683636626 + repo_id: 4 + owner_id: 1 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + is_fork_pull_request: 0 + token_hash: f8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784222 + token_salt: ffffffffff + token_last_eight: ffffffff + log_filename: artifact-test2/2f/47.log + log_in_storage: 1 + log_length: 707 + log_size: 90179 + log_expired: 0 +- + id: 53 + job_id: 198 + attempt: 1 + runner_id: 1 + status: 1 + started: 1683636528 + stopped: 1683636626 + repo_id: 2 + owner_id: 2 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + is_fork_pull_request: 0 + token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784223 + token_salt: ffffffffff + token_last_eight: ffffffff + log_filename: artifact-test2/2f/47.log + log_in_storage: 1 + log_length: 0 + log_size: 0 + log_expired: 0 +- + id: 54 + job_id: 199 + attempt: 1 + runner_id: 1 + status: 2 + started: 1683636528 + stopped: 1683636626 + repo_id: 2 + owner_id: 2 + commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 + is_fork_pull_request: 0 + token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784224 + token_salt: ffffffffff + token_last_eight: ffffffff + log_filename: artifact-test2/2f/47.log + log_in_storage: 1 + log_length: 0 + log_size: 0 + log_expired: 0 diff --git a/models/fixtures/branch.yml b/models/fixtures/branch.yml index 17b1869ab6..03e21d04b4 100644 --- a/models/fixtures/branch.yml +++ b/models/fixtures/branch.yml @@ -93,3 +93,123 @@ is_deleted: false deleted_by_id: 0 deleted_unix: 0 + +- + id: 16 + repo_id: 16 + name: 'master' + commit_id: '69554a64c1e6030f051e5c3f94bfbd773cd6a324' + commit_message: 'not signed commit' + commit_time: 1502042309 + pusher_id: 2 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 17 + repo_id: 16 + name: 'not-signed' + commit_id: '69554a64c1e6030f051e5c3f94bfbd773cd6a324' + commit_message: 'not signed commit' + commit_time: 1502042309 + pusher_id: 2 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 18 + repo_id: 16 + name: 'good-sign-not-yet-validated' + commit_id: '27566bd5738fc8b4e3fef3c5e72cce608537bd95' + commit_message: 'good signed commit (with not yet validated email)' + commit_time: 1502042234 + pusher_id: 2 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 19 + repo_id: 16 + name: 'good-sign' + commit_id: 'f27c2b2b03dcab38beaf89b0ab4ff61f6de63441' + commit_message: 'good signed commit' + commit_time: 1502042101 + pusher_id: 2 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 20 + repo_id: 1 + name: 'feature/1' + commit_id: '65f1bf27bc3bf70f64657658635e66094edbcb4d' + commit_message: 'Initial commit' + commit_time: 1489950479 + pusher_id: 2 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 21 + repo_id: 49 + name: 'master' + commit_id: 'aacbdfe9e1c4b47f60abe81849045fa4e96f1d75' + commit_message: "Add 'test/test.txt'" + commit_time: 1572535577 + pusher_id: 2 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 22 + repo_id: 1 + name: 'develop' + commit_id: '65f1bf27bc3bf70f64657658635e66094edbcb4d' + commit_message: "Initial commit" + commit_time: 1489927679 + pusher_id: 1 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 23 + repo_id: 3 + name: 'master' + commit_id: '2a47ca4b614a9f5a43abbd5ad851a54a616ffee6' + commit_message: "init project" + commit_time: 1497448461 + pusher_id: 1 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 24 + repo_id: 3 + name: 'test_branch' + commit_id: 'd22b4d4daa5be07329fcef6ed458f00cf3392da0' + commit_message: "test commit" + commit_time: 1602935385 + pusher_id: 1 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 + +- + id: 25 + repo_id: 54 + name: 'master' + commit_id: '73cf03db6ece34e12bf91e8853dc58f678f2f82d' + commit_message: 'Initial commit' + commit_time: 1671663402 + pusher_id: 2 + is_deleted: false + deleted_by_id: 0 + deleted_unix: 0 diff --git a/models/fixtures/commit_status.yml b/models/fixtures/commit_status.yml index 20d57975ef..87c652e53a 100644 --- a/models/fixtures/commit_status.yml +++ b/models/fixtures/commit_status.yml @@ -7,6 +7,7 @@ target_url: https://example.com/builds/ description: My awesome CI-service context: ci/awesomeness + context_hash: c65f4d64a3b14a3eced0c9b36799e66e1bd5ced7 creator_id: 2 - @@ -18,6 +19,7 @@ target_url: https://example.com/converage/ description: My awesome Coverage service context: cov/awesomeness + context_hash: 3929ac7bccd3fa1bf9b38ddedb77973b1b9a8cfe creator_id: 2 - @@ -29,6 +31,7 @@ target_url: https://example.com/converage/ description: My awesome Coverage service context: cov/awesomeness + context_hash: 3929ac7bccd3fa1bf9b38ddedb77973b1b9a8cfe creator_id: 2 - @@ -40,6 +43,7 @@ target_url: https://example.com/builds/ description: My awesome CI-service context: ci/awesomeness + context_hash: c65f4d64a3b14a3eced0c9b36799e66e1bd5ced7 creator_id: 2 - @@ -51,4 +55,5 @@ target_url: https://example.com/builds/ description: My awesome deploy service context: deploy/awesomeness + context_hash: ae9547713a6665fc4261d0756904932085a41cf2 creator_id: 2 diff --git a/models/fixtures/email_address.yml b/models/fixtures/email_address.yml index b2a0432635..0f6bd9ee6d 100644 --- a/models/fixtures/email_address.yml +++ b/models/fixtures/email_address.yml @@ -81,7 +81,7 @@ - id: 11 uid: 4 - email: user4@example.com + email: User4@Example.Com lower_email: user4@example.com is_activated: true is_primary: true diff --git a/models/fixtures/hook_task.yml b/models/fixtures/hook_task.yml index d573406b36..6023719b1e 100644 --- a/models/fixtures/hook_task.yml +++ b/models/fixtures/hook_task.yml @@ -18,7 +18,7 @@ id: 2 hook_id: 1 uuid: uuid2 - is_delivered: false + is_delivered: true - id: 3 diff --git a/models/fixtures/issue_pin.yml b/models/fixtures/issue_pin.yml new file mode 100644 index 0000000000..14b7a72d84 --- /dev/null +++ b/models/fixtures/issue_pin.yml @@ -0,0 +1,6 @@ +- + id: 1 + repo_id: 2 + issue_id: 4 + is_pull: false + pin_order: 1 diff --git a/models/fixtures/repo_transfer.yml b/models/fixtures/repo_transfer.yml index db92c95248..b12e6b207f 100644 --- a/models/fixtures/repo_transfer.yml +++ b/models/fixtures/repo_transfer.yml @@ -21,3 +21,11 @@ repo_id: 32 created_unix: 1553610671 updated_unix: 1553610671 + +- + id: 4 + doer_id: 3 + recipient_id: 1 + repo_id: 5 + created_unix: 1553610671 + updated_unix: 1553610671 diff --git a/models/fixtures/repository.yml b/models/fixtures/repository.yml index bbb028eb7b..552a78cbd2 100644 --- a/models/fixtures/repository.yml +++ b/models/fixtures/repository.yml @@ -1695,19 +1695,6 @@ close_issues_via_commit_in_any_branch: false - - id: 59 - owner_id: 2 - owner_name: user2 - lower_name: test_commit_revert - name: test_commit_revert - default_branch: main - is_empty: false - is_archived: false - is_private: true - status: 0 - num_issues: 0 - -- id: 60 owner_id: 40 owner_name: user40 diff --git a/models/fixtures/user.yml b/models/fixtures/user.yml index b3bece5589..976a236011 100644 --- a/models/fixtures/user.yml +++ b/models/fixtures/user.yml @@ -67,7 +67,7 @@ num_followers: 2 num_following: 1 num_stars: 2 - num_repos: 15 + num_repos: 14 num_teams: 0 num_members: 0 visibility: 0 diff --git a/models/fixtures/webhook.yml b/models/fixtures/webhook.yml index f62bae1f31..ec282914b8 100644 --- a/models/fixtures/webhook.yml +++ b/models/fixtures/webhook.yml @@ -1,7 +1,7 @@ - id: 1 repo_id: 1 - url: www.example.com/url1 + url: https://www.example.com/url1 content_type: 1 # json events: '{"push_only":true,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":false}}' is_active: true @@ -9,7 +9,7 @@ - id: 2 repo_id: 1 - url: www.example.com/url2 + url: https://www.example.com/url2 content_type: 1 # json events: '{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}' is_active: false @@ -18,14 +18,35 @@ id: 3 owner_id: 3 repo_id: 3 - url: www.example.com/url3 + url: https://www.example.com/url3 content_type: 1 # json events: '{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}' is_active: true + - id: 4 repo_id: 2 - url: www.example.com/url4 + url: https://www.example.com/url4 + content_type: 1 # json + events: '{"push_only":true,"branch_filter":"{master,feature*}"}' + is_active: true + +- + id: 5 + repo_id: 0 + owner_id: 0 + url: https://www.example.com/url5 + content_type: 1 # json + events: '{"push_only":true,"branch_filter":"{master,feature*}"}' + is_active: true + is_system_webhook: true + +- + id: 6 + repo_id: 0 + owner_id: 0 + url: https://www.example.com/url6 content_type: 1 # json events: '{"push_only":true,"branch_filter":"{master,feature*}"}' is_active: true + is_system_webhook: false diff --git a/models/git/branch.go b/models/git/branch.go index e683ce47e6..07c94a8ba5 100644 --- a/models/git/branch.go +++ b/models/git/branch.go @@ -167,9 +167,24 @@ func GetBranch(ctx context.Context, repoID int64, branchName string) (*Branch, e BranchName: branchName, } } + // FIXME: this design is not right: it doesn't check `branch.IsDeleted`, it doesn't make sense to make callers to check IsDeleted again and again. + // It causes inconsistency with `GetBranches` and `git.GetBranch`, and will lead to strange bugs + // In the future, there should be 2 functions: `GetBranchExisting` and `GetBranchWithDeleted` return &branch, nil } +// IsBranchExist returns true if the branch exists in the repository. +func IsBranchExist(ctx context.Context, repoID int64, branchName string) (bool, error) { + var branch Branch + has, err := db.GetEngine(ctx).Where("repo_id=?", repoID).And("name=?", branchName).Get(&branch) + if err != nil { + return false, err + } else if !has { + return false, nil + } + return !branch.IsDeleted, nil +} + func GetBranches(ctx context.Context, repoID int64, branchNames []string, includeDeleted bool) ([]*Branch, error) { branches := make([]*Branch, 0, len(branchNames)) @@ -220,6 +235,11 @@ func GetDeletedBranchByID(ctx context.Context, repoID, branchID int64) (*Branch, return &branch, nil } +func DeleteRepoBranches(ctx context.Context, repoID int64) error { + _, err := db.GetEngine(ctx).Where("repo_id=?", repoID).Delete(new(Branch)) + return err +} + func DeleteBranches(ctx context.Context, repoID, doerID int64, branchIDs []int64) error { return db.WithTx(ctx, func(ctx context.Context) error { branches := make([]*Branch, 0, len(branchIDs)) @@ -440,6 +460,8 @@ type FindRecentlyPushedNewBranchesOptions struct { } type RecentlyPushedNewBranch struct { + BranchRepo *repo_model.Repository + BranchName string BranchDisplayName string BranchLink string BranchCompareURL string @@ -465,7 +487,7 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o ForkFrom: opts.BaseRepo.ID, Archived: optional.Some(false), } - repoCond := repo_model.SearchRepositoryCondition(&repoOpts).And(repo_model.AccessibleRepositoryCondition(doer, unit.TypeCode)) + repoCond := repo_model.SearchRepositoryCondition(repoOpts).And(repo_model.AccessibleRepositoryCondition(doer, unit.TypeCode)) if opts.Repo.ID == opts.BaseRepo.ID { // should also include the base repo's branches repoCond = repoCond.Or(builder.Eq{"id": opts.BaseRepo.ID}) @@ -540,7 +562,9 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o branchDisplayName = fmt.Sprintf("%s:%s", branch.Repo.FullName(), branchDisplayName) } newBranches = append(newBranches, &RecentlyPushedNewBranch{ + BranchRepo: branch.Repo, BranchDisplayName: branchDisplayName, + BranchName: branch.Name, BranchLink: fmt.Sprintf("%s/src/branch/%s", branch.Repo.Link(), util.PathEscapeSegments(branch.Name)), BranchCompareURL: branch.Repo.ComposeBranchCompareURL(opts.BaseRepo, branch.Name), CommitTime: branch.CommitTime, diff --git a/models/git/branch_test.go b/models/git/branch_test.go index b8ea663e81..252dcc5690 100644 --- a/models/git/branch_test.go +++ b/models/git/branch_test.go @@ -21,7 +21,7 @@ import ( func TestAddDeletedBranch(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) - assert.EqualValues(t, git.Sha1ObjectFormat.Name(), repo.ObjectFormatName) + assert.Equal(t, git.Sha1ObjectFormat.Name(), repo.ObjectFormatName) firstBranch := unittest.AssertExistsAndLoadBean(t, &git_model.Branch{ID: 1}) assert.True(t, firstBranch.IsDeleted) diff --git a/models/git/commit_status.go b/models/git/commit_status.go index 0579a41209..f85e1b15e5 100644 --- a/models/git/commit_status.go +++ b/models/git/commit_status.go @@ -17,10 +17,10 @@ import ( "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/commitstatus" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" - api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/translation" @@ -30,17 +30,17 @@ import ( // CommitStatus holds a single Status of a single Commit type CommitStatus struct { - ID int64 `xorm:"pk autoincr"` - Index int64 `xorm:"INDEX UNIQUE(repo_sha_index)"` - RepoID int64 `xorm:"INDEX UNIQUE(repo_sha_index)"` - Repo *repo_model.Repository `xorm:"-"` - State api.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"` - SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_sha_index)"` - TargetURL string `xorm:"TEXT"` - Description string `xorm:"TEXT"` - ContextHash string `xorm:"VARCHAR(64) index"` - Context string `xorm:"TEXT"` - Creator *user_model.User `xorm:"-"` + ID int64 `xorm:"pk autoincr"` + Index int64 `xorm:"INDEX UNIQUE(repo_sha_index)"` + RepoID int64 `xorm:"INDEX UNIQUE(repo_sha_index)"` + Repo *repo_model.Repository `xorm:"-"` + State commitstatus.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"` + SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_sha_index)"` + TargetURL string `xorm:"TEXT"` + Description string `xorm:"TEXT"` + ContextHash string `xorm:"VARCHAR(64) index"` + Context string `xorm:"TEXT"` + Creator *user_model.User `xorm:"-"` CreatorID int64 CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` @@ -222,7 +222,7 @@ func (status *CommitStatus) HideActionsURL(ctx context.Context) { } } - prefix := fmt.Sprintf("%s/actions", status.Repo.Link()) + prefix := status.Repo.Link() + "/actions" if strings.HasPrefix(status.TargetURL, prefix) { status.TargetURL = "" } @@ -230,22 +230,25 @@ func (status *CommitStatus) HideActionsURL(ctx context.Context) { // CalcCommitStatus returns commit status state via some status, the commit statues should order by id desc func CalcCommitStatus(statuses []*CommitStatus) *CommitStatus { - var lastStatus *CommitStatus - state := api.CommitStatusSuccess + if len(statuses) == 0 { + return nil + } + + states := make(commitstatus.CommitStatusStates, 0, len(statuses)) + targetURL := "" for _, status := range statuses { - if status.State.NoBetterThan(state) { - state = status.State - lastStatus = status + states = append(states, status.State) + if status.TargetURL != "" { + targetURL = status.TargetURL } } - if lastStatus == nil { - if len(statuses) > 0 { - lastStatus = statuses[0] - } else { - lastStatus = &CommitStatus{} - } + + return &CommitStatus{ + RepoID: statuses[0].RepoID, + SHA: statuses[0].SHA, + State: states.Combine(), + TargetURL: targetURL, } - return lastStatus } // CommitStatusOptions holds the options for query commit statuses @@ -298,27 +301,37 @@ type CommitStatusIndex struct { MaxIndex int64 `xorm:"index"` } +func makeRepoCommitQuery(ctx context.Context, repoID int64, sha string) *xorm.Session { + return db.GetEngine(ctx).Table(&CommitStatus{}). + Where("repo_id = ?", repoID).And("sha = ?", sha) +} + // GetLatestCommitStatus returns all statuses with a unique context for a given commit. -func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, int64, error) { - getBase := func() *xorm.Session { - return db.GetEngine(ctx).Table(&CommitStatus{}). - Where("repo_id = ?", repoID).And("sha = ?", sha) - } +func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, error) { indices := make([]int64, 0, 10) - sess := getBase().Select("max( `index` ) as `index`"). - GroupBy("context_hash").OrderBy("max( `index` ) desc") + sess := makeRepoCommitQuery(ctx, repoID, sha). + Select("max( `index` ) as `index`"). + GroupBy("context_hash"). + OrderBy("max( `index` ) desc") if !listOptions.IsListAll() { sess = db.SetSessionPagination(sess, &listOptions) } - count, err := sess.FindAndCount(&indices) - if err != nil { - return nil, count, err + if err := sess.Find(&indices); err != nil { + return nil, err } statuses := make([]*CommitStatus, 0, len(indices)) if len(indices) == 0 { - return statuses, count, nil + return statuses, nil } - return statuses, count, getBase().And(builder.In("`index`", indices)).Find(&statuses) + err := makeRepoCommitQuery(ctx, repoID, sha).And(builder.In("`index`", indices)).Find(&statuses) + return statuses, err +} + +func CountLatestCommitStatus(ctx context.Context, repoID int64, sha string) (int64, error) { + return makeRepoCommitQuery(ctx, repoID, sha). + Select("count(context_hash)"). + GroupBy("context_hash"). + Count() } // GetLatestCommitStatusForPairs returns all statuses with a unique context for a given list of repo-sha pairs @@ -453,9 +466,8 @@ func NewCommitStatus(ctx context.Context, opts NewCommitStatusOptions) error { return fmt.Errorf("NewCommitStatus[nil, %s]: no repository specified", opts.SHA) } - repoPath := opts.Repo.RepoPath() if opts.Creator == nil { - return fmt.Errorf("NewCommitStatus[%s, %s]: no user specified", repoPath, opts.SHA) + return fmt.Errorf("NewCommitStatus[%s, %s]: no user specified", opts.Repo.FullName(), opts.SHA) } ctx, committer, err := db.TxContext(ctx) @@ -477,13 +489,13 @@ func NewCommitStatus(ctx context.Context, opts NewCommitStatusOptions) error { opts.CommitStatus.CreatorID = opts.Creator.ID opts.CommitStatus.RepoID = opts.Repo.ID opts.CommitStatus.Index = idx - log.Debug("NewCommitStatus[%s, %s]: %d", repoPath, opts.SHA, opts.CommitStatus.Index) + log.Debug("NewCommitStatus[%s, %s]: %d", opts.Repo.FullName(), opts.SHA, opts.CommitStatus.Index) opts.CommitStatus.ContextHash = hashCommitStatusContext(opts.CommitStatus.Context) // Insert new CommitStatus if _, err = db.GetEngine(ctx).Insert(opts.CommitStatus); err != nil { - return fmt.Errorf("insert CommitStatus[%s, %s]: %w", repoPath, opts.SHA, err) + return fmt.Errorf("insert CommitStatus[%s, %s]: %w", opts.Repo.FullName(), opts.SHA, err) } return committer.Commit() @@ -496,47 +508,11 @@ type SignCommitWithStatuses struct { *asymkey_model.SignCommit } -// ParseCommitsWithStatus checks commits latest statuses and calculates its worst status state -func ParseCommitsWithStatus(ctx context.Context, oldCommits []*asymkey_model.SignCommit, repo *repo_model.Repository) []*SignCommitWithStatuses { - newCommits := make([]*SignCommitWithStatuses, 0, len(oldCommits)) - - for _, c := range oldCommits { - commit := &SignCommitWithStatuses{ - SignCommit: c, - } - statuses, _, err := GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptions{}) - if err != nil { - log.Error("GetLatestCommitStatus: %v", err) - } else { - commit.Statuses = statuses - commit.Status = CalcCommitStatus(statuses) - } - - newCommits = append(newCommits, commit) - } - return newCommits -} - // hashCommitStatusContext hash context func hashCommitStatusContext(context string) string { return fmt.Sprintf("%x", sha1.Sum([]byte(context))) } -// ConvertFromGitCommit converts git commits into SignCommitWithStatuses -func ConvertFromGitCommit(ctx context.Context, commits []*git.Commit, repo *repo_model.Repository) []*SignCommitWithStatuses { - return ParseCommitsWithStatus(ctx, - asymkey_model.ParseCommitsWithSignature( - ctx, - user_model.ValidateCommitsWithEmails(ctx, commits), - repo.GetTrustModel(), - func(user *user_model.User) (bool, error) { - return repo_model.IsOwnerMemberCollaborator(ctx, repo, user.ID) - }, - ), - repo, - ) -} - // CommitStatusesHideActionsURL hide Gitea Actions urls func CommitStatusesHideActionsURL(ctx context.Context, statuses []*CommitStatus) { idToRepos := make(map[int64]*repo_model.Repository) diff --git a/models/git/commit_status_summary.go b/models/git/commit_status_summary.go index 7603e7aa65..dd416fa015 100644 --- a/models/git/commit_status_summary.go +++ b/models/git/commit_status_summary.go @@ -7,19 +7,19 @@ import ( "context" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/commitstatus" "code.gitea.io/gitea/modules/setting" - api "code.gitea.io/gitea/modules/structs" "xorm.io/builder" ) // CommitStatusSummary holds the latest commit Status of a single Commit type CommitStatusSummary struct { - ID int64 `xorm:"pk autoincr"` - RepoID int64 `xorm:"INDEX UNIQUE(repo_id_sha)"` - SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_id_sha)"` - State api.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"` - TargetURL string `xorm:"TEXT"` + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"INDEX UNIQUE(repo_id_sha)"` + SHA string `xorm:"VARCHAR(64) NOT NULL INDEX UNIQUE(repo_id_sha)"` + State commitstatus.CommitStatusState `xorm:"VARCHAR(7) NOT NULL"` + TargetURL string `xorm:"TEXT"` } func init() { @@ -55,11 +55,15 @@ func GetLatestCommitStatusForRepoAndSHAs(ctx context.Context, repoSHAs []RepoSHA } func UpdateCommitStatusSummary(ctx context.Context, repoID int64, sha string) error { - commitStatuses, _, err := GetLatestCommitStatus(ctx, repoID, sha, db.ListOptionsAll) + commitStatuses, err := GetLatestCommitStatus(ctx, repoID, sha, db.ListOptionsAll) if err != nil { return err } - state := CalcCommitStatus(commitStatuses) + // it guarantees that commitStatuses is not empty because this function is always called after a commit status is created + if len(commitStatuses) == 0 { + setting.PanicInDevOrTesting("no commit statuses found for repo %d and sha %s", repoID, sha) + } + state := CalcCommitStatus(commitStatuses) // non-empty commitStatuses is guaranteed // mysql will return 0 when update a record which state hasn't been changed which behaviour is different from other database, // so we need to use insert in on duplicate if setting.Database.Type.IsMySQL() { diff --git a/models/git/commit_status_test.go b/models/git/commit_status_test.go index 37d785e938..4c0f5e891b 100644 --- a/models/git/commit_status_test.go +++ b/models/git/commit_status_test.go @@ -14,9 +14,9 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/commitstatus" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" - "code.gitea.io/gitea/modules/structs" "github.com/stretchr/testify/assert" ) @@ -26,7 +26,7 @@ func TestGetCommitStatuses(t *testing.T) { repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) - sha1 := "1234123412341234123412341234123412341234" + sha1 := "1234123412341234123412341234123412341234" // the mocked commit ID in test fixtures statuses, maxResults, err := db.FindAndCount[git_model.CommitStatus](db.DefaultContext, &git_model.CommitStatusOptions{ ListOptions: db.ListOptions{Page: 1, PageSize: 50}, @@ -38,23 +38,23 @@ func TestGetCommitStatuses(t *testing.T) { assert.Len(t, statuses, 5) assert.Equal(t, "ci/awesomeness", statuses[0].Context) - assert.Equal(t, structs.CommitStatusPending, statuses[0].State) + assert.Equal(t, commitstatus.CommitStatusPending, statuses[0].State) assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[0].APIURL(db.DefaultContext)) assert.Equal(t, "cov/awesomeness", statuses[1].Context) - assert.Equal(t, structs.CommitStatusWarning, statuses[1].State) + assert.Equal(t, commitstatus.CommitStatusWarning, statuses[1].State) assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[1].APIURL(db.DefaultContext)) assert.Equal(t, "cov/awesomeness", statuses[2].Context) - assert.Equal(t, structs.CommitStatusSuccess, statuses[2].State) + assert.Equal(t, commitstatus.CommitStatusSuccess, statuses[2].State) assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[2].APIURL(db.DefaultContext)) assert.Equal(t, "ci/awesomeness", statuses[3].Context) - assert.Equal(t, structs.CommitStatusFailure, statuses[3].State) + assert.Equal(t, commitstatus.CommitStatusFailure, statuses[3].State) assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[3].APIURL(db.DefaultContext)) assert.Equal(t, "deploy/awesomeness", statuses[4].Context) - assert.Equal(t, structs.CommitStatusError, statuses[4].State) + assert.Equal(t, commitstatus.CommitStatusError, statuses[4].State) assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/statuses/1234123412341234123412341234123412341234", statuses[4].APIURL(db.DefaultContext)) statuses, maxResults, err = db.FindAndCount[git_model.CommitStatus](db.DefaultContext, &git_model.CommitStatusOptions{ @@ -75,110 +75,110 @@ func Test_CalcCommitStatus(t *testing.T) { { statuses: []*git_model.CommitStatus{ { - State: structs.CommitStatusPending, + State: commitstatus.CommitStatusPending, }, }, expected: &git_model.CommitStatus{ - State: structs.CommitStatusPending, + State: commitstatus.CommitStatusPending, }, }, { statuses: []*git_model.CommitStatus{ { - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, { - State: structs.CommitStatusPending, + State: commitstatus.CommitStatusPending, }, }, expected: &git_model.CommitStatus{ - State: structs.CommitStatusPending, + State: commitstatus.CommitStatusPending, }, }, { statuses: []*git_model.CommitStatus{ { - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, { - State: structs.CommitStatusPending, + State: commitstatus.CommitStatusPending, }, { - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, }, expected: &git_model.CommitStatus{ - State: structs.CommitStatusPending, + State: commitstatus.CommitStatusPending, }, }, { statuses: []*git_model.CommitStatus{ { - State: structs.CommitStatusError, + State: commitstatus.CommitStatusError, }, { - State: structs.CommitStatusPending, + State: commitstatus.CommitStatusPending, }, { - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, }, expected: &git_model.CommitStatus{ - State: structs.CommitStatusError, + State: commitstatus.CommitStatusFailure, }, }, { statuses: []*git_model.CommitStatus{ { - State: structs.CommitStatusWarning, + State: commitstatus.CommitStatusWarning, }, { - State: structs.CommitStatusPending, + State: commitstatus.CommitStatusPending, }, { - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, }, expected: &git_model.CommitStatus{ - State: structs.CommitStatusWarning, + State: commitstatus.CommitStatusPending, }, }, { statuses: []*git_model.CommitStatus{ { - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, { - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, { - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, }, expected: &git_model.CommitStatus{ - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, }, }, { statuses: []*git_model.CommitStatus{ { - State: structs.CommitStatusFailure, + State: commitstatus.CommitStatusFailure, }, { - State: structs.CommitStatusError, + State: commitstatus.CommitStatusError, }, { - State: structs.CommitStatusWarning, + State: commitstatus.CommitStatusWarning, }, }, expected: &git_model.CommitStatus{ - State: structs.CommitStatusError, + State: commitstatus.CommitStatusFailure, }, }, } for _, kase := range kases { - assert.Equal(t, kase.expected, git_model.CalcCommitStatus(kase.statuses)) + assert.Equal(t, kase.expected, git_model.CalcCommitStatus(kase.statuses), "statuses: %v", kase.statuses) } } @@ -208,7 +208,7 @@ func TestFindRepoRecentCommitStatusContexts(t *testing.T) { Creator: user2, SHA: commit.ID, CommitStatus: &git_model.CommitStatus{ - State: structs.CommitStatusFailure, + State: commitstatus.CommitStatusFailure, TargetURL: "https://example.com/tests/", Context: "compliance/lint-backend", }, @@ -220,7 +220,7 @@ func TestFindRepoRecentCommitStatusContexts(t *testing.T) { Creator: user2, SHA: commit.ID, CommitStatus: &git_model.CommitStatus{ - State: structs.CommitStatusSuccess, + State: commitstatus.CommitStatusSuccess, TargetURL: "https://example.com/tests/", Context: "compliance/lint-backend", }, @@ -256,3 +256,26 @@ func TestCommitStatusesHideActionsURL(t *testing.T) { assert.Empty(t, statuses[0].TargetURL) assert.Equal(t, "https://mycicd.org/1", statuses[1].TargetURL) } + +func TestGetCountLatestCommitStatus(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) + + sha1 := "1234123412341234123412341234123412341234" // the mocked commit ID in test fixtures + + commitStatuses, err := git_model.GetLatestCommitStatus(db.DefaultContext, repo1.ID, sha1, db.ListOptions{ + Page: 1, + PageSize: 2, + }) + assert.NoError(t, err) + assert.Len(t, commitStatuses, 2) + assert.Equal(t, commitstatus.CommitStatusFailure, commitStatuses[0].State) + assert.Equal(t, "ci/awesomeness", commitStatuses[0].Context) + assert.Equal(t, commitstatus.CommitStatusError, commitStatuses[1].State) + assert.Equal(t, "deploy/awesomeness", commitStatuses[1].Context) + + count, err := git_model.CountLatestCommitStatus(db.DefaultContext, repo1.ID, sha1) + assert.NoError(t, err) + assert.EqualValues(t, 3, count) +} diff --git a/models/git/lfs.go b/models/git/lfs.go index bb6361050a..e4fa2b446a 100644 --- a/models/git/lfs.go +++ b/models/git/lfs.go @@ -112,7 +112,6 @@ type LFSMetaObject struct { ID int64 `xorm:"pk autoincr"` lfs.Pointer `xorm:"extends"` RepositoryID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"` - Existing bool `xorm:"-"` CreatedUnix timeutil.TimeStamp `xorm:"created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` } @@ -146,7 +145,6 @@ func NewLFSMetaObject(ctx context.Context, repoID int64, p lfs.Pointer) (*LFSMet if err != nil { return nil, err } else if exist { - m.Existing = true return m, committer.Commit() } diff --git a/models/git/protected_branch.go b/models/git/protected_branch.go index a3caed73c4..55bbe6938c 100644 --- a/models/git/protected_branch.go +++ b/models/git/protected_branch.go @@ -246,7 +246,7 @@ func (protectBranch *ProtectedBranch) GetUnprotectedFilePatterns() []glob.Glob { func getFilePatterns(filePatterns string) []glob.Glob { extarr := make([]glob.Glob, 0, 10) - for _, expr := range strings.Split(strings.ToLower(filePatterns), ";") { + for expr := range strings.SplitSeq(strings.ToLower(filePatterns), ";") { expr = strings.TrimSpace(expr) if expr != "" { if g, err := glob.Compile(expr, '.', '/'); err != nil { @@ -518,7 +518,7 @@ func updateTeamWhitelist(ctx context.Context, repo *repo_model.Repository, curre return currentWhitelist, nil } - teams, err := organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead) + teams, err := organization.GetTeamsWithAccessToAnyRepoUnit(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead, unit.TypeCode, unit.TypePullRequests) if err != nil { return nil, fmt.Errorf("GetTeamsWithAccessToRepo [org_id: %d, repo_id: %d]: %v", repo.OwnerID, repo.ID, err) } diff --git a/models/git/protected_branch_list_test.go b/models/git/protected_branch_list_test.go index a46402c543..298c2fa074 100644 --- a/models/git/protected_branch_list_test.go +++ b/models/git/protected_branch_list_test.go @@ -70,7 +70,7 @@ func TestBranchRuleMatchPriority(t *testing.T) { assert.Error(t, fmt.Errorf("no matched rules but expected %s[%d]", kase.Rules[kase.ExpectedMatchIdx], kase.ExpectedMatchIdx)) } } else { - assert.EqualValues(t, kase.Rules[kase.ExpectedMatchIdx], matchedPB.RuleName) + assert.Equal(t, kase.Rules[kase.ExpectedMatchIdx], matchedPB.RuleName) } } } diff --git a/models/git/protected_branch_test.go b/models/git/protected_branch_test.go index e1c91d927d..367992081d 100644 --- a/models/git/protected_branch_test.go +++ b/models/git/protected_branch_test.go @@ -74,7 +74,7 @@ func TestBranchRuleMatch(t *testing.T) { } else { infact = " not" } - assert.EqualValues(t, kase.ExpectedMatch, pb.Match(kase.BranchName), + assert.Equal(t, kase.ExpectedMatch, pb.Match(kase.BranchName), "%s should%s match %s but it is%s", kase.BranchName, should, kase.Rule, infact, ) } diff --git a/models/issues/comment.go b/models/issues/comment.go index a7ec8f57fc..4fdb0c1808 100644 --- a/models/issues/comment.go +++ b/models/issues/comment.go @@ -9,6 +9,7 @@ import ( "context" "fmt" "html/template" + "slices" "strconv" "unicode/utf8" @@ -19,8 +20,6 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" - "code.gitea.io/gitea/modules/gitrepo" - "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/references" @@ -112,8 +111,8 @@ const ( CommentTypePRScheduledToAutoMerge // 34 pr was scheduled to auto merge when checks succeed CommentTypePRUnScheduledToAutoMerge // 35 pr was un scheduled to auto merge when checks succeed - CommentTypePin // 36 pin Issue - CommentTypeUnpin // 37 unpin Issue + CommentTypePin // 36 pin Issue/PullRequest + CommentTypeUnpin // 37 unpin Issue/PullRequest CommentTypeChangeTimeEstimate // 38 Change time estimate ) @@ -198,12 +197,7 @@ func (t CommentType) HasMailReplySupport() bool { } func (t CommentType) CountedAsConversation() bool { - for _, ct := range ConversationCountedCommentType() { - if t == ct { - return true - } - } - return false + return slices.Contains(ConversationCountedCommentType(), t) } // ConversationCountedCommentType returns the comment types that are counted as a conversation @@ -616,7 +610,7 @@ func UpdateCommentAttachments(ctx context.Context, c *Comment, uuids []string) e if err != nil { return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err) } - for i := 0; i < len(attachments); i++ { + for i := range attachments { attachments[i].IssueID = c.IssueID attachments[i].CommentID = c.ID if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil { @@ -721,7 +715,8 @@ func (c *Comment) LoadReactions(ctx context.Context, repo *repo_model.Repository return nil } -func (c *Comment) loadReview(ctx context.Context) (err error) { +// LoadReview loads the associated review +func (c *Comment) LoadReview(ctx context.Context) (err error) { if c.ReviewID == 0 { return nil } @@ -738,11 +733,6 @@ func (c *Comment) loadReview(ctx context.Context) (err error) { return nil } -// LoadReview loads the associated review -func (c *Comment) LoadReview(ctx context.Context) error { - return c.loadReview(ctx) -} - // DiffSide returns "previous" if Comment.Line is a LOC of the previous changes and "proposed" if it is a LOC of the proposed changes. func (c *Comment) DiffSide() string { if c.Line < 0 { @@ -774,41 +764,6 @@ func (c *Comment) CodeCommentLink(ctx context.Context) string { return fmt.Sprintf("%s/files#%s", c.Issue.Link(), c.HashTag()) } -// LoadPushCommits Load push commits -func (c *Comment) LoadPushCommits(ctx context.Context) (err error) { - if c.Content == "" || c.Commits != nil || c.Type != CommentTypePullRequestPush { - return nil - } - - var data PushActionContent - - err = json.Unmarshal([]byte(c.Content), &data) - if err != nil { - return err - } - - c.IsForcePush = data.IsForcePush - - if c.IsForcePush { - if len(data.CommitIDs) != 2 { - return nil - } - c.OldCommit = data.CommitIDs[0] - c.NewCommit = data.CommitIDs[1] - } else { - gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, c.Issue.Repo) - if err != nil { - return err - } - defer closer.Close() - - c.Commits = git_model.ConvertFromGitCommit(ctx, gitRepo.GetCommitsFromIDs(data.CommitIDs), c.Issue.Repo) - c.CommitsNum = int64(len(c.Commits)) - } - - return err -} - // CreateComment creates comment with context func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) { ctx, committer, err := db.TxContext(ctx) @@ -897,7 +852,7 @@ func updateCommentInfos(ctx context.Context, opts *CreateCommentOptions, comment } if comment.ReviewID != 0 { if comment.Review == nil { - if err := comment.loadReview(ctx); err != nil { + if err := comment.LoadReview(ctx); err != nil { return err } } diff --git a/models/issues/comment_code.go b/models/issues/comment_code.go index 67a77ceb13..55e67a1243 100644 --- a/models/issues/comment_code.go +++ b/models/issues/comment_code.go @@ -5,6 +5,7 @@ package issues import ( "context" + "strconv" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/renderhelper" @@ -86,8 +87,10 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu ids = append(ids, comment.ReviewID) } } - if err := e.In("id", ids).Find(&reviews); err != nil { - return nil, err + if len(ids) > 0 { + if err := e.In("id", ids).Find(&reviews); err != nil { + return nil, err + } } n := 0 @@ -112,7 +115,9 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu } var err error - rctx := renderhelper.NewRenderContextRepoComment(ctx, issue.Repo) + rctx := renderhelper.NewRenderContextRepoComment(ctx, issue.Repo, renderhelper.RepoCommentOptions{ + FootnoteContextID: strconv.FormatInt(comment.ID, 10), + }) if comment.RenderedContent, err = markdown.RenderString(rctx, comment.Content); err != nil { return nil, err } diff --git a/models/issues/comment_list.go b/models/issues/comment_list.go index 61ac1c8f56..f6c485449f 100644 --- a/models/issues/comment_list.go +++ b/models/issues/comment_list.go @@ -26,14 +26,14 @@ func (comments CommentList) LoadPosters(ctx context.Context) error { return c.PosterID, c.Poster == nil && c.PosterID > 0 }) - posterMaps, err := getPostersByIDs(ctx, posterIDs) + posterMaps, err := user_model.GetUsersMapByIDs(ctx, posterIDs) if err != nil { return err } for _, comment := range comments { if comment.Poster == nil { - comment.Poster = getPoster(comment.PosterID, posterMaps) + comment.Poster = user_model.GetPossibleUserFromMap(comment.PosterID, posterMaps) } } return nil @@ -41,7 +41,7 @@ func (comments CommentList) LoadPosters(ctx context.Context) error { func (comments CommentList) getLabelIDs() []int64 { return container.FilterSlice(comments, func(comment *Comment) (int64, bool) { - return comment.LabelID, comment.LabelID > 0 + return comment.LabelID, comment.LabelID > 0 && comment.Label == nil }) } @@ -51,13 +51,13 @@ func (comments CommentList) loadLabels(ctx context.Context) error { } labelIDs := comments.getLabelIDs() + if len(labelIDs) == 0 { + return nil + } commentLabels := make(map[int64]*Label, len(labelIDs)) left := len(labelIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("id", labelIDs[:limit]). Rows(new(Label)) @@ -104,10 +104,7 @@ func (comments CommentList) loadMilestones(ctx context.Context) error { milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs)) left := len(milestoneIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) err := db.GetEngine(ctx). In("id", milestoneIDs[:limit]). Find(&milestoneMaps) @@ -118,8 +115,8 @@ func (comments CommentList) loadMilestones(ctx context.Context) error { milestoneIDs = milestoneIDs[limit:] } - for _, issue := range comments { - issue.Milestone = milestoneMaps[issue.MilestoneID] + for _, comment := range comments { + comment.Milestone = milestoneMaps[comment.MilestoneID] } return nil } @@ -143,10 +140,7 @@ func (comments CommentList) loadOldMilestones(ctx context.Context) error { milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs)) left := len(milestoneIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) err := db.GetEngine(ctx). In("id", milestoneIDs[:limit]). Find(&milestoneMaps) @@ -175,13 +169,13 @@ func (comments CommentList) loadAssignees(ctx context.Context) error { } assigneeIDs := comments.getAssigneeIDs() + if len(assigneeIDs) == 0 { + return nil + } assignees := make(map[int64]*user_model.User, len(assigneeIDs)) left := len(assigneeIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("id", assigneeIDs[:limit]). Rows(new(user_model.User)) @@ -250,10 +244,7 @@ func (comments CommentList) LoadIssues(ctx context.Context) error { issues := make(map[int64]*Issue, len(issueIDs)) left := len(issueIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("id", issueIDs[:limit]). Rows(new(Issue)) @@ -301,13 +292,13 @@ func (comments CommentList) loadDependentIssues(ctx context.Context) error { e := db.GetEngine(ctx) issueIDs := comments.getDependentIssueIDs() + if len(issueIDs) == 0 { + return nil + } issues := make(map[int64]*Issue, len(issueIDs)) left := len(issueIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := e. In("id", issueIDs[:limit]). Rows(new(Issue)) @@ -383,10 +374,7 @@ func (comments CommentList) LoadAttachments(ctx context.Context) (err error) { commentsIDs := comments.getAttachmentCommentIDs() left := len(commentsIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("comment_id", commentsIDs[:limit]). Rows(new(repo_model.Attachment)) @@ -427,6 +415,9 @@ func (comments CommentList) loadReviews(ctx context.Context) error { } reviewIDs := comments.getReviewIDs() + if len(reviewIDs) == 0 { + return nil + } reviews := make(map[int64]*Review, len(reviewIDs)) if err := db.GetEngine(ctx).In("id", reviewIDs).Find(&reviews); err != nil { return err diff --git a/models/issues/comment_test.go b/models/issues/comment_test.go index ae0bc3ce17..c08e3b970d 100644 --- a/models/issues/comment_test.go +++ b/models/issues/comment_test.go @@ -34,10 +34,10 @@ func TestCreateComment(t *testing.T) { assert.NoError(t, err) then := time.Now().Unix() - assert.EqualValues(t, issues_model.CommentTypeComment, comment.Type) - assert.EqualValues(t, "Hello", comment.Content) - assert.EqualValues(t, issue.ID, comment.IssueID) - assert.EqualValues(t, doer.ID, comment.PosterID) + assert.Equal(t, issues_model.CommentTypeComment, comment.Type) + assert.Equal(t, "Hello", comment.Content) + assert.Equal(t, issue.ID, comment.IssueID) + assert.Equal(t, doer.ID, comment.PosterID) unittest.AssertInt64InRange(t, now, then, int64(comment.CreatedUnix)) unittest.AssertExistsAndLoadBean(t, comment) // assert actually added to DB @@ -58,9 +58,9 @@ func Test_UpdateCommentAttachment(t *testing.T) { assert.NoError(t, err) attachment2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: attachment.ID}) - assert.EqualValues(t, attachment.Name, attachment2.Name) - assert.EqualValues(t, comment.ID, attachment2.CommentID) - assert.EqualValues(t, comment.IssueID, attachment2.IssueID) + assert.Equal(t, attachment.Name, attachment2.Name) + assert.Equal(t, comment.ID, attachment2.CommentID) + assert.Equal(t, comment.IssueID, attachment2.IssueID) } func TestFetchCodeComments(t *testing.T) { @@ -111,7 +111,7 @@ func TestMigrate_InsertIssueComments(t *testing.T) { assert.NoError(t, err) issueModified := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1}) - assert.EqualValues(t, issue.NumComments+1, issueModified.NumComments) + assert.Equal(t, issue.NumComments+1, issueModified.NumComments) unittest.CheckConsistencyFor(t, &issues_model.Issue{}) } @@ -122,5 +122,5 @@ func Test_UpdateIssueNumComments(t *testing.T) { assert.NoError(t, issues_model.UpdateIssueNumComments(db.DefaultContext, issue2.ID)) issue2 = unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2}) - assert.EqualValues(t, 1, issue2.NumComments) + assert.Equal(t, 1, issue2.NumComments) } diff --git a/models/issues/issue.go b/models/issues/issue.go index fe347c2715..a86d50ca9d 100644 --- a/models/issues/issue.go +++ b/models/issues/issue.go @@ -10,6 +10,7 @@ import ( "html/template" "regexp" "slices" + "strconv" "code.gitea.io/gitea/models/db" project_model "code.gitea.io/gitea/models/project" @@ -17,6 +18,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" @@ -46,23 +48,6 @@ func (err ErrIssueNotExist) Unwrap() error { return util.ErrNotExist } -// ErrIssueIsClosed represents a "IssueIsClosed" kind of error. -type ErrIssueIsClosed struct { - ID int64 - RepoID int64 - Index int64 -} - -// IsErrIssueIsClosed checks if an error is a ErrIssueNotExist. -func IsErrIssueIsClosed(err error) bool { - _, ok := err.(ErrIssueIsClosed) - return ok -} - -func (err ErrIssueIsClosed) Error() string { - return fmt.Sprintf("issue is closed [id: %d, repo_id: %d, index: %d]", err.ID, err.RepoID, err.Index) -} - // ErrNewIssueInsert is used when the INSERT statement in newIssue fails type ErrNewIssueInsert struct { OriginalError error @@ -78,22 +63,6 @@ func (err ErrNewIssueInsert) Error() string { return err.OriginalError.Error() } -// ErrIssueWasClosed is used when close a closed issue -type ErrIssueWasClosed struct { - ID int64 - Index int64 -} - -// IsErrIssueWasClosed checks if an error is a ErrIssueWasClosed. -func IsErrIssueWasClosed(err error) bool { - _, ok := err.(ErrIssueWasClosed) - return ok -} - -func (err ErrIssueWasClosed) Error() string { - return fmt.Sprintf("Issue [%d] %d was already closed", err.ID, err.Index) -} - var ErrIssueAlreadyChanged = util.NewInvalidArgumentErrorf("the issue is already changed") // Issue represents an issue or pull request of repository. @@ -129,7 +98,7 @@ type Issue struct { // TODO: RemoveIssueRef: see "repo/issue/branch_selector_field.tmpl" Ref string - PinOrder int `xorm:"DEFAULT 0"` + PinOrder int `xorm:"-"` // 0 means not loaded, -1 means loaded but not pinned DeadlineUnix timeutil.TimeStamp `xorm:"INDEX"` @@ -271,6 +240,9 @@ func (issue *Issue) loadCommentsByType(ctx context.Context, tp CommentType) (err IssueID: issue.ID, Type: tp, }) + for _, comment := range issue.Comments { + comment.Issue = issue + } return err } @@ -320,6 +292,23 @@ func (issue *Issue) LoadMilestone(ctx context.Context) (err error) { return nil } +func (issue *Issue) LoadPinOrder(ctx context.Context) error { + if issue.PinOrder != 0 { + return nil + } + issuePin, err := GetIssuePin(ctx, issue) + if err != nil && !db.IsErrNotExist(err) { + return err + } + + if issuePin != nil { + issue.PinOrder = issuePin.PinOrder + } else { + issue.PinOrder = -1 + } + return nil +} + // LoadAttributes loads the attribute of this issue. func (issue *Issue) LoadAttributes(ctx context.Context) (err error) { if err = issue.LoadRepo(ctx); err != nil { @@ -359,6 +348,10 @@ func (issue *Issue) LoadAttributes(ctx context.Context) (err error) { return err } + if err = issue.LoadPinOrder(ctx); err != nil { + return err + } + if err = issue.Comments.LoadAttributes(ctx); err != nil { return err } @@ -371,6 +364,14 @@ func (issue *Issue) LoadAttributes(ctx context.Context) (err error) { return issue.loadReactions(ctx) } +// IsPinned returns if a Issue is pinned +func (issue *Issue) IsPinned() bool { + if issue.PinOrder == 0 { + setting.PanicInDevOrTesting("issue's pinorder has not been loaded") + } + return issue.PinOrder > 0 +} + func (issue *Issue) ResetAttributesLoaded() { issue.isLabelsLoaded = false issue.isMilestoneLoaded = false @@ -531,6 +532,45 @@ func GetIssueByIndex(ctx context.Context, repoID, index int64) (*Issue, error) { return issue, nil } +func isPullToCond(isPull optional.Option[bool]) builder.Cond { + if isPull.Has() { + return builder.Eq{"is_pull": isPull.Value()} + } + return builder.NewCond() +} + +func FindLatestUpdatedIssues(ctx context.Context, repoID int64, isPull optional.Option[bool], pageSize int) (IssueList, error) { + issues := make([]*Issue, 0, pageSize) + err := db.GetEngine(ctx).Where("repo_id = ?", repoID). + And(isPullToCond(isPull)). + OrderBy("updated_unix DESC"). + Limit(pageSize). + Find(&issues) + return issues, err +} + +func FindIssuesSuggestionByKeyword(ctx context.Context, repoID int64, keyword string, isPull optional.Option[bool], excludedID int64, pageSize int) (IssueList, error) { + cond := builder.NewCond() + if excludedID > 0 { + cond = cond.And(builder.Neq{"`id`": excludedID}) + } + + // It seems that GitHub searches both title and content (maybe sorting by the search engine's ranking system?) + // The first PR (https://github.com/go-gitea/gitea/pull/32327) uses "search indexer" to search "name(title) + content" + // But it seems that searching "content" (especially LIKE by DB engine) generates worse (unusable) results. + // So now (https://github.com/go-gitea/gitea/pull/33538) it only searches "name(title)", leave the improvements to the future. + cond = cond.And(db.BuildCaseInsensitiveLike("`name`", keyword)) + + issues := make([]*Issue, 0, pageSize) + err := db.GetEngine(ctx).Where("repo_id = ?", repoID). + And(isPullToCond(isPull)). + And(cond). + OrderBy("updated_unix DESC, `index` DESC"). + Limit(pageSize). + Find(&issues) + return issues, err +} + // GetIssueWithAttrsByIndex returns issue by index in a repository. func GetIssueWithAttrsByIndex(ctx context.Context, repoID, index int64) (*Issue, error) { issue, err := GetIssueByIndex(ctx, repoID, index) @@ -556,6 +596,9 @@ func GetIssueByID(ctx context.Context, id int64) (*Issue, error) { // If keepOrder is true, the order of the returned issues will be the same as the given IDs. func GetIssuesByIDs(ctx context.Context, issueIDs []int64, keepOrder ...bool) (IssueList, error) { issues := make([]*Issue, 0, len(issueIDs)) + if len(issueIDs) == 0 { + return issues, nil + } if err := db.GetEngine(ctx).In("id", issueIDs).Find(&issues); err != nil { return nil, err @@ -710,190 +753,6 @@ func (issue *Issue) HasOriginalAuthor() bool { return issue.OriginalAuthor != "" && issue.OriginalAuthorID != 0 } -var ErrIssueMaxPinReached = util.NewInvalidArgumentErrorf("the max number of pinned issues has been readched") - -// IsPinned returns if a Issue is pinned -func (issue *Issue) IsPinned() bool { - return issue.PinOrder != 0 -} - -// Pin pins a Issue -func (issue *Issue) Pin(ctx context.Context, user *user_model.User) error { - // If the Issue is already pinned, we don't need to pin it twice - if issue.IsPinned() { - return nil - } - - var maxPin int - _, err := db.GetEngine(ctx).SQL("SELECT MAX(pin_order) FROM issue WHERE repo_id = ? AND is_pull = ?", issue.RepoID, issue.IsPull).Get(&maxPin) - if err != nil { - return err - } - - // Check if the maximum allowed Pins reached - if maxPin >= setting.Repository.Issue.MaxPinned { - return ErrIssueMaxPinReached - } - - _, err = db.GetEngine(ctx).Table("issue"). - Where("id = ?", issue.ID). - Update(map[string]any{ - "pin_order": maxPin + 1, - }) - if err != nil { - return err - } - - // Add the pin event to the history - opts := &CreateCommentOptions{ - Type: CommentTypePin, - Doer: user, - Repo: issue.Repo, - Issue: issue, - } - if _, err = CreateComment(ctx, opts); err != nil { - return err - } - - return nil -} - -// UnpinIssue unpins a Issue -func (issue *Issue) Unpin(ctx context.Context, user *user_model.User) error { - // If the Issue is not pinned, we don't need to unpin it - if !issue.IsPinned() { - return nil - } - - // This sets the Pin for all Issues that come after the unpined Issue to the correct value - _, err := db.GetEngine(ctx).Exec("UPDATE issue SET pin_order = pin_order - 1 WHERE repo_id = ? AND is_pull = ? AND pin_order > ?", issue.RepoID, issue.IsPull, issue.PinOrder) - if err != nil { - return err - } - - _, err = db.GetEngine(ctx).Table("issue"). - Where("id = ?", issue.ID). - Update(map[string]any{ - "pin_order": 0, - }) - if err != nil { - return err - } - - // Add the unpin event to the history - opts := &CreateCommentOptions{ - Type: CommentTypeUnpin, - Doer: user, - Repo: issue.Repo, - Issue: issue, - } - if _, err = CreateComment(ctx, opts); err != nil { - return err - } - - return nil -} - -// PinOrUnpin pins or unpins a Issue -func (issue *Issue) PinOrUnpin(ctx context.Context, user *user_model.User) error { - if !issue.IsPinned() { - return issue.Pin(ctx, user) - } - - return issue.Unpin(ctx, user) -} - -// MovePin moves a Pinned Issue to a new Position -func (issue *Issue) MovePin(ctx context.Context, newPosition int) error { - // If the Issue is not pinned, we can't move them - if !issue.IsPinned() { - return nil - } - - if newPosition < 1 { - return fmt.Errorf("The Position can't be lower than 1") - } - - dbctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - - var maxPin int - _, err = db.GetEngine(dbctx).SQL("SELECT MAX(pin_order) FROM issue WHERE repo_id = ? AND is_pull = ?", issue.RepoID, issue.IsPull).Get(&maxPin) - if err != nil { - return err - } - - // If the new Position bigger than the current Maximum, set it to the Maximum - if newPosition > maxPin+1 { - newPosition = maxPin + 1 - } - - // Lower the Position of all Pinned Issue that came after the current Position - _, err = db.GetEngine(dbctx).Exec("UPDATE issue SET pin_order = pin_order - 1 WHERE repo_id = ? AND is_pull = ? AND pin_order > ?", issue.RepoID, issue.IsPull, issue.PinOrder) - if err != nil { - return err - } - - // Higher the Position of all Pinned Issues that comes after the new Position - _, err = db.GetEngine(dbctx).Exec("UPDATE issue SET pin_order = pin_order + 1 WHERE repo_id = ? AND is_pull = ? AND pin_order >= ?", issue.RepoID, issue.IsPull, newPosition) - if err != nil { - return err - } - - _, err = db.GetEngine(dbctx).Table("issue"). - Where("id = ?", issue.ID). - Update(map[string]any{ - "pin_order": newPosition, - }) - if err != nil { - return err - } - - return committer.Commit() -} - -// GetPinnedIssues returns the pinned Issues for the given Repo and type -func GetPinnedIssues(ctx context.Context, repoID int64, isPull bool) (IssueList, error) { - issues := make(IssueList, 0) - - err := db.GetEngine(ctx). - Table("issue"). - Where("repo_id = ?", repoID). - And("is_pull = ?", isPull). - And("pin_order > 0"). - OrderBy("pin_order"). - Find(&issues) - if err != nil { - return nil, err - } - - err = issues.LoadAttributes(ctx) - if err != nil { - return nil, err - } - - return issues, nil -} - -// IsNewPinAllowed returns if a new Issue or Pull request can be pinned -func IsNewPinAllowed(ctx context.Context, repoID int64, isPull bool) (bool, error) { - var maxPin int - _, err := db.GetEngine(ctx).SQL("SELECT COUNT(pin_order) FROM issue WHERE repo_id = ? AND is_pull = ? AND pin_order > 0", repoID, isPull).Get(&maxPin) - if err != nil { - return false, err - } - - return maxPin < setting.Repository.Issue.MaxPinned, nil -} - -// IsErrIssueMaxPinReached returns if the error is, that the User can't pin more Issues -func IsErrIssueMaxPinReached(err error) bool { - return err == ErrIssueMaxPinReached -} - // InsertIssues insert issues to database func InsertIssues(ctx context.Context, issues ...*Issue) error { ctx, committer, err := db.TxContext(ctx) @@ -957,7 +816,7 @@ func ChangeIssueTimeEstimate(ctx context.Context, issue *Issue, doer *user_model Doer: doer, Repo: issue.Repo, Issue: issue, - Content: fmt.Sprintf("%d", timeEstimate), + Content: strconv.FormatInt(timeEstimate, 10), } if _, err := CreateComment(ctx, opts); err != nil { return fmt.Errorf("createComment: %w", err) diff --git a/models/issues/issue_label.go b/models/issues/issue_label.go index 10fc821454..f082079e07 100644 --- a/models/issues/issue_label.go +++ b/models/issues/issue_label.go @@ -206,6 +206,7 @@ func DeleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *use } issue.Labels = nil + issue.isLabelsLoaded = false return issue.LoadLabels(ctx) } diff --git a/models/issues/issue_list.go b/models/issues/issue_list.go index 22a4548adc..26b93189b8 100644 --- a/models/issues/issue_list.go +++ b/models/issues/issue_list.go @@ -42,10 +42,7 @@ func (issues IssueList) LoadRepositories(ctx context.Context) (repo_model.Reposi repoMaps := make(map[int64]*repo_model.Repository, len(repoIDs)) left := len(repoIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) err := db.GetEngine(ctx). In("id", repoIDs[:limit]). Find(&repoMaps) @@ -81,53 +78,19 @@ func (issues IssueList) LoadPosters(ctx context.Context) error { return issue.PosterID, issue.Poster == nil && issue.PosterID > 0 }) - posterMaps, err := getPostersByIDs(ctx, posterIDs) + posterMaps, err := user_model.GetUsersMapByIDs(ctx, posterIDs) if err != nil { return err } for _, issue := range issues { if issue.Poster == nil { - issue.Poster = getPoster(issue.PosterID, posterMaps) + issue.Poster = user_model.GetPossibleUserFromMap(issue.PosterID, posterMaps) } } return nil } -func getPostersByIDs(ctx context.Context, posterIDs []int64) (map[int64]*user_model.User, error) { - posterMaps := make(map[int64]*user_model.User, len(posterIDs)) - left := len(posterIDs) - for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } - err := db.GetEngine(ctx). - In("id", posterIDs[:limit]). - Find(&posterMaps) - if err != nil { - return nil, err - } - left -= limit - posterIDs = posterIDs[limit:] - } - return posterMaps, nil -} - -func getPoster(posterID int64, posterMaps map[int64]*user_model.User) *user_model.User { - if posterID == user_model.ActionsUserID { - return user_model.NewActionsUser() - } - if posterID <= 0 { - return nil - } - poster, ok := posterMaps[posterID] - if !ok { - return user_model.NewGhostUser() - } - return poster -} - func (issues IssueList) getIssueIDs() []int64 { ids := make([]int64, 0, len(issues)) for _, issue := range issues { @@ -150,10 +113,7 @@ func (issues IssueList) LoadLabels(ctx context.Context) error { issueIDs := issues.getIssueIDs() left := len(issueIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx).Table("label"). Join("LEFT", "issue_label", "issue_label.label_id = label.id"). In("issue_label.issue_id", issueIDs[:limit]). @@ -205,10 +165,7 @@ func (issues IssueList) LoadMilestones(ctx context.Context) error { milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs)) left := len(milestoneIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) err := db.GetEngine(ctx). In("id", milestoneIDs[:limit]). Find(&milestoneMaps) @@ -237,10 +194,7 @@ func (issues IssueList) LoadProjects(ctx context.Context) error { } for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) projects := make([]*projectWithIssueID, 0, limit) err := db.GetEngine(ctx). @@ -279,10 +233,7 @@ func (issues IssueList) LoadAssignees(ctx context.Context) error { issueIDs := issues.getIssueIDs() left := len(issueIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx).Table("issue_assignees"). Join("INNER", "`user`", "`user`.id = `issue_assignees`.assignee_id"). In("`issue_assignees`.issue_id", issueIDs[:limit]).OrderBy(user_model.GetOrderByName()). @@ -340,10 +291,7 @@ func (issues IssueList) LoadPullRequests(ctx context.Context) error { pullRequestMaps := make(map[int64]*PullRequest, len(issuesIDs)) left := len(issuesIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("issue_id", issuesIDs[:limit]). Rows(new(PullRequest)) @@ -388,10 +336,7 @@ func (issues IssueList) LoadAttachments(ctx context.Context) (err error) { issuesIDs := issues.getIssueIDs() left := len(issuesIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx). In("issue_id", issuesIDs[:limit]). Rows(new(repo_model.Attachment)) @@ -433,10 +378,7 @@ func (issues IssueList) loadComments(ctx context.Context, cond builder.Cond) (er issuesIDs := issues.getIssueIDs() left := len(issuesIDs) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) rows, err := db.GetEngine(ctx).Table("comment"). Join("INNER", "issue", "issue.id = comment.issue_id"). In("issue.id", issuesIDs[:limit]). @@ -500,10 +442,7 @@ func (issues IssueList) loadTotalTrackedTimes(ctx context.Context) (err error) { left := len(ids) for left > 0 { - limit := db.DefaultMaxInSize - if left < limit { - limit = left - } + limit := min(left, db.DefaultMaxInSize) // select issue_id, sum(time) from tracked_time where issue_id in (<issue ids in current page>) group by issue_id rows, err := db.GetEngine(ctx).Table("tracked_time"). @@ -540,6 +479,39 @@ func (issues IssueList) loadTotalTrackedTimes(ctx context.Context) (err error) { return nil } +func (issues IssueList) LoadPinOrder(ctx context.Context) error { + if len(issues) == 0 { + return nil + } + + issueIDs := container.FilterSlice(issues, func(issue *Issue) (int64, bool) { + return issue.ID, issue.PinOrder == 0 + }) + if len(issueIDs) == 0 { + return nil + } + issuePins, err := GetIssuePinsByIssueIDs(ctx, issueIDs) + if err != nil { + return err + } + + for _, issue := range issues { + if issue.PinOrder != 0 { + continue + } + for _, pin := range issuePins { + if pin.IssueID == issue.ID { + issue.PinOrder = pin.PinOrder + break + } + } + if issue.PinOrder == 0 { + issue.PinOrder = -1 + } + } + return nil +} + // loadAttributes loads all attributes, expect for attachments and comments func (issues IssueList) LoadAttributes(ctx context.Context) error { if _, err := issues.LoadRepositories(ctx); err != nil { diff --git a/models/issues/issue_list_test.go b/models/issues/issue_list_test.go index 9069e1012d..5b4d2ca5ab 100644 --- a/models/issues/issue_list_test.go +++ b/models/issues/issue_list_test.go @@ -27,7 +27,7 @@ func TestIssueList_LoadRepositories(t *testing.T) { assert.NoError(t, err) assert.Len(t, repos, 2) for _, issue := range issueList { - assert.EqualValues(t, issue.RepoID, issue.Repo.ID) + assert.Equal(t, issue.RepoID, issue.Repo.ID) } } @@ -41,28 +41,28 @@ func TestIssueList_LoadAttributes(t *testing.T) { assert.NoError(t, issueList.LoadAttributes(db.DefaultContext)) for _, issue := range issueList { - assert.EqualValues(t, issue.RepoID, issue.Repo.ID) + assert.Equal(t, issue.RepoID, issue.Repo.ID) for _, label := range issue.Labels { - assert.EqualValues(t, issue.RepoID, label.RepoID) + assert.Equal(t, issue.RepoID, label.RepoID) unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label.ID}) } if issue.PosterID > 0 { - assert.EqualValues(t, issue.PosterID, issue.Poster.ID) + assert.Equal(t, issue.PosterID, issue.Poster.ID) } if issue.AssigneeID > 0 { - assert.EqualValues(t, issue.AssigneeID, issue.Assignee.ID) + assert.Equal(t, issue.AssigneeID, issue.Assignee.ID) } if issue.MilestoneID > 0 { - assert.EqualValues(t, issue.MilestoneID, issue.Milestone.ID) + assert.Equal(t, issue.MilestoneID, issue.Milestone.ID) } if issue.IsPull { - assert.EqualValues(t, issue.ID, issue.PullRequest.IssueID) + assert.Equal(t, issue.ID, issue.PullRequest.IssueID) } for _, attachment := range issue.Attachments { - assert.EqualValues(t, issue.ID, attachment.IssueID) + assert.Equal(t, issue.ID, attachment.IssueID) } for _, comment := range issue.Comments { - assert.EqualValues(t, issue.ID, comment.IssueID) + assert.Equal(t, issue.ID, comment.IssueID) } if issue.ID == int64(1) { assert.Equal(t, int64(400), issue.TotalTrackedTime) diff --git a/models/issues/issue_lock.go b/models/issues/issue_lock.go index b21629b529..fa0d128f74 100644 --- a/models/issues/issue_lock.go +++ b/models/issues/issue_lock.go @@ -12,8 +12,14 @@ import ( // IssueLockOptions defines options for locking and/or unlocking an issue/PR type IssueLockOptions struct { - Doer *user_model.User - Issue *Issue + Doer *user_model.User + Issue *Issue + + // Reason is the doer-provided comment message for the locked issue + // GitHub doesn't support changing the "reasons" by config file, so GitHub has pre-defined "reason" enum values. + // Gitea is not like GitHub, it allows site admin to define customized "reasons" in the config file. + // So the API caller might not know what kind of "reasons" are valid, and the customized reasons are not translatable. + // To make things clear and simple: doer have the chance to use any reason they like, we do not do validation. Reason string } diff --git a/models/issues/issue_pin.go b/models/issues/issue_pin.go new file mode 100644 index 0000000000..ae6195b05d --- /dev/null +++ b/models/issues/issue_pin.go @@ -0,0 +1,246 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package issues + +import ( + "context" + "errors" + "sort" + + "code.gitea.io/gitea/models/db" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" +) + +type IssuePin struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"UNIQUE(s) NOT NULL"` + IssueID int64 `xorm:"UNIQUE(s) NOT NULL"` + IsPull bool `xorm:"NOT NULL"` + PinOrder int `xorm:"DEFAULT 0"` +} + +var ErrIssueMaxPinReached = util.NewInvalidArgumentErrorf("the max number of pinned issues has been readched") + +// IsErrIssueMaxPinReached returns if the error is, that the User can't pin more Issues +func IsErrIssueMaxPinReached(err error) bool { + return err == ErrIssueMaxPinReached +} + +func init() { + db.RegisterModel(new(IssuePin)) +} + +func GetIssuePin(ctx context.Context, issue *Issue) (*IssuePin, error) { + pin := new(IssuePin) + has, err := db.GetEngine(ctx). + Where("repo_id = ?", issue.RepoID). + And("issue_id = ?", issue.ID).Get(pin) + if err != nil { + return nil, err + } else if !has { + return nil, db.ErrNotExist{ + Resource: "IssuePin", + ID: issue.ID, + } + } + return pin, nil +} + +func GetIssuePinsByIssueIDs(ctx context.Context, issueIDs []int64) ([]IssuePin, error) { + var pins []IssuePin + if err := db.GetEngine(ctx).In("issue_id", issueIDs).Find(&pins); err != nil { + return nil, err + } + return pins, nil +} + +// Pin pins a Issue +func PinIssue(ctx context.Context, issue *Issue, user *user_model.User) error { + return db.WithTx(ctx, func(ctx context.Context) error { + pinnedIssuesNum, err := getPinnedIssuesNum(ctx, issue.RepoID, issue.IsPull) + if err != nil { + return err + } + + // Check if the maximum allowed Pins reached + if pinnedIssuesNum >= setting.Repository.Issue.MaxPinned { + return ErrIssueMaxPinReached + } + + pinnedIssuesMaxPinOrder, err := getPinnedIssuesMaxPinOrder(ctx, issue.RepoID, issue.IsPull) + if err != nil { + return err + } + + if _, err = db.GetEngine(ctx).Insert(&IssuePin{ + RepoID: issue.RepoID, + IssueID: issue.ID, + IsPull: issue.IsPull, + PinOrder: pinnedIssuesMaxPinOrder + 1, + }); err != nil { + return err + } + + // Add the pin event to the history + _, err = CreateComment(ctx, &CreateCommentOptions{ + Type: CommentTypePin, + Doer: user, + Repo: issue.Repo, + Issue: issue, + }) + return err + }) +} + +// UnpinIssue unpins a Issue +func UnpinIssue(ctx context.Context, issue *Issue, user *user_model.User) error { + return db.WithTx(ctx, func(ctx context.Context) error { + // This sets the Pin for all Issues that come after the unpined Issue to the correct value + cnt, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Delete(new(IssuePin)) + if err != nil { + return err + } + if cnt == 0 { + return nil + } + + // Add the unpin event to the history + _, err = CreateComment(ctx, &CreateCommentOptions{ + Type: CommentTypeUnpin, + Doer: user, + Repo: issue.Repo, + Issue: issue, + }) + return err + }) +} + +func getPinnedIssuesNum(ctx context.Context, repoID int64, isPull bool) (int, error) { + var pinnedIssuesNum int + _, err := db.GetEngine(ctx).SQL("SELECT count(pin_order) FROM issue_pin WHERE repo_id = ? AND is_pull = ?", repoID, isPull).Get(&pinnedIssuesNum) + return pinnedIssuesNum, err +} + +func getPinnedIssuesMaxPinOrder(ctx context.Context, repoID int64, isPull bool) (int, error) { + var maxPinnedIssuesMaxPinOrder int + _, err := db.GetEngine(ctx).SQL("SELECT max(pin_order) FROM issue_pin WHERE repo_id = ? AND is_pull = ?", repoID, isPull).Get(&maxPinnedIssuesMaxPinOrder) + return maxPinnedIssuesMaxPinOrder, err +} + +// MovePin moves a Pinned Issue to a new Position +func MovePin(ctx context.Context, issue *Issue, newPosition int) error { + if newPosition < 1 { + return errors.New("The Position can't be lower than 1") + } + + issuePin, err := GetIssuePin(ctx, issue) + if err != nil { + return err + } + if issuePin.PinOrder == newPosition { + return nil + } + + return db.WithTx(ctx, func(ctx context.Context) error { + if issuePin.PinOrder > newPosition { // move the issue to a lower position + _, err = db.GetEngine(ctx).Exec("UPDATE issue_pin SET pin_order = pin_order + 1 WHERE repo_id = ? AND is_pull = ? AND pin_order >= ? AND pin_order < ?", issue.RepoID, issue.IsPull, newPosition, issuePin.PinOrder) + } else { // move the issue to a higher position + // Lower the Position of all Pinned Issue that came after the current Position + _, err = db.GetEngine(ctx).Exec("UPDATE issue_pin SET pin_order = pin_order - 1 WHERE repo_id = ? AND is_pull = ? AND pin_order > ? AND pin_order <= ?", issue.RepoID, issue.IsPull, issuePin.PinOrder, newPosition) + } + if err != nil { + return err + } + + _, err = db.GetEngine(ctx). + Table("issue_pin"). + Where("id = ?", issuePin.ID). + Update(map[string]any{ + "pin_order": newPosition, + }) + return err + }) +} + +func GetPinnedIssueIDs(ctx context.Context, repoID int64, isPull bool) ([]int64, error) { + var issuePins []IssuePin + if err := db.GetEngine(ctx). + Table("issue_pin"). + Where("repo_id = ?", repoID). + And("is_pull = ?", isPull). + Find(&issuePins); err != nil { + return nil, err + } + + sort.Slice(issuePins, func(i, j int) bool { + return issuePins[i].PinOrder < issuePins[j].PinOrder + }) + + var ids []int64 + for _, pin := range issuePins { + ids = append(ids, pin.IssueID) + } + return ids, nil +} + +func GetIssuePinsByRepoID(ctx context.Context, repoID int64, isPull bool) ([]*IssuePin, error) { + var pins []*IssuePin + if err := db.GetEngine(ctx).Where("repo_id = ? AND is_pull = ?", repoID, isPull).Find(&pins); err != nil { + return nil, err + } + return pins, nil +} + +// GetPinnedIssues returns the pinned Issues for the given Repo and type +func GetPinnedIssues(ctx context.Context, repoID int64, isPull bool) (IssueList, error) { + issuePins, err := GetIssuePinsByRepoID(ctx, repoID, isPull) + if err != nil { + return nil, err + } + if len(issuePins) == 0 { + return IssueList{}, nil + } + ids := make([]int64, 0, len(issuePins)) + for _, pin := range issuePins { + ids = append(ids, pin.IssueID) + } + + issues := make(IssueList, 0, len(ids)) + if err := db.GetEngine(ctx).In("id", ids).Find(&issues); err != nil { + return nil, err + } + for _, issue := range issues { + for _, pin := range issuePins { + if pin.IssueID == issue.ID { + issue.PinOrder = pin.PinOrder + break + } + } + if (!setting.IsProd || setting.IsInTesting) && issue.PinOrder == 0 { + panic("It should not happen that a pinned Issue has no PinOrder") + } + } + sort.Slice(issues, func(i, j int) bool { + return issues[i].PinOrder < issues[j].PinOrder + }) + + if err = issues.LoadAttributes(ctx); err != nil { + return nil, err + } + + return issues, nil +} + +// IsNewPinAllowed returns if a new Issue or Pull request can be pinned +func IsNewPinAllowed(ctx context.Context, repoID int64, isPull bool) (bool, error) { + var maxPin int + _, err := db.GetEngine(ctx).SQL("SELECT COUNT(pin_order) FROM issue_pin WHERE repo_id = ? AND is_pull = ?", repoID, isPull).Get(&maxPin) + if err != nil { + return false, err + } + + return maxPin < setting.Repository.Issue.MaxPinned, nil +} diff --git a/models/issues/issue_project.go b/models/issues/issue_project.go index c4515fd898..0185244783 100644 --- a/models/issues/issue_project.go +++ b/models/issues/issue_project.go @@ -38,13 +38,30 @@ func (issue *Issue) projectID(ctx context.Context) int64 { } // ProjectColumnID return project column id if issue was assigned to one -func (issue *Issue) ProjectColumnID(ctx context.Context) int64 { +func (issue *Issue) ProjectColumnID(ctx context.Context) (int64, error) { var ip project_model.ProjectIssue has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip) - if err != nil || !has { - return 0 + if err != nil { + return 0, err + } else if !has { + return 0, nil + } + return ip.ProjectColumnID, nil +} + +func LoadProjectIssueColumnMap(ctx context.Context, projectID, defaultColumnID int64) (map[int64]int64, error) { + issues := make([]project_model.ProjectIssue, 0) + if err := db.GetEngine(ctx).Where("project_id=?", projectID).Find(&issues); err != nil { + return nil, err + } + result := make(map[int64]int64, len(issues)) + for _, issue := range issues { + if issue.ProjectColumnID == 0 { + issue.ProjectColumnID = defaultColumnID + } + result[issue.IssueID] = issue.ProjectColumnID } - return ip.ProjectColumnID + return result, nil } // LoadIssuesFromColumn load issues assigned to this column @@ -59,11 +76,11 @@ func LoadIssuesFromColumn(ctx context.Context, b *project_model.Column, opts *Is } if b.Default { - issues, err := Issues(ctx, &IssuesOptions{ - ProjectColumnID: db.NoConditionID, - ProjectID: b.ProjectID, - SortType: "project-column-sorting", - }) + issues, err := Issues(ctx, opts.Copy(func(o *IssuesOptions) { + o.ProjectColumnID = db.NoConditionID + o.ProjectID = b.ProjectID + o.SortType = "project-column-sorting" + })) if err != nil { return nil, err } @@ -77,19 +94,6 @@ func LoadIssuesFromColumn(ctx context.Context, b *project_model.Column, opts *Is return issueList, nil } -// LoadIssuesFromColumnList load issues assigned to the columns -func LoadIssuesFromColumnList(ctx context.Context, bs project_model.ColumnList, opts *IssuesOptions) (map[int64]IssueList, error) { - issuesMap := make(map[int64]IssueList, len(bs)) - for i := range bs { - il, err := LoadIssuesFromColumn(ctx, bs[i], opts) - if err != nil { - return nil, err - } - issuesMap[bs[i].ID] = il - } - return issuesMap, nil -} - // IssueAssignOrRemoveProject changes the project associated with an issue // If newProjectID is 0, the issue is removed from the project func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectID, newColumnID int64) error { @@ -110,7 +114,7 @@ func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_mo return util.NewPermissionDeniedErrorf("issue %d can't be accessed by project %d", issue.ID, newProject.ID) } if newColumnID == 0 { - newDefaultColumn, err := newProject.GetDefaultColumn(ctx) + newDefaultColumn, err := newProject.MustDefaultColumn(ctx) if err != nil { return err } diff --git a/models/issues/issue_search.go b/models/issues/issue_search.go index f1cd125d49..79bd6a19b0 100644 --- a/models/issues/issue_search.go +++ b/models/issues/issue_search.go @@ -21,14 +21,16 @@ import ( "xorm.io/xorm" ) +const ScopeSortPrefix = "scope-" + // IssuesOptions represents options of an issue. -type IssuesOptions struct { //nolint +type IssuesOptions struct { //nolint:revive // export stutter Paginator *db.ListOptions RepoIDs []int64 // overwrites RepoCond if the length is not 0 AllPublic bool // include also all public repositories RepoCond builder.Cond - AssigneeID optional.Option[int64] - PosterID optional.Option[int64] + AssigneeID string // "(none)" or "(any)" or a user ID + PosterID string // "(none)" or "(any)" or a user ID MentionedID int64 ReviewRequestedID int64 ReviewedID int64 @@ -49,9 +51,9 @@ type IssuesOptions struct { //nolint // prioritize issues from this repo PriorityRepoID int64 IsArchived optional.Option[bool] - Org *organization.Organization // issues permission scope - Team *organization.Team // issues permission scope - User *user_model.User // issues permission scope + Owner *user_model.User // issues permission scope, it could be an organization or a user + Team *organization.Team // issues permission scope + Doer *user_model.User // issues permission scope } // Copy returns a copy of the options. @@ -70,11 +72,24 @@ func (o *IssuesOptions) Copy(edit ...func(options *IssuesOptions)) *IssuesOption // applySorts sort an issues-related session based on the provided // sortType string func applySorts(sess *xorm.Session, sortType string, priorityRepoID int64) { + // Since this sortType is dynamically created, it has to be treated specially. + if after, ok := strings.CutPrefix(sortType, ScopeSortPrefix); ok { + scope := after + sess.Join("LEFT", "issue_label", "issue.id = issue_label.issue_id") + // "exclusive_order=0" means "no order is set", so exclude it from the JOIN criteria and then "LEFT JOIN" result is also null + sess.Join("LEFT", "label", "label.id = issue_label.label_id AND label.exclusive_order <> 0 AND label.name LIKE ?", scope+"/%") + // Use COALESCE to make sure we sort NULL last regardless of backend DB (2147483647 == max int) + sess.OrderBy("COALESCE(label.exclusive_order, 2147483647) ASC").Desc("issue.id") + return + } + switch sortType { case "oldest": sess.Asc("issue.created_unix").Asc("issue.id") case "recentupdate": sess.Desc("issue.updated_unix").Desc("issue.created_unix").Desc("issue.id") + case "recentclose": + sess.Desc("issue.closed_unix").Desc("issue.created_unix").Desc("issue.id") case "leastupdate": sess.Asc("issue.updated_unix").Asc("issue.created_unix").Asc("issue.id") case "mostcomment": @@ -273,8 +288,12 @@ func applyConditions(sess *xorm.Session, opts *IssuesOptions) { applyLabelsCondition(sess, opts) - if opts.User != nil { - sess.And(issuePullAccessibleRepoCond("issue.repo_id", opts.User.ID, opts.Org, opts.Team, opts.IsPull.Value())) + if opts.Owner != nil { + sess.And(repo_model.UserOwnedRepoCond(opts.Owner.ID)) + } + + if opts.Doer != nil && !opts.Doer.IsAdmin { + sess.And(issuePullAccessibleRepoCond("issue.repo_id", opts.Doer.ID, opts.Owner, opts.Team, opts.IsPull.Value())) } } @@ -321,20 +340,20 @@ func teamUnitsRepoCond(id string, userID, orgID, teamID int64, units ...unit.Typ } // issuePullAccessibleRepoCond userID must not be zero, this condition require join repository table -func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *organization.Organization, team *organization.Team, isPull bool) builder.Cond { +func issuePullAccessibleRepoCond(repoIDstr string, userID int64, owner *user_model.User, team *organization.Team, isPull bool) builder.Cond { cond := builder.NewCond() unitType := unit.TypeIssues if isPull { unitType = unit.TypePullRequests } - if org != nil { + if owner != nil && owner.IsOrganization() { if team != nil { - cond = cond.And(teamUnitsRepoCond(repoIDstr, userID, org.ID, team.ID, unitType)) // special team member repos + cond = cond.And(teamUnitsRepoCond(repoIDstr, userID, owner.ID, team.ID, unitType)) // special team member repos } else { cond = cond.And( builder.Or( - repo_model.UserOrgUnitRepoCond(repoIDstr, userID, org.ID, unitType), // team member repos - repo_model.UserOrgPublicUnitRepoCond(userID, org.ID), // user org public non-member repos, TODO: check repo has issues + repo_model.UserOrgUnitRepoCond(repoIDstr, userID, owner.ID, unitType), // team member repos + repo_model.UserOrgPublicUnitRepoCond(userID, owner.ID), // user org public non-member repos, TODO: check repo has issues ), ) } @@ -352,26 +371,25 @@ func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *organizati return cond } -func applyAssigneeCondition(sess *xorm.Session, assigneeID optional.Option[int64]) { +func applyAssigneeCondition(sess *xorm.Session, assigneeID string) { // old logic: 0 is also treated as "not filtering assignee", because the "assignee" was read as FormInt64 - if !assigneeID.Has() || assigneeID.Value() == 0 { - return - } - if assigneeID.Value() == db.NoConditionID { + if assigneeID == "(none)" { sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_assignees)") - } else { + } else if assigneeID == "(any)" { + sess.Where("issue.id IN (SELECT issue_id FROM issue_assignees)") + } else if assigneeIDInt64, _ := strconv.ParseInt(assigneeID, 10, 64); assigneeIDInt64 > 0 { sess.Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id"). - And("issue_assignees.assignee_id = ?", assigneeID.Value()) + And("issue_assignees.assignee_id = ?", assigneeIDInt64) } } -func applyPosterCondition(sess *xorm.Session, posterID optional.Option[int64]) { - if !posterID.Has() { - return - } - // poster doesn't need to support db.NoConditionID(-1), so just use the value as-is - if posterID.Has() { - sess.And("issue.poster_id=?", posterID.Value()) +func applyPosterCondition(sess *xorm.Session, posterID string) { + // Actually every issue has a poster. + // The "(none)" is for internal usage only: when doer tries to search non-existing user as poster, use "(none)" to return empty result. + if posterID == "(none)" { + sess.And("issue.poster_id=0") + } else if posterIDInt64, _ := strconv.ParseInt(posterID, 10, 64); posterIDInt64 > 0 { + sess.And("issue.poster_id=?", posterIDInt64) } } diff --git a/models/issues/issue_stats.go b/models/issues/issue_stats.go index 9ef9347a16..adedaa3d3a 100644 --- a/models/issues/issue_stats.go +++ b/models/issues/issue_stats.go @@ -94,10 +94,7 @@ func GetIssueStats(ctx context.Context, opts *IssuesOptions) (*IssueStats, error // ids in a temporary table and join from them. accum := &IssueStats{} for i := 0; i < len(opts.IssueIDs); { - chunk := i + MaxQueryParameters - if chunk > len(opts.IssueIDs) { - chunk = len(opts.IssueIDs) - } + chunk := min(i+MaxQueryParameters, len(opts.IssueIDs)) stats, err := getIssueStatsChunk(ctx, opts, opts.IssueIDs[i:chunk]) if err != nil { return nil, err @@ -107,7 +104,7 @@ func GetIssueStats(ctx context.Context, opts *IssuesOptions) (*IssueStats, error accum.YourRepositoriesCount += stats.YourRepositoriesCount accum.AssignCount += stats.AssignCount accum.CreateCount += stats.CreateCount - accum.OpenCount += stats.MentionCount + accum.MentionCount += stats.MentionCount accum.ReviewRequestedCount += stats.ReviewRequestedCount accum.ReviewedCount += stats.ReviewedCount i = chunk diff --git a/models/issues/issue_test.go b/models/issues/issue_test.go index dbbb1e4179..1c5db55bbc 100644 --- a/models/issues/issue_test.go +++ b/models/issues/issue_test.go @@ -4,8 +4,8 @@ package issues_test import ( - "context" "fmt" + "slices" "sort" "sync" "testing" @@ -16,7 +16,6 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" "github.com/stretchr/testify/assert" @@ -143,8 +142,8 @@ func TestUpdateIssueCols(t *testing.T) { then := time.Now().Unix() updatedIssue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue.ID}) - assert.EqualValues(t, newTitle, updatedIssue.Title) - assert.EqualValues(t, prevContent, updatedIssue.Content) + assert.Equal(t, newTitle, updatedIssue.Title) + assert.Equal(t, prevContent, updatedIssue.Content) unittest.AssertInt64InRange(t, now, then, int64(updatedIssue.UpdatedUnix)) } @@ -156,7 +155,7 @@ func TestIssues(t *testing.T) { }{ { issues_model.IssuesOptions{ - AssigneeID: optional.Some(int64(1)), + AssigneeID: "1", SortType: "oldest", }, []int64{1, 6}, @@ -203,7 +202,7 @@ func TestIssues(t *testing.T) { assert.NoError(t, err) if assert.Len(t, issues, len(test.ExpectedIssueIDs)) { for i, issue := range issues { - assert.EqualValues(t, test.ExpectedIssueIDs[i], issue.ID) + assert.Equal(t, test.ExpectedIssueIDs[i], issue.ID) } } } @@ -236,10 +235,10 @@ func testInsertIssue(t *testing.T, title, content string, expectIndex int64) *is has, err := db.GetEngine(db.DefaultContext).ID(issue.ID).Get(&newIssue) assert.NoError(t, err) assert.True(t, has) - assert.EqualValues(t, issue.Title, newIssue.Title) - assert.EqualValues(t, issue.Content, newIssue.Content) + assert.Equal(t, issue.Title, newIssue.Title) + assert.Equal(t, issue.Content, newIssue.Content) if expectIndex > 0 { - assert.EqualValues(t, expectIndex, newIssue.Index) + assert.Equal(t, expectIndex, newIssue.Index) } }) return &newIssue @@ -272,8 +271,8 @@ func TestIssue_ResolveMentions(t *testing.T) { for i, user := range resolved { ids[i] = user.ID } - sort.Slice(ids, func(i, j int) bool { return ids[i] < ids[j] }) - assert.EqualValues(t, expected, ids) + slices.Sort(ids) + assert.Equal(t, expected, ids) } // Public repo, existing user @@ -294,7 +293,7 @@ func TestResourceIndex(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) var wg sync.WaitGroup - for i := 0; i < 100; i++ { + for i := range 100 { wg.Add(1) go func(i int) { testInsertIssue(t, fmt.Sprintf("issue %d", i+1), "my issue", 0) @@ -316,7 +315,7 @@ func TestCorrectIssueStats(t *testing.T) { issueAmount := issues_model.MaxQueryParameters + 10 var wg sync.WaitGroup - for i := 0; i < issueAmount; i++ { + for i := range issueAmount { wg.Add(1) go func(i int) { testInsertIssue(t, fmt.Sprintf("Issue %d", i+1), "Bugs are nasty", 0) @@ -326,7 +325,7 @@ func TestCorrectIssueStats(t *testing.T) { wg.Wait() // Now we will get all issueID's that match the "Bugs are nasty" query. - issues, err := issues_model.Issues(context.TODO(), &issues_model.IssuesOptions{ + issues, err := issues_model.Issues(t.Context(), &issues_model.IssuesOptions{ Paginator: &db.ListOptions{ PageSize: issueAmount, }, @@ -394,28 +393,28 @@ func TestIssueLoadAttributes(t *testing.T) { for _, issue := range issueList { assert.NoError(t, issue.LoadAttributes(db.DefaultContext)) - assert.EqualValues(t, issue.RepoID, issue.Repo.ID) + assert.Equal(t, issue.RepoID, issue.Repo.ID) for _, label := range issue.Labels { - assert.EqualValues(t, issue.RepoID, label.RepoID) + assert.Equal(t, issue.RepoID, label.RepoID) unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label.ID}) } if issue.PosterID > 0 { - assert.EqualValues(t, issue.PosterID, issue.Poster.ID) + assert.Equal(t, issue.PosterID, issue.Poster.ID) } if issue.AssigneeID > 0 { - assert.EqualValues(t, issue.AssigneeID, issue.Assignee.ID) + assert.Equal(t, issue.AssigneeID, issue.Assignee.ID) } if issue.MilestoneID > 0 { - assert.EqualValues(t, issue.MilestoneID, issue.Milestone.ID) + assert.Equal(t, issue.MilestoneID, issue.Milestone.ID) } if issue.IsPull { - assert.EqualValues(t, issue.ID, issue.PullRequest.IssueID) + assert.Equal(t, issue.ID, issue.PullRequest.IssueID) } for _, attachment := range issue.Attachments { - assert.EqualValues(t, issue.ID, attachment.IssueID) + assert.Equal(t, issue.ID, attachment.IssueID) } for _, comment := range issue.Comments { - assert.EqualValues(t, issue.ID, comment.IssueID) + assert.Equal(t, issue.ID, comment.IssueID) } if issue.ID == int64(1) { assert.Equal(t, int64(400), issue.TotalTrackedTime) diff --git a/models/issues/issue_update.go b/models/issues/issue_update.go index 479834045c..9b99787e3b 100644 --- a/models/issues/issue_update.go +++ b/models/issues/issue_update.go @@ -5,16 +5,14 @@ package issues import ( "context" + "errors" "fmt" "strings" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/organization" - "code.gitea.io/gitea/models/perm" access_model "code.gitea.io/gitea/models/perm/access" - project_model "code.gitea.io/gitea/models/project" repo_model "code.gitea.io/gitea/models/repo" - system_model "code.gitea.io/gitea/models/system" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" @@ -28,38 +26,40 @@ import ( // UpdateIssueCols updates cols of issue func UpdateIssueCols(ctx context.Context, issue *Issue, cols ...string) error { - if _, err := db.GetEngine(ctx).ID(issue.ID).Cols(cols...).Update(issue); err != nil { - return err - } - return nil + _, err := db.GetEngine(ctx).ID(issue.ID).Cols(cols...).Update(issue) + return err } -func ChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User, isClosed, isMergePull bool) (*Comment, error) { - // Reload the issue - currentIssue, err := GetIssueByID(ctx, issue.ID) - if err != nil { - return nil, err - } +// ErrIssueIsClosed is used when close a closed issue +type ErrIssueIsClosed struct { + ID int64 + RepoID int64 + Index int64 + IsPull bool +} - // Nothing should be performed if current status is same as target status - if currentIssue.IsClosed == isClosed { - if !issue.IsPull { - return nil, ErrIssueWasClosed{ - ID: issue.ID, - } - } - return nil, ErrPullWasClosed{ - ID: issue.ID, - } - } +// IsErrIssueIsClosed checks if an error is a ErrIssueIsClosed. +func IsErrIssueIsClosed(err error) bool { + _, ok := err.(ErrIssueIsClosed) + return ok +} - issue.IsClosed = isClosed - return doChangeIssueStatus(ctx, issue, doer, isMergePull) +func (err ErrIssueIsClosed) Error() string { + return fmt.Sprintf("%s [id: %d, repo_id: %d, index: %d] is already closed", util.Iif(err.IsPull, "Pull Request", "Issue"), err.ID, err.RepoID, err.Index) } -func doChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User, isMergePull bool) (*Comment, error) { +func SetIssueAsClosed(ctx context.Context, issue *Issue, doer *user_model.User, isMergePull bool) (*Comment, error) { + if issue.IsClosed { + return nil, ErrIssueIsClosed{ + ID: issue.ID, + RepoID: issue.RepoID, + Index: issue.Index, + IsPull: issue.IsPull, + } + } + // Check for open dependencies - if issue.IsClosed && issue.Repo.IsDependenciesEnabled(ctx) { + if issue.Repo.IsDependenciesEnabled(ctx) { // only check if dependencies are enabled and we're about to close an issue, otherwise reopening an issue would fail when there are unsatisfied dependencies noDeps, err := IssueNoDependenciesLeft(ctx, issue) if err != nil { @@ -71,16 +71,63 @@ func doChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.Use } } - if issue.IsClosed { - issue.ClosedUnix = timeutil.TimeStampNow() - } else { - issue.ClosedUnix = 0 + issue.IsClosed = true + issue.ClosedUnix = timeutil.TimeStampNow() + + if cnt, err := db.GetEngine(ctx).ID(issue.ID).Cols("is_closed", "closed_unix"). + Where("is_closed = ?", false). + Update(issue); err != nil { + return nil, err + } else if cnt != 1 { + return nil, ErrIssueAlreadyChanged } - if err := UpdateIssueCols(ctx, issue, "is_closed", "closed_unix"); err != nil { + return updateIssueNumbers(ctx, issue, doer, util.Iif(isMergePull, CommentTypeMergePull, CommentTypeClose)) +} + +// ErrIssueIsOpen is used when reopen an opened issue +type ErrIssueIsOpen struct { + ID int64 + RepoID int64 + IsPull bool + Index int64 +} + +// IsErrIssueIsOpen checks if an error is a ErrIssueIsOpen. +func IsErrIssueIsOpen(err error) bool { + _, ok := err.(ErrIssueIsOpen) + return ok +} + +func (err ErrIssueIsOpen) Error() string { + return fmt.Sprintf("%s [id: %d, repo_id: %d, index: %d] is already open", util.Iif(err.IsPull, "Pull Request", "Issue"), err.ID, err.RepoID, err.Index) +} + +func setIssueAsReopen(ctx context.Context, issue *Issue, doer *user_model.User) (*Comment, error) { + if !issue.IsClosed { + return nil, ErrIssueIsOpen{ + ID: issue.ID, + RepoID: issue.RepoID, + Index: issue.Index, + IsPull: issue.IsPull, + } + } + + issue.IsClosed = false + issue.ClosedUnix = 0 + + if cnt, err := db.GetEngine(ctx).ID(issue.ID).Cols("is_closed", "closed_unix"). + Where("is_closed = ?", true). + Update(issue); err != nil { return nil, err + } else if cnt != 1 { + return nil, ErrIssueAlreadyChanged } + return updateIssueNumbers(ctx, issue, doer, CommentTypeReopen) +} + +func updateIssueNumbers(ctx context.Context, issue *Issue, doer *user_model.User, cmtType CommentType) (*Comment, error) { // Update issue count of labels if err := issue.LoadLabels(ctx); err != nil { return nil, err @@ -103,14 +150,6 @@ func doChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.Use return nil, err } - // New action comment - cmtType := CommentTypeClose - if !issue.IsClosed { - cmtType = CommentTypeReopen - } else if isMergePull { - cmtType = CommentTypeMergePull - } - return CreateComment(ctx, &CreateCommentOptions{ Type: cmtType, Doer: doer, @@ -134,7 +173,7 @@ func CloseIssue(ctx context.Context, issue *Issue, doer *user_model.User) (*Comm } defer committer.Close() - comment, err := ChangeIssueStatus(ctx, issue, doer, true, false) + comment, err := SetIssueAsClosed(ctx, issue, doer, false) if err != nil { return nil, err } @@ -159,7 +198,7 @@ func ReopenIssue(ctx context.Context, issue *Issue, doer *user_model.User) (*Com } defer committer.Close() - comment, err := ChangeIssueStatus(ctx, issue, doer, false, false) + comment, err := setIssueAsReopen(ctx, issue, doer) if err != nil { return nil, err } @@ -265,7 +304,7 @@ func UpdateIssueAttachments(ctx context.Context, issueID int64, uuids []string) if err != nil { return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err) } - for i := 0; i < len(attachments); i++ { + for i := range attachments { attachments[i].IssueID = issueID if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil { return fmt.Errorf("update attachment [id: %d]: %w", attachments[i].ID, err) @@ -345,10 +384,10 @@ func NewIssueWithIndex(ctx context.Context, doer *user_model.User, opts NewIssue } if opts.Issue.Index <= 0 { - return fmt.Errorf("no issue index provided") + return errors.New("no issue index provided") } if opts.Issue.ID > 0 { - return fmt.Errorf("issue exist") + return errors.New("issue exist") } if _, err := e.Insert(opts.Issue); err != nil { @@ -570,7 +609,7 @@ func ResolveIssueMentionsByVisibility(ctx context.Context, issue *Issue, doer *u unittype = unit.TypePullRequests } for _, team := range teams { - if team.AccessMode >= perm.AccessModeAdmin { + if team.HasAdminAccess() { checked = append(checked, team.ID) resolved[issue.Repo.Owner.LowerName+"/"+team.LowerName] = true continue @@ -674,137 +713,13 @@ func UpdateReactionsMigrationsByType(ctx context.Context, gitServiceType api.Git return err } -// DeleteIssuesByRepoID deletes issues by repositories id -func DeleteIssuesByRepoID(ctx context.Context, repoID int64) (attachmentPaths []string, err error) { - // MariaDB has a performance bug: https://jira.mariadb.org/browse/MDEV-16289 - // so here it uses "DELETE ... WHERE IN" with pre-queried IDs. - sess := db.GetEngine(ctx) - - for { - issueIDs := make([]int64, 0, db.DefaultMaxInSize) - - err := sess.Table(&Issue{}).Where("repo_id = ?", repoID).OrderBy("id").Limit(db.DefaultMaxInSize).Cols("id").Find(&issueIDs) - if err != nil { - return nil, err - } - - if len(issueIDs) == 0 { - break - } - - // Delete content histories - _, err = sess.In("issue_id", issueIDs).Delete(&ContentHistory{}) - if err != nil { - return nil, err - } - - // Delete comments and attachments - _, err = sess.In("issue_id", issueIDs).Delete(&Comment{}) - if err != nil { - return nil, err - } - - // Dependencies for issues in this repository - _, err = sess.In("issue_id", issueIDs).Delete(&IssueDependency{}) - if err != nil { - return nil, err - } - - // Delete dependencies for issues in other repositories - _, err = sess.In("dependency_id", issueIDs).Delete(&IssueDependency{}) - if err != nil { - return nil, err - } - - _, err = sess.In("issue_id", issueIDs).Delete(&IssueUser{}) - if err != nil { - return nil, err - } - - _, err = sess.In("issue_id", issueIDs).Delete(&Reaction{}) - if err != nil { - return nil, err - } - - _, err = sess.In("issue_id", issueIDs).Delete(&IssueWatch{}) - if err != nil { - return nil, err - } - - _, err = sess.In("issue_id", issueIDs).Delete(&Stopwatch{}) - if err != nil { - return nil, err - } - - _, err = sess.In("issue_id", issueIDs).Delete(&TrackedTime{}) - if err != nil { - return nil, err - } - - _, err = sess.In("issue_id", issueIDs).Delete(&project_model.ProjectIssue{}) - if err != nil { - return nil, err - } - - _, err = sess.In("dependent_issue_id", issueIDs).Delete(&Comment{}) - if err != nil { - return nil, err - } - - var attachments []*repo_model.Attachment - err = sess.In("issue_id", issueIDs).Find(&attachments) - if err != nil { - return nil, err - } - - for j := range attachments { - attachmentPaths = append(attachmentPaths, attachments[j].RelativePath()) - } - - _, err = sess.In("issue_id", issueIDs).Delete(&repo_model.Attachment{}) - if err != nil { - return nil, err - } - - _, err = sess.In("id", issueIDs).Delete(&Issue{}) - if err != nil { - return nil, err - } - } - - return attachmentPaths, err -} - -// DeleteOrphanedIssues delete issues without a repo -func DeleteOrphanedIssues(ctx context.Context) error { - var attachmentPaths []string - err := db.WithTx(ctx, func(ctx context.Context) error { - var ids []int64 - - if err := db.GetEngine(ctx).Table("issue").Distinct("issue.repo_id"). - Join("LEFT", "repository", "issue.repo_id=repository.id"). - Where(builder.IsNull{"repository.id"}).GroupBy("issue.repo_id"). - Find(&ids); err != nil { - return err - } - - for i := range ids { - paths, err := DeleteIssuesByRepoID(ctx, ids[i]) - if err != nil { - return err - } - attachmentPaths = append(attachmentPaths, paths...) - } - - return nil - }) - if err != nil { - return err - } - - // Remove issue attachment files. - for i := range attachmentPaths { - system_model.RemoveAllWithNotice(ctx, "Delete issue attachment", attachmentPaths[i]) +func GetOrphanedIssueRepoIDs(ctx context.Context) ([]int64, error) { + var repoIDs []int64 + if err := db.GetEngine(ctx).Table("issue").Distinct("issue.repo_id"). + Join("LEFT", "repository", "issue.repo_id=repository.id"). + Where(builder.IsNull{"repository.id"}). + Find(&repoIDs); err != nil { + return nil, err } - return nil + return repoIDs, nil } diff --git a/models/issues/label.go b/models/issues/label.go index b9d24bbe99..cfbe100926 100644 --- a/models/issues/label.go +++ b/models/issues/label.go @@ -87,6 +87,7 @@ type Label struct { OrgID int64 `xorm:"INDEX"` Name string Exclusive bool + ExclusiveOrder int `xorm:"DEFAULT 0"` // 0 means no exclusive order Description string Color string `xorm:"VARCHAR(7)"` NumIssues int @@ -236,7 +237,7 @@ func UpdateLabel(ctx context.Context, l *Label) error { } l.Color = color - return updateLabelCols(ctx, l, "name", "description", "color", "exclusive", "archived_unix") + return updateLabelCols(ctx, l, "name", "description", "color", "exclusive", "exclusive_order", "archived_unix") } // DeleteLabel delete a label @@ -299,6 +300,9 @@ func GetLabelByID(ctx context.Context, labelID int64) (*Label, error) { // GetLabelsByIDs returns a list of labels by IDs func GetLabelsByIDs(ctx context.Context, labelIDs []int64, cols ...string) ([]*Label, error) { labels := make([]*Label, 0, len(labelIDs)) + if len(labelIDs) == 0 { + return labels, nil + } return labels, db.GetEngine(ctx).Table("label"). In("id", labelIDs). Asc("name"). @@ -375,6 +379,9 @@ func BuildLabelNamesIssueIDsCondition(labelNames []string) *builder.Builder { // it silently ignores label IDs that do not belong to the repository. func GetLabelsInRepoByIDs(ctx context.Context, repoID int64, labelIDs []int64) ([]*Label, error) { labels := make([]*Label, 0, len(labelIDs)) + if len(labelIDs) == 0 { + return labels, nil + } return labels, db.GetEngine(ctx). Where("repo_id = ?", repoID). In("id", labelIDs). @@ -447,6 +454,9 @@ func GetLabelInOrgByID(ctx context.Context, orgID, labelID int64) (*Label, error // it silently ignores label IDs that do not belong to the organization. func GetLabelsInOrgByIDs(ctx context.Context, orgID int64, labelIDs []int64) ([]*Label, error) { labels := make([]*Label, 0, len(labelIDs)) + if len(labelIDs) == 0 { + return labels, nil + } return labels, db.GetEngine(ctx). Where("org_id = ?", orgID). In("id", labelIDs). diff --git a/models/issues/label_test.go b/models/issues/label_test.go index 185fa11bbc..226036d543 100644 --- a/models/issues/label_test.go +++ b/models/issues/label_test.go @@ -20,7 +20,7 @@ func TestLabel_CalOpenIssues(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1}) label.CalOpenIssues() - assert.EqualValues(t, 2, label.NumOpenIssues) + assert.Equal(t, 2, label.NumOpenIssues) } func TestLabel_LoadSelectedLabelsAfterClick(t *testing.T) { @@ -154,7 +154,7 @@ func TestGetLabelsByRepoID(t *testing.T) { assert.NoError(t, err) assert.Len(t, labels, len(expectedIssueIDs)) for i, label := range labels { - assert.EqualValues(t, expectedIssueIDs[i], label.ID) + assert.Equal(t, expectedIssueIDs[i], label.ID) } } testSuccess(1, "leastissues", []int64{2, 1}) @@ -221,7 +221,7 @@ func TestGetLabelsByOrgID(t *testing.T) { assert.NoError(t, err) assert.Len(t, labels, len(expectedIssueIDs)) for i, label := range labels { - assert.EqualValues(t, expectedIssueIDs[i], label.ID) + assert.Equal(t, expectedIssueIDs[i], label.ID) } } testSuccess(3, "leastissues", []int64{3, 4}) @@ -267,10 +267,10 @@ func TestUpdateLabel(t *testing.T) { label.Name = update.Name assert.NoError(t, issues_model.UpdateLabel(db.DefaultContext, update)) newLabel := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1}) - assert.EqualValues(t, label.ID, newLabel.ID) - assert.EqualValues(t, label.Color, newLabel.Color) - assert.EqualValues(t, label.Name, newLabel.Name) - assert.EqualValues(t, label.Description, newLabel.Description) + assert.Equal(t, label.ID, newLabel.ID) + assert.Equal(t, label.Color, newLabel.Color) + assert.Equal(t, label.Name, newLabel.Name) + assert.Equal(t, label.Description, newLabel.Description) assert.EqualValues(t, 0, newLabel.ArchivedUnix) unittest.CheckConsistencyFor(t, &issues_model.Label{}, &repo_model.Repository{}) } @@ -313,7 +313,7 @@ func TestNewIssueLabel(t *testing.T) { Content: "1", }) label = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 2}) - assert.EqualValues(t, prevNumIssues+1, label.NumIssues) + assert.Equal(t, prevNumIssues+1, label.NumIssues) // re-add existing IssueLabel assert.NoError(t, issues_model.NewIssueLabel(db.DefaultContext, issue, label, doer)) @@ -366,11 +366,11 @@ func TestNewIssueLabels(t *testing.T) { }) unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label1.ID}) label1 = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1}) - assert.EqualValues(t, 3, label1.NumIssues) - assert.EqualValues(t, 1, label1.NumClosedIssues) + assert.Equal(t, 3, label1.NumIssues) + assert.Equal(t, 1, label1.NumClosedIssues) label2 = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 2}) - assert.EqualValues(t, 1, label2.NumIssues) - assert.EqualValues(t, 1, label2.NumClosedIssues) + assert.Equal(t, 1, label2.NumIssues) + assert.Equal(t, 1, label2.NumClosedIssues) // corner case: test empty slice assert.NoError(t, issues_model.NewIssueLabels(db.DefaultContext, issue, []*issues_model.Label{}, doer)) @@ -408,8 +408,8 @@ func TestDeleteIssueLabel(t *testing.T) { LabelID: labelID, }, `content=''`) label = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: labelID}) - assert.EqualValues(t, expectedNumIssues, label.NumIssues) - assert.EqualValues(t, expectedNumClosedIssues, label.NumClosedIssues) + assert.Equal(t, expectedNumIssues, label.NumIssues) + assert.Equal(t, expectedNumClosedIssues, label.NumClosedIssues) } testSuccess(1, 1, 2) testSuccess(2, 5, 2) diff --git a/models/issues/milestone_test.go b/models/issues/milestone_test.go index 28cd0c028b..f73355c27d 100644 --- a/models/issues/milestone_test.go +++ b/models/issues/milestone_test.go @@ -69,7 +69,7 @@ func TestGetMilestonesByRepoID(t *testing.T) { assert.Len(t, milestones, n) for _, milestone := range milestones { - assert.EqualValues(t, repoID, milestone.RepoID) + assert.Equal(t, repoID, milestone.RepoID) } } test(1, api.StateOpen) @@ -327,7 +327,7 @@ func TestUpdateMilestone(t *testing.T) { milestone.Content = "newMilestoneContent" assert.NoError(t, issues_model.UpdateMilestone(db.DefaultContext, milestone, milestone.IsClosed)) milestone = unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}) - assert.EqualValues(t, "newMilestoneName", milestone.Name) + assert.Equal(t, "newMilestoneName", milestone.Name) unittest.CheckConsistencyFor(t, &issues_model.Milestone{}) } @@ -364,7 +364,7 @@ func TestMigrate_InsertMilestones(t *testing.T) { assert.NoError(t, err) unittest.AssertExistsAndLoadBean(t, ms) repoModified := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo.ID}) - assert.EqualValues(t, repo.NumMilestones+1, repoModified.NumMilestones) + assert.Equal(t, repo.NumMilestones+1, repoModified.NumMilestones) unittest.CheckConsistencyFor(t, &issues_model.Milestone{}) } diff --git a/models/issues/pull.go b/models/issues/pull.go index 8c43eb19dd..0ff32e2473 100644 --- a/models/issues/pull.go +++ b/models/issues/pull.go @@ -6,10 +6,10 @@ package issues import ( "context" + "errors" "fmt" "io" "regexp" - "strconv" "strings" "code.gitea.io/gitea/models/db" @@ -80,22 +80,6 @@ func (err ErrPullRequestAlreadyExists) Unwrap() error { return util.ErrAlreadyExist } -// ErrPullWasClosed is used close a closed pull request -type ErrPullWasClosed struct { - ID int64 - Index int64 -} - -// IsErrPullWasClosed checks if an error is a ErrErrPullWasClosed. -func IsErrPullWasClosed(err error) bool { - _, ok := err.(ErrPullWasClosed) - return ok -} - -func (err ErrPullWasClosed) Error() string { - return fmt.Sprintf("Pull request [%d] %d was already closed", err.ID, err.Index) -} - // PullRequestType defines pull request type type PullRequestType int @@ -119,27 +103,6 @@ const ( PullRequestStatusAncestor ) -func (status PullRequestStatus) String() string { - switch status { - case PullRequestStatusConflict: - return "CONFLICT" - case PullRequestStatusChecking: - return "CHECKING" - case PullRequestStatusMergeable: - return "MERGEABLE" - case PullRequestStatusManuallyMerged: - return "MANUALLY_MERGED" - case PullRequestStatusError: - return "ERROR" - case PullRequestStatusEmpty: - return "EMPTY" - case PullRequestStatusAncestor: - return "ANCESTOR" - default: - return strconv.Itoa(int(status)) - } -} - // PullRequestFlow the flow of pull request type PullRequestFlow int @@ -301,7 +264,7 @@ func (pr *PullRequest) LoadRequestedReviewers(ctx context.Context) error { return nil } - reviews, err := GetReviewsByIssueID(ctx, pr.Issue.ID) + reviews, _, err := GetReviewsByIssueID(ctx, pr.Issue.ID) if err != nil { return err } @@ -320,7 +283,7 @@ func (pr *PullRequest) LoadRequestedReviewers(ctx context.Context) error { // LoadRequestedReviewersTeams loads the requested reviewers teams. func (pr *PullRequest) LoadRequestedReviewersTeams(ctx context.Context) error { - reviews, err := GetReviewsByIssueID(ctx, pr.Issue.ID) + reviews, _, err := GetReviewsByIssueID(ctx, pr.Issue.ID) if err != nil { return err } @@ -686,12 +649,6 @@ func GetAllUnmergedAgitPullRequestByPoster(ctx context.Context, uid int64) ([]*P return pulls, err } -// Update updates all fields of pull request. -func (pr *PullRequest) Update(ctx context.Context) error { - _, err := db.GetEngine(ctx).ID(pr.ID).AllCols().Update(pr) - return err -} - // UpdateCols updates specific fields of pull request. func (pr *PullRequest) UpdateCols(ctx context.Context, cols ...string) error { _, err := db.GetEngine(ctx).ID(pr.ID).Cols(cols...).Update(pr) @@ -748,7 +705,7 @@ func (pr *PullRequest) GetWorkInProgressPrefix(ctx context.Context) string { // UpdateCommitDivergence update Divergence of a pull request func (pr *PullRequest) UpdateCommitDivergence(ctx context.Context, ahead, behind int) error { if pr.ID == 0 { - return fmt.Errorf("pull ID is 0") + return errors.New("pull ID is 0") } pr.CommitsAhead = ahead pr.CommitsBehind = behind @@ -941,7 +898,7 @@ func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule, if strings.Contains(user, "/") { s := strings.Split(user, "/") if len(s) != 2 { - warnings = append(warnings, fmt.Sprintf("incorrect codeowner group: %s", user)) + warnings = append(warnings, "incorrect codeowner group: "+user) continue } orgName := s[0] @@ -949,12 +906,12 @@ func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule, org, err := org_model.GetOrgByName(ctx, orgName) if err != nil { - warnings = append(warnings, fmt.Sprintf("incorrect codeowner organization: %s", user)) + warnings = append(warnings, "incorrect codeowner organization: "+user) continue } teams, err := org.LoadTeams(ctx) if err != nil { - warnings = append(warnings, fmt.Sprintf("incorrect codeowner team: %s", user)) + warnings = append(warnings, "incorrect codeowner team: "+user) continue } @@ -966,7 +923,7 @@ func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule, } else { u, err := user_model.GetUserByName(ctx, user) if err != nil { - warnings = append(warnings, fmt.Sprintf("incorrect codeowner user: %s", user)) + warnings = append(warnings, "incorrect codeowner user: "+user) continue } rule.Users = append(rule.Users, u) diff --git a/models/issues/pull_list.go b/models/issues/pull_list.go index 59010aa9d0..84f9f6166d 100644 --- a/models/issues/pull_list.go +++ b/models/issues/pull_list.go @@ -28,11 +28,16 @@ type PullRequestsOptions struct { Labels []int64 MilestoneID int64 PosterID int64 + BaseBranch string } func listPullRequestStatement(ctx context.Context, baseRepoID int64, opts *PullRequestsOptions) *xorm.Session { sess := db.GetEngine(ctx).Where("pull_request.base_repo_id=?", baseRepoID) + if opts.BaseBranch != "" { + sess.And("pull_request.base_branch=?", opts.BaseBranch) + } + sess.Join("INNER", "issue", "pull_request.issue_id = issue.id") switch opts.State { case "closed", "open": @@ -56,7 +61,7 @@ func listPullRequestStatement(ctx context.Context, baseRepoID int64, opts *PullR } // GetUnmergedPullRequestsByHeadInfo returns all pull requests that are open and has not been merged -func GetUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch string) ([]*PullRequest, error) { +func GetUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch string) (PullRequestList, error) { prs := make([]*PullRequest, 0, 2) sess := db.GetEngine(ctx). Join("INNER", "issue", "issue.id = pull_request.issue_id"). @@ -111,7 +116,7 @@ func HasUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch // GetUnmergedPullRequestsByBaseInfo returns all pull requests that are open and has not been merged // by given base information (repo and branch). -func GetUnmergedPullRequestsByBaseInfo(ctx context.Context, repoID int64, branch string) ([]*PullRequest, error) { +func GetUnmergedPullRequestsByBaseInfo(ctx context.Context, repoID int64, branch string) (PullRequestList, error) { prs := make([]*PullRequest, 0, 2) return prs, db.GetEngine(ctx). Where("base_repo_id=? AND base_branch=? AND has_merged=? AND issue.is_closed=?", @@ -147,7 +152,8 @@ func PullRequests(ctx context.Context, baseRepoID int64, opts *PullRequestsOptio applySorts(findSession, opts.SortType, 0) findSession = db.SetSessionPagination(findSession, opts) prs := make([]*PullRequest, 0, opts.PageSize) - return prs, maxResults, findSession.Find(&prs) + found := findSession.Find(&prs) + return prs, maxResults, found } // PullRequestList defines a list of pull requests @@ -166,6 +172,23 @@ func (prs PullRequestList) getRepositoryIDs() []int64 { return repoIDs.Values() } +func (prs PullRequestList) SetBaseRepo(baseRepo *repo_model.Repository) { + for _, pr := range prs { + if pr.BaseRepo == nil { + pr.BaseRepo = baseRepo + } + } +} + +func (prs PullRequestList) SetHeadRepo(headRepo *repo_model.Repository) { + for _, pr := range prs { + if pr.HeadRepo == nil { + pr.HeadRepo = headRepo + pr.isHeadRepoLoaded = true + } + } +} + func (prs PullRequestList) LoadRepositories(ctx context.Context) error { repoIDs := prs.getRepositoryIDs() reposMap := make(map[int64]*repo_model.Repository, len(repoIDs)) diff --git a/models/issues/pull_list_test.go b/models/issues/pull_list_test.go index c7a898ca4e..eb2de006d6 100644 --- a/models/issues/pull_list_test.go +++ b/models/issues/pull_list_test.go @@ -16,11 +16,11 @@ import ( func TestPullRequestList_LoadAttributes(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - prs := []*issues_model.PullRequest{ + prs := issues_model.PullRequestList{ unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}), unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}), } - assert.NoError(t, issues_model.PullRequestList(prs).LoadAttributes(db.DefaultContext)) + assert.NoError(t, prs.LoadAttributes(db.DefaultContext)) for _, pr := range prs { assert.NotNil(t, pr.Issue) assert.Equal(t, pr.IssueID, pr.Issue.ID) @@ -32,26 +32,26 @@ func TestPullRequestList_LoadAttributes(t *testing.T) { func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - prs := []*issues_model.PullRequest{ + prs := issues_model.PullRequestList{ unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}), unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}), } - reviewComments, err := issues_model.PullRequestList(prs).LoadReviewCommentsCounts(db.DefaultContext) + reviewComments, err := prs.LoadReviewCommentsCounts(db.DefaultContext) assert.NoError(t, err) assert.Len(t, reviewComments, 2) for _, pr := range prs { - assert.EqualValues(t, 1, reviewComments[pr.IssueID]) + assert.Equal(t, 1, reviewComments[pr.IssueID]) } } func TestPullRequestList_LoadReviews(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - prs := []*issues_model.PullRequest{ + prs := issues_model.PullRequestList{ unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}), unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}), } - reviewList, err := issues_model.PullRequestList(prs).LoadReviews(db.DefaultContext) + reviewList, err := prs.LoadReviews(db.DefaultContext) assert.NoError(t, err) // 1, 7, 8, 9, 10, 22 assert.Len(t, reviewList, 6) diff --git a/models/issues/pull_test.go b/models/issues/pull_test.go index 090659864a..39efaa5792 100644 --- a/models/issues/pull_test.go +++ b/models/issues/pull_test.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/modules/setting" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestPullRequest_LoadAttributes(t *testing.T) { @@ -76,6 +77,47 @@ func TestPullRequestsNewest(t *testing.T) { } } +func TestPullRequests_Closed_RecentSortType(t *testing.T) { + // Issue ID | Closed At. | Updated At + // 2 | 1707270001 | 1707270001 + // 3 | 1707271000 | 1707279999 + // 11 | 1707279999 | 1707275555 + tests := []struct { + sortType string + expectedIssueIDOrder []int64 + }{ + {"recentupdate", []int64{3, 11, 2}}, + {"recentclose", []int64{11, 3, 2}}, + } + + assert.NoError(t, unittest.PrepareTestDatabase()) + _, err := db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707270001, updated_unix = 1707270001, is_closed = true WHERE id = 2") + require.NoError(t, err) + _, err = db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707271000, updated_unix = 1707279999, is_closed = true WHERE id = 3") + require.NoError(t, err) + _, err = db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707279999, updated_unix = 1707275555, is_closed = true WHERE id = 11") + require.NoError(t, err) + + for _, test := range tests { + t.Run(test.sortType, func(t *testing.T) { + prs, _, err := issues_model.PullRequests(db.DefaultContext, 1, &issues_model.PullRequestsOptions{ + ListOptions: db.ListOptions{ + Page: 1, + }, + State: "closed", + SortType: test.sortType, + }) + require.NoError(t, err) + + if assert.Len(t, prs, len(test.expectedIssueIDOrder)) { + for i := range test.expectedIssueIDOrder { + assert.Equal(t, test.expectedIssueIDOrder[i], prs[i].IssueID) + } + } + }) + } +} + func TestLoadRequestedReviewers(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) @@ -206,19 +248,6 @@ func TestGetPullRequestByIssueID(t *testing.T) { assert.True(t, issues_model.IsErrPullRequestNotExist(err)) } -func TestPullRequest_Update(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}) - pr.BaseBranch = "baseBranch" - pr.HeadBranch = "headBranch" - pr.Update(db.DefaultContext) - - pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID}) - assert.Equal(t, "baseBranch", pr.BaseBranch) - assert.Equal(t, "headBranch", pr.HeadBranch) - unittest.CheckConsistencyFor(t, pr) -} - func TestPullRequest_UpdateCols(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) pr := &issues_model.PullRequest{ @@ -285,7 +314,7 @@ func TestDeleteOrphanedObjects(t *testing.T) { countAfter, err := db.GetEngine(db.DefaultContext).Count(&issues_model.PullRequest{}) assert.NoError(t, err) - assert.EqualValues(t, countBefore, countAfter) + assert.Equal(t, countBefore, countAfter) } func TestParseCodeOwnersLine(t *testing.T) { @@ -318,7 +347,7 @@ func TestGetApprovers(t *testing.T) { setting.Repository.PullRequest.DefaultMergeMessageOfficialApproversOnly = false approvers := pr.GetApprovers(db.DefaultContext) expected := "Reviewed-by: User Five <user5@example.com>\nReviewed-by: Org Six <org6@example.com>\n" - assert.EqualValues(t, expected, approvers) + assert.Equal(t, expected, approvers) } func TestGetPullRequestByMergedCommit(t *testing.T) { diff --git a/models/issues/reaction.go b/models/issues/reaction.go index 11b3c6be20..f24001fd23 100644 --- a/models/issues/reaction.go +++ b/models/issues/reaction.go @@ -7,6 +7,7 @@ import ( "bytes" "context" "fmt" + "strings" "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" @@ -321,6 +322,11 @@ func valuesUser(m map[int64]*user_model.User) []*user_model.User { return values } +// newMigrationOriginalUser creates and returns a fake user for external user +func newMigrationOriginalUser(name string) *user_model.User { + return &user_model.User{ID: 0, Name: name, LowerName: strings.ToLower(name)} +} + // LoadUsers loads reactions' all users func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Repository) ([]*user_model.User, error) { if len(list) == 0 { @@ -338,7 +344,7 @@ func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Reposit for _, reaction := range list { if reaction.OriginalAuthor != "" { - reaction.User = user_model.NewReplaceUser(fmt.Sprintf("%s(%s)", reaction.OriginalAuthor, repo.OriginalServiceType.Name())) + reaction.User = newMigrationOriginalUser(fmt.Sprintf("%s(%s)", reaction.OriginalAuthor, repo.OriginalServiceType.Name())) } else if user, ok := userMaps[reaction.UserID]; ok { reaction.User = user } else { diff --git a/models/issues/review.go b/models/issues/review.go index 3e787273be..71fdb7456f 100644 --- a/models/issues/review.go +++ b/models/issues/review.go @@ -5,6 +5,7 @@ package issues import ( "context" + "errors" "fmt" "slices" "strings" @@ -374,7 +375,7 @@ func CreateReview(ctx context.Context, opts CreateReviewOptions) (*Review, error review.Type = ReviewTypeRequest review.ReviewerTeamID = opts.ReviewerTeam.ID } else { - return nil, fmt.Errorf("provide either reviewer or reviewer team") + return nil, errors.New("provide either reviewer or reviewer team") } if _, err := sess.Insert(review); err != nil { @@ -663,7 +664,7 @@ func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_mo } if review != nil { - // skip it when reviewer hase been request to review + // skip it when reviewer has been request to review if review.Type == ReviewTypeRequest { return nil, committer.Commit() // still commit the transaction, or committer.Close() will rollback it, even if it's a reused transaction. } @@ -930,17 +931,19 @@ func MarkConversation(ctx context.Context, comment *Comment, doer *user_model.Us } // CanMarkConversation Add or remove Conversation mark for a code comment permission check -// the PR writer , offfcial reviewer and poster can do it +// the PR writer , official reviewer and poster can do it func CanMarkConversation(ctx context.Context, issue *Issue, doer *user_model.User) (permResult bool, err error) { if doer == nil || issue == nil { - return false, fmt.Errorf("issue or doer is nil") + return false, errors.New("issue or doer is nil") } + if err = issue.LoadRepo(ctx); err != nil { + return false, err + } + if issue.Repo.IsArchived { + return false, nil + } if doer.ID != issue.PosterID { - if err = issue.LoadRepo(ctx); err != nil { - return false, err - } - p, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer) if err != nil { return false, err @@ -970,11 +973,11 @@ func DeleteReview(ctx context.Context, r *Review) error { defer committer.Close() if r.ID == 0 { - return fmt.Errorf("review is not allowed to be 0") + return errors.New("review is not allowed to be 0") } if r.Type == ReviewTypeRequest { - return fmt.Errorf("review request can not be deleted using this method") + return errors.New("review request can not be deleted using this method") } opts := FindCommentsOptions{ diff --git a/models/issues/review_list.go b/models/issues/review_list.go index bc7d7ec0f0..bbb8c489fa 100644 --- a/models/issues/review_list.go +++ b/models/issues/review_list.go @@ -5,6 +5,8 @@ package issues import ( "context" + "slices" + "sort" "code.gitea.io/gitea/models/db" organization_model "code.gitea.io/gitea/models/organization" @@ -20,7 +22,7 @@ type ReviewList []*Review // LoadReviewers loads reviewers func (reviews ReviewList) LoadReviewers(ctx context.Context) error { reviewerIDs := make([]int64, len(reviews)) - for i := 0; i < len(reviews); i++ { + for i := range reviews { reviewerIDs[i] = reviews[i].ReviewerID } reviewers, err := user_model.GetPossibleUserByIDs(ctx, reviewerIDs) @@ -153,43 +155,60 @@ func CountReviews(ctx context.Context, opts FindReviewOptions) (int64, error) { return db.GetEngine(ctx).Where(opts.toCond()).Count(&Review{}) } -// GetReviewersFromOriginalAuthorsByIssueID gets the latest review of each original authors for a pull request -func GetReviewersFromOriginalAuthorsByIssueID(ctx context.Context, issueID int64) (ReviewList, error) { +// GetReviewsByIssueID gets the latest review of each reviewer for a pull request +// The first returned parameter is the latest review of each individual reviewer or team +// The second returned parameter is the latest review of each original author which is migrated from other systems +// The reviews are sorted by updated time +func GetReviewsByIssueID(ctx context.Context, issueID int64) (latestReviews, migratedOriginalReviews ReviewList, err error) { reviews := make([]*Review, 0, 10) - // Get latest review of each reviewer, sorted in order they were made - if err := db.GetEngine(ctx).SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_team_id = 0 AND type in (?, ?, ?) AND original_author_id <> 0 GROUP BY issue_id, original_author_id) ORDER BY review.updated_unix ASC", - issueID, ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest). - Find(&reviews); err != nil { - return nil, err + // Get all reviews for the issue id + if err := db.GetEngine(ctx).Where("issue_id=?", issueID).OrderBy("updated_unix ASC").Find(&reviews); err != nil { + return nil, nil, err } - return reviews, nil -} - -// GetReviewsByIssueID gets the latest review of each reviewer for a pull request -func GetReviewsByIssueID(ctx context.Context, issueID int64) (ReviewList, error) { - reviews := make([]*Review, 0, 10) - - sess := db.GetEngine(ctx) + // filter them in memory to get the latest review of each reviewer + // Since the reviews should not be too many for one issue, less than 100 commonly, it's acceptable to do this in memory + // And since there are too less indexes in review table, it will be very slow to filter in the database + reviewersMap := make(map[int64][]*Review) // key is reviewer id + originalReviewersMap := make(map[int64][]*Review) // key is original author id + reviewTeamsMap := make(map[int64][]*Review) // key is reviewer team id + countedReivewTypes := []ReviewType{ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest} + for _, review := range reviews { + if review.ReviewerTeamID == 0 && slices.Contains(countedReivewTypes, review.Type) && !review.Dismissed { + if review.OriginalAuthorID != 0 { + originalReviewersMap[review.OriginalAuthorID] = append(originalReviewersMap[review.OriginalAuthorID], review) + } else { + reviewersMap[review.ReviewerID] = append(reviewersMap[review.ReviewerID], review) + } + } else if review.ReviewerTeamID != 0 && review.OriginalAuthorID == 0 { + reviewTeamsMap[review.ReviewerTeamID] = append(reviewTeamsMap[review.ReviewerTeamID], review) + } + } - // Get latest review of each reviewer, sorted in order they were made - if err := sess.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_team_id = 0 AND type in (?, ?, ?) AND dismissed = ? AND original_author_id = 0 GROUP BY issue_id, reviewer_id) ORDER BY review.updated_unix ASC", - issueID, ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest, false). - Find(&reviews); err != nil { - return nil, err + individualReviews := make([]*Review, 0, 10) + for _, reviews := range reviewersMap { + individualReviews = append(individualReviews, reviews[len(reviews)-1]) } + sort.Slice(individualReviews, func(i, j int) bool { + return individualReviews[i].UpdatedUnix < individualReviews[j].UpdatedUnix + }) - teamReviewRequests := make([]*Review, 0, 5) - if err := sess.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_team_id <> 0 AND original_author_id = 0 GROUP BY issue_id, reviewer_team_id) ORDER BY review.updated_unix ASC", - issueID). - Find(&teamReviewRequests); err != nil { - return nil, err + originalReviews := make([]*Review, 0, 10) + for _, reviews := range originalReviewersMap { + originalReviews = append(originalReviews, reviews[len(reviews)-1]) } + sort.Slice(originalReviews, func(i, j int) bool { + return originalReviews[i].UpdatedUnix < originalReviews[j].UpdatedUnix + }) - if len(teamReviewRequests) > 0 { - reviews = append(reviews, teamReviewRequests...) + teamReviewRequests := make([]*Review, 0, 5) + for _, reviews := range reviewTeamsMap { + teamReviewRequests = append(teamReviewRequests, reviews[len(reviews)-1]) } + sort.Slice(teamReviewRequests, func(i, j int) bool { + return teamReviewRequests[i].UpdatedUnix < teamReviewRequests[j].UpdatedUnix + }) - return reviews, nil + return append(individualReviews, teamReviewRequests...), originalReviews, nil } diff --git a/models/issues/review_test.go b/models/issues/review_test.go index 50330e3ff2..2588b8ba41 100644 --- a/models/issues/review_test.go +++ b/models/issues/review_test.go @@ -162,8 +162,9 @@ func TestGetReviewersByIssueID(t *testing.T) { }, ) - allReviews, err := issues_model.GetReviewsByIssueID(db.DefaultContext, issue.ID) + allReviews, migratedReviews, err := issues_model.GetReviewsByIssueID(db.DefaultContext, issue.ID) assert.NoError(t, err) + assert.Empty(t, migratedReviews) for _, review := range allReviews { assert.NoError(t, review.LoadReviewer(db.DefaultContext)) } diff --git a/models/issues/stopwatch.go b/models/issues/stopwatch.go index 629af95b57..761b8f91a0 100644 --- a/models/issues/stopwatch.go +++ b/models/issues/stopwatch.go @@ -5,7 +5,6 @@ package issues import ( "context" - "fmt" "time" "code.gitea.io/gitea/models/db" @@ -15,20 +14,6 @@ import ( "code.gitea.io/gitea/modules/util" ) -// ErrIssueStopwatchNotExist represents an error that stopwatch is not exist -type ErrIssueStopwatchNotExist struct { - UserID int64 - IssueID int64 -} - -func (err ErrIssueStopwatchNotExist) Error() string { - return fmt.Sprintf("issue stopwatch doesn't exist[uid: %d, issue_id: %d", err.UserID, err.IssueID) -} - -func (err ErrIssueStopwatchNotExist) Unwrap() error { - return util.ErrNotExist -} - // Stopwatch represents a stopwatch for time tracking. type Stopwatch struct { ID int64 `xorm:"pk autoincr"` @@ -46,11 +31,6 @@ func (s Stopwatch) Seconds() int64 { return int64(timeutil.TimeStampNow() - s.CreatedUnix) } -// Duration returns a human-readable duration string based on local server time -func (s Stopwatch) Duration() string { - return util.SecToTime(s.Seconds()) -} - func getStopwatch(ctx context.Context, userID, issueID int64) (sw *Stopwatch, exists bool, err error) { sw = new(Stopwatch) exists, err = db.GetEngine(ctx). @@ -60,13 +40,11 @@ func getStopwatch(ctx context.Context, userID, issueID int64) (sw *Stopwatch, ex return sw, exists, err } -// UserIDCount is a simple coalition of UserID and Count type UserStopwatch struct { UserID int64 StopWatches []*Stopwatch } -// GetUIDsAndNotificationCounts between the two provided times func GetUIDsAndStopwatch(ctx context.Context) ([]*UserStopwatch, error) { sws := []*Stopwatch{} if err := db.GetEngine(ctx).Where("issue_id != 0").Find(&sws); err != nil { @@ -92,7 +70,7 @@ func GetUIDsAndStopwatch(ctx context.Context) ([]*UserStopwatch, error) { return res, nil } -// GetUserStopwatches return list of all stopwatches of a user +// GetUserStopwatches return list of the user's all stopwatches func GetUserStopwatches(ctx context.Context, userID int64, listOptions db.ListOptions) ([]*Stopwatch, error) { sws := make([]*Stopwatch, 0, 8) sess := db.GetEngine(ctx).Where("stopwatch.user_id = ?", userID) @@ -107,7 +85,7 @@ func GetUserStopwatches(ctx context.Context, userID int64, listOptions db.ListOp return sws, nil } -// CountUserStopwatches return count of all stopwatches of a user +// CountUserStopwatches return count of the user's all stopwatches func CountUserStopwatches(ctx context.Context, userID int64) (int64, error) { return db.GetEngine(ctx).Where("user_id = ?", userID).Count(&Stopwatch{}) } @@ -141,43 +119,21 @@ func HasUserStopwatch(ctx context.Context, userID int64) (exists bool, sw *Stopw return exists, sw, issue, err } -// FinishIssueStopwatchIfPossible if stopwatch exist then finish it otherwise ignore -func FinishIssueStopwatchIfPossible(ctx context.Context, user *user_model.User, issue *Issue) error { - _, exists, err := getStopwatch(ctx, user.ID, issue.ID) - if err != nil { - return err - } - if !exists { - return nil - } - return FinishIssueStopwatch(ctx, user, issue) -} - -// CreateOrStopIssueStopwatch create an issue stopwatch if it's not exist, otherwise finish it -func CreateOrStopIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error { - _, exists, err := getStopwatch(ctx, user.ID, issue.ID) - if err != nil { - return err - } - if exists { - return FinishIssueStopwatch(ctx, user, issue) - } - return CreateIssueStopwatch(ctx, user, issue) -} - -// FinishIssueStopwatch if stopwatch exist then finish it otherwise return an error -func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error { +// FinishIssueStopwatch if stopwatch exists, then finish it. +func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) (ok bool, err error) { sw, exists, err := getStopwatch(ctx, user.ID, issue.ID) if err != nil { - return err + return false, err + } else if !exists { + return false, nil } - if !exists { - return ErrIssueStopwatchNotExist{ - UserID: user.ID, - IssueID: issue.ID, - } + if err = finishIssueStopwatch(ctx, user, issue, sw); err != nil { + return false, err } + return true, nil +} +func finishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue, sw *Stopwatch) error { // Create tracked time out of the time difference between start date and actual date timediff := time.Now().Unix() - int64(sw.CreatedUnix) @@ -189,101 +145,81 @@ func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Iss Time: timediff, } - if err := db.Insert(ctx, tt); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return err } - - if err := issue.LoadRepo(ctx); err != nil { + if err := db.Insert(ctx, tt); err != nil { return err } - if _, err := CreateComment(ctx, &CreateCommentOptions{ Doer: user, Issue: issue, Repo: issue.Repo, - Content: util.SecToTime(timediff), + Content: util.SecToHours(timediff), Type: CommentTypeStopTracking, TimeID: tt.ID, }); err != nil { return err } - _, err = db.DeleteByBean(ctx, sw) + _, err := db.DeleteByBean(ctx, sw) return err } -// CreateIssueStopwatch creates a stopwatch if not exist, otherwise return an error -func CreateIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error { - if err := issue.LoadRepo(ctx); err != nil { - return err - } - - // if another stopwatch is running: stop it - exists, _, otherIssue, err := HasUserStopwatch(ctx, user.ID) - if err != nil { - return err - } - if exists { - if err := FinishIssueStopwatch(ctx, user, otherIssue); err != nil { - return err +// CreateIssueStopwatch creates a stopwatch if the issue doesn't have the user's stopwatch. +// It also stops any other stopwatch that might be running for the user. +func CreateIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) (ok bool, err error) { + { // if another issue's stopwatch is running: stop it; if this issue has a stopwatch: return an error. + exists, otherStopWatch, otherIssue, err := HasUserStopwatch(ctx, user.ID) + if err != nil { + return false, err + } + if exists { + if otherStopWatch.IssueID == issue.ID { + // don't allow starting stopwatch for the same issue + return false, nil + } + // stop the other issue's stopwatch + if err = finishIssueStopwatch(ctx, user, otherIssue, otherStopWatch); err != nil { + return false, err + } } } - // Create stopwatch - sw := &Stopwatch{ - UserID: user.ID, - IssueID: issue.ID, - } - - if err := db.Insert(ctx, sw); err != nil { - return err + if err = issue.LoadRepo(ctx); err != nil { + return false, err } - - if err := issue.LoadRepo(ctx); err != nil { - return err + if err = db.Insert(ctx, &Stopwatch{UserID: user.ID, IssueID: issue.ID}); err != nil { + return false, err } - - if _, err := CreateComment(ctx, &CreateCommentOptions{ + if _, err = CreateComment(ctx, &CreateCommentOptions{ Doer: user, Issue: issue, Repo: issue.Repo, Type: CommentTypeStartTracking, }); err != nil { - return err + return false, err } - - return nil + return true, nil } // CancelStopwatch removes the given stopwatch and logs it into issue's timeline. -func CancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error { - ctx, committer, err := db.TxContext(ctx) - if err != nil { - return err - } - defer committer.Close() - if err := cancelStopwatch(ctx, user, issue); err != nil { - return err - } - return committer.Commit() -} - -func cancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error { - e := db.GetEngine(ctx) - sw, exists, err := getStopwatch(ctx, user.ID, issue.ID) - if err != nil { - return err - } - - if exists { - if _, err := e.Delete(sw); err != nil { +func CancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) (ok bool, err error) { + err = db.WithTx(ctx, func(ctx context.Context) error { + e := db.GetEngine(ctx) + sw, exists, err := getStopwatch(ctx, user.ID, issue.ID) + if err != nil { return err + } else if !exists { + return nil } - if err := issue.LoadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return err } - - if _, err := CreateComment(ctx, &CreateCommentOptions{ + if _, err = e.Delete(sw); err != nil { + return err + } + if _, err = CreateComment(ctx, &CreateCommentOptions{ Doer: user, Issue: issue, Repo: issue.Repo, @@ -291,6 +227,8 @@ func cancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) e }); err != nil { return err } - } - return nil + ok = true + return nil + }) + return ok, err } diff --git a/models/issues/stopwatch_test.go b/models/issues/stopwatch_test.go index a1bf9dc931..6333c10234 100644 --- a/models/issues/stopwatch_test.go +++ b/models/issues/stopwatch_test.go @@ -10,7 +10,6 @@ import ( issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" ) @@ -18,26 +17,22 @@ import ( func TestCancelStopwatch(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - user1, err := user_model.GetUserByID(db.DefaultContext, 1) - assert.NoError(t, err) - - issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1) - assert.NoError(t, err) - issue2, err := issues_model.GetIssueByID(db.DefaultContext, 2) - assert.NoError(t, err) + user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) + issue1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1}) - err = issues_model.CancelStopwatch(db.DefaultContext, user1, issue1) + ok, err := issues_model.CancelStopwatch(db.DefaultContext, user1, issue1) assert.NoError(t, err) + assert.True(t, ok) unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: user1.ID, IssueID: issue1.ID}) + unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeCancelTracking, PosterID: user1.ID, IssueID: issue1.ID}) - _ = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeCancelTracking, PosterID: user1.ID, IssueID: issue1.ID}) - - assert.NoError(t, issues_model.CancelStopwatch(db.DefaultContext, user1, issue2)) + ok, err = issues_model.CancelStopwatch(db.DefaultContext, user1, issue1) + assert.NoError(t, err) + assert.False(t, ok) } func TestStopwatchExists(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - assert.True(t, issues_model.StopwatchExists(db.DefaultContext, 1, 1)) assert.False(t, issues_model.StopwatchExists(db.DefaultContext, 1, 2)) } @@ -58,21 +53,35 @@ func TestHasUserStopwatch(t *testing.T) { func TestCreateOrStopIssueStopwatch(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - user2, err := user_model.GetUserByID(db.DefaultContext, 2) + user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}) + issue1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1}) + issue3 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 3}) + + // create a new stopwatch + ok, err := issues_model.CreateIssueStopwatch(db.DefaultContext, user4, issue1) assert.NoError(t, err) - org3, err := user_model.GetUserByID(db.DefaultContext, 3) + assert.True(t, ok) + unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{UserID: user4.ID, IssueID: issue1.ID}) + // should not create a second stopwatch for the same issue + ok, err = issues_model.CreateIssueStopwatch(db.DefaultContext, user4, issue1) assert.NoError(t, err) - - issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1) + assert.False(t, ok) + // on a different issue, it will finish the existing stopwatch and create a new one + ok, err = issues_model.CreateIssueStopwatch(db.DefaultContext, user4, issue3) assert.NoError(t, err) - issue2, err := issues_model.GetIssueByID(db.DefaultContext, 2) + assert.True(t, ok) + unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: user4.ID, IssueID: issue1.ID}) + unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{UserID: user4.ID, IssueID: issue3.ID}) + + // user2 already has a stopwatch in test fixture + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + issue2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2}) + ok, err = issues_model.FinishIssueStopwatch(db.DefaultContext, user2, issue2) assert.NoError(t, err) - - assert.NoError(t, issues_model.CreateOrStopIssueStopwatch(db.DefaultContext, org3, issue1)) - sw := unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{UserID: 3, IssueID: 1}) - assert.LessOrEqual(t, sw.CreatedUnix, timeutil.TimeStampNow()) - - assert.NoError(t, issues_model.CreateOrStopIssueStopwatch(db.DefaultContext, user2, issue2)) - unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: 2, IssueID: 2}) - unittest.AssertExistsAndLoadBean(t, &issues_model.TrackedTime{UserID: 2, IssueID: 2}) + assert.True(t, ok) + unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: user2.ID, IssueID: issue2.ID}) + unittest.AssertExistsAndLoadBean(t, &issues_model.TrackedTime{UserID: user2.ID, IssueID: issue2.ID}) + ok, err = issues_model.FinishIssueStopwatch(db.DefaultContext, user2, issue2) + assert.NoError(t, err) + assert.False(t, ok) } diff --git a/models/issues/tracked_time.go b/models/issues/tracked_time.go index ea404d36cd..2afbe272ed 100644 --- a/models/issues/tracked_time.go +++ b/models/issues/tracked_time.go @@ -350,10 +350,7 @@ func GetIssueTotalTrackedTime(ctx context.Context, opts *IssuesOptions, isClosed // we get the statistics in smaller chunks and get accumulates var accum int64 for i := 0; i < len(opts.IssueIDs); { - chunk := i + MaxQueryParameters - if chunk > len(opts.IssueIDs) { - chunk = len(opts.IssueIDs) - } + chunk := min(i+MaxQueryParameters, len(opts.IssueIDs)) time, err := getIssueTotalTrackedTimeChunk(ctx, opts, isClosed, opts.IssueIDs[i:chunk]) if err != nil { return 0, err diff --git a/models/migrations/base/db.go b/models/migrations/base/db.go index eb1c44a79e..479a46379c 100644 --- a/models/migrations/base/db.go +++ b/models/migrations/base/db.go @@ -52,7 +52,7 @@ func RecreateTable(sess *xorm.Session, bean any) error { // TODO: This will not work if there are foreign keys tableName := sess.Engine().TableName(bean) - tempTableName := fmt.Sprintf("tmp_recreate__%s", tableName) + tempTableName := "tmp_recreate__" + tableName // We need to move the old table away and create a new one with the correct columns // We will need to do this in stages to prevent data loss @@ -82,7 +82,7 @@ func RecreateTable(sess *xorm.Session, bean any) error { } newTableColumns := table.Columns() if len(newTableColumns) == 0 { - return fmt.Errorf("no columns in new table") + return errors.New("no columns in new table") } hasID := false for _, column := range newTableColumns { @@ -518,7 +518,7 @@ func ModifyColumn(x *xorm.Engine, tableName string, col *schemas.Column) error { func removeAllWithRetry(dir string) error { var err error - for i := 0; i < 20; i++ { + for range 20 { err = os.RemoveAll(dir) if err == nil { break @@ -552,11 +552,11 @@ func deleteDB() error { } defer db.Close() - if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", setting.Database.Name)); err != nil { + if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil { return err } - if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name)); err != nil { + if _, err = db.Exec("CREATE DATABASE IF NOT EXISTS " + setting.Database.Name); err != nil { return err } return nil @@ -568,11 +568,11 @@ func deleteDB() error { } defer db.Close() - if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", setting.Database.Name)); err != nil { + if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil { return err } - if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE %s", setting.Database.Name)); err != nil { + if _, err = db.Exec("CREATE DATABASE " + setting.Database.Name); err != nil { return err } db.Close() @@ -594,7 +594,7 @@ func deleteDB() error { if !schrows.Next() { // Create and setup a DB schema - _, err = db.Exec(fmt.Sprintf("CREATE SCHEMA %s", setting.Database.Schema)) + _, err = db.Exec("CREATE SCHEMA " + setting.Database.Schema) if err != nil { return err } diff --git a/models/migrations/base/tests.go b/models/migrations/base/tests.go index fe6de9c517..33fd1df707 100644 --- a/models/migrations/base/tests.go +++ b/models/migrations/base/tests.go @@ -1,7 +1,6 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -//nolint:forbidigo package base import ( @@ -15,6 +14,7 @@ import ( "code.gitea.io/gitea/models/unittest" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/tempdir" "code.gitea.io/gitea/modules/test" "code.gitea.io/gitea/modules/testlogger" @@ -105,7 +105,7 @@ func MainTest(m *testing.M) { giteaConf := os.Getenv("GITEA_CONF") if giteaConf == "" { giteaConf = filepath.Join(filepath.Dir(setting.AppPath), "tests/sqlite.ini") - fmt.Printf("Environment variable $GITEA_CONF not set - defaulting to %s\n", giteaConf) + _, _ = fmt.Fprintf(os.Stderr, "Environment variable $GITEA_CONF not set - defaulting to %s\n", giteaConf) } if !filepath.IsAbs(giteaConf) { @@ -114,15 +114,16 @@ func MainTest(m *testing.M) { setting.CustomConf = giteaConf } - tmpDataPath, err := os.MkdirTemp("", "data") + tmpDataPath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("data") if err != nil { testlogger.Fatalf("Unable to create temporary data path %v\n", err) } + defer cleanup() setting.CustomPath = filepath.Join(setting.AppWorkPath, "custom") setting.AppDataPath = tmpDataPath - unittest.InitSettings() + unittest.InitSettingsForTesting() if err = git.InitFull(context.Background()); err != nil { testlogger.Fatalf("Unable to InitFull: %v\n", err) } @@ -132,10 +133,7 @@ func MainTest(m *testing.M) { exitStatus := m.Run() if err := removeAllWithRetry(setting.RepoRootPath); err != nil { - fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err) - } - if err := removeAllWithRetry(tmpDataPath); err != nil { - fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err) + _, _ = fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err) } os.Exit(exitStatus) } diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go index 52d10c4fe8..176372486e 100644 --- a/models/migrations/migrations.go +++ b/models/migrations/migrations.go @@ -6,6 +6,7 @@ package migrations import ( "context" + "errors" "fmt" "code.gitea.io/gitea/models/migrations/v1_10" @@ -22,6 +23,7 @@ import ( "code.gitea.io/gitea/models/migrations/v1_21" "code.gitea.io/gitea/models/migrations/v1_22" "code.gitea.io/gitea/models/migrations/v1_23" + "code.gitea.io/gitea/models/migrations/v1_24" "code.gitea.io/gitea/models/migrations/v1_6" "code.gitea.io/gitea/models/migrations/v1_7" "code.gitea.io/gitea/models/migrations/v1_8" @@ -369,6 +371,17 @@ func prepareMigrationTasks() []*migration { newMigration(309, "Improve Notification table indices", v1_23.ImproveNotificationTableIndices), newMigration(310, "Add Priority to ProtectedBranch", v1_23.AddPriorityToProtectedBranch), newMigration(311, "Add TimeEstimate to Issue table", v1_23.AddTimeEstimateColumnToIssueTable), + + // Gitea 1.23.0-rc0 ends at migration ID number 311 (database version 312) + newMigration(312, "Add DeleteBranchAfterMerge to AutoMerge", v1_24.AddDeleteBranchAfterMergeForAutoMerge), + newMigration(313, "Move PinOrder from issue table to a new table issue_pin", v1_24.MovePinOrderToTableIssuePin), + newMigration(314, "Update OwnerID as zero for repository level action tables", v1_24.UpdateOwnerIDOfRepoLevelActionsTables), + newMigration(315, "Add Ephemeral to ActionRunner", v1_24.AddEphemeralToActionRunner), + newMigration(316, "Add description for secrets and variables", v1_24.AddDescriptionForSecretsAndVariables), + newMigration(317, "Add new index for action for heatmap", v1_24.AddNewIndexForUserDashboard), + newMigration(318, "Add anonymous_access_mode for repo_unit", v1_24.AddRepoUnitAnonymousAccessMode), + newMigration(319, "Add ExclusiveOrder to Label table", v1_24.AddExclusiveOrderColumnToLabelTable), + newMigration(320, "Migrate two_factor_policy to login_source table", v1_24.MigrateSkipTwoFactor), } return preparedMigrations } @@ -407,14 +420,14 @@ func ExpectedDBVersion() int64 { } // EnsureUpToDate will check if the db is at the correct version -func EnsureUpToDate(x *xorm.Engine) error { +func EnsureUpToDate(ctx context.Context, x *xorm.Engine) error { currentDB, err := GetCurrentDBVersion(x) if err != nil { return err } if currentDB < 0 { - return fmt.Errorf("database has not been initialized") + return errors.New("database has not been initialized") } if minDBVersion > currentDB { diff --git a/models/migrations/migrations_test.go b/models/migrations/migrations_test.go index e66b015b3d..8649d116f5 100644 --- a/models/migrations/migrations_test.go +++ b/models/migrations/migrations_test.go @@ -22,7 +22,7 @@ func TestMigrations(t *testing.T) { assert.EqualValues(t, 71, migrationIDNumberToDBVersion(70)) - assert.EqualValues(t, []*migration{{idNumber: 70}, {idNumber: 71}}, getPendingMigrations(70, preparedMigrations)) - assert.EqualValues(t, []*migration{{idNumber: 71}}, getPendingMigrations(71, preparedMigrations)) - assert.EqualValues(t, []*migration{}, getPendingMigrations(72, preparedMigrations)) + assert.Equal(t, []*migration{{idNumber: 70}, {idNumber: 71}}, getPendingMigrations(70, preparedMigrations)) + assert.Equal(t, []*migration{{idNumber: 71}}, getPendingMigrations(71, preparedMigrations)) + assert.Equal(t, []*migration{}, getPendingMigrations(72, preparedMigrations)) } diff --git a/models/migrations/v1_10/v100.go b/models/migrations/v1_10/v100.go index 5d2fd8e244..1742bea296 100644 --- a/models/migrations/v1_10/v100.go +++ b/models/migrations/v1_10/v100.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import ( "net/url" diff --git a/models/migrations/v1_10/v101.go b/models/migrations/v1_10/v101.go index f023a2a0e7..6c8dfe2486 100644 --- a/models/migrations/v1_10/v101.go +++ b/models/migrations/v1_10/v101.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_10/v88.go b/models/migrations/v1_10/v88.go index 7e86ac364f..eb8e81c19e 100644 --- a/models/migrations/v1_10/v88.go +++ b/models/migrations/v1_10/v88.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import ( "crypto/sha1" diff --git a/models/migrations/v1_10/v89.go b/models/migrations/v1_10/v89.go index d5f27ffdc6..0df2a6e17b 100644 --- a/models/migrations/v1_10/v89.go +++ b/models/migrations/v1_10/v89.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import "xorm.io/xorm" diff --git a/models/migrations/v1_10/v90.go b/models/migrations/v1_10/v90.go index 295d4b1c1b..5521a97e32 100644 --- a/models/migrations/v1_10/v90.go +++ b/models/migrations/v1_10/v90.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import "xorm.io/xorm" diff --git a/models/migrations/v1_10/v91.go b/models/migrations/v1_10/v91.go index 48cac2de70..08db6c2742 100644 --- a/models/migrations/v1_10/v91.go +++ b/models/migrations/v1_10/v91.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import "xorm.io/xorm" diff --git a/models/migrations/v1_10/v92.go b/models/migrations/v1_10/v92.go index 9080108594..b6c04a9234 100644 --- a/models/migrations/v1_10/v92.go +++ b/models/migrations/v1_10/v92.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import ( "xorm.io/builder" diff --git a/models/migrations/v1_10/v93.go b/models/migrations/v1_10/v93.go index ee59a8db39..c131be9a8d 100644 --- a/models/migrations/v1_10/v93.go +++ b/models/migrations/v1_10/v93.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import "xorm.io/xorm" diff --git a/models/migrations/v1_10/v94.go b/models/migrations/v1_10/v94.go index c131af162b..13b7d7b303 100644 --- a/models/migrations/v1_10/v94.go +++ b/models/migrations/v1_10/v94.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import "xorm.io/xorm" diff --git a/models/migrations/v1_10/v95.go b/models/migrations/v1_10/v95.go index 3b1f67fd9c..86b52026bf 100644 --- a/models/migrations/v1_10/v95.go +++ b/models/migrations/v1_10/v95.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import "xorm.io/xorm" diff --git a/models/migrations/v1_10/v96.go b/models/migrations/v1_10/v96.go index 34c8240031..ca35a169c4 100644 --- a/models/migrations/v1_10/v96.go +++ b/models/migrations/v1_10/v96.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import ( "path/filepath" diff --git a/models/migrations/v1_10/v97.go b/models/migrations/v1_10/v97.go index dee45b32e3..5872bb63e5 100644 --- a/models/migrations/v1_10/v97.go +++ b/models/migrations/v1_10/v97.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import "xorm.io/xorm" diff --git a/models/migrations/v1_10/v98.go b/models/migrations/v1_10/v98.go index bdd9aed089..d21c326459 100644 --- a/models/migrations/v1_10/v98.go +++ b/models/migrations/v1_10/v98.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import "xorm.io/xorm" diff --git a/models/migrations/v1_10/v99.go b/models/migrations/v1_10/v99.go index ebe6597f7c..223c188057 100644 --- a/models/migrations/v1_10/v99.go +++ b/models/migrations/v1_10/v99.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_10 //nolint +package v1_10 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_11/v102.go b/models/migrations/v1_11/v102.go index 9358e4cef3..e52290afb0 100644 --- a/models/migrations/v1_11/v102.go +++ b/models/migrations/v1_11/v102.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_11/v103.go b/models/migrations/v1_11/v103.go index 53527dac58..a515710160 100644 --- a/models/migrations/v1_11/v103.go +++ b/models/migrations/v1_11/v103.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_11/v104.go b/models/migrations/v1_11/v104.go index 3e8ee64bc1..3b0d3c64b2 100644 --- a/models/migrations/v1_11/v104.go +++ b/models/migrations/v1_11/v104.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_11/v105.go b/models/migrations/v1_11/v105.go index b91340c30a..d86973a0f6 100644 --- a/models/migrations/v1_11/v105.go +++ b/models/migrations/v1_11/v105.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_11/v106.go b/models/migrations/v1_11/v106.go index ecb11cdd1e..edffe18683 100644 --- a/models/migrations/v1_11/v106.go +++ b/models/migrations/v1_11/v106.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_11/v107.go b/models/migrations/v1_11/v107.go index f0bfe5862c..a158e3bb50 100644 --- a/models/migrations/v1_11/v107.go +++ b/models/migrations/v1_11/v107.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_11/v108.go b/models/migrations/v1_11/v108.go index a85096234d..8f14504ceb 100644 --- a/models/migrations/v1_11/v108.go +++ b/models/migrations/v1_11/v108.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_11/v109.go b/models/migrations/v1_11/v109.go index ea565ccda3..f7616aec7b 100644 --- a/models/migrations/v1_11/v109.go +++ b/models/migrations/v1_11/v109.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_11/v110.go b/models/migrations/v1_11/v110.go index 81afa1331d..512f728c03 100644 --- a/models/migrations/v1_11/v110.go +++ b/models/migrations/v1_11/v110.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_11/v111.go b/models/migrations/v1_11/v111.go index ff108479a9..c27465f051 100644 --- a/models/migrations/v1_11/v111.go +++ b/models/migrations/v1_11/v111.go @@ -1,10 +1,11 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "fmt" + "slices" "xorm.io/xorm" ) @@ -344,10 +345,8 @@ func AddBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error { } return AccessModeWrite <= perm.UnitsMode[UnitTypeCode], nil } - for _, id := range protectedBranch.ApprovalsWhitelistUserIDs { - if id == reviewer.ID { - return true, nil - } + if slices.Contains(protectedBranch.ApprovalsWhitelistUserIDs, reviewer.ID) { + return true, nil } // isUserInTeams diff --git a/models/migrations/v1_11/v112.go b/models/migrations/v1_11/v112.go index 0857663119..fe45cf9222 100644 --- a/models/migrations/v1_11/v112.go +++ b/models/migrations/v1_11/v112.go @@ -1,12 +1,12 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( - "fmt" "path/filepath" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" @@ -31,7 +31,7 @@ func RemoveAttachmentMissedRepo(x *xorm.Engine) error { for i := 0; i < len(attachments); i++ { uuid := attachments[i].UUID if err = util.RemoveAll(filepath.Join(setting.Attachment.Storage.Path, uuid[0:1], uuid[1:2], uuid)); err != nil { - fmt.Printf("Error: %v", err) //nolint:forbidigo + log.Warn("Unable to remove attachment file by UUID %s: %v", uuid, err) } } diff --git a/models/migrations/v1_11/v113.go b/models/migrations/v1_11/v113.go index dea344a44f..a4d54f66fb 100644 --- a/models/migrations/v1_11/v113.go +++ b/models/migrations/v1_11/v113.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "fmt" diff --git a/models/migrations/v1_11/v114.go b/models/migrations/v1_11/v114.go index 95adcee989..9467a8a90c 100644 --- a/models/migrations/v1_11/v114.go +++ b/models/migrations/v1_11/v114.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "net/url" diff --git a/models/migrations/v1_11/v115.go b/models/migrations/v1_11/v115.go index 8c631cfd0b..5933c0520f 100644 --- a/models/migrations/v1_11/v115.go +++ b/models/migrations/v1_11/v115.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "crypto/md5" @@ -146,7 +146,7 @@ func copyOldAvatarToNewLocation(userID int64, oldAvatar string) (string, error) return "", fmt.Errorf("io.ReadAll: %w", err) } - newAvatar := fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%d-%x", userID, md5.Sum(data))))) + newAvatar := fmt.Sprintf("%x", md5.Sum(fmt.Appendf(nil, "%d-%x", userID, md5.Sum(data)))) if newAvatar == oldAvatar { return newAvatar, nil } diff --git a/models/migrations/v1_11/v116.go b/models/migrations/v1_11/v116.go index 85aa76c1e0..729fbad18b 100644 --- a/models/migrations/v1_11/v116.go +++ b/models/migrations/v1_11/v116.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_11 //nolint +package v1_11 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v117.go b/models/migrations/v1_12/v117.go index 8eadcdef2b..73b58ca34b 100644 --- a/models/migrations/v1_12/v117.go +++ b/models/migrations/v1_12/v117.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v118.go b/models/migrations/v1_12/v118.go index eb022dc5e4..e8b4249743 100644 --- a/models/migrations/v1_12/v118.go +++ b/models/migrations/v1_12/v118.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v119.go b/models/migrations/v1_12/v119.go index 60bfe6a57d..b4bf29a935 100644 --- a/models/migrations/v1_12/v119.go +++ b/models/migrations/v1_12/v119.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v120.go b/models/migrations/v1_12/v120.go index 3f7ed8d373..14d515f5a7 100644 --- a/models/migrations/v1_12/v120.go +++ b/models/migrations/v1_12/v120.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v121.go b/models/migrations/v1_12/v121.go index 175ec9164d..a28ae4e1c9 100644 --- a/models/migrations/v1_12/v121.go +++ b/models/migrations/v1_12/v121.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import "xorm.io/xorm" diff --git a/models/migrations/v1_12/v122.go b/models/migrations/v1_12/v122.go index 6e31d863a1..bc1b175f6a 100644 --- a/models/migrations/v1_12/v122.go +++ b/models/migrations/v1_12/v122.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v123.go b/models/migrations/v1_12/v123.go index b0c3af07a3..52b10bb850 100644 --- a/models/migrations/v1_12/v123.go +++ b/models/migrations/v1_12/v123.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v124.go b/models/migrations/v1_12/v124.go index d2ba03ffe0..9a93f436d4 100644 --- a/models/migrations/v1_12/v124.go +++ b/models/migrations/v1_12/v124.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v125.go b/models/migrations/v1_12/v125.go index ec4ffaab25..7f582ecff5 100644 --- a/models/migrations/v1_12/v125.go +++ b/models/migrations/v1_12/v125.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" diff --git a/models/migrations/v1_12/v126.go b/models/migrations/v1_12/v126.go index ca9ec3aa3f..64fd7f7478 100644 --- a/models/migrations/v1_12/v126.go +++ b/models/migrations/v1_12/v126.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/builder" diff --git a/models/migrations/v1_12/v127.go b/models/migrations/v1_12/v127.go index 00e391dc87..9bd78db95e 100644 --- a/models/migrations/v1_12/v127.go +++ b/models/migrations/v1_12/v127.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" diff --git a/models/migrations/v1_12/v128.go b/models/migrations/v1_12/v128.go index 44d44a26c5..e7dbff3766 100644 --- a/models/migrations/v1_12/v128.go +++ b/models/migrations/v1_12/v128.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" @@ -82,17 +82,17 @@ func FixMergeBase(x *xorm.Engine) error { if !pr.HasMerged { var err error - pr.MergeBase, _, err = git.NewCommand(git.DefaultContext, "merge-base").AddDashesAndList(pr.BaseBranch, gitRefName).RunStdString(&git.RunOpts{Dir: repoPath}) + pr.MergeBase, _, err = git.NewCommand("merge-base").AddDashesAndList(pr.BaseBranch, gitRefName).RunStdString(git.DefaultContext, &git.RunOpts{Dir: repoPath}) if err != nil { var err2 error - pr.MergeBase, _, err2 = git.NewCommand(git.DefaultContext, "rev-parse").AddDynamicArguments(git.BranchPrefix + pr.BaseBranch).RunStdString(&git.RunOpts{Dir: repoPath}) + pr.MergeBase, _, err2 = git.NewCommand("rev-parse").AddDynamicArguments(git.BranchPrefix+pr.BaseBranch).RunStdString(git.DefaultContext, &git.RunOpts{Dir: repoPath}) if err2 != nil { log.Error("Unable to get merge base for PR ID %d, Index %d in %s/%s. Error: %v & %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err, err2) continue } } } else { - parentsString, _, err := git.NewCommand(git.DefaultContext, "rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID).RunStdString(&git.RunOpts{Dir: repoPath}) + parentsString, _, err := git.NewCommand("rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID).RunStdString(git.DefaultContext, &git.RunOpts{Dir: repoPath}) if err != nil { log.Error("Unable to get parents for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err) continue @@ -104,9 +104,9 @@ func FixMergeBase(x *xorm.Engine) error { refs := append([]string{}, parents[1:]...) refs = append(refs, gitRefName) - cmd := git.NewCommand(git.DefaultContext, "merge-base").AddDashesAndList(refs...) + cmd := git.NewCommand("merge-base").AddDashesAndList(refs...) - pr.MergeBase, _, err = cmd.RunStdString(&git.RunOpts{Dir: repoPath}) + pr.MergeBase, _, err = cmd.RunStdString(git.DefaultContext, &git.RunOpts{Dir: repoPath}) if err != nil { log.Error("Unable to get merge base for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err) continue diff --git a/models/migrations/v1_12/v129.go b/models/migrations/v1_12/v129.go index cf228242b9..3e4d3aca68 100644 --- a/models/migrations/v1_12/v129.go +++ b/models/migrations/v1_12/v129.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v130.go b/models/migrations/v1_12/v130.go index 391810c7ca..107bb756fd 100644 --- a/models/migrations/v1_12/v130.go +++ b/models/migrations/v1_12/v130.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "code.gitea.io/gitea/modules/json" diff --git a/models/migrations/v1_12/v131.go b/models/migrations/v1_12/v131.go index 5184bc3590..1266c2f185 100644 --- a/models/migrations/v1_12/v131.go +++ b/models/migrations/v1_12/v131.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" diff --git a/models/migrations/v1_12/v132.go b/models/migrations/v1_12/v132.go index 3b2b28f7ab..8b1ae6db93 100644 --- a/models/migrations/v1_12/v132.go +++ b/models/migrations/v1_12/v132.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" diff --git a/models/migrations/v1_12/v133.go b/models/migrations/v1_12/v133.go index c9087fc8c1..69e20597d8 100644 --- a/models/migrations/v1_12/v133.go +++ b/models/migrations/v1_12/v133.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import "xorm.io/xorm" diff --git a/models/migrations/v1_12/v134.go b/models/migrations/v1_12/v134.go index 3d1c82f09e..09d743964d 100644 --- a/models/migrations/v1_12/v134.go +++ b/models/migrations/v1_12/v134.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" @@ -79,7 +79,7 @@ func RefixMergeBase(x *xorm.Engine) error { gitRefName := fmt.Sprintf("refs/pull/%d/head", pr.Index) - parentsString, _, err := git.NewCommand(git.DefaultContext, "rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID).RunStdString(&git.RunOpts{Dir: repoPath}) + parentsString, _, err := git.NewCommand("rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID).RunStdString(git.DefaultContext, &git.RunOpts{Dir: repoPath}) if err != nil { log.Error("Unable to get parents for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err) continue @@ -92,9 +92,9 @@ func RefixMergeBase(x *xorm.Engine) error { // we should recalculate refs := append([]string{}, parents[1:]...) refs = append(refs, gitRefName) - cmd := git.NewCommand(git.DefaultContext, "merge-base").AddDashesAndList(refs...) + cmd := git.NewCommand("merge-base").AddDashesAndList(refs...) - pr.MergeBase, _, err = cmd.RunStdString(&git.RunOpts{Dir: repoPath}) + pr.MergeBase, _, err = cmd.RunStdString(git.DefaultContext, &git.RunOpts{Dir: repoPath}) if err != nil { log.Error("Unable to get merge base for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err) continue diff --git a/models/migrations/v1_12/v135.go b/models/migrations/v1_12/v135.go index 8898011df5..5df0ad7fc4 100644 --- a/models/migrations/v1_12/v135.go +++ b/models/migrations/v1_12/v135.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" diff --git a/models/migrations/v1_12/v136.go b/models/migrations/v1_12/v136.go index d91ff92feb..0f53278b46 100644 --- a/models/migrations/v1_12/v136.go +++ b/models/migrations/v1_12/v136.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" diff --git a/models/migrations/v1_12/v137.go b/models/migrations/v1_12/v137.go index 0d86b72010..9d38483488 100644 --- a/models/migrations/v1_12/v137.go +++ b/models/migrations/v1_12/v137.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_12/v138.go b/models/migrations/v1_12/v138.go index 8c8d353f40..4485adeb2d 100644 --- a/models/migrations/v1_12/v138.go +++ b/models/migrations/v1_12/v138.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "fmt" diff --git a/models/migrations/v1_12/v139.go b/models/migrations/v1_12/v139.go index 279aa7df87..a3799841ac 100644 --- a/models/migrations/v1_12/v139.go +++ b/models/migrations/v1_12/v139.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_12 //nolint +package v1_12 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_13/v140.go b/models/migrations/v1_13/v140.go index 2d3337012d..a9a047bca9 100644 --- a/models/migrations/v1_13/v140.go +++ b/models/migrations/v1_13/v140.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "fmt" @@ -21,12 +21,7 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error { // RepoIndexerType specifies the repository indexer type type RepoIndexerType int - const ( - // RepoIndexerTypeCode code indexer - 0 - RepoIndexerTypeCode RepoIndexerType = iota //nolint:unused - // RepoIndexerTypeStats repository stats indexer - 1 - RepoIndexerTypeStats - ) + const RepoIndexerTypeStats RepoIndexerType = 1 // RepoIndexerStatus see models/repo_indexer.go type RepoIndexerStatus struct { @@ -46,7 +41,7 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error { } // Delete language stats - if _, err := x.Exec(fmt.Sprintf("%s language_stat", truncExpr)); err != nil { + if _, err := x.Exec(truncExpr + " language_stat"); err != nil { return err } diff --git a/models/migrations/v1_13/v141.go b/models/migrations/v1_13/v141.go index ae211e0e44..b54bc1727c 100644 --- a/models/migrations/v1_13/v141.go +++ b/models/migrations/v1_13/v141.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "fmt" diff --git a/models/migrations/v1_13/v142.go b/models/migrations/v1_13/v142.go index 7c7c01ad47..d08a0ae0bf 100644 --- a/models/migrations/v1_13/v142.go +++ b/models/migrations/v1_13/v142.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "code.gitea.io/gitea/modules/log" diff --git a/models/migrations/v1_13/v143.go b/models/migrations/v1_13/v143.go index 885768dff3..b9a856ed0f 100644 --- a/models/migrations/v1_13/v143.go +++ b/models/migrations/v1_13/v143.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "code.gitea.io/gitea/modules/log" diff --git a/models/migrations/v1_13/v144.go b/models/migrations/v1_13/v144.go index f5a0bc5751..9352d78bc8 100644 --- a/models/migrations/v1_13/v144.go +++ b/models/migrations/v1_13/v144.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "code.gitea.io/gitea/modules/log" diff --git a/models/migrations/v1_13/v145.go b/models/migrations/v1_13/v145.go index 8acb29bf33..86ebb4f9d9 100644 --- a/models/migrations/v1_13/v145.go +++ b/models/migrations/v1_13/v145.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "fmt" @@ -42,7 +42,7 @@ func IncreaseLanguageField(x *xorm.Engine) error { switch { case setting.Database.Type.IsMySQL(): - if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat MODIFY COLUMN language %s", sqlType)); err != nil { + if _, err := sess.Exec("ALTER TABLE language_stat MODIFY COLUMN language " + sqlType); err != nil { return err } case setting.Database.Type.IsMSSQL(): @@ -64,7 +64,7 @@ func IncreaseLanguageField(x *xorm.Engine) error { return fmt.Errorf("Drop table `language_stat` constraint `%s`: %w", constraint, err) } } - if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language %s", sqlType)); err != nil { + if _, err := sess.Exec("ALTER TABLE language_stat ALTER COLUMN language " + sqlType); err != nil { return err } // Finally restore the constraint @@ -72,7 +72,7 @@ func IncreaseLanguageField(x *xorm.Engine) error { return err } case setting.Database.Type.IsPostgreSQL(): - if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE language_stat ALTER COLUMN language TYPE %s", sqlType)); err != nil { + if _, err := sess.Exec("ALTER TABLE language_stat ALTER COLUMN language TYPE " + sqlType); err != nil { return err } } diff --git a/models/migrations/v1_13/v146.go b/models/migrations/v1_13/v146.go index 7d9a878704..355c772c26 100644 --- a/models/migrations/v1_13/v146.go +++ b/models/migrations/v1_13/v146.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_13/v147.go b/models/migrations/v1_13/v147.go index 510ef39b28..0059c06220 100644 --- a/models/migrations/v1_13/v147.go +++ b/models/migrations/v1_13/v147.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_13/v148.go b/models/migrations/v1_13/v148.go index 7bb8ab700b..d276db3d61 100644 --- a/models/migrations/v1_13/v148.go +++ b/models/migrations/v1_13/v148.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_13/v149.go b/models/migrations/v1_13/v149.go index 2a1db04cbb..a96b8e5ac7 100644 --- a/models/migrations/v1_13/v149.go +++ b/models/migrations/v1_13/v149.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "fmt" diff --git a/models/migrations/v1_13/v150.go b/models/migrations/v1_13/v150.go index d5ba489566..590ea72903 100644 --- a/models/migrations/v1_13/v150.go +++ b/models/migrations/v1_13/v150.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_13/v151.go b/models/migrations/v1_13/v151.go index 25af1d03ec..454929534f 100644 --- a/models/migrations/v1_13/v151.go +++ b/models/migrations/v1_13/v151.go @@ -1,10 +1,11 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "context" + "errors" "fmt" "strings" @@ -113,7 +114,7 @@ func SetDefaultPasswordToArgon2(x *xorm.Engine) error { newTableColumns := table.Columns() if len(newTableColumns) == 0 { - return fmt.Errorf("no columns in new table") + return errors.New("no columns in new table") } hasID := false for _, column := range newTableColumns { diff --git a/models/migrations/v1_13/v152.go b/models/migrations/v1_13/v152.go index 502c82a40d..648e26446f 100644 --- a/models/migrations/v1_13/v152.go +++ b/models/migrations/v1_13/v152.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import "xorm.io/xorm" diff --git a/models/migrations/v1_13/v153.go b/models/migrations/v1_13/v153.go index 0b2dd3eb62..e5462fc162 100644 --- a/models/migrations/v1_13/v153.go +++ b/models/migrations/v1_13/v153.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_13/v154.go b/models/migrations/v1_13/v154.go index 60cc56713e..5477d1b889 100644 --- a/models/migrations/v1_13/v154.go +++ b/models/migrations/v1_13/v154.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_13 //nolint +package v1_13 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_14/main_test.go b/models/migrations/v1_14/main_test.go index 7a091b9b9a..978f88577c 100644 --- a/models/migrations/v1_14/main_test.go +++ b/models/migrations/v1_14/main_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "testing" diff --git a/models/migrations/v1_14/v155.go b/models/migrations/v1_14/v155.go index e814f59938..505a9ae033 100644 --- a/models/migrations/v1_14/v155.go +++ b/models/migrations/v1_14/v155.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v156.go b/models/migrations/v1_14/v156.go index 2cf4954a15..2fa5819610 100644 --- a/models/migrations/v1_14/v156.go +++ b/models/migrations/v1_14/v156.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v157.go b/models/migrations/v1_14/v157.go index 7187278d29..2c5625ebbd 100644 --- a/models/migrations/v1_14/v157.go +++ b/models/migrations/v1_14/v157.go @@ -1,24 +1,13 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "xorm.io/xorm" ) func FixRepoTopics(x *xorm.Engine) error { - type Topic struct { //nolint:unused - ID int64 `xorm:"pk autoincr"` - Name string `xorm:"UNIQUE VARCHAR(25)"` - RepoCount int - } - - type RepoTopic struct { //nolint:unused - RepoID int64 `xorm:"pk"` - TopicID int64 `xorm:"pk"` - } - type Repository struct { ID int64 `xorm:"pk autoincr"` Topics []string `xorm:"TEXT JSON"` diff --git a/models/migrations/v1_14/v158.go b/models/migrations/v1_14/v158.go index 1094d8abf7..3c57e8e3da 100644 --- a/models/migrations/v1_14/v158.go +++ b/models/migrations/v1_14/v158.go @@ -1,10 +1,10 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( - "fmt" + "errors" "strconv" "code.gitea.io/gitea/modules/log" @@ -82,7 +82,7 @@ func UpdateCodeCommentReplies(x *xorm.Engine) error { sqlCmd = "SELECT TOP " + strconv.Itoa(batchSize) + " * FROM #temp_comments WHERE " + "(id NOT IN ( SELECT TOP " + strconv.Itoa(start) + " id FROM #temp_comments ORDER BY id )) ORDER BY id" default: - return fmt.Errorf("Unsupported database type") + return errors.New("Unsupported database type") } if err := sess.SQL(sqlCmd).Find(&comments); err != nil { diff --git a/models/migrations/v1_14/v159.go b/models/migrations/v1_14/v159.go index 149ae0f6a8..e6f6f0f061 100644 --- a/models/migrations/v1_14/v159.go +++ b/models/migrations/v1_14/v159.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_14/v160.go b/models/migrations/v1_14/v160.go index 4dea91b514..73f3798954 100644 --- a/models/migrations/v1_14/v160.go +++ b/models/migrations/v1_14/v160.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_14/v161.go b/models/migrations/v1_14/v161.go index ac7e821a80..eb92dee77c 100644 --- a/models/migrations/v1_14/v161.go +++ b/models/migrations/v1_14/v161.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "context" diff --git a/models/migrations/v1_14/v162.go b/models/migrations/v1_14/v162.go index 2e4e0b8eb0..a0ddd36d55 100644 --- a/models/migrations/v1_14/v162.go +++ b/models/migrations/v1_14/v162.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_14/v163.go b/models/migrations/v1_14/v163.go index 0cd8ba68c8..84c35190b7 100644 --- a/models/migrations/v1_14/v163.go +++ b/models/migrations/v1_14/v163.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_14/v164.go b/models/migrations/v1_14/v164.go index 54f6951427..d2fd9b8464 100644 --- a/models/migrations/v1_14/v164.go +++ b/models/migrations/v1_14/v164.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v165.go b/models/migrations/v1_14/v165.go index 926350cdf7..6e1b34156b 100644 --- a/models/migrations/v1_14/v165.go +++ b/models/migrations/v1_14/v165.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "code.gitea.io/gitea/models/migrations/base" @@ -16,10 +16,7 @@ func ConvertHookTaskTypeToVarcharAndTrim(x *xorm.Engine) error { return nil } - type HookTask struct { //nolint:unused - Typ string `xorm:"VARCHAR(16) index"` - } - + // HookTask: Typ string `xorm:"VARCHAR(16) index"` if err := base.ModifyColumn(x, "hook_task", &schemas.Column{ Name: "typ", SQLType: schemas.SQLType{ @@ -42,10 +39,7 @@ func ConvertHookTaskTypeToVarcharAndTrim(x *xorm.Engine) error { return err } - type Webhook struct { //nolint:unused - Type string `xorm:"VARCHAR(16) index"` - } - + // Webhook: Type string `xorm:"VARCHAR(16) index"` if err := base.ModifyColumn(x, "webhook", &schemas.Column{ Name: "type", SQLType: schemas.SQLType{ diff --git a/models/migrations/v1_14/v166.go b/models/migrations/v1_14/v166.go index e5731582fd..4c106bd7da 100644 --- a/models/migrations/v1_14/v166.go +++ b/models/migrations/v1_14/v166.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "crypto/sha256" diff --git a/models/migrations/v1_14/v167.go b/models/migrations/v1_14/v167.go index 9d416f6a32..d77bbc401e 100644 --- a/models/migrations/v1_14/v167.go +++ b/models/migrations/v1_14/v167.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v168.go b/models/migrations/v1_14/v168.go index a30a8859f7..aa93eec19b 100644 --- a/models/migrations/v1_14/v168.go +++ b/models/migrations/v1_14/v168.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import "xorm.io/xorm" diff --git a/models/migrations/v1_14/v169.go b/models/migrations/v1_14/v169.go index 5b81bb58b1..4f9df0d96f 100644 --- a/models/migrations/v1_14/v169.go +++ b/models/migrations/v1_14/v169.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_14/v170.go b/models/migrations/v1_14/v170.go index 7b6498a3e9..a2ff4623e1 100644 --- a/models/migrations/v1_14/v170.go +++ b/models/migrations/v1_14/v170.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v171.go b/models/migrations/v1_14/v171.go index 51a35a02ad..7b200e960a 100644 --- a/models/migrations/v1_14/v171.go +++ b/models/migrations/v1_14/v171.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v172.go b/models/migrations/v1_14/v172.go index 0f9bef902a..bbd61d87b2 100644 --- a/models/migrations/v1_14/v172.go +++ b/models/migrations/v1_14/v172.go @@ -1,7 +1,7 @@ // Copyright 2020 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_14/v173.go b/models/migrations/v1_14/v173.go index 2d9eee9197..7752fbe966 100644 --- a/models/migrations/v1_14/v173.go +++ b/models/migrations/v1_14/v173.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v174.go b/models/migrations/v1_14/v174.go index c839e15db8..4049e43070 100644 --- a/models/migrations/v1_14/v174.go +++ b/models/migrations/v1_14/v174.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v175.go b/models/migrations/v1_14/v175.go index 70d72b2600..92ed130473 100644 --- a/models/migrations/v1_14/v175.go +++ b/models/migrations/v1_14/v175.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v176.go b/models/migrations/v1_14/v176.go index 1ed49f75fa..ef5dce9a02 100644 --- a/models/migrations/v1_14/v176.go +++ b/models/migrations/v1_14/v176.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_14/v176_test.go b/models/migrations/v1_14/v176_test.go index ea3e750d7f..5c1db4db71 100644 --- a/models/migrations/v1_14/v176_test.go +++ b/models/migrations/v1_14/v176_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "testing" diff --git a/models/migrations/v1_14/v177.go b/models/migrations/v1_14/v177.go index 6e1838f369..96676bf8d9 100644 --- a/models/migrations/v1_14/v177.go +++ b/models/migrations/v1_14/v177.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "fmt" diff --git a/models/migrations/v1_14/v177_test.go b/models/migrations/v1_14/v177_test.go index 5568a18fec..263f69f338 100644 --- a/models/migrations/v1_14/v177_test.go +++ b/models/migrations/v1_14/v177_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_14 //nolint +package v1_14 import ( "testing" diff --git a/models/migrations/v1_15/main_test.go b/models/migrations/v1_15/main_test.go index 366f19788e..d01585e997 100644 --- a/models/migrations/v1_15/main_test.go +++ b/models/migrations/v1_15/main_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "testing" diff --git a/models/migrations/v1_15/v178.go b/models/migrations/v1_15/v178.go index 6d236eb049..ca3a5c262e 100644 --- a/models/migrations/v1_15/v178.go +++ b/models/migrations/v1_15/v178.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_15/v179.go b/models/migrations/v1_15/v179.go index f6b142eb42..d6fb86ffec 100644 --- a/models/migrations/v1_15/v179.go +++ b/models/migrations/v1_15/v179.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_15/v180.go b/models/migrations/v1_15/v180.go index c71e771861..dd132f8330 100644 --- a/models/migrations/v1_15/v180.go +++ b/models/migrations/v1_15/v180.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "code.gitea.io/gitea/modules/json" diff --git a/models/migrations/v1_15/v181.go b/models/migrations/v1_15/v181.go index 2185ed0213..fb1d3d7a75 100644 --- a/models/migrations/v1_15/v181.go +++ b/models/migrations/v1_15/v181.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "strings" diff --git a/models/migrations/v1_15/v181_test.go b/models/migrations/v1_15/v181_test.go index 1b075be7a0..73b5c1f3d6 100644 --- a/models/migrations/v1_15/v181_test.go +++ b/models/migrations/v1_15/v181_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "strings" @@ -49,7 +49,7 @@ func Test_AddPrimaryEmail2EmailAddress(t *testing.T) { assert.NoError(t, err) assert.True(t, has) assert.True(t, emailAddress.IsPrimary) - assert.EqualValues(t, user.IsActive, emailAddress.IsActivated) - assert.EqualValues(t, user.ID, emailAddress.UID) + assert.Equal(t, user.IsActive, emailAddress.IsActivated) + assert.Equal(t, user.ID, emailAddress.UID) } } diff --git a/models/migrations/v1_15/v182.go b/models/migrations/v1_15/v182.go index 9ca500c0f9..f53ff11df9 100644 --- a/models/migrations/v1_15/v182.go +++ b/models/migrations/v1_15/v182.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_15/v182_test.go b/models/migrations/v1_15/v182_test.go index 75ef8e1cd8..5fc6a0c467 100644 --- a/models/migrations/v1_15/v182_test.go +++ b/models/migrations/v1_15/v182_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "testing" diff --git a/models/migrations/v1_15/v183.go b/models/migrations/v1_15/v183.go index effad1b467..5d0582f03d 100644 --- a/models/migrations/v1_15/v183.go +++ b/models/migrations/v1_15/v183.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "fmt" diff --git a/models/migrations/v1_15/v184.go b/models/migrations/v1_15/v184.go index 4b3dd1467a..2823bc1f7a 100644 --- a/models/migrations/v1_15/v184.go +++ b/models/migrations/v1_15/v184.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "context" diff --git a/models/migrations/v1_15/v185.go b/models/migrations/v1_15/v185.go index e5878ec193..60af59edca 100644 --- a/models/migrations/v1_15/v185.go +++ b/models/migrations/v1_15/v185.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_15/v186.go b/models/migrations/v1_15/v186.go index 01aab3add5..67dc97d13d 100644 --- a/models/migrations/v1_15/v186.go +++ b/models/migrations/v1_15/v186.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_15/v187.go b/models/migrations/v1_15/v187.go index 21cd6772b7..5fd90c65fb 100644 --- a/models/migrations/v1_15/v187.go +++ b/models/migrations/v1_15/v187.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_15/v188.go b/models/migrations/v1_15/v188.go index 71e45cab0e..4494e6ff05 100644 --- a/models/migrations/v1_15/v188.go +++ b/models/migrations/v1_15/v188.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_15 //nolint +package v1_15 import "xorm.io/xorm" diff --git a/models/migrations/v1_16/main_test.go b/models/migrations/v1_16/main_test.go index 817a0c13a4..7f93d6e9e5 100644 --- a/models/migrations/v1_16/main_test.go +++ b/models/migrations/v1_16/main_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "testing" diff --git a/models/migrations/v1_16/v189.go b/models/migrations/v1_16/v189.go index 5649645051..6bc99e58ab 100644 --- a/models/migrations/v1_16/v189.go +++ b/models/migrations/v1_16/v189.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "encoding/binary" diff --git a/models/migrations/v1_16/v189_test.go b/models/migrations/v1_16/v189_test.go index 32ef821d27..fb56ac8e11 100644 --- a/models/migrations/v1_16/v189_test.go +++ b/models/migrations/v1_16/v189_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "testing" @@ -75,8 +75,8 @@ func Test_UnwrapLDAPSourceCfg(t *testing.T) { return } - assert.EqualValues(t, expected, converted, "UnwrapLDAPSourceCfg failed for %d", source.ID) - assert.EqualValues(t, source.ID%2 == 0, source.IsActive, "UnwrapLDAPSourceCfg failed for %d", source.ID) + assert.Equal(t, expected, converted, "UnwrapLDAPSourceCfg failed for %d", source.ID) + assert.Equal(t, source.ID%2 == 0, source.IsActive, "UnwrapLDAPSourceCfg failed for %d", source.ID) } } } diff --git a/models/migrations/v1_16/v190.go b/models/migrations/v1_16/v190.go index 5953802849..1eb6b6ddb4 100644 --- a/models/migrations/v1_16/v190.go +++ b/models/migrations/v1_16/v190.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "fmt" diff --git a/models/migrations/v1_16/v191.go b/models/migrations/v1_16/v191.go index c618783c08..957c82e484 100644 --- a/models/migrations/v1_16/v191.go +++ b/models/migrations/v1_16/v191.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_16/v192.go b/models/migrations/v1_16/v192.go index 2d5d158a09..9d03fbe3c8 100644 --- a/models/migrations/v1_16/v192.go +++ b/models/migrations/v1_16/v192.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_16/v193.go b/models/migrations/v1_16/v193.go index 8d3ce7a558..a5af2de380 100644 --- a/models/migrations/v1_16/v193.go +++ b/models/migrations/v1_16/v193.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_16/v193_test.go b/models/migrations/v1_16/v193_test.go index b279967a2c..2e827f0550 100644 --- a/models/migrations/v1_16/v193_test.go +++ b/models/migrations/v1_16/v193_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "testing" @@ -62,7 +62,7 @@ func Test_AddRepoIDForAttachment(t *testing.T) { has, err := x.ID(attach.IssueID).Get(&issue) assert.NoError(t, err) assert.True(t, has) - assert.EqualValues(t, attach.RepoID, issue.RepoID) + assert.Equal(t, attach.RepoID, issue.RepoID) } var releaseAttachments []*NewAttachment @@ -75,6 +75,6 @@ func Test_AddRepoIDForAttachment(t *testing.T) { has, err := x.ID(attach.ReleaseID).Get(&release) assert.NoError(t, err) assert.True(t, has) - assert.EqualValues(t, attach.RepoID, release.RepoID) + assert.Equal(t, attach.RepoID, release.RepoID) } } diff --git a/models/migrations/v1_16/v194.go b/models/migrations/v1_16/v194.go index 6aa13c50cf..2e4ed8340e 100644 --- a/models/migrations/v1_16/v194.go +++ b/models/migrations/v1_16/v194.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "fmt" diff --git a/models/migrations/v1_16/v195.go b/models/migrations/v1_16/v195.go index 6d7e94141e..4fd42b7bd2 100644 --- a/models/migrations/v1_16/v195.go +++ b/models/migrations/v1_16/v195.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "fmt" diff --git a/models/migrations/v1_16/v195_test.go b/models/migrations/v1_16/v195_test.go index 742397bf32..946e06e399 100644 --- a/models/migrations/v1_16/v195_test.go +++ b/models/migrations/v1_16/v195_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "testing" diff --git a/models/migrations/v1_16/v196.go b/models/migrations/v1_16/v196.go index 7cbafc61e5..6c9caa100f 100644 --- a/models/migrations/v1_16/v196.go +++ b/models/migrations/v1_16/v196.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "fmt" diff --git a/models/migrations/v1_16/v197.go b/models/migrations/v1_16/v197.go index 97888b2847..862bdfdcbd 100644 --- a/models/migrations/v1_16/v197.go +++ b/models/migrations/v1_16/v197.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_16/v198.go b/models/migrations/v1_16/v198.go index 115bb313a0..f35ede138a 100644 --- a/models/migrations/v1_16/v198.go +++ b/models/migrations/v1_16/v198.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "fmt" diff --git a/models/migrations/v1_16/v199.go b/models/migrations/v1_16/v199.go index 6adcf890af..4020352f2b 100644 --- a/models/migrations/v1_16/v199.go +++ b/models/migrations/v1_16/v199.go @@ -1,6 +1,6 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 // We used to use a table `remote_version` to store information for updater, now we use `AppState`, so this migration task is a no-op now. diff --git a/models/migrations/v1_16/v200.go b/models/migrations/v1_16/v200.go index c08c20e51d..de57fad8fe 100644 --- a/models/migrations/v1_16/v200.go +++ b/models/migrations/v1_16/v200.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "fmt" diff --git a/models/migrations/v1_16/v201.go b/models/migrations/v1_16/v201.go index 35e0c9f2fb..2c43698b0c 100644 --- a/models/migrations/v1_16/v201.go +++ b/models/migrations/v1_16/v201.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_16/v202.go b/models/migrations/v1_16/v202.go index 6ba36152f1..d8c8fdcadc 100644 --- a/models/migrations/v1_16/v202.go +++ b/models/migrations/v1_16/v202.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "fmt" diff --git a/models/migrations/v1_16/v203.go b/models/migrations/v1_16/v203.go index e8e6b52453..c3241cba57 100644 --- a/models/migrations/v1_16/v203.go +++ b/models/migrations/v1_16/v203.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_16/v204.go b/models/migrations/v1_16/v204.go index ece03e1305..4d375307e7 100644 --- a/models/migrations/v1_16/v204.go +++ b/models/migrations/v1_16/v204.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import "xorm.io/xorm" diff --git a/models/migrations/v1_16/v205.go b/models/migrations/v1_16/v205.go index d6c577083c..78241bad5b 100644 --- a/models/migrations/v1_16/v205.go +++ b/models/migrations/v1_16/v205.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_16/v206.go b/models/migrations/v1_16/v206.go index 581a7d76e9..01a9c386eb 100644 --- a/models/migrations/v1_16/v206.go +++ b/models/migrations/v1_16/v206.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "fmt" diff --git a/models/migrations/v1_16/v207.go b/models/migrations/v1_16/v207.go index 91208f066c..19126ead1f 100644 --- a/models/migrations/v1_16/v207.go +++ b/models/migrations/v1_16/v207.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_16/v208.go b/models/migrations/v1_16/v208.go index 1a11ef096a..fb643324f4 100644 --- a/models/migrations/v1_16/v208.go +++ b/models/migrations/v1_16/v208.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_16/v209.go b/models/migrations/v1_16/v209.go index be3100e02a..230838647b 100644 --- a/models/migrations/v1_16/v209.go +++ b/models/migrations/v1_16/v209.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_16/v210.go b/models/migrations/v1_16/v210.go index 51b7d81e99..0b94baf8e3 100644 --- a/models/migrations/v1_16/v210.go +++ b/models/migrations/v1_16/v210.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "encoding/base32" diff --git a/models/migrations/v1_16/v210_test.go b/models/migrations/v1_16/v210_test.go index d43fb03106..3b4ac7aa4b 100644 --- a/models/migrations/v1_16/v210_test.go +++ b/models/migrations/v1_16/v210_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_16 //nolint +package v1_16 import ( "testing" @@ -71,5 +71,5 @@ func Test_RemigrateU2FCredentials(t *testing.T) { return } - assert.EqualValues(t, expected, got) + assert.Equal(t, expected, got) } diff --git a/models/migrations/v1_17/main_test.go b/models/migrations/v1_17/main_test.go index 79cb3fa078..571a4f55a3 100644 --- a/models/migrations/v1_17/main_test.go +++ b/models/migrations/v1_17/main_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "testing" diff --git a/models/migrations/v1_17/v211.go b/models/migrations/v1_17/v211.go index 9b72c8610b..517cf19388 100644 --- a/models/migrations/v1_17/v211.go +++ b/models/migrations/v1_17/v211.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_17/v212.go b/models/migrations/v1_17/v212.go index e3f9437121..788792211f 100644 --- a/models/migrations/v1_17/v212.go +++ b/models/migrations/v1_17/v212.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_17/v213.go b/models/migrations/v1_17/v213.go index bb3f466e52..b2bbdf7279 100644 --- a/models/migrations/v1_17/v213.go +++ b/models/migrations/v1_17/v213.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_17/v214.go b/models/migrations/v1_17/v214.go index 2268164919..1925324f0f 100644 --- a/models/migrations/v1_17/v214.go +++ b/models/migrations/v1_17/v214.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_17/v215.go b/models/migrations/v1_17/v215.go index b338f85417..748539225d 100644 --- a/models/migrations/v1_17/v215.go +++ b/models/migrations/v1_17/v215.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "code.gitea.io/gitea/models/pull" diff --git a/models/migrations/v1_17/v216.go b/models/migrations/v1_17/v216.go index 268f472a42..37aeacb6fc 100644 --- a/models/migrations/v1_17/v216.go +++ b/models/migrations/v1_17/v216.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 // This migration added non-ideal indices to the action table which on larger datasets slowed things down // it has been superseded by v218.go diff --git a/models/migrations/v1_17/v217.go b/models/migrations/v1_17/v217.go index 3f970b68a5..04626bcbc5 100644 --- a/models/migrations/v1_17/v217.go +++ b/models/migrations/v1_17/v217.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_17/v218.go b/models/migrations/v1_17/v218.go index 4c05a9b539..17d4cd89d4 100644 --- a/models/migrations/v1_17/v218.go +++ b/models/migrations/v1_17/v218.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_17/v219.go b/models/migrations/v1_17/v219.go index d266029fd9..6e335cb813 100644 --- a/models/migrations/v1_17/v219.go +++ b/models/migrations/v1_17/v219.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "time" diff --git a/models/migrations/v1_17/v220.go b/models/migrations/v1_17/v220.go index 904ddc5192..4ac8c58e1e 100644 --- a/models/migrations/v1_17/v220.go +++ b/models/migrations/v1_17/v220.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( packages_model "code.gitea.io/gitea/models/packages" diff --git a/models/migrations/v1_17/v221.go b/models/migrations/v1_17/v221.go index 9e159388bd..9e6a67eb18 100644 --- a/models/migrations/v1_17/v221.go +++ b/models/migrations/v1_17/v221.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "encoding/base32" diff --git a/models/migrations/v1_17/v221_test.go b/models/migrations/v1_17/v221_test.go index 9ca54142e2..a2dc0fae55 100644 --- a/models/migrations/v1_17/v221_test.go +++ b/models/migrations/v1_17/v221_test.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "encoding/base32" diff --git a/models/migrations/v1_17/v222.go b/models/migrations/v1_17/v222.go index 2ffb94eb1c..a5ea537d8a 100644 --- a/models/migrations/v1_17/v222.go +++ b/models/migrations/v1_17/v222.go @@ -1,10 +1,11 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "context" + "errors" "fmt" "code.gitea.io/gitea/models/migrations/base" @@ -29,7 +30,7 @@ func DropOldCredentialIDColumn(x *xorm.Engine) error { } if !credentialIDBytesExists { // looks like 221 hasn't properly run - return fmt.Errorf("webauthn_credential does not have a credential_id_bytes column... it is not safe to run this migration") + return errors.New("webauthn_credential does not have a credential_id_bytes column... it is not safe to run this migration") } // Create webauthnCredential table diff --git a/models/migrations/v1_17/v223.go b/models/migrations/v1_17/v223.go index 018451ee4c..b2bfb76551 100644 --- a/models/migrations/v1_17/v223.go +++ b/models/migrations/v1_17/v223.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_17 //nolint +package v1_17 import ( "context" diff --git a/models/migrations/v1_18/main_test.go b/models/migrations/v1_18/main_test.go index f71a21d1fb..ebcfb45a94 100644 --- a/models/migrations/v1_18/main_test.go +++ b/models/migrations/v1_18/main_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "testing" diff --git a/models/migrations/v1_18/v224.go b/models/migrations/v1_18/v224.go index f3d522b91a..6dc12020ea 100644 --- a/models/migrations/v1_18/v224.go +++ b/models/migrations/v1_18/v224.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_18/v225.go b/models/migrations/v1_18/v225.go index b0ac3777fc..bc6117e38f 100644 --- a/models/migrations/v1_18/v225.go +++ b/models/migrations/v1_18/v225.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_18/v226.go b/models/migrations/v1_18/v226.go index f87e24b11d..8ed9761476 100644 --- a/models/migrations/v1_18/v226.go +++ b/models/migrations/v1_18/v226.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "xorm.io/builder" diff --git a/models/migrations/v1_18/v227.go b/models/migrations/v1_18/v227.go index 5fe5dcd0c9..3aca686d59 100644 --- a/models/migrations/v1_18/v227.go +++ b/models/migrations/v1_18/v227.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_18/v228.go b/models/migrations/v1_18/v228.go index 3e7a36de15..b13f6461bd 100644 --- a/models/migrations/v1_18/v228.go +++ b/models/migrations/v1_18/v228.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_18/v229.go b/models/migrations/v1_18/v229.go index 10d9f35097..bc15e01390 100644 --- a/models/migrations/v1_18/v229.go +++ b/models/migrations/v1_18/v229.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "fmt" diff --git a/models/migrations/v1_18/v229_test.go b/models/migrations/v1_18/v229_test.go index d489328c00..5722dd3557 100644 --- a/models/migrations/v1_18/v229_test.go +++ b/models/migrations/v1_18/v229_test.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "testing" diff --git a/models/migrations/v1_18/v230.go b/models/migrations/v1_18/v230.go index ea5b4d02e1..078fce7643 100644 --- a/models/migrations/v1_18/v230.go +++ b/models/migrations/v1_18/v230.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_18/v230_test.go b/models/migrations/v1_18/v230_test.go index 40db4c2ffe..25b2f6525d 100644 --- a/models/migrations/v1_18/v230_test.go +++ b/models/migrations/v1_18/v230_test.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_18 //nolint +package v1_18 import ( "testing" diff --git a/models/migrations/v1_19/main_test.go b/models/migrations/v1_19/main_test.go index 59f42af111..87e807be6e 100644 --- a/models/migrations/v1_19/main_test.go +++ b/models/migrations/v1_19/main_test.go @@ -1,7 +1,7 @@ // Copyright 2021 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "testing" diff --git a/models/migrations/v1_19/v231.go b/models/migrations/v1_19/v231.go index 79e46132f0..8ef1e4e743 100644 --- a/models/migrations/v1_19/v231.go +++ b/models/migrations/v1_19/v231.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_19/v232.go b/models/migrations/v1_19/v232.go index 9caf587c1e..493dbc6df8 100644 --- a/models/migrations/v1_19/v232.go +++ b/models/migrations/v1_19/v232.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_19/v233.go b/models/migrations/v1_19/v233.go index ba4cd8e20b..9eb6d40509 100644 --- a/models/migrations/v1_19/v233.go +++ b/models/migrations/v1_19/v233.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "fmt" diff --git a/models/migrations/v1_19/v233_test.go b/models/migrations/v1_19/v233_test.go index 32c10ab0f4..7436ff7483 100644 --- a/models/migrations/v1_19/v233_test.go +++ b/models/migrations/v1_19/v233_test.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "testing" @@ -64,7 +64,7 @@ func Test_AddHeaderAuthorizationEncryptedColWebhook(t *testing.T) { assert.Equal(t, e.Meta, got[i].Meta) if e.HeaderAuthorization == "" { - assert.Equal(t, "", got[i].HeaderAuthorizationEncrypted) + assert.Empty(t, got[i].HeaderAuthorizationEncrypted) } else { cipherhex := got[i].HeaderAuthorizationEncrypted cleartext, err := secret.DecryptSecret(setting.SecretKey, cipherhex) diff --git a/models/migrations/v1_19/v234.go b/models/migrations/v1_19/v234.go index 728a580807..3475384d6f 100644 --- a/models/migrations/v1_19/v234.go +++ b/models/migrations/v1_19/v234.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_19/v235.go b/models/migrations/v1_19/v235.go index 3715de3920..297d90f65a 100644 --- a/models/migrations/v1_19/v235.go +++ b/models/migrations/v1_19/v235.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_19/v236.go b/models/migrations/v1_19/v236.go index f172a85b1f..0ed4d97a27 100644 --- a/models/migrations/v1_19/v236.go +++ b/models/migrations/v1_19/v236.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_19/v237.go b/models/migrations/v1_19/v237.go index b23c765aa5..cf30226ccd 100644 --- a/models/migrations/v1_19/v237.go +++ b/models/migrations/v1_19/v237.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_19/v238.go b/models/migrations/v1_19/v238.go index 266e6cea58..de681bfc7a 100644 --- a/models/migrations/v1_19/v238.go +++ b/models/migrations/v1_19/v238.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_19/v239.go b/models/migrations/v1_19/v239.go index 10076f2401..8f4a65be95 100644 --- a/models/migrations/v1_19/v239.go +++ b/models/migrations/v1_19/v239.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_19/v240.go b/models/migrations/v1_19/v240.go index 4505f86299..7fdbaeb9dc 100644 --- a/models/migrations/v1_19/v240.go +++ b/models/migrations/v1_19/v240.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "code.gitea.io/gitea/models/db" diff --git a/models/migrations/v1_19/v241.go b/models/migrations/v1_19/v241.go index a617d6fd2f..e35801a057 100644 --- a/models/migrations/v1_19/v241.go +++ b/models/migrations/v1_19/v241.go @@ -1,7 +1,7 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_19/v242.go b/models/migrations/v1_19/v242.go index 4470835214..e9e759eaaa 100644 --- a/models/migrations/v1_19/v242.go +++ b/models/migrations/v1_19/v242.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_19/v243.go b/models/migrations/v1_19/v243.go index 55bbfafb2f..9c3f372594 100644 --- a/models/migrations/v1_19/v243.go +++ b/models/migrations/v1_19/v243.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_19 //nolint +package v1_19 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_20/main_test.go b/models/migrations/v1_20/main_test.go index 92a1a9f622..2fd63a7118 100644 --- a/models/migrations/v1_20/main_test.go +++ b/models/migrations/v1_20/main_test.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "testing" diff --git a/models/migrations/v1_20/v244.go b/models/migrations/v1_20/v244.go index 977566ad7d..76cdccaca5 100644 --- a/models/migrations/v1_20/v244.go +++ b/models/migrations/v1_20/v244.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_20/v245.go b/models/migrations/v1_20/v245.go index ab58d12880..4acb11416c 100644 --- a/models/migrations/v1_20/v245.go +++ b/models/migrations/v1_20/v245.go @@ -1,11 +1,10 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "context" - "fmt" "code.gitea.io/gitea/models/migrations/base" "code.gitea.io/gitea/modules/setting" @@ -57,7 +56,7 @@ func RenameWebhookOrgToOwner(x *xorm.Engine) error { return err } sqlType := x.Dialect().SQLType(inferredTable.GetColumn("org_id")) - if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `webhook` CHANGE org_id owner_id %s", sqlType)); err != nil { + if _, err := sess.Exec("ALTER TABLE `webhook` CHANGE org_id owner_id " + sqlType); err != nil { return err } case setting.Database.Type.IsMSSQL(): diff --git a/models/migrations/v1_20/v246.go b/models/migrations/v1_20/v246.go index e6340ef079..22bf723404 100644 --- a/models/migrations/v1_20/v246.go +++ b/models/migrations/v1_20/v246.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_20/v247.go b/models/migrations/v1_20/v247.go index 59fc5c46b5..4f82937e18 100644 --- a/models/migrations/v1_20/v247.go +++ b/models/migrations/v1_20/v247.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "code.gitea.io/gitea/modules/log" diff --git a/models/migrations/v1_20/v248.go b/models/migrations/v1_20/v248.go index 40555210e7..4f2091e4bc 100644 --- a/models/migrations/v1_20/v248.go +++ b/models/migrations/v1_20/v248.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import "xorm.io/xorm" diff --git a/models/migrations/v1_20/v249.go b/models/migrations/v1_20/v249.go index 02951a74d6..c6d3a177ca 100644 --- a/models/migrations/v1_20/v249.go +++ b/models/migrations/v1_20/v249.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_20/v250.go b/models/migrations/v1_20/v250.go index 86388ef0b8..ec45e6e5c3 100644 --- a/models/migrations/v1_20/v250.go +++ b/models/migrations/v1_20/v250.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "strings" diff --git a/models/migrations/v1_20/v251.go b/models/migrations/v1_20/v251.go index 7743248a3f..a274c22a73 100644 --- a/models/migrations/v1_20/v251.go +++ b/models/migrations/v1_20/v251.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "code.gitea.io/gitea/modules/log" diff --git a/models/migrations/v1_20/v252.go b/models/migrations/v1_20/v252.go index ab61cd9b8b..d6aa602753 100644 --- a/models/migrations/v1_20/v252.go +++ b/models/migrations/v1_20/v252.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "code.gitea.io/gitea/modules/log" diff --git a/models/migrations/v1_20/v253.go b/models/migrations/v1_20/v253.go index 96c494bd8d..c96454dbf9 100644 --- a/models/migrations/v1_20/v253.go +++ b/models/migrations/v1_20/v253.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "code.gitea.io/gitea/modules/log" diff --git a/models/migrations/v1_20/v254.go b/models/migrations/v1_20/v254.go index 1e26979a5b..9cdbfb3916 100644 --- a/models/migrations/v1_20/v254.go +++ b/models/migrations/v1_20/v254.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_20/v255.go b/models/migrations/v1_20/v255.go index 14b70f8f96..caf198700e 100644 --- a/models/migrations/v1_20/v255.go +++ b/models/migrations/v1_20/v255.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_20/v256.go b/models/migrations/v1_20/v256.go index 822153b93e..7b84c1e154 100644 --- a/models/migrations/v1_20/v256.go +++ b/models/migrations/v1_20/v256.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_20/v257.go b/models/migrations/v1_20/v257.go index 6c6ca4c748..9d5f7c07df 100644 --- a/models/migrations/v1_20/v257.go +++ b/models/migrations/v1_20/v257.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_20/v258.go b/models/migrations/v1_20/v258.go index 47174ce805..1d3faffdae 100644 --- a/models/migrations/v1_20/v258.go +++ b/models/migrations/v1_20/v258.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_20/v259.go b/models/migrations/v1_20/v259.go index 5b8ced4ad7..9e0dc9b61d 100644 --- a/models/migrations/v1_20/v259.go +++ b/models/migrations/v1_20/v259.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "fmt" @@ -329,7 +329,7 @@ func ConvertScopedAccessTokens(x *xorm.Engine) error { for _, token := range tokens { var scopes []string allNewScopesMap := make(map[AccessTokenScope]bool) - for _, oldScope := range strings.Split(token.Scope, ",") { + for oldScope := range strings.SplitSeq(token.Scope, ",") { if newScopes, exists := accessTokenScopeMap[OldAccessTokenScope(oldScope)]; exists { for _, newScope := range newScopes { allNewScopesMap[newScope] = true diff --git a/models/migrations/v1_20/v259_test.go b/models/migrations/v1_20/v259_test.go index 5bc9a71391..0bf63719e5 100644 --- a/models/migrations/v1_20/v259_test.go +++ b/models/migrations/v1_20/v259_test.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_20 //nolint +package v1_20 import ( "sort" @@ -96,7 +96,7 @@ func Test_ConvertScopedAccessTokens(t *testing.T) { tokens := make([]AccessToken, 0) err = x.Find(&tokens) assert.NoError(t, err) - assert.Equal(t, len(tests), len(tokens)) + assert.Len(t, tokens, len(tests)) // sort the tokens (insertion order by auto-incrementing primary key) sort.Slice(tokens, func(i, j int) bool { diff --git a/models/migrations/v1_21/main_test.go b/models/migrations/v1_21/main_test.go index 9afdea1677..536a7ade08 100644 --- a/models/migrations/v1_21/main_test.go +++ b/models/migrations/v1_21/main_test.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "testing" diff --git a/models/migrations/v1_21/v260.go b/models/migrations/v1_21/v260.go index 6ca52c5998..8540c58ae8 100644 --- a/models/migrations/v1_21/v260.go +++ b/models/migrations/v1_21/v260.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_21/v261.go b/models/migrations/v1_21/v261.go index 4ec1160d0b..122b98eb93 100644 --- a/models/migrations/v1_21/v261.go +++ b/models/migrations/v1_21/v261.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_21/v262.go b/models/migrations/v1_21/v262.go index 23e900572a..6e88e29b9d 100644 --- a/models/migrations/v1_21/v262.go +++ b/models/migrations/v1_21/v262.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v263.go b/models/migrations/v1_21/v263.go index 2c7cbadf0d..55c418bde0 100644 --- a/models/migrations/v1_21/v263.go +++ b/models/migrations/v1_21/v263.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "fmt" diff --git a/models/migrations/v1_21/v264.go b/models/migrations/v1_21/v264.go index e81a17ad6d..7fc0ec6024 100644 --- a/models/migrations/v1_21/v264.go +++ b/models/migrations/v1_21/v264.go @@ -1,11 +1,11 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "context" - "fmt" + "errors" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/timeutil" @@ -57,7 +57,7 @@ func AddBranchTable(x *xorm.Engine) error { if err != nil { return err } else if !has { - return fmt.Errorf("no admin user found") + return errors.New("no admin user found") } branches := make([]Branch, 0, 100) diff --git a/models/migrations/v1_21/v265.go b/models/migrations/v1_21/v265.go index 800eb95f72..b6892acc27 100644 --- a/models/migrations/v1_21/v265.go +++ b/models/migrations/v1_21/v265.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v266.go b/models/migrations/v1_21/v266.go index 79a5f5e14c..440549e868 100644 --- a/models/migrations/v1_21/v266.go +++ b/models/migrations/v1_21/v266.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v267.go b/models/migrations/v1_21/v267.go index bc0e954bdc..394139a17e 100644 --- a/models/migrations/v1_21/v267.go +++ b/models/migrations/v1_21/v267.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_21/v268.go b/models/migrations/v1_21/v268.go index 332793ff07..b677d2383e 100644 --- a/models/migrations/v1_21/v268.go +++ b/models/migrations/v1_21/v268.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v269.go b/models/migrations/v1_21/v269.go index 475ec02380..042040927d 100644 --- a/models/migrations/v1_21/v269.go +++ b/models/migrations/v1_21/v269.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v270.go b/models/migrations/v1_21/v270.go index b9cc84d3ac..ab7c5660ba 100644 --- a/models/migrations/v1_21/v270.go +++ b/models/migrations/v1_21/v270.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v271.go b/models/migrations/v1_21/v271.go index 098f6499d5..05e1af1351 100644 --- a/models/migrations/v1_21/v271.go +++ b/models/migrations/v1_21/v271.go @@ -1,7 +1,8 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 + import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_21/v272.go b/models/migrations/v1_21/v272.go index a729c49f1b..14c1e0c4b0 100644 --- a/models/migrations/v1_21/v272.go +++ b/models/migrations/v1_21/v272.go @@ -1,7 +1,8 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 + import ( "xorm.io/xorm" ) diff --git a/models/migrations/v1_21/v273.go b/models/migrations/v1_21/v273.go index 61c79f4a76..e614a56a7d 100644 --- a/models/migrations/v1_21/v273.go +++ b/models/migrations/v1_21/v273.go @@ -1,7 +1,8 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 + import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_21/v274.go b/models/migrations/v1_21/v274.go index df5994f159..d0b557a151 100644 --- a/models/migrations/v1_21/v274.go +++ b/models/migrations/v1_21/v274.go @@ -1,7 +1,8 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 + import ( "time" diff --git a/models/migrations/v1_21/v275.go b/models/migrations/v1_21/v275.go index 78804a59d6..2bfe5c72fa 100644 --- a/models/migrations/v1_21/v275.go +++ b/models/migrations/v1_21/v275.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v276.go b/models/migrations/v1_21/v276.go index 15177bf040..3ab7e22cd0 100644 --- a/models/migrations/v1_21/v276.go +++ b/models/migrations/v1_21/v276.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "context" @@ -172,7 +172,7 @@ func getRemoteAddress(ownerName, repoName, remoteName string) (string, error) { return "", fmt.Errorf("get remote %s's address of %s/%s failed: %v", remoteName, ownerName, repoName, err) } - u, err := giturl.Parse(remoteURL) + u, err := giturl.ParseGitURL(remoteURL) if err != nil { return "", err } diff --git a/models/migrations/v1_21/v277.go b/models/migrations/v1_21/v277.go index 12529160b7..0c102eddde 100644 --- a/models/migrations/v1_21/v277.go +++ b/models/migrations/v1_21/v277.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v278.go b/models/migrations/v1_21/v278.go index d6a462d1e7..846f228678 100644 --- a/models/migrations/v1_21/v278.go +++ b/models/migrations/v1_21/v278.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_21/v279.go b/models/migrations/v1_21/v279.go index 2abd1bbe84..beb39effe1 100644 --- a/models/migrations/v1_21/v279.go +++ b/models/migrations/v1_21/v279.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_21 //nolint +package v1_21 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_22/main_test.go b/models/migrations/v1_22/main_test.go index efd8dbaa8c..ac8facd6aa 100644 --- a/models/migrations/v1_22/main_test.go +++ b/models/migrations/v1_22/main_test.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "testing" diff --git a/models/migrations/v1_22/v280.go b/models/migrations/v1_22/v280.go index a8ee4a3bf7..2271cb6089 100644 --- a/models/migrations/v1_22/v280.go +++ b/models/migrations/v1_22/v280.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_22/v281.go b/models/migrations/v1_22/v281.go index fc1866aa83..129ec2cba0 100644 --- a/models/migrations/v1_22/v281.go +++ b/models/migrations/v1_22/v281.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_22/v282.go b/models/migrations/v1_22/v282.go index baad9e0916..eed64c30f7 100644 --- a/models/migrations/v1_22/v282.go +++ b/models/migrations/v1_22/v282.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_22/v283.go b/models/migrations/v1_22/v283.go index 0a45c51245..0eca031b65 100644 --- a/models/migrations/v1_22/v283.go +++ b/models/migrations/v1_22/v283.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "fmt" diff --git a/models/migrations/v1_22/v283_test.go b/models/migrations/v1_22/v283_test.go index e89a7cbfc2..743f860466 100644 --- a/models/migrations/v1_22/v283_test.go +++ b/models/migrations/v1_22/v283_test.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "testing" diff --git a/models/migrations/v1_22/v284.go b/models/migrations/v1_22/v284.go index 2b95078980..31b38f6aed 100644 --- a/models/migrations/v1_22/v284.go +++ b/models/migrations/v1_22/v284.go @@ -1,7 +1,8 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 + import ( "xorm.io/xorm" ) diff --git a/models/migrations/v1_22/v285.go b/models/migrations/v1_22/v285.go index a55cc17c04..fed89f670e 100644 --- a/models/migrations/v1_22/v285.go +++ b/models/migrations/v1_22/v285.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "time" diff --git a/models/migrations/v1_22/v286.go b/models/migrations/v1_22/v286.go index 1fcde33202..f3ba50dbb6 100644 --- a/models/migrations/v1_22/v286.go +++ b/models/migrations/v1_22/v286.go @@ -1,6 +1,6 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "errors" diff --git a/models/migrations/v1_22/v286_test.go b/models/migrations/v1_22/v286_test.go index 1f213ddb6e..b4a50f6fcb 100644 --- a/models/migrations/v1_22/v286_test.go +++ b/models/migrations/v1_22/v286_test.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "testing" @@ -108,11 +108,11 @@ func Test_RepositoryFormat(t *testing.T) { ok, err := x.ID(2).Get(repo) assert.NoError(t, err) assert.True(t, ok) - assert.EqualValues(t, "sha1", repo.ObjectFormatName) + assert.Equal(t, "sha1", repo.ObjectFormatName) repo = new(Repository) ok, err = x.ID(id).Get(repo) assert.NoError(t, err) assert.True(t, ok) - assert.EqualValues(t, "sha256", repo.ObjectFormatName) + assert.Equal(t, "sha256", repo.ObjectFormatName) } diff --git a/models/migrations/v1_22/v287.go b/models/migrations/v1_22/v287.go index c8b1593286..5fd901f9de 100644 --- a/models/migrations/v1_22/v287.go +++ b/models/migrations/v1_22/v287.go @@ -1,7 +1,7 @@ // Copyright 2023 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_22/v287_test.go b/models/migrations/v1_22/v287_test.go index 9c7b10947d..2b42a33c38 100644 --- a/models/migrations/v1_22/v287_test.go +++ b/models/migrations/v1_22/v287_test.go @@ -1,10 +1,10 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( - "fmt" + "strconv" "testing" "code.gitea.io/gitea/models/migrations/base" @@ -50,7 +50,7 @@ func Test_UpdateBadgeColName(t *testing.T) { for i, e := range oldBadges { got := got[i+1] // 1 is in the badge.yml assert.Equal(t, e.ID, got.ID) - assert.Equal(t, fmt.Sprintf("%d", e.ID), got.Slug) + assert.Equal(t, strconv.FormatInt(e.ID, 10), got.Slug) } // TODO: check if badges have been updated diff --git a/models/migrations/v1_22/v288.go b/models/migrations/v1_22/v288.go index 7c93bfcc66..26c850c218 100644 --- a/models/migrations/v1_22/v288.go +++ b/models/migrations/v1_22/v288.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_22/v289.go b/models/migrations/v1_22/v289.go index b9941aadd9..78689a4ffa 100644 --- a/models/migrations/v1_22/v289.go +++ b/models/migrations/v1_22/v289.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import "xorm.io/xorm" diff --git a/models/migrations/v1_22/v290.go b/models/migrations/v1_22/v290.go index 9c54d4e87c..0f4d78410c 100644 --- a/models/migrations/v1_22/v290.go +++ b/models/migrations/v1_22/v290.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_22/v291.go b/models/migrations/v1_22/v291.go index 74726fae96..823a644a95 100644 --- a/models/migrations/v1_22/v291.go +++ b/models/migrations/v1_22/v291.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import "xorm.io/xorm" diff --git a/models/migrations/v1_22/v292.go b/models/migrations/v1_22/v292.go index beca556aee..440f48ce80 100644 --- a/models/migrations/v1_22/v292.go +++ b/models/migrations/v1_22/v292.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 // NOTE: noop the original migration has bug which some projects will be skip, so // these projects will have no default board. diff --git a/models/migrations/v1_22/v293.go b/models/migrations/v1_22/v293.go index 53cc719294..5299b8618f 100644 --- a/models/migrations/v1_22/v293.go +++ b/models/migrations/v1_22/v293.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_22/v293_test.go b/models/migrations/v1_22/v293_test.go index cfe4345143..2c8f7683a8 100644 --- a/models/migrations/v1_22/v293_test.go +++ b/models/migrations/v1_22/v293_test.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "testing" diff --git a/models/migrations/v1_22/v294.go b/models/migrations/v1_22/v294.go index 20e261fb1b..8776e51a16 100644 --- a/models/migrations/v1_22/v294.go +++ b/models/migrations/v1_22/v294.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "fmt" diff --git a/models/migrations/v1_22/v294_test.go b/models/migrations/v1_22/v294_test.go index a1d702cb77..1cf03d6120 100644 --- a/models/migrations/v1_22/v294_test.go +++ b/models/migrations/v1_22/v294_test.go @@ -1,10 +1,9 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( - "slices" "testing" "code.gitea.io/gitea/models/migrations/base" @@ -44,7 +43,7 @@ func Test_AddUniqueIndexForProjectIssue(t *testing.T) { for _, index := range tables[0].Indexes { if index.Type == schemas.UniqueType { found = true - slices.Equal(index.Cols, []string{"project_id", "issue_id"}) + assert.ElementsMatch(t, index.Cols, []string{"project_id", "issue_id"}) break } } diff --git a/models/migrations/v1_22/v295.go b/models/migrations/v1_22/v295.go index 17bdadb4ad..319b1a399b 100644 --- a/models/migrations/v1_22/v295.go +++ b/models/migrations/v1_22/v295.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import "xorm.io/xorm" diff --git a/models/migrations/v1_22/v296.go b/models/migrations/v1_22/v296.go index 1ecacab95f..75350f9f65 100644 --- a/models/migrations/v1_22/v296.go +++ b/models/migrations/v1_22/v296.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import "xorm.io/xorm" diff --git a/models/migrations/v1_22/v297.go b/models/migrations/v1_22/v297.go index 7d4b506925..9a4405f266 100644 --- a/models/migrations/v1_22/v297.go +++ b/models/migrations/v1_22/v297.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import ( "code.gitea.io/gitea/models/perm" diff --git a/models/migrations/v1_22/v298.go b/models/migrations/v1_22/v298.go index b9f3b95ade..7700173a00 100644 --- a/models/migrations/v1_22/v298.go +++ b/models/migrations/v1_22/v298.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_22 //nolint +package v1_22 import "xorm.io/xorm" diff --git a/models/migrations/v1_23/main_test.go b/models/migrations/v1_23/main_test.go index b7948bd4dd..f7b2caed83 100644 --- a/models/migrations/v1_23/main_test.go +++ b/models/migrations/v1_23/main_test.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "testing" diff --git a/models/migrations/v1_23/v299.go b/models/migrations/v1_23/v299.go index f6db960c3b..11021d8855 100644 --- a/models/migrations/v1_23/v299.go +++ b/models/migrations/v1_23/v299.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import "xorm.io/xorm" @@ -14,5 +14,9 @@ func AddContentVersionToIssueAndComment(x *xorm.Engine) error { ContentVersion int `xorm:"NOT NULL DEFAULT 0"` } - return x.Sync(new(Comment), new(Issue)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(Comment), new(Issue)) + return err } diff --git a/models/migrations/v1_23/v300.go b/models/migrations/v1_23/v300.go index f1f1cccdbf..13c6489c5e 100644 --- a/models/migrations/v1_23/v300.go +++ b/models/migrations/v1_23/v300.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import "xorm.io/xorm" @@ -13,5 +13,9 @@ func AddForcePushBranchProtection(x *xorm.Engine) error { ForcePushAllowlistTeamIDs []int64 `xorm:"JSON TEXT"` ForcePushAllowlistDeployKeys bool `xorm:"NOT NULL DEFAULT false"` } - return x.Sync(new(ProtectedBranch)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(ProtectedBranch)) + return err } diff --git a/models/migrations/v1_23/v301.go b/models/migrations/v1_23/v301.go index b7797f6c6b..ed8e9ef059 100644 --- a/models/migrations/v1_23/v301.go +++ b/models/migrations/v1_23/v301.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import "xorm.io/xorm" @@ -10,5 +10,9 @@ func AddSkipSecondaryAuthColumnToOAuth2ApplicationTable(x *xorm.Engine) error { type oauth2Application struct { SkipSecondaryAuthorization bool `xorm:"NOT NULL DEFAULT FALSE"` } - return x.Sync(new(oauth2Application)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(oauth2Application)) + return err } diff --git a/models/migrations/v1_23/v302.go b/models/migrations/v1_23/v302.go index d7ea03eb3d..e4a50b3ec8 100644 --- a/models/migrations/v1_23/v302.go +++ b/models/migrations/v1_23/v302.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "code.gitea.io/gitea/modules/timeutil" @@ -14,5 +14,8 @@ func AddIndexToActionTaskStoppedLogExpired(x *xorm.Engine) error { Stopped timeutil.TimeStamp `xorm:"index(stopped_log_expired)"` LogExpired bool `xorm:"index(stopped_log_expired)"` } - return x.Sync(new(ActionTask)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreDropIndices: true, + }, new(ActionTask)) + return err } diff --git a/models/migrations/v1_23/v302_test.go b/models/migrations/v1_23/v302_test.go new file mode 100644 index 0000000000..b008b6fc03 --- /dev/null +++ b/models/migrations/v1_23/v302_test.go @@ -0,0 +1,51 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_23 + +import ( + "testing" + + "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/modules/timeutil" + + "github.com/stretchr/testify/assert" +) + +func Test_AddIndexToActionTaskStoppedLogExpired(t *testing.T) { + type ActionTask struct { + ID int64 + JobID int64 + Attempt int64 + RunnerID int64 `xorm:"index"` + Status int `xorm:"index"` + Started timeutil.TimeStamp `xorm:"index"` + Stopped timeutil.TimeStamp `xorm:"index(stopped_log_expired)"` + + RepoID int64 `xorm:"index"` + OwnerID int64 `xorm:"index"` + CommitSHA string `xorm:"index"` + IsForkPullRequest bool + + Token string `xorm:"-"` + TokenHash string `xorm:"UNIQUE"` // sha256 of token + TokenSalt string + TokenLastEight string `xorm:"index token_last_eight"` + + LogFilename string // file name of log + LogInStorage bool // read log from database or from storage + LogLength int64 // lines count + LogSize int64 // blob size + LogIndexes []int64 `xorm:"LONGBLOB"` // line number to offset + LogExpired bool `xorm:"index(stopped_log_expired)"` // files that are too old will be deleted + + Created timeutil.TimeStamp `xorm:"created"` + Updated timeutil.TimeStamp `xorm:"updated index"` + } + + // Prepare and load the testing database + x, deferable := base.PrepareTestEnv(t, 0, new(ActionTask)) + defer deferable() + + assert.NoError(t, AddIndexToActionTaskStoppedLogExpired(x)) +} diff --git a/models/migrations/v1_23/v303.go b/models/migrations/v1_23/v303.go index adfe917d3f..dc541a9535 100644 --- a/models/migrations/v1_23/v303.go +++ b/models/migrations/v1_23/v303.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "xorm.io/xorm" @@ -19,5 +19,9 @@ func AddCommentMetaDataColumn(x *xorm.Engine) error { CommentMetaData *CommentMetaData `xorm:"JSON TEXT"` // put all non-index metadata in a single field } - return x.Sync(new(Comment)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(Comment)) + return err } diff --git a/models/migrations/v1_23/v304.go b/models/migrations/v1_23/v304.go index 65cffedbd9..35d4d4881a 100644 --- a/models/migrations/v1_23/v304.go +++ b/models/migrations/v1_23/v304.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import "xorm.io/xorm" @@ -9,5 +9,8 @@ func AddIndexForReleaseSha1(x *xorm.Engine) error { type Release struct { Sha1 string `xorm:"INDEX VARCHAR(64)"` } - return x.Sync(new(Release)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreDropIndices: true, + }, new(Release)) + return err } diff --git a/models/migrations/v1_23/v304_test.go b/models/migrations/v1_23/v304_test.go new file mode 100644 index 0000000000..c3dfa5e7e7 --- /dev/null +++ b/models/migrations/v1_23/v304_test.go @@ -0,0 +1,40 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_23 + +import ( + "testing" + + "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/modules/timeutil" + + "github.com/stretchr/testify/assert" +) + +func Test_AddIndexForReleaseSha1(t *testing.T) { + type Release struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"INDEX UNIQUE(n)"` + PublisherID int64 `xorm:"INDEX"` + TagName string `xorm:"INDEX UNIQUE(n)"` + OriginalAuthor string + OriginalAuthorID int64 `xorm:"index"` + LowerTagName string + Target string + Title string + Sha1 string `xorm:"VARCHAR(64)"` + NumCommits int64 + Note string `xorm:"TEXT"` + IsDraft bool `xorm:"NOT NULL DEFAULT false"` + IsPrerelease bool `xorm:"NOT NULL DEFAULT false"` + IsTag bool `xorm:"NOT NULL DEFAULT false"` // will be true only if the record is a tag and has no related releases + CreatedUnix timeutil.TimeStamp `xorm:"INDEX"` + } + + // Prepare and load the testing database + x, deferable := base.PrepareTestEnv(t, 0, new(Release)) + defer deferable() + + assert.NoError(t, AddIndexForReleaseSha1(x)) +} diff --git a/models/migrations/v1_23/v305.go b/models/migrations/v1_23/v305.go index 4d881192b2..3762279de1 100644 --- a/models/migrations/v1_23/v305.go +++ b/models/migrations/v1_23/v305.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_23/v306.go b/models/migrations/v1_23/v306.go index 276b438e95..c5c89dbeb8 100644 --- a/models/migrations/v1_23/v306.go +++ b/models/migrations/v1_23/v306.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import "xorm.io/xorm" @@ -9,5 +9,9 @@ func AddBlockAdminMergeOverrideBranchProtection(x *xorm.Engine) error { type ProtectedBranch struct { BlockAdminMergeOverride bool `xorm:"NOT NULL DEFAULT false"` } - return x.Sync(new(ProtectedBranch)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(ProtectedBranch)) + return err } diff --git a/models/migrations/v1_23/v307.go b/models/migrations/v1_23/v307.go index ef7f5f2c3f..54a69d250b 100644 --- a/models/migrations/v1_23/v307.go +++ b/models/migrations/v1_23/v307.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_23/v308.go b/models/migrations/v1_23/v308.go index 1e8a9b0af2..695fdfcc2d 100644 --- a/models/migrations/v1_23/v308.go +++ b/models/migrations/v1_23/v308.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_23/v309.go b/models/migrations/v1_23/v309.go index 5b39398443..e629b718a8 100644 --- a/models/migrations/v1_23/v309.go +++ b/models/migrations/v1_23/v309.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_23/v310.go b/models/migrations/v1_23/v310.go index 394417f5a0..074b1c54d3 100644 --- a/models/migrations/v1_23/v310.go +++ b/models/migrations/v1_23/v310.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "xorm.io/xorm" @@ -12,5 +12,9 @@ func AddPriorityToProtectedBranch(x *xorm.Engine) error { Priority int64 `xorm:"NOT NULL DEFAULT 0"` } - return x.Sync(new(ProtectedBranch)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(ProtectedBranch)) + return err } diff --git a/models/migrations/v1_23/v311.go b/models/migrations/v1_23/v311.go index 0fc1ac8c0e..ef48085c79 100644 --- a/models/migrations/v1_23/v311.go +++ b/models/migrations/v1_23/v311.go @@ -1,7 +1,7 @@ // Copyright 2024 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_23 //nolint +package v1_23 import ( "xorm.io/xorm" @@ -11,6 +11,9 @@ func AddTimeEstimateColumnToIssueTable(x *xorm.Engine) error { type Issue struct { TimeEstimate int64 `xorm:"NOT NULL DEFAULT 0"` } - - return x.Sync(new(Issue)) + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(Issue)) + return err } diff --git a/models/migrations/v1_24/v312.go b/models/migrations/v1_24/v312.go new file mode 100644 index 0000000000..823b0eae40 --- /dev/null +++ b/models/migrations/v1_24/v312.go @@ -0,0 +1,25 @@ +// Copyright 2024 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "xorm.io/xorm" +) + +type pullAutoMerge struct { + DeleteBranchAfterMerge bool +} + +// TableName return database table name for xorm +func (pullAutoMerge) TableName() string { + return "pull_auto_merge" +} + +func AddDeleteBranchAfterMergeForAutoMerge(x *xorm.Engine) error { + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(pullAutoMerge)) + return err +} diff --git a/models/migrations/v1_24/v313.go b/models/migrations/v1_24/v313.go new file mode 100644 index 0000000000..7e6cda6bfd --- /dev/null +++ b/models/migrations/v1_24/v313.go @@ -0,0 +1,31 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "code.gitea.io/gitea/models/migrations/base" + + "xorm.io/xorm" +) + +func MovePinOrderToTableIssuePin(x *xorm.Engine) error { + type IssuePin struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"UNIQUE(s) NOT NULL"` + IssueID int64 `xorm:"UNIQUE(s) NOT NULL"` + IsPull bool `xorm:"NOT NULL"` + PinOrder int `xorm:"DEFAULT 0"` + } + + if err := x.Sync(new(IssuePin)); err != nil { + return err + } + + if _, err := x.Exec("INSERT INTO issue_pin (repo_id, issue_id, is_pull, pin_order) SELECT repo_id, id, is_pull, pin_order FROM issue WHERE pin_order > 0"); err != nil { + return err + } + sess := x.NewSession() + defer sess.Close() + return base.DropTableColumns(sess, "issue", "pin_order") +} diff --git a/models/migrations/v1_24/v314.go b/models/migrations/v1_24/v314.go new file mode 100644 index 0000000000..51cb2e34aa --- /dev/null +++ b/models/migrations/v1_24/v314.go @@ -0,0 +1,19 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "xorm.io/xorm" +) + +func UpdateOwnerIDOfRepoLevelActionsTables(x *xorm.Engine) error { + if _, err := x.Exec("UPDATE `action_runner` SET `owner_id` = 0 WHERE `repo_id` > 0 AND `owner_id` > 0"); err != nil { + return err + } + if _, err := x.Exec("UPDATE `action_variable` SET `owner_id` = 0 WHERE `repo_id` > 0 AND `owner_id` > 0"); err != nil { + return err + } + _, err := x.Exec("UPDATE `secret` SET `owner_id` = 0 WHERE `repo_id` > 0 AND `owner_id` > 0") + return err +} diff --git a/models/migrations/v1_24/v315.go b/models/migrations/v1_24/v315.go new file mode 100644 index 0000000000..52b9b44785 --- /dev/null +++ b/models/migrations/v1_24/v315.go @@ -0,0 +1,19 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "xorm.io/xorm" +) + +func AddEphemeralToActionRunner(x *xorm.Engine) error { + type ActionRunner struct { + Ephemeral bool `xorm:"ephemeral NOT NULL DEFAULT false"` + } + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(ActionRunner)) + return err +} diff --git a/models/migrations/v1_24/v316.go b/models/migrations/v1_24/v316.go new file mode 100644 index 0000000000..14e888f9ee --- /dev/null +++ b/models/migrations/v1_24/v316.go @@ -0,0 +1,24 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "xorm.io/xorm" +) + +func AddDescriptionForSecretsAndVariables(x *xorm.Engine) error { + type Secret struct { + Description string `xorm:"TEXT"` + } + + type ActionVariable struct { + Description string `xorm:"TEXT"` + } + + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(Secret), new(ActionVariable)) + return err +} diff --git a/models/migrations/v1_24/v317.go b/models/migrations/v1_24/v317.go new file mode 100644 index 0000000000..a13db2dd27 --- /dev/null +++ b/models/migrations/v1_24/v317.go @@ -0,0 +1,56 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/xorm" + "xorm.io/xorm/schemas" +) + +type improveActionTableIndicesAction struct { + ID int64 `xorm:"pk autoincr"` + UserID int64 `xorm:"INDEX"` // Receiver user id. + OpType int + ActUserID int64 // Action user id. + RepoID int64 + CommentID int64 `xorm:"INDEX"` + IsDeleted bool `xorm:"NOT NULL DEFAULT false"` + RefName string + IsPrivate bool `xorm:"NOT NULL DEFAULT false"` + Content string `xorm:"TEXT"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` +} + +// TableName sets the name of this table +func (*improveActionTableIndicesAction) TableName() string { + return "action" +} + +// TableIndices implements xorm's TableIndices interface +func (a *improveActionTableIndicesAction) TableIndices() []*schemas.Index { + repoIndex := schemas.NewIndex("r_u_d", schemas.IndexType) + repoIndex.AddColumn("repo_id", "user_id", "is_deleted") + + actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType) + actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted") + + cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType) + cudIndex.AddColumn("created_unix", "user_id", "is_deleted") + + cuIndex := schemas.NewIndex("c_u", schemas.IndexType) + cuIndex.AddColumn("user_id", "is_deleted") + + actUserUserIndex := schemas.NewIndex("au_c_u", schemas.IndexType) + actUserUserIndex.AddColumn("act_user_id", "created_unix", "user_id") + + indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex, cuIndex, actUserUserIndex} + + return indices +} + +func AddNewIndexForUserDashboard(x *xorm.Engine) error { + return x.Sync(new(improveActionTableIndicesAction)) +} diff --git a/models/migrations/v1_24/v318.go b/models/migrations/v1_24/v318.go new file mode 100644 index 0000000000..9b4a540960 --- /dev/null +++ b/models/migrations/v1_24/v318.go @@ -0,0 +1,21 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "code.gitea.io/gitea/models/perm" + + "xorm.io/xorm" +) + +func AddRepoUnitAnonymousAccessMode(x *xorm.Engine) error { + type RepoUnit struct { //revive:disable-line:exported + AnonymousAccessMode perm.AccessMode `xorm:"NOT NULL DEFAULT 0"` + } + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(RepoUnit)) + return err +} diff --git a/models/migrations/v1_24/v319.go b/models/migrations/v1_24/v319.go new file mode 100644 index 0000000000..648081f74e --- /dev/null +++ b/models/migrations/v1_24/v319.go @@ -0,0 +1,19 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "xorm.io/xorm" +) + +func AddExclusiveOrderColumnToLabelTable(x *xorm.Engine) error { + type Label struct { + ExclusiveOrder int `xorm:"DEFAULT 0"` + } + _, err := x.SyncWithOptions(xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, new(Label)) + return err +} diff --git a/models/migrations/v1_24/v320.go b/models/migrations/v1_24/v320.go new file mode 100644 index 0000000000..ebef71939c --- /dev/null +++ b/models/migrations/v1_24/v320.go @@ -0,0 +1,57 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_24 + +import ( + "code.gitea.io/gitea/modules/json" + + "xorm.io/xorm" +) + +func MigrateSkipTwoFactor(x *xorm.Engine) error { + type LoginSource struct { + TwoFactorPolicy string `xorm:"two_factor_policy NOT NULL DEFAULT ''"` + } + _, err := x.SyncWithOptions( + xorm.SyncOptions{ + IgnoreConstrains: true, + IgnoreIndices: true, + }, + new(LoginSource), + ) + if err != nil { + return err + } + + type LoginSourceSimple struct { + ID int64 + Cfg string + } + + var loginSources []LoginSourceSimple + err = x.Table("login_source").Find(&loginSources) + if err != nil { + return err + } + + for _, source := range loginSources { + if source.Cfg == "" { + continue + } + + var cfg map[string]any + err = json.Unmarshal([]byte(source.Cfg), &cfg) + if err != nil { + return err + } + + if cfg["SkipLocalTwoFA"] == true { + _, err = x.Exec("UPDATE login_source SET two_factor_policy = 'skip' WHERE id = ?", source.ID) + if err != nil { + return err + } + } + } + return nil +} diff --git a/models/migrations/v1_6/v70.go b/models/migrations/v1_6/v70.go index 74434a84a1..41f0966942 100644 --- a/models/migrations/v1_6/v70.go +++ b/models/migrations/v1_6/v70.go @@ -1,7 +1,7 @@ // Copyright 2018 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_6 //nolint +package v1_6 import ( "fmt" diff --git a/models/migrations/v1_6/v71.go b/models/migrations/v1_6/v71.go index 586187228b..2b11f57c92 100644 --- a/models/migrations/v1_6/v71.go +++ b/models/migrations/v1_6/v71.go @@ -1,7 +1,7 @@ // Copyright 2018 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_6 //nolint +package v1_6 import ( "fmt" diff --git a/models/migrations/v1_6/v72.go b/models/migrations/v1_6/v72.go index 04cef9a170..9fad88a1b6 100644 --- a/models/migrations/v1_6/v72.go +++ b/models/migrations/v1_6/v72.go @@ -1,7 +1,7 @@ // Copyright 2018 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_6 //nolint +package v1_6 import ( "fmt" diff --git a/models/migrations/v1_7/v73.go b/models/migrations/v1_7/v73.go index b5a748aae3..e0b7a28537 100644 --- a/models/migrations/v1_7/v73.go +++ b/models/migrations/v1_7/v73.go @@ -1,7 +1,7 @@ // Copyright 2018 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_7 //nolint +package v1_7 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_7/v74.go b/models/migrations/v1_7/v74.go index f0567e3c9b..376be37a24 100644 --- a/models/migrations/v1_7/v74.go +++ b/models/migrations/v1_7/v74.go @@ -1,7 +1,7 @@ // Copyright 2018 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_7 //nolint +package v1_7 import "xorm.io/xorm" diff --git a/models/migrations/v1_7/v75.go b/models/migrations/v1_7/v75.go index fa7430970c..ef11575466 100644 --- a/models/migrations/v1_7/v75.go +++ b/models/migrations/v1_7/v75.go @@ -1,7 +1,7 @@ // Copyright 2018 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_7 //nolint +package v1_7 import ( "xorm.io/builder" diff --git a/models/migrations/v1_8/v76.go b/models/migrations/v1_8/v76.go index d3fbd94deb..81e9307549 100644 --- a/models/migrations/v1_8/v76.go +++ b/models/migrations/v1_8/v76.go @@ -1,7 +1,7 @@ // Copyright 2018 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_8 //nolint +package v1_8 import ( "fmt" diff --git a/models/migrations/v1_8/v77.go b/models/migrations/v1_8/v77.go index 8b19993924..4fe5ebe635 100644 --- a/models/migrations/v1_8/v77.go +++ b/models/migrations/v1_8/v77.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_8 //nolint +package v1_8 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_8/v78.go b/models/migrations/v1_8/v78.go index 8f041c1484..e67f464131 100644 --- a/models/migrations/v1_8/v78.go +++ b/models/migrations/v1_8/v78.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_8 //nolint +package v1_8 import ( "code.gitea.io/gitea/models/migrations/base" diff --git a/models/migrations/v1_8/v79.go b/models/migrations/v1_8/v79.go index eb3a9ed0f4..3f50114d5a 100644 --- a/models/migrations/v1_8/v79.go +++ b/models/migrations/v1_8/v79.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_8 //nolint +package v1_8 import ( "code.gitea.io/gitea/modules/setting" diff --git a/models/migrations/v1_8/v80.go b/models/migrations/v1_8/v80.go index cebbbead28..6f9df47a93 100644 --- a/models/migrations/v1_8/v80.go +++ b/models/migrations/v1_8/v80.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_8 //nolint +package v1_8 import "xorm.io/xorm" diff --git a/models/migrations/v1_8/v81.go b/models/migrations/v1_8/v81.go index a100dc1ef7..3c2acc6458 100644 --- a/models/migrations/v1_8/v81.go +++ b/models/migrations/v1_8/v81.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_8 //nolint +package v1_8 import ( "fmt" diff --git a/models/migrations/v1_9/v82.go b/models/migrations/v1_9/v82.go index 26806dd645..c685d3b86b 100644 --- a/models/migrations/v1_9/v82.go +++ b/models/migrations/v1_9/v82.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_9 //nolint +package v1_9 import ( "fmt" diff --git a/models/migrations/v1_9/v83.go b/models/migrations/v1_9/v83.go index 10e6c45875..a0cd57f7c5 100644 --- a/models/migrations/v1_9/v83.go +++ b/models/migrations/v1_9/v83.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_9 //nolint +package v1_9 import ( "code.gitea.io/gitea/modules/timeutil" diff --git a/models/migrations/v1_9/v84.go b/models/migrations/v1_9/v84.go index c7155fe9cf..423915ae57 100644 --- a/models/migrations/v1_9/v84.go +++ b/models/migrations/v1_9/v84.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_9 //nolint +package v1_9 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_9/v85.go b/models/migrations/v1_9/v85.go index a23d7c5d6e..48e1cd5dc4 100644 --- a/models/migrations/v1_9/v85.go +++ b/models/migrations/v1_9/v85.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_9 //nolint +package v1_9 import ( "fmt" diff --git a/models/migrations/v1_9/v86.go b/models/migrations/v1_9/v86.go index cf2725d158..9464ff0cf6 100644 --- a/models/migrations/v1_9/v86.go +++ b/models/migrations/v1_9/v86.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_9 //nolint +package v1_9 import ( "xorm.io/xorm" diff --git a/models/migrations/v1_9/v87.go b/models/migrations/v1_9/v87.go index fa01b6e5e3..81a4ebf80d 100644 --- a/models/migrations/v1_9/v87.go +++ b/models/migrations/v1_9/v87.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package v1_9 //nolint +package v1_9 import ( "xorm.io/xorm" diff --git a/models/organization/org.go b/models/organization/org.go index 3e55a36758..0f3aef146c 100644 --- a/models/organization/org.go +++ b/models/organization/org.go @@ -178,12 +178,6 @@ func (org *Organization) HomeLink() string { return org.AsUser().HomeLink() } -// CanCreateRepo returns if user login can create a repository -// NOTE: functions calling this assume a failure due to repository count limit; if new checks are added, those functions should be revised -func (org *Organization) CanCreateRepo() bool { - return org.AsUser().CanCreateRepo() -} - // FindOrgMembersOpts represensts find org members conditions type FindOrgMembersOpts struct { db.ListOptions @@ -608,8 +602,3 @@ func getUserTeamIDsQueryBuilder(orgID, userID int64) *builder.Builder { "team_user.uid": userID, }) } - -// TeamsWithAccessToRepo returns all teams that have given access level to the repository. -func (org *Organization) TeamsWithAccessToRepo(ctx context.Context, repoID int64, mode perm.AccessMode) ([]*Team, error) { - return GetTeamsWithAccessToRepo(ctx, org.ID, repoID, mode) -} diff --git a/models/organization/org_list.go b/models/organization/org_list.go index 4c4168af1f..81457191fe 100644 --- a/models/organization/org_list.go +++ b/models/organization/org_list.go @@ -50,8 +50,8 @@ type SearchOrganizationsOptions struct { // FindOrgOptions finds orgs options type FindOrgOptions struct { db.ListOptions - UserID int64 - IncludePrivate bool + UserID int64 + IncludeVisibility structs.VisibleType } func queryUserOrgIDs(userID int64, includePrivate bool) *builder.Builder { @@ -65,11 +65,10 @@ func queryUserOrgIDs(userID int64, includePrivate bool) *builder.Builder { func (opts FindOrgOptions) ToConds() builder.Cond { var cond builder.Cond = builder.Eq{"`user`.`type`": user_model.UserTypeOrganization} if opts.UserID > 0 { - cond = cond.And(builder.In("`user`.`id`", queryUserOrgIDs(opts.UserID, opts.IncludePrivate))) - } - if !opts.IncludePrivate { - cond = cond.And(builder.Eq{"`user`.visibility": structs.VisibleTypePublic}) + cond = cond.And(builder.In("`user`.`id`", queryUserOrgIDs(opts.UserID, opts.IncludeVisibility == structs.VisibleTypePrivate))) } + // public=0, limited=1, private=2 + cond = cond.And(builder.Lte{"`user`.visibility": opts.IncludeVisibility}) return cond } @@ -77,6 +76,16 @@ func (opts FindOrgOptions) ToOrders() string { return "`user`.lower_name ASC" } +func DoerViewOtherVisibility(doer, other *user_model.User) structs.VisibleType { + if doer == nil || other == nil { + return structs.VisibleTypePublic + } + if doer.IsAdmin || doer.ID == other.ID { + return structs.VisibleTypePrivate + } + return structs.VisibleTypeLimited +} + // GetOrgsCanCreateRepoByUserID returns a list of organizations where given user ID // are allowed to create repos. func GetOrgsCanCreateRepoByUserID(ctx context.Context, userID int64) ([]*Organization, error) { @@ -124,6 +133,7 @@ func GetUserOrgsList(ctx context.Context, user *user_model.User) ([]*MinimalOrg, if err := db.GetEngine(ctx).Select(columnsStr). Table("user"). Where(builder.In("`user`.`id`", queryUserOrgIDs(user.ID, true))). + OrderBy("`user`.lower_name ASC"). Find(&orgs); err != nil { return nil, err } diff --git a/models/organization/org_list_test.go b/models/organization/org_list_test.go index 0f0f8a4bcd..a2a25c6f91 100644 --- a/models/organization/org_list_test.go +++ b/models/organization/org_list_test.go @@ -10,25 +10,32 @@ import ( "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/structs" "github.com/stretchr/testify/assert" ) -func TestCountOrganizations(t *testing.T) { +func TestOrgList(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) + t.Run("CountOrganizations", testCountOrganizations) + t.Run("FindOrgs", testFindOrgs) + t.Run("GetUserOrgsList", testGetUserOrgsList) + t.Run("LoadOrgListTeams", testLoadOrgListTeams) + t.Run("DoerViewOtherVisibility", testDoerViewOtherVisibility) +} + +func testCountOrganizations(t *testing.T) { expected, err := db.GetEngine(db.DefaultContext).Where("type=?", user_model.UserTypeOrganization).Count(&organization.Organization{}) assert.NoError(t, err) - cnt, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{IncludePrivate: true}) + cnt, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{IncludeVisibility: structs.VisibleTypePrivate}) assert.NoError(t, err) assert.Equal(t, expected, cnt) } -func TestFindOrgs(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - +func testFindOrgs(t *testing.T) { orgs, err := db.Find[organization.Organization](db.DefaultContext, organization.FindOrgOptions{ - UserID: 4, - IncludePrivate: true, + UserID: 4, + IncludeVisibility: structs.VisibleTypePrivate, }) assert.NoError(t, err) if assert.Len(t, orgs, 1) { @@ -36,33 +43,30 @@ func TestFindOrgs(t *testing.T) { } orgs, err = db.Find[organization.Organization](db.DefaultContext, organization.FindOrgOptions{ - UserID: 4, - IncludePrivate: false, + UserID: 4, }) assert.NoError(t, err) assert.Empty(t, orgs) total, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{ - UserID: 4, - IncludePrivate: true, + UserID: 4, + IncludeVisibility: structs.VisibleTypePrivate, }) assert.NoError(t, err) assert.EqualValues(t, 1, total) } -func TestGetUserOrgsList(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testGetUserOrgsList(t *testing.T) { orgs, err := organization.GetUserOrgsList(db.DefaultContext, &user_model.User{ID: 4}) assert.NoError(t, err) if assert.Len(t, orgs, 1) { assert.EqualValues(t, 3, orgs[0].ID) // repo_id: 3 is in the team, 32 is public, 5 is private with no team - assert.EqualValues(t, 2, orgs[0].NumRepos) + assert.Equal(t, 2, orgs[0].NumRepos) } } -func TestLoadOrgListTeams(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testLoadOrgListTeams(t *testing.T) { orgs, err := organization.GetUserOrgsList(db.DefaultContext, &user_model.User{ID: 4}) assert.NoError(t, err) assert.Len(t, orgs, 1) @@ -71,3 +75,10 @@ func TestLoadOrgListTeams(t *testing.T) { assert.Len(t, teamsMap, 1) assert.Len(t, teamsMap[3], 5) } + +func testDoerViewOtherVisibility(t *testing.T) { + assert.Equal(t, structs.VisibleTypePublic, organization.DoerViewOtherVisibility(nil, nil)) + assert.Equal(t, structs.VisibleTypeLimited, organization.DoerViewOtherVisibility(&user_model.User{ID: 1}, &user_model.User{ID: 2})) + assert.Equal(t, structs.VisibleTypePrivate, organization.DoerViewOtherVisibility(&user_model.User{ID: 1}, &user_model.User{ID: 1})) + assert.Equal(t, structs.VisibleTypePrivate, organization.DoerViewOtherVisibility(&user_model.User{ID: 1, IsAdmin: true}, &user_model.User{ID: 2})) +} diff --git a/models/organization/org_test.go b/models/organization/org_test.go index 2c5b4090df..234325a8cd 100644 --- a/models/organization/org_test.go +++ b/models/organization/org_test.go @@ -16,6 +16,7 @@ import ( "code.gitea.io/gitea/modules/structs" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestUser_IsOwnedBy(t *testing.T) { @@ -134,7 +135,7 @@ func TestIsOrganizationOwner(t *testing.T) { test := func(orgID, userID int64, expected bool) { isOwner, err := organization.IsOrganizationOwner(db.DefaultContext, orgID, userID) assert.NoError(t, err) - assert.EqualValues(t, expected, isOwner) + assert.Equal(t, expected, isOwner) } test(3, 2, true) test(3, 3, false) @@ -148,7 +149,7 @@ func TestIsOrganizationMember(t *testing.T) { test := func(orgID, userID int64, expected bool) { isMember, err := organization.IsOrganizationMember(db.DefaultContext, orgID, userID) assert.NoError(t, err) - assert.EqualValues(t, expected, isMember) + assert.Equal(t, expected, isMember) } test(3, 2, true) test(3, 3, false) @@ -163,7 +164,7 @@ func TestIsPublicMembership(t *testing.T) { test := func(orgID, userID int64, expected bool) { isMember, err := organization.IsPublicMembership(db.DefaultContext, orgID, userID) assert.NoError(t, err) - assert.EqualValues(t, expected, isMember) + assert.Equal(t, expected, isMember) } test(3, 2, true) test(3, 3, false) @@ -180,9 +181,8 @@ func TestRestrictedUserOrgMembers(t *testing.T) { ID: 29, IsRestricted: true, }) - if !assert.True(t, restrictedUser.IsRestricted) { - return // ensure fixtures return restricted user - } + // ensure fixtures return restricted user + require.True(t, restrictedUser.IsRestricted) testCases := []struct { name string @@ -237,7 +237,7 @@ func TestRestrictedUserOrgMembers(t *testing.T) { memberUIDs = append(memberUIDs, member.UID) } slices.Sort(memberUIDs) - assert.EqualValues(t, tc.expectedUIDs, memberUIDs) + assert.Equal(t, tc.expectedUIDs, memberUIDs) }) } } @@ -255,7 +255,7 @@ func TestGetOrgUsersByOrgID(t *testing.T) { sort.Slice(orgUsers, func(i, j int) bool { return orgUsers[i].ID < orgUsers[j].ID }) - assert.EqualValues(t, []*organization.OrgUser{{ + assert.Equal(t, []*organization.OrgUser{{ ID: 1, OrgID: 3, UID: 2, @@ -320,9 +320,9 @@ func TestAccessibleReposEnv_CountRepos(t *testing.T) { testSuccess := func(userID, expectedCount int64) { env, err := repo_model.AccessibleReposEnv(db.DefaultContext, org, userID) assert.NoError(t, err) - count, err := env.CountRepos() + count, err := env.CountRepos(db.DefaultContext) assert.NoError(t, err) - assert.EqualValues(t, expectedCount, count) + assert.Equal(t, expectedCount, count) } testSuccess(2, 3) testSuccess(4, 2) @@ -334,7 +334,7 @@ func TestAccessibleReposEnv_RepoIDs(t *testing.T) { testSuccess := func(userID int64, expectedRepoIDs []int64) { env, err := repo_model.AccessibleReposEnv(db.DefaultContext, org, userID) assert.NoError(t, err) - repoIDs, err := env.RepoIDs(1, 100) + repoIDs, err := env.RepoIDs(db.DefaultContext) assert.NoError(t, err) assert.Equal(t, expectedRepoIDs, repoIDs) } @@ -342,32 +342,13 @@ func TestAccessibleReposEnv_RepoIDs(t *testing.T) { testSuccess(4, []int64{3, 32}) } -func TestAccessibleReposEnv_Repos(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}) - testSuccess := func(userID int64, expectedRepoIDs []int64) { - env, err := repo_model.AccessibleReposEnv(db.DefaultContext, org, userID) - assert.NoError(t, err) - repos, err := env.Repos(1, 100) - assert.NoError(t, err) - expectedRepos := make(repo_model.RepositoryList, len(expectedRepoIDs)) - for i, repoID := range expectedRepoIDs { - expectedRepos[i] = unittest.AssertExistsAndLoadBean(t, - &repo_model.Repository{ID: repoID}) - } - assert.Equal(t, expectedRepos, repos) - } - testSuccess(2, []int64{3, 5, 32}) - testSuccess(4, []int64{3, 32}) -} - func TestAccessibleReposEnv_MirrorRepos(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}) testSuccess := func(userID int64, expectedRepoIDs []int64) { env, err := repo_model.AccessibleReposEnv(db.DefaultContext, org, userID) assert.NoError(t, err) - repos, err := env.MirrorRepos() + repos, err := env.MirrorRepos(db.DefaultContext) assert.NoError(t, err) expectedRepos := make(repo_model.RepositoryList, len(expectedRepoIDs)) for i, repoID := range expectedRepoIDs { diff --git a/models/organization/org_user.go b/models/organization/org_user.go index 08d936d922..4d7527c15f 100644 --- a/models/organization/org_user.go +++ b/models/organization/org_user.go @@ -78,7 +78,7 @@ func IsOrganizationAdmin(ctx context.Context, orgID, uid int64) (bool, error) { return false, err } for _, t := range teams { - if t.AccessMode >= perm.AccessModeAdmin { + if t.HasAdminAccess() { return true, nil } } diff --git a/models/organization/org_user_test.go b/models/organization/org_user_test.go index c5110b2a34..689544430d 100644 --- a/models/organization/org_user_test.go +++ b/models/organization/org_user_test.go @@ -139,7 +139,7 @@ func TestAddOrgUser(t *testing.T) { unittest.AssertExistsAndLoadBean(t, ou) assert.Equal(t, isPublic, ou.IsPublic) org = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: orgID}) - assert.EqualValues(t, expectedNumMembers, org.NumMembers) + assert.Equal(t, expectedNumMembers, org.NumMembers) } setting.Service.DefaultOrgMemberVisible = false diff --git a/models/organization/org_worktime.go b/models/organization/org_worktime.go new file mode 100644 index 0000000000..7b57182a8a --- /dev/null +++ b/models/organization/org_worktime.go @@ -0,0 +1,103 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package organization + +import ( + "sort" + + "code.gitea.io/gitea/models/db" + + "xorm.io/builder" +) + +type WorktimeSumByRepos struct { + RepoName string + SumTime int64 +} + +func GetWorktimeByRepos(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByRepos, err error) { + err = db.GetEngine(db.DefaultContext). + Select("repository.name AS repo_name, SUM(tracked_time.time) AS sum_time"). + Table("tracked_time"). + Join("INNER", "issue", "tracked_time.issue_id = issue.id"). + Join("INNER", "repository", "issue.repo_id = repository.id"). + Where(builder.Eq{"repository.owner_id": org.ID}). + And(builder.Eq{"tracked_time.deleted": false}). + And(builder.Gte{"tracked_time.created_unix": unitFrom}). + And(builder.Lte{"tracked_time.created_unix": unixTo}). + GroupBy("repository.name"). + OrderBy("repository.name"). + Find(&results) + return results, err +} + +type WorktimeSumByMilestones struct { + RepoName string + MilestoneName string + MilestoneID int64 + MilestoneDeadline int64 + SumTime int64 + HideRepoName bool +} + +func GetWorktimeByMilestones(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByMilestones, err error) { + err = db.GetEngine(db.DefaultContext). + Select("repository.name AS repo_name, milestone.name AS milestone_name, milestone.id AS milestone_id, milestone.deadline_unix as milestone_deadline, SUM(tracked_time.time) AS sum_time"). + Table("tracked_time"). + Join("INNER", "issue", "tracked_time.issue_id = issue.id"). + Join("INNER", "repository", "issue.repo_id = repository.id"). + Join("LEFT", "milestone", "issue.milestone_id = milestone.id"). + Where(builder.Eq{"repository.owner_id": org.ID}). + And(builder.Eq{"tracked_time.deleted": false}). + And(builder.Gte{"tracked_time.created_unix": unitFrom}). + And(builder.Lte{"tracked_time.created_unix": unixTo}). + GroupBy("repository.name, milestone.name, milestone.deadline_unix, milestone.id"). + OrderBy("repository.name, milestone.deadline_unix, milestone.id"). + Find(&results) + + // TODO: pgsql: NULL values are sorted last in default ascending order, so we need to sort them manually again. + sort.Slice(results, func(i, j int) bool { + if results[i].RepoName != results[j].RepoName { + return results[i].RepoName < results[j].RepoName + } + if results[i].MilestoneDeadline != results[j].MilestoneDeadline { + return results[i].MilestoneDeadline < results[j].MilestoneDeadline + } + return results[i].MilestoneID < results[j].MilestoneID + }) + + // Show only the first RepoName, for nicer output. + prevRepoName := "" + for i := 0; i < len(results); i++ { + res := &results[i] + res.MilestoneDeadline = 0 // clear the deadline because we do not really need it + if prevRepoName == res.RepoName { + res.HideRepoName = true + } + prevRepoName = res.RepoName + } + return results, err +} + +type WorktimeSumByMembers struct { + UserName string + SumTime int64 +} + +func GetWorktimeByMembers(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByMembers, err error) { + err = db.GetEngine(db.DefaultContext). + Select("`user`.name AS user_name, SUM(tracked_time.time) AS sum_time"). + Table("tracked_time"). + Join("INNER", "issue", "tracked_time.issue_id = issue.id"). + Join("INNER", "repository", "issue.repo_id = repository.id"). + Join("INNER", "`user`", "tracked_time.user_id = `user`.id"). + Where(builder.Eq{"repository.owner_id": org.ID}). + And(builder.Eq{"tracked_time.deleted": false}). + And(builder.Gte{"tracked_time.created_unix": unitFrom}). + And(builder.Lte{"tracked_time.created_unix": unixTo}). + GroupBy("`user`.name"). + OrderBy("sum_time DESC"). + Find(&results) + return results, err +} diff --git a/models/organization/team.go b/models/organization/team.go index 96666da39a..7f3a9b3829 100644 --- a/models/organization/team.go +++ b/models/organization/team.go @@ -113,7 +113,7 @@ func (t *Team) LoadUnits(ctx context.Context) (err error) { // GetUnitNames returns the team units names func (t *Team) GetUnitNames() (res []string) { - if t.AccessMode >= perm.AccessModeAdmin { + if t.HasAdminAccess() { return unit.AllUnitKeyNames() } @@ -126,7 +126,7 @@ func (t *Team) GetUnitNames() (res []string) { // GetUnitsMap returns the team units permissions func (t *Team) GetUnitsMap() map[string]string { m := make(map[string]string) - if t.AccessMode >= perm.AccessModeAdmin { + if t.HasAdminAccess() { for _, u := range unit.Units { m[u.NameKey] = t.AccessMode.ToString() } @@ -153,6 +153,10 @@ func (t *Team) IsMember(ctx context.Context, userID int64) bool { return isMember } +func (t *Team) HasAdminAccess() bool { + return t.AccessMode >= perm.AccessModeAdmin +} + // LoadMembers returns paginated members in team of organization. func (t *Team) LoadMembers(ctx context.Context) (err error) { t.Members, err = GetTeamMembers(ctx, &SearchMembersOptions{ @@ -238,22 +242,6 @@ func GetTeamByID(ctx context.Context, teamID int64) (*Team, error) { return t, nil } -// GetTeamNamesByID returns team's lower name from a list of team ids. -func GetTeamNamesByID(ctx context.Context, teamIDs []int64) ([]string, error) { - if len(teamIDs) == 0 { - return []string{}, nil - } - - var teamNames []string - err := db.GetEngine(ctx).Table("team"). - Select("lower_name"). - In("id", teamIDs). - Asc("name"). - Find(&teamNames) - - return teamNames, err -} - // IncrTeamRepoNum increases the number of repos for the given team by 1 func IncrTeamRepoNum(ctx context.Context, teamID int64) error { _, err := db.GetEngine(ctx).Incr("num_repos").ID(teamID).Update(new(Team)) diff --git a/models/organization/team_list.go b/models/organization/team_list.go index 6f2a922e95..0274f9c5ba 100644 --- a/models/organization/team_list.go +++ b/models/organization/team_list.go @@ -133,5 +133,8 @@ func GetTeamsByOrgIDs(ctx context.Context, orgIDs []int64) (TeamList, error) { func GetTeamsByIDs(ctx context.Context, teamIDs []int64) (map[int64]*Team, error) { teams := make(map[int64]*Team, len(teamIDs)) + if len(teamIDs) == 0 { + return teams, nil + } return teams, db.GetEngine(ctx).Where(builder.In("`id`", teamIDs)).Find(&teams) } diff --git a/models/organization/team_repo.go b/models/organization/team_repo.go index 53edd203a8..b3e266dbc7 100644 --- a/models/organization/team_repo.go +++ b/models/organization/team_repo.go @@ -9,6 +9,8 @@ import ( "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/models/unit" + + "xorm.io/builder" ) // TeamRepo represents an team-repository relation. @@ -48,26 +50,27 @@ func RemoveTeamRepo(ctx context.Context, teamID, repoID int64) error { return err } -// GetTeamsWithAccessToRepo returns all teams in an organization that have given access level to the repository. -func GetTeamsWithAccessToRepo(ctx context.Context, orgID, repoID int64, mode perm.AccessMode) ([]*Team, error) { +// GetTeamsWithAccessToAnyRepoUnit returns all teams in an organization that have given access level to the repository special unit. +// This function is only used for finding some teams that can be used as branch protection allowlist or reviewers, it isn't really used for access control. +// FIXME: TEAM-UNIT-PERMISSION this logic is not complete, search the fixme keyword to see more details +func GetTeamsWithAccessToAnyRepoUnit(ctx context.Context, orgID, repoID int64, mode perm.AccessMode, unitType unit.Type, unitTypesMore ...unit.Type) ([]*Team, error) { teams := make([]*Team, 0, 5) - return teams, db.GetEngine(ctx).Where("team.authorize >= ?", mode). - Join("INNER", "team_repo", "team_repo.team_id = team.id"). - And("team_repo.org_id = ?", orgID). - And("team_repo.repo_id = ?", repoID). - OrderBy("name"). - Find(&teams) -} -// GetTeamsWithAccessToRepoUnit returns all teams in an organization that have given access level to the repository special unit. -func GetTeamsWithAccessToRepoUnit(ctx context.Context, orgID, repoID int64, mode perm.AccessMode, unitType unit.Type) ([]*Team, error) { - teams := make([]*Team, 0, 5) - return teams, db.GetEngine(ctx).Where("team_unit.access_mode >= ?", mode). + sub := builder.Select("team_id").From("team_unit"). + Where(builder.Expr("team_unit.team_id = team.id")). + And(builder.In("team_unit.type", append([]unit.Type{unitType}, unitTypesMore...))). + And(builder.Expr("team_unit.access_mode >= ?", mode)) + + err := db.GetEngine(ctx). Join("INNER", "team_repo", "team_repo.team_id = team.id"). - Join("INNER", "team_unit", "team_unit.team_id = team.id"). And("team_repo.org_id = ?", orgID). And("team_repo.repo_id = ?", repoID). - And("team_unit.type = ?", unitType). + And(builder.Or( + builder.Expr("team.authorize >= ?", mode), + builder.In("team.id", sub), + )). OrderBy("name"). Find(&teams) + + return teams, err } diff --git a/models/organization/team_repo_test.go b/models/organization/team_repo_test.go index c0d6750df9..73a06a93c2 100644 --- a/models/organization/team_repo_test.go +++ b/models/organization/team_repo_test.go @@ -22,7 +22,7 @@ func TestGetTeamsWithAccessToRepoUnit(t *testing.T) { org41 := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 41}) repo61 := unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 61}) - teams, err := organization.GetTeamsWithAccessToRepoUnit(db.DefaultContext, org41.ID, repo61.ID, perm.AccessModeRead, unit.TypePullRequests) + teams, err := organization.GetTeamsWithAccessToAnyRepoUnit(db.DefaultContext, org41.ID, repo61.ID, perm.AccessModeRead, unit.TypePullRequests) assert.NoError(t, err) if assert.Len(t, teams, 2) { assert.EqualValues(t, 21, teams[0].ID) diff --git a/models/organization/team_test.go b/models/organization/team_test.go index deaabbfa2c..b0bf842584 100644 --- a/models/organization/team_test.go +++ b/models/organization/team_test.go @@ -77,7 +77,7 @@ func TestGetTeam(t *testing.T) { testSuccess := func(orgID int64, name string) { team, err := organization.GetTeam(db.DefaultContext, orgID, name) assert.NoError(t, err) - assert.EqualValues(t, orgID, team.OrgID) + assert.Equal(t, orgID, team.OrgID) assert.Equal(t, name, team.Name) } testSuccess(3, "Owners") @@ -95,7 +95,7 @@ func TestGetTeamByID(t *testing.T) { testSuccess := func(teamID int64) { team, err := organization.GetTeamByID(db.DefaultContext, teamID) assert.NoError(t, err) - assert.EqualValues(t, teamID, team.ID) + assert.Equal(t, teamID, team.ID) } testSuccess(1) testSuccess(2) @@ -163,7 +163,7 @@ func TestGetUserOrgTeams(t *testing.T) { teams, err := organization.GetUserOrgTeams(db.DefaultContext, orgID, userID) assert.NoError(t, err) for _, team := range teams { - assert.EqualValues(t, orgID, team.OrgID) + assert.Equal(t, orgID, team.OrgID) unittest.AssertExistsAndLoadBean(t, &organization.TeamUser{TeamID: team.ID, UID: userID}) } } diff --git a/models/packages/container/const.go b/models/packages/container/const.go deleted file mode 100644 index 0dfbda051d..0000000000 --- a/models/packages/container/const.go +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package container - -const ( - ManifestFilename = "manifest.json" - UploadVersion = "_upload" -) diff --git a/models/packages/container/search.go b/models/packages/container/search.go index 5df35117ce..9321d9eb41 100644 --- a/models/packages/container/search.go +++ b/models/packages/container/search.go @@ -25,6 +25,7 @@ type BlobSearchOptions struct { Digest string Tag string IsManifest bool + OnlyLead bool Repository string } @@ -43,7 +44,10 @@ func (opts *BlobSearchOptions) toConds() builder.Cond { cond = cond.And(builder.Eq{"package_version.lower_version": strings.ToLower(opts.Tag)}) } if opts.IsManifest { - cond = cond.And(builder.Eq{"package_file.lower_name": ManifestFilename}) + cond = cond.And(builder.Eq{"package_file.lower_name": container_module.ManifestFilename}) + } + if opts.OnlyLead { + cond = cond.And(builder.Eq{"package_file.is_lead": true}) } if opts.Digest != "" { var propsCond builder.Cond = builder.Eq{ @@ -73,11 +77,9 @@ func GetContainerBlob(ctx context.Context, opts *BlobSearchOptions) (*packages.P pfds, err := getContainerBlobsLimit(ctx, opts, 1) if err != nil { return nil, err - } - if len(pfds) != 1 { + } else if len(pfds) == 0 { return nil, ErrContainerBlobNotExist } - return pfds[0], nil } @@ -233,7 +235,7 @@ func SearchImageTags(ctx context.Context, opts *ImageTagsSearchOptions) ([]*pack func SearchExpiredUploadedBlobs(ctx context.Context, olderThan time.Duration) ([]*packages.PackageFile, error) { var cond builder.Cond = builder.Eq{ "package_version.is_internal": true, - "package_version.lower_version": UploadVersion, + "package_version.lower_version": container_module.UploadVersion, "package.type": packages.TypeContainer, } cond = cond.And(builder.Lt{"package_file.created_unix": time.Now().Add(-olderThan).Unix()}) diff --git a/models/packages/descriptor.go b/models/packages/descriptor.go index 803b73c968..2d43dc3046 100644 --- a/models/packages/descriptor.go +++ b/models/packages/descriptor.go @@ -11,6 +11,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/packages/alpine" "code.gitea.io/gitea/modules/packages/arch" @@ -102,19 +103,26 @@ func (pd *PackageDescriptor) CalculateBlobSize() int64 { // GetPackageDescriptor gets the package description for a version func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDescriptor, error) { - p, err := GetPackageByID(ctx, pv.PackageID) + return GetPackageDescriptorWithCache(ctx, pv, cache.NewEphemeralCache()) +} + +func GetPackageDescriptorWithCache(ctx context.Context, pv *PackageVersion, c *cache.EphemeralCache) (*PackageDescriptor, error) { + p, err := cache.GetWithEphemeralCache(ctx, c, "package", pv.PackageID, GetPackageByID) if err != nil { return nil, err } - o, err := user_model.GetUserByID(ctx, p.OwnerID) + o, err := cache.GetWithEphemeralCache(ctx, c, "user", p.OwnerID, user_model.GetUserByID) if err != nil { return nil, err } - repository, err := repo_model.GetRepositoryByID(ctx, p.RepoID) - if err != nil && !repo_model.IsErrRepoNotExist(err) { - return nil, err + var repository *repo_model.Repository + if p.RepoID > 0 { + repository, err = cache.GetWithEphemeralCache(ctx, c, "repo", p.RepoID, repo_model.GetRepositoryByID) + if err != nil && !repo_model.IsErrRepoNotExist(err) { + return nil, err + } } - creator, err := user_model.GetUserByID(ctx, pv.CreatorID) + creator, err := cache.GetWithEphemeralCache(ctx, c, "user", pv.CreatorID, user_model.GetUserByID) if err != nil { if errors.Is(err, util.ErrNotExist) { creator = user_model.NewGhostUser() @@ -142,9 +150,13 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc return nil, err } - pfds, err := GetPackageFileDescriptors(ctx, pfs) - if err != nil { - return nil, err + pfds := make([]*PackageFileDescriptor, 0, len(pfs)) + for _, pf := range pfs { + pfd, err := getPackageFileDescriptor(ctx, pf, c) + if err != nil { + return nil, err + } + pfds = append(pfds, pfd) } var metadata any @@ -194,7 +206,7 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc case TypeVagrant: metadata = &vagrant.Metadata{} default: - panic(fmt.Sprintf("unknown package type: %s", string(p.Type))) + panic("unknown package type: " + string(p.Type)) } if metadata != nil { if err := json.Unmarshal([]byte(pv.MetadataJSON), &metadata); err != nil { @@ -218,7 +230,11 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc // GetPackageFileDescriptor gets a package file descriptor for a package file func GetPackageFileDescriptor(ctx context.Context, pf *PackageFile) (*PackageFileDescriptor, error) { - pb, err := GetBlobByID(ctx, pf.BlobID) + return getPackageFileDescriptor(ctx, pf, cache.NewEphemeralCache()) +} + +func getPackageFileDescriptor(ctx context.Context, pf *PackageFile, c *cache.EphemeralCache) (*PackageFileDescriptor, error) { + pb, err := cache.GetWithEphemeralCache(ctx, c, "package_file_blob", pf.BlobID, GetBlobByID) if err != nil { return nil, err } @@ -248,9 +264,13 @@ func GetPackageFileDescriptors(ctx context.Context, pfs []*PackageFile) ([]*Pack // GetPackageDescriptors gets the package descriptions for the versions func GetPackageDescriptors(ctx context.Context, pvs []*PackageVersion) ([]*PackageDescriptor, error) { + return getPackageDescriptors(ctx, pvs, cache.NewEphemeralCache()) +} + +func getPackageDescriptors(ctx context.Context, pvs []*PackageVersion, c *cache.EphemeralCache) ([]*PackageDescriptor, error) { pds := make([]*PackageDescriptor, 0, len(pvs)) for _, pv := range pvs { - pd, err := GetPackageDescriptor(ctx, pv) + pd, err := GetPackageDescriptorWithCache(ctx, pv, c) if err != nil { return nil, err } diff --git a/models/packages/nuget/search.go b/models/packages/nuget/search.go index 7a505ff08f..a4b23f31d5 100644 --- a/models/packages/nuget/search.go +++ b/models/packages/nuget/search.go @@ -33,7 +33,7 @@ func SearchVersions(ctx context.Context, opts *packages_model.PackageSearchOptio Where(cond). OrderBy("package.name ASC") if opts.Paginator != nil { - skip, take := opts.GetSkipTake() + skip, take := opts.Paginator.GetSkipTake() inner = inner.Limit(take, skip) } diff --git a/models/packages/package.go b/models/packages/package.go index 31e1277a6e..38d1cdcf66 100644 --- a/models/packages/package.go +++ b/models/packages/package.go @@ -127,7 +127,7 @@ func (pt Type) Name() string { case TypeVagrant: return "Vagrant" } - panic(fmt.Sprintf("unknown package type: %s", string(pt))) + panic("unknown package type: " + string(pt)) } // SVGName gets the name of the package type svg image @@ -178,7 +178,7 @@ func (pt Type) SVGName() string { case TypeVagrant: return "gitea-vagrant" } - panic(fmt.Sprintf("unknown package type: %s", string(pt))) + panic("unknown package type: " + string(pt)) } // Package represents a package @@ -228,6 +228,11 @@ func SetRepositoryLink(ctx context.Context, packageID, repoID int64) error { return err } +func UnlinkRepository(ctx context.Context, packageID int64) error { + _, err := db.GetEngine(ctx).ID(packageID).Cols("repo_id").Update(&Package{RepoID: 0}) + return err +} + // UnlinkRepositoryFromAllPackages unlinks every package from the repository func UnlinkRepositoryFromAllPackages(ctx context.Context, repoID int64) error { _, err := db.GetEngine(ctx).Where("repo_id = ?", repoID).Cols("repo_id").Update(&Package{}) diff --git a/models/packages/package_file.go b/models/packages/package_file.go index 270cb32fdf..bf877485d6 100644 --- a/models/packages/package_file.go +++ b/models/packages/package_file.go @@ -115,6 +115,11 @@ func DeleteFileByID(ctx context.Context, fileID int64) error { return err } +func UpdateFile(ctx context.Context, pf *PackageFile, cols []string) error { + _, err := db.GetEngine(ctx).ID(pf.ID).Cols(cols...).Update(pf) + return err +} + // PackageFileSearchOptions are options for SearchXXX methods type PackageFileSearchOptions struct { OwnerID int64 diff --git a/models/packages/package_property.go b/models/packages/package_property.go index e0170016cf..7ddbfd97e9 100644 --- a/models/packages/package_property.go +++ b/models/packages/package_property.go @@ -66,6 +66,20 @@ func UpdateProperty(ctx context.Context, pp *PackageProperty) error { return err } +func InsertOrUpdateProperty(ctx context.Context, refType PropertyType, refID int64, name, value string) error { + pp := PackageProperty{RefType: refType, RefID: refID, Name: name} + ok, err := db.GetEngine(ctx).Get(&pp) + if err != nil { + return err + } + if ok { + _, err = db.GetEngine(ctx).Where("ref_type=? AND ref_id=? AND name=?", refType, refID, name).Cols("value").Update(&PackageProperty{Value: value}) + return err + } + _, err = InsertProperty(ctx, refType, refID, name, value) + return err +} + // DeleteAllProperties deletes all properties of a ref func DeleteAllProperties(ctx context.Context, refType PropertyType, refID int64) error { _, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Delete(&PackageProperty{}) @@ -78,8 +92,8 @@ func DeletePropertyByID(ctx context.Context, propertyID int64) error { return err } -// DeletePropertyByName deletes properties by name -func DeletePropertyByName(ctx context.Context, refType PropertyType, refID int64, name string) error { +// DeletePropertiesByName deletes properties by name +func DeletePropertiesByName(ctx context.Context, refType PropertyType, refID int64, name string) error { _, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).Delete(&PackageProperty{}) return err } diff --git a/models/packages/package_version.go b/models/packages/package_version.go index 278e8e3a86..5672e0efbf 100644 --- a/models/packages/package_version.go +++ b/models/packages/package_version.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/modules/util" "xorm.io/builder" + "xorm.io/xorm" ) // ErrDuplicatePackageVersion indicates a duplicated package version error @@ -187,7 +188,7 @@ type PackageSearchOptions struct { HasFileWithName string // only results are found which are associated with a file with the specific name HasFiles optional.Option[bool] // only results are found which have associated files Sort VersionSort - db.Paginator + Paginator db.Paginator } func (opts *PackageSearchOptions) ToConds() builder.Cond { @@ -279,9 +280,19 @@ func (opts *PackageSearchOptions) configureOrderBy(e db.Engine) { default: e.Desc("package_version.created_unix") } + e.Desc("package_version.id") // Sort by id for stable order with duplicates in the other field +} - // Sort by id for stable order with duplicates in the other field - e.Asc("package_version.id") +func searchVersionsBySession(sess *xorm.Session, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) { + opts.configureOrderBy(sess) + pvs := make([]*PackageVersion, 0, 10) + if opts.Paginator != nil { + sess = db.SetSessionPagination(sess, opts.Paginator) + count, err := sess.FindAndCount(&pvs) + return pvs, count, err + } + err := sess.Find(&pvs) + return pvs, int64(len(pvs)), err } // SearchVersions gets all versions of packages matching the search options @@ -291,16 +302,7 @@ func SearchVersions(ctx context.Context, opts *PackageSearchOptions) ([]*Package Table("package_version"). Join("INNER", "package", "package.id = package_version.package_id"). Where(opts.ToConds()) - - opts.configureOrderBy(sess) - - if opts.Paginator != nil { - sess = db.SetSessionPagination(sess, opts) - } - - pvs := make([]*PackageVersion, 0, 10) - count, err := sess.FindAndCount(&pvs) - return pvs, count, err + return searchVersionsBySession(sess, opts) } // SearchLatestVersions gets the latest version of every package matching the search options @@ -318,15 +320,7 @@ func SearchLatestVersions(ctx context.Context, opts *PackageSearchOptions) ([]*P Join("INNER", "package", "package.id = package_version.package_id"). Where(builder.In("package_version.id", in)) - opts.configureOrderBy(sess) - - if opts.Paginator != nil { - sess = db.SetSessionPagination(sess, opts) - } - - pvs := make([]*PackageVersion, 0, 10) - count, err := sess.FindAndCount(&pvs) - return pvs, count, err + return searchVersionsBySession(sess, opts) } // ExistVersion checks if a version matching the search options exist diff --git a/models/perm/access/repo_permission.go b/models/perm/access/repo_permission.go index 0ed116a132..7de43ecd07 100644 --- a/models/perm/access/repo_permission.go +++ b/models/perm/access/repo_permission.go @@ -15,6 +15,7 @@ import ( "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" ) @@ -25,7 +26,8 @@ type Permission struct { units []*repo_model.RepoUnit unitsMode map[unit.Type]perm_model.AccessMode - everyoneAccessMode map[unit.Type]perm_model.AccessMode + everyoneAccessMode map[unit.Type]perm_model.AccessMode // the unit's minimal access mode for every signed-in user + anonymousAccessMode map[unit.Type]perm_model.AccessMode // the unit's minimal access mode for anonymous (non-signed-in) user } // IsOwner returns true if current user is the owner of repository. @@ -39,7 +41,8 @@ func (p *Permission) IsAdmin() bool { } // HasAnyUnitAccess returns true if the user might have at least one access mode to any unit of this repository. -// It doesn't count the "everyone access mode". +// It doesn't count the "public(anonymous/everyone) access mode". +// TODO: most calls to this function should be replaced with `HasAnyUnitAccessOrPublicAccess` func (p *Permission) HasAnyUnitAccess() bool { for _, v := range p.unitsMode { if v >= perm_model.AccessModeRead { @@ -49,13 +52,22 @@ func (p *Permission) HasAnyUnitAccess() bool { return p.AccessMode >= perm_model.AccessModeRead } -func (p *Permission) HasAnyUnitAccessOrEveryoneAccess() bool { +func (p *Permission) HasAnyUnitPublicAccess() bool { + for _, v := range p.anonymousAccessMode { + if v >= perm_model.AccessModeRead { + return true + } + } for _, v := range p.everyoneAccessMode { if v >= perm_model.AccessModeRead { return true } } - return p.HasAnyUnitAccess() + return false +} + +func (p *Permission) HasAnyUnitAccessOrPublicAccess() bool { + return p.HasAnyUnitPublicAccess() || p.HasAnyUnitAccess() } // HasUnits returns true if the permission contains attached units @@ -73,14 +85,16 @@ func (p *Permission) GetFirstUnitRepoID() int64 { } // UnitAccessMode returns current user access mode to the specify unit of the repository -// It also considers "everyone access mode" +// It also considers "public (anonymous/everyone) access mode" func (p *Permission) UnitAccessMode(unitType unit.Type) perm_model.AccessMode { // if the units map contains the access mode, use it, but admin/owner mode could override it if m, ok := p.unitsMode[unitType]; ok { return util.Iif(p.AccessMode >= perm_model.AccessModeAdmin, p.AccessMode, m) } // if the units map does not contain the access mode, return the default access mode if the unit exists - unitDefaultAccessMode := max(p.AccessMode, p.everyoneAccessMode[unitType]) + unitDefaultAccessMode := p.AccessMode + unitDefaultAccessMode = max(unitDefaultAccessMode, p.anonymousAccessMode[unitType]) + unitDefaultAccessMode = max(unitDefaultAccessMode, p.everyoneAccessMode[unitType]) hasUnit := slices.ContainsFunc(p.units, func(u *repo_model.RepoUnit) bool { return u.Type == unitType }) return util.Iif(hasUnit, unitDefaultAccessMode, perm_model.AccessModeNone) } @@ -152,7 +166,7 @@ func (p *Permission) ReadableUnitTypes() []unit.Type { } func (p *Permission) LogString() string { - format := "<Permission AccessMode=%s, %d Units, %d UnitsMode(s): [ " + format := "<Permission AccessMode=%s, %d Units, %d UnitsMode(s): [" args := []any{p.AccessMode.ToString(), len(p.units), len(p.unitsMode)} for i, u := range p.units { @@ -164,27 +178,77 @@ func (p *Permission) LogString() string { config = err.Error() } } - format += "\nUnits[%d]: ID: %d RepoID: %d Type: %s Config: %s" + format += "\n\tunits[%d]: ID=%d RepoID=%d Type=%s Config=%s" args = append(args, i, u.ID, u.RepoID, u.Type.LogString(), config) } for key, value := range p.unitsMode { - format += "\nUnitMode[%-v]: %-v" + format += "\n\tunitsMode[%-v]: %-v" args = append(args, key.LogString(), value.LogString()) } - format += " ]>" + format += "\n\tanonymousAccessMode: %-v" + args = append(args, p.anonymousAccessMode) + format += "\n\teveryoneAccessMode: %-v" + args = append(args, p.everyoneAccessMode) + format += "\n\t]>" return fmt.Sprintf(format, args...) } -func applyEveryoneRepoPermission(user *user_model.User, perm *Permission) { +func applyPublicAccessPermission(unitType unit.Type, accessMode perm_model.AccessMode, modeMap *map[unit.Type]perm_model.AccessMode) { + if setting.Repository.ForcePrivate { + return + } + if accessMode >= perm_model.AccessModeRead && accessMode > (*modeMap)[unitType] { + if *modeMap == nil { + *modeMap = make(map[unit.Type]perm_model.AccessMode) + } + (*modeMap)[unitType] = accessMode + } +} + +func finalProcessRepoUnitPermission(user *user_model.User, perm *Permission) { + // apply public (anonymous) access permissions + for _, u := range perm.units { + applyPublicAccessPermission(u.Type, u.AnonymousAccessMode, &perm.anonymousAccessMode) + } + if user == nil || user.ID <= 0 { + // for anonymous access, it could be: + // AccessMode is None or Read, units has repo units, unitModes is nil return } + + // apply public (everyone) access permissions for _, u := range perm.units { - if u.EveryoneAccessMode >= perm_model.AccessModeRead && u.EveryoneAccessMode > perm.everyoneAccessMode[u.Type] { - if perm.everyoneAccessMode == nil { - perm.everyoneAccessMode = make(map[unit.Type]perm_model.AccessMode) + applyPublicAccessPermission(u.Type, u.EveryoneAccessMode, &perm.everyoneAccessMode) + } + + if perm.unitsMode == nil { + // if unitsMode is not set, then it means that the default p.AccessMode applies to all units + return + } + + // remove no permission units + origPermUnits := perm.units + perm.units = make([]*repo_model.RepoUnit, 0, len(perm.units)) + for _, u := range origPermUnits { + shouldKeep := false + for t := range perm.unitsMode { + if shouldKeep = u.Type == t; shouldKeep { + break + } + } + for t := range perm.anonymousAccessMode { + if shouldKeep = shouldKeep || u.Type == t; shouldKeep { + break } - perm.everyoneAccessMode[u.Type] = u.EveryoneAccessMode + } + for t := range perm.everyoneAccessMode { + if shouldKeep = shouldKeep || u.Type == t; shouldKeep { + break + } + } + if shouldKeep { + perm.units = append(perm.units, u) } } } @@ -193,11 +257,9 @@ func applyEveryoneRepoPermission(user *user_model.User, perm *Permission) { func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, user *user_model.User) (perm Permission, err error) { defer func() { if err == nil { - applyEveryoneRepoPermission(user, &perm) - } - if log.IsTrace() { - log.Trace("Permission Loaded for user %-v in repo %-v, permissions: %-+v", user, repo, perm) + finalProcessRepoUnitPermission(user, &perm) } + log.Trace("Permission Loaded for user %-v in repo %-v, permissions: %-+v", user, repo, perm) }() if err = repo.LoadUnits(ctx); err != nil { @@ -206,7 +268,6 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use perm.units = repo.Units // anonymous user visit private repo. - // TODO: anonymous user visit public unit of private repo??? if user == nil && repo.IsPrivate { perm.AccessMode = perm_model.AccessModeNone return perm, nil @@ -225,7 +286,8 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use } // Prevent strangers from checking out public repo of private organization/users - // Allow user if they are collaborator of a repo within a private user or a private organization but not a member of the organization itself + // Allow user if they are a collaborator of a repo within a private user or a private organization but not a member of the organization itself + // TODO: rename it to "IsOwnerVisibleToDoer" if !organization.HasOrgOrUserVisible(ctx, repo.Owner, user) && !isCollaborator { perm.AccessMode = perm_model.AccessModeNone return perm, nil @@ -243,7 +305,7 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use return perm, nil } - // plain user + // plain user TODO: this check should be replaced, only need to check collaborator access mode perm.AccessMode, err = accessLevel(ctx, user, repo) if err != nil { return perm, err @@ -253,6 +315,19 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use return perm, nil } + // now: the owner is visible to doer, if the repo is public, then the min access mode is read + minAccessMode := util.Iif(!repo.IsPrivate && !user.IsRestricted, perm_model.AccessModeRead, perm_model.AccessModeNone) + perm.AccessMode = max(perm.AccessMode, minAccessMode) + + // get units mode from teams + teams, err := organization.GetUserRepoTeams(ctx, repo.OwnerID, user.ID, repo.ID) + if err != nil { + return perm, err + } + if len(teams) == 0 { + return perm, nil + } + perm.unitsMode = make(map[unit.Type]perm_model.AccessMode) // Collaborators on organization @@ -262,15 +337,9 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use } } - // get units mode from teams - teams, err := organization.GetUserRepoTeams(ctx, repo.OwnerID, user.ID, repo.ID) - if err != nil { - return perm, err - } - // if user in an owner team for _, team := range teams { - if team.AccessMode >= perm_model.AccessModeAdmin { + if team.HasAdminAccess() { perm.AccessMode = perm_model.AccessModeOwner perm.unitsMode = nil return perm, nil @@ -278,29 +347,12 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use } for _, u := range repo.Units { - var found bool for _, team := range teams { + unitAccessMode := minAccessMode if teamMode, exist := team.UnitAccessModeEx(ctx, u.Type); exist { - perm.unitsMode[u.Type] = max(perm.unitsMode[u.Type], teamMode) - found = true - } - } - - // for a public repo on an organization, a non-restricted user has read permission on non-team defined units. - if !found && !repo.IsPrivate && !user.IsRestricted { - if _, ok := perm.unitsMode[u.Type]; !ok { - perm.unitsMode[u.Type] = perm_model.AccessModeRead - } - } - } - - // remove no permission units - perm.units = make([]*repo_model.RepoUnit, 0, len(repo.Units)) - for t := range perm.unitsMode { - for _, u := range repo.Units { - if u.Type == t { - perm.units = append(perm.units, u) + unitAccessMode = max(perm.unitsMode[u.Type], unitAccessMode, teamMode) } + perm.unitsMode[u.Type] = unitAccessMode } } @@ -348,7 +400,7 @@ func IsUserRepoAdmin(ctx context.Context, repo *repo_model.Repository, user *use } for _, team := range teams { - if team.AccessMode >= perm_model.AccessModeAdmin { + if team.HasAdminAccess() { return true, nil } } @@ -357,13 +409,13 @@ func IsUserRepoAdmin(ctx context.Context, repo *repo_model.Repository, user *use // AccessLevel returns the Access a user has to a repository. Will return NoneAccess if the // user does not have access. -func AccessLevel(ctx context.Context, user *user_model.User, repo *repo_model.Repository) (perm_model.AccessMode, error) { //nolint +func AccessLevel(ctx context.Context, user *user_model.User, repo *repo_model.Repository) (perm_model.AccessMode, error) { //nolint:revive // export stutter return AccessLevelUnit(ctx, user, repo, unit.TypeCode) } // AccessLevelUnit returns the Access a user has to a repository's. Will return NoneAccess if the // user does not have access. -func AccessLevelUnit(ctx context.Context, user *user_model.User, repo *repo_model.Repository, unitType unit.Type) (perm_model.AccessMode, error) { //nolint +func AccessLevelUnit(ctx context.Context, user *user_model.User, repo *repo_model.Repository, unitType unit.Type) (perm_model.AccessMode, error) { //nolint:revive // export stutter perm, err := GetUserRepoPermission(ctx, repo, user) if err != nil { return perm_model.AccessModeNone, err @@ -471,3 +523,7 @@ func CheckRepoUnitUser(ctx context.Context, repo *repo_model.Repository, user *u return perm.CanRead(unitType) } + +func PermissionNoAccess() Permission { + return Permission{AccessMode: perm_model.AccessModeNone} +} diff --git a/models/perm/access/repo_permission_test.go b/models/perm/access/repo_permission_test.go index 50070c4368..c8675b1ded 100644 --- a/models/perm/access/repo_permission_test.go +++ b/models/perm/access/repo_permission_test.go @@ -6,12 +6,16 @@ package access import ( "testing" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" perm_model "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" + "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestHasAnyUnitAccess(t *testing.T) { @@ -22,14 +26,21 @@ func TestHasAnyUnitAccess(t *testing.T) { units: []*repo_model.RepoUnit{{Type: unit.TypeWiki}}, } assert.False(t, perm.HasAnyUnitAccess()) - assert.False(t, perm.HasAnyUnitAccessOrEveryoneAccess()) + assert.False(t, perm.HasAnyUnitAccessOrPublicAccess()) perm = Permission{ units: []*repo_model.RepoUnit{{Type: unit.TypeWiki}}, everyoneAccessMode: map[unit.Type]perm_model.AccessMode{unit.TypeIssues: perm_model.AccessModeRead}, } assert.False(t, perm.HasAnyUnitAccess()) - assert.True(t, perm.HasAnyUnitAccessOrEveryoneAccess()) + assert.True(t, perm.HasAnyUnitAccessOrPublicAccess()) + + perm = Permission{ + units: []*repo_model.RepoUnit{{Type: unit.TypeWiki}}, + anonymousAccessMode: map[unit.Type]perm_model.AccessMode{unit.TypeIssues: perm_model.AccessModeRead}, + } + assert.False(t, perm.HasAnyUnitAccess()) + assert.True(t, perm.HasAnyUnitAccessOrPublicAccess()) perm = Permission{ AccessMode: perm_model.AccessModeRead, @@ -43,23 +54,32 @@ func TestHasAnyUnitAccess(t *testing.T) { assert.True(t, perm.HasAnyUnitAccess()) } -func TestApplyEveryoneRepoPermission(t *testing.T) { +func TestApplyPublicAccessRepoPermission(t *testing.T) { perm := Permission{ AccessMode: perm_model.AccessModeNone, units: []*repo_model.RepoUnit{ {Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead}, }, } - applyEveryoneRepoPermission(nil, &perm) + finalProcessRepoUnitPermission(nil, &perm) assert.False(t, perm.CanRead(unit.TypeWiki)) perm = Permission{ AccessMode: perm_model.AccessModeNone, units: []*repo_model.RepoUnit{ + {Type: unit.TypeWiki, AnonymousAccessMode: perm_model.AccessModeRead}, + }, + } + finalProcessRepoUnitPermission(nil, &perm) + assert.True(t, perm.CanRead(unit.TypeWiki)) + + perm = Permission{ + AccessMode: perm_model.AccessModeNone, + units: []*repo_model.RepoUnit{ {Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead}, }, } - applyEveryoneRepoPermission(&user_model.User{ID: 0}, &perm) + finalProcessRepoUnitPermission(&user_model.User{ID: 0}, &perm) assert.False(t, perm.CanRead(unit.TypeWiki)) perm = Permission{ @@ -68,7 +88,7 @@ func TestApplyEveryoneRepoPermission(t *testing.T) { {Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead}, }, } - applyEveryoneRepoPermission(&user_model.User{ID: 1}, &perm) + finalProcessRepoUnitPermission(&user_model.User{ID: 1}, &perm) assert.True(t, perm.CanRead(unit.TypeWiki)) perm = Permission{ @@ -77,20 +97,22 @@ func TestApplyEveryoneRepoPermission(t *testing.T) { {Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead}, }, } - applyEveryoneRepoPermission(&user_model.User{ID: 1}, &perm) + finalProcessRepoUnitPermission(&user_model.User{ID: 1}, &perm) // it should work the same as "EveryoneAccessMode: none" because the default AccessMode should be applied to units assert.True(t, perm.CanWrite(unit.TypeWiki)) perm = Permission{ units: []*repo_model.RepoUnit{ + {Type: unit.TypeCode}, // will be removed {Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead}, }, unitsMode: map[unit.Type]perm_model.AccessMode{ unit.TypeWiki: perm_model.AccessModeWrite, }, } - applyEveryoneRepoPermission(&user_model.User{ID: 1}, &perm) + finalProcessRepoUnitPermission(&user_model.User{ID: 1}, &perm) assert.True(t, perm.CanWrite(unit.TypeWiki)) + assert.Len(t, perm.units, 1) } func TestUnitAccessMode(t *testing.T) { @@ -134,3 +156,45 @@ func TestUnitAccessMode(t *testing.T) { } assert.Equal(t, perm_model.AccessModeRead, perm.UnitAccessMode(unit.TypeWiki), "has unit, and map, use map") } + +func TestGetUserRepoPermission(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + ctx := t.Context() + repo32 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 32}) // org public repo + require.NoError(t, repo32.LoadOwner(ctx)) + require.True(t, repo32.Owner.IsOrganization()) + + require.NoError(t, db.TruncateBeans(ctx, &organization.Team{}, &organization.TeamUser{}, &organization.TeamRepo{}, &organization.TeamUnit{})) + org := repo32.Owner + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}) + team := &organization.Team{OrgID: org.ID, LowerName: "test_team"} + require.NoError(t, db.Insert(ctx, team)) + + t.Run("DoerInTeamWithNoRepo", func(t *testing.T) { + require.NoError(t, db.Insert(ctx, &organization.TeamUser{OrgID: org.ID, TeamID: team.ID, UID: user.ID})) + perm, err := GetUserRepoPermission(ctx, repo32, user) + require.NoError(t, err) + assert.Equal(t, perm_model.AccessModeRead, perm.AccessMode) + assert.Nil(t, perm.unitsMode) // doer in the team, but has no access to the repo + }) + + require.NoError(t, db.Insert(ctx, &organization.TeamRepo{OrgID: org.ID, TeamID: team.ID, RepoID: repo32.ID})) + require.NoError(t, db.Insert(ctx, &organization.TeamUnit{OrgID: org.ID, TeamID: team.ID, Type: unit.TypeCode, AccessMode: perm_model.AccessModeNone})) + t.Run("DoerWithTeamUnitAccessNone", func(t *testing.T) { + perm, err := GetUserRepoPermission(ctx, repo32, user) + require.NoError(t, err) + assert.Equal(t, perm_model.AccessModeRead, perm.AccessMode) + assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeCode]) + assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeIssues]) + }) + + require.NoError(t, db.TruncateBeans(ctx, &organization.TeamUnit{})) + require.NoError(t, db.Insert(ctx, &organization.TeamUnit{OrgID: org.ID, TeamID: team.ID, Type: unit.TypeCode, AccessMode: perm_model.AccessModeWrite})) + t.Run("DoerWithTeamUnitAccessWrite", func(t *testing.T) { + perm, err := GetUserRepoPermission(ctx, repo32, user) + require.NoError(t, err) + assert.Equal(t, perm_model.AccessModeRead, perm.AccessMode) + assert.Equal(t, perm_model.AccessModeWrite, perm.unitsMode[unit.TypeCode]) + assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeIssues]) + }) +} diff --git a/models/project/column.go b/models/project/column.go index 222f448599..9b9d874997 100644 --- a/models/project/column.go +++ b/models/project/column.go @@ -48,6 +48,8 @@ type Column struct { ProjectID int64 `xorm:"INDEX NOT NULL"` CreatorID int64 `xorm:"NOT NULL"` + NumIssues int64 `xorm:"-"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` } @@ -57,20 +59,6 @@ func (Column) TableName() string { return "project_board" // TODO: the legacy table name should be project_column } -// NumIssues return counter of all issues assigned to the column -func (c *Column) NumIssues(ctx context.Context) int { - total, err := db.GetEngine(ctx).Table("project_issue"). - Where("project_id=?", c.ProjectID). - And("project_board_id=?", c.ID). - GroupBy("issue_id"). - Cols("issue_id"). - Count() - if err != nil { - return 0 - } - return int(total) -} - func (c *Column) GetIssues(ctx context.Context) ([]*ProjectIssue, error) { issues := make([]*ProjectIssue, 0, 5) if err := db.GetEngine(ctx).Where("project_id=?", c.ProjectID). @@ -159,7 +147,7 @@ func NewColumn(ctx context.Context, column *Column) error { return err } if res.ColumnCount >= maxProjectColumns { - return fmt.Errorf("NewBoard: maximum number of columns reached") + return errors.New("NewBoard: maximum number of columns reached") } column.Sorting = int8(util.Iif(res.ColumnCount > 0, res.MaxSorting+1, 0)) _, err := db.GetEngine(ctx).Insert(column) @@ -184,7 +172,7 @@ func deleteColumnByID(ctx context.Context, columnID int64) error { } if column.Default { - return fmt.Errorf("deleteColumnByID: cannot delete default column") + return errors.New("deleteColumnByID: cannot delete default column") } // move all issues to the default column @@ -192,7 +180,7 @@ func deleteColumnByID(ctx context.Context, columnID int64) error { if err != nil { return err } - defaultColumn, err := project.GetDefaultColumn(ctx) + defaultColumn, err := project.MustDefaultColumn(ctx) if err != nil { return err } @@ -257,8 +245,8 @@ func (p *Project) GetColumns(ctx context.Context) (ColumnList, error) { return columns, nil } -// GetDefaultColumn return default column and ensure only one exists -func (p *Project) GetDefaultColumn(ctx context.Context) (*Column, error) { +// getDefaultColumn return default column and ensure only one exists +func (p *Project) getDefaultColumn(ctx context.Context) (*Column, error) { var column Column has, err := db.GetEngine(ctx). Where("project_id=? AND `default` = ?", p.ID, true). @@ -270,6 +258,33 @@ func (p *Project) GetDefaultColumn(ctx context.Context) (*Column, error) { if has { return &column, nil } + return nil, ErrProjectColumnNotExist{ColumnID: 0} +} + +// MustDefaultColumn returns the default column for a project. +// If one exists, it is returned +// If none exists, the first column will be elevated to the default column of this project +func (p *Project) MustDefaultColumn(ctx context.Context) (*Column, error) { + c, err := p.getDefaultColumn(ctx) + if err != nil && !IsErrProjectColumnNotExist(err) { + return nil, err + } + if c != nil { + return c, nil + } + + var column Column + has, err := db.GetEngine(ctx).Where("project_id=?", p.ID).OrderBy("sorting, id").Get(&column) + if err != nil { + return nil, err + } + if has { + column.Default = true + if _, err := db.GetEngine(ctx).ID(column.ID).Cols("`default`").Update(&column); err != nil { + return nil, err + } + return &column, nil + } // create a default column if none is found column = Column{ @@ -321,6 +336,9 @@ func UpdateColumnSorting(ctx context.Context, cl ColumnList) error { func GetColumnsByIDs(ctx context.Context, projectID int64, columnsIDs []int64) (ColumnList, error) { columns := make([]*Column, 0, 5) + if len(columnsIDs) == 0 { + return columns, nil + } if err := db.GetEngine(ctx). Where("project_id =?", projectID). In("id", columnsIDs). diff --git a/models/project/column_test.go b/models/project/column_test.go index 566667e45d..6a615090a5 100644 --- a/models/project/column_test.go +++ b/models/project/column_test.go @@ -20,19 +20,19 @@ func TestGetDefaultColumn(t *testing.T) { assert.NoError(t, err) // check if default column was added - column, err := projectWithoutDefault.GetDefaultColumn(db.DefaultContext) + column, err := projectWithoutDefault.MustDefaultColumn(db.DefaultContext) assert.NoError(t, err) assert.Equal(t, int64(5), column.ProjectID) - assert.Equal(t, "Uncategorized", column.Title) + assert.Equal(t, "Done", column.Title) projectWithMultipleDefaults, err := GetProjectByID(db.DefaultContext, 6) assert.NoError(t, err) // check if multiple defaults were removed - column, err = projectWithMultipleDefaults.GetDefaultColumn(db.DefaultContext) + column, err = projectWithMultipleDefaults.MustDefaultColumn(db.DefaultContext) assert.NoError(t, err) assert.Equal(t, int64(6), column.ProjectID) - assert.Equal(t, int64(9), column.ID) + assert.Equal(t, int64(9), column.ID) // there are 2 default columns in the test data, use the latest one // set 8 as default column assert.NoError(t, SetDefaultColumn(db.DefaultContext, column.ProjectID, 8)) @@ -97,9 +97,9 @@ func Test_MoveColumnsOnProject(t *testing.T) { columnsAfter, err := project1.GetColumns(db.DefaultContext) assert.NoError(t, err) assert.Len(t, columnsAfter, 3) - assert.EqualValues(t, columns[1].ID, columnsAfter[0].ID) - assert.EqualValues(t, columns[2].ID, columnsAfter[1].ID) - assert.EqualValues(t, columns[0].ID, columnsAfter[2].ID) + assert.Equal(t, columns[1].ID, columnsAfter[0].ID) + assert.Equal(t, columns[2].ID, columnsAfter[1].ID) + assert.Equal(t, columns[0].ID, columnsAfter[2].ID) } func Test_NewColumn(t *testing.T) { @@ -110,7 +110,7 @@ func Test_NewColumn(t *testing.T) { assert.NoError(t, err) assert.Len(t, columns, 3) - for i := 0; i < maxProjectColumns-3; i++ { + for i := range maxProjectColumns - 3 { err := NewColumn(db.DefaultContext, &Column{ Title: fmt.Sprintf("column-%d", i+4), ProjectID: project1.ID, diff --git a/models/project/issue.go b/models/project/issue.go index b4347a9c2b..47d1537ec7 100644 --- a/models/project/issue.go +++ b/models/project/issue.go @@ -5,10 +5,9 @@ package project import ( "context" - "fmt" + "errors" "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/util" ) @@ -34,51 +33,9 @@ func deleteProjectIssuesByProjectID(ctx context.Context, projectID int64) error return err } -// NumIssues return counter of all issues assigned to a project -func (p *Project) NumIssues(ctx context.Context) int { - c, err := db.GetEngine(ctx).Table("project_issue"). - Where("project_id=?", p.ID). - GroupBy("issue_id"). - Cols("issue_id"). - Count() - if err != nil { - log.Error("NumIssues: %v", err) - return 0 - } - return int(c) -} - -// NumClosedIssues return counter of closed issues assigned to a project -func (p *Project) NumClosedIssues(ctx context.Context) int { - c, err := db.GetEngine(ctx).Table("project_issue"). - Join("INNER", "issue", "project_issue.issue_id=issue.id"). - Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, true). - Cols("issue_id"). - Count() - if err != nil { - log.Error("NumClosedIssues: %v", err) - return 0 - } - return int(c) -} - -// NumOpenIssues return counter of open issues assigned to a project -func (p *Project) NumOpenIssues(ctx context.Context) int { - c, err := db.GetEngine(ctx).Table("project_issue"). - Join("INNER", "issue", "project_issue.issue_id=issue.id"). - Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, false). - Cols("issue_id"). - Count() - if err != nil { - log.Error("NumOpenIssues: %v", err) - return 0 - } - return int(c) -} - func (c *Column) moveIssuesToAnotherColumn(ctx context.Context, newColumn *Column) error { if c.ProjectID != newColumn.ProjectID { - return fmt.Errorf("columns have to be in the same project") + return errors.New("columns have to be in the same project") } if c.ID == newColumn.ID { diff --git a/models/project/project.go b/models/project/project.go index edeb0b4742..f516466854 100644 --- a/models/project/project.go +++ b/models/project/project.go @@ -97,6 +97,9 @@ type Project struct { Type Type RenderedContent template.HTML `xorm:"-"` + NumOpenIssues int64 `xorm:"-"` + NumClosedIssues int64 `xorm:"-"` + NumIssues int64 `xorm:"-"` CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` @@ -126,11 +129,11 @@ func (p *Project) LoadRepo(ctx context.Context) (err error) { return err } -func ProjectLinkForOrg(org *user_model.User, projectID int64) string { //nolint +func ProjectLinkForOrg(org *user_model.User, projectID int64) string { //nolint:revive // export stutter return fmt.Sprintf("%s/-/projects/%d", org.HomeLink(), projectID) } -func ProjectLinkForRepo(repo *repo_model.Repository, projectID int64) string { //nolint +func ProjectLinkForRepo(repo *repo_model.Repository, projectID int64) string { //nolint:revive // export stutter return fmt.Sprintf("%s/projects/%d", repo.Link(), projectID) } @@ -244,6 +247,10 @@ func GetSearchOrderByBySortType(sortType string) db.SearchOrderBy { return db.SearchOrderByRecentUpdated case "leastupdate": return db.SearchOrderByLeastUpdated + case "alphabetically": + return "title ASC" + case "reversealphabetically": + return "title DESC" default: return db.SearchOrderByNewest } diff --git a/models/project/project_test.go b/models/project/project_test.go index dd421b4659..c2e924e8ae 100644 --- a/models/project/project_test.go +++ b/models/project/project_test.go @@ -113,10 +113,10 @@ func TestProjectsSort(t *testing.T) { OrderBy: GetSearchOrderByBySortType(tt.sortType), }) assert.NoError(t, err) - assert.EqualValues(t, int64(6), count) + assert.Equal(t, int64(6), count) if assert.Len(t, projects, 6) { for i := range projects { - assert.EqualValues(t, tt.wants[i], projects[i].ID) + assert.Equal(t, tt.wants[i], projects[i].ID) } } } diff --git a/models/pull/automerge.go b/models/pull/automerge.go index f69fcb60d1..3cafacc3a4 100644 --- a/models/pull/automerge.go +++ b/models/pull/automerge.go @@ -15,13 +15,14 @@ import ( // AutoMerge represents a pull request scheduled for merging when checks succeed type AutoMerge struct { - ID int64 `xorm:"pk autoincr"` - PullID int64 `xorm:"UNIQUE"` - DoerID int64 `xorm:"INDEX NOT NULL"` - Doer *user_model.User `xorm:"-"` - MergeStyle repo_model.MergeStyle `xorm:"varchar(30)"` - Message string `xorm:"LONGTEXT"` - CreatedUnix timeutil.TimeStamp `xorm:"created"` + ID int64 `xorm:"pk autoincr"` + PullID int64 `xorm:"UNIQUE"` + DoerID int64 `xorm:"INDEX NOT NULL"` + Doer *user_model.User `xorm:"-"` + MergeStyle repo_model.MergeStyle `xorm:"varchar(30)"` + Message string `xorm:"LONGTEXT"` + DeleteBranchAfterMerge bool + CreatedUnix timeutil.TimeStamp `xorm:"created"` } // TableName return database table name for xorm @@ -49,7 +50,7 @@ func IsErrAlreadyScheduledToAutoMerge(err error) bool { } // ScheduleAutoMerge schedules a pull request to be merged when all checks succeed -func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pullID int64, style repo_model.MergeStyle, message string) error { +func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pullID int64, style repo_model.MergeStyle, message string, deleteBranchAfterMerge bool) error { // Check if we already have a merge scheduled for that pull request if exists, _, err := GetScheduledMergeByPullID(ctx, pullID); err != nil { return err @@ -58,10 +59,11 @@ func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pullID int64, } _, err := db.GetEngine(ctx).Insert(&AutoMerge{ - DoerID: doer.ID, - PullID: pullID, - MergeStyle: style, - Message: message, + DoerID: doer.ID, + PullID: pullID, + MergeStyle: style, + Message: message, + DeleteBranchAfterMerge: deleteBranchAfterMerge, }) return err } diff --git a/models/pull/review_state.go b/models/pull/review_state.go index e46a22a49d..137af00eab 100644 --- a/models/pull/review_state.go +++ b/models/pull/review_state.go @@ -6,6 +6,7 @@ package pull import ( "context" "fmt" + "maps" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/log" @@ -100,9 +101,7 @@ func mergeFiles(oldFiles, newFiles map[string]ViewedState) map[string]ViewedStat return oldFiles } - for file, viewed := range newFiles { - oldFiles[file] = viewed - } + maps.Copy(oldFiles, newFiles) return oldFiles } diff --git a/models/renderhelper/commit_checker.go b/models/renderhelper/commit_checker.go index 4815643e67..407e45fb54 100644 --- a/models/renderhelper/commit_checker.go +++ b/models/renderhelper/commit_checker.go @@ -47,7 +47,7 @@ func (c *commitChecker) IsCommitIDExisting(commitID string) bool { c.gitRepo, c.gitRepoCloser = r, closer } - exist = c.gitRepo.IsReferenceExist(commitID) // Don't use IsObjectExist since it doesn't support short hashs with gogit edition. + exist = c.gitRepo.IsReferenceExist(commitID) // Don't use IsObjectExist since it doesn't support short hashes with gogit edition. c.commitCache[commitID] = exist return exist } diff --git a/models/renderhelper/repo_comment.go b/models/renderhelper/repo_comment.go index 6bd5e91ad1..ae0fbf0abd 100644 --- a/models/renderhelper/repo_comment.go +++ b/models/renderhelper/repo_comment.go @@ -28,14 +28,14 @@ func (r *RepoComment) IsCommitIDExisting(commitID string) bool { return r.commitChecker.IsCommitIDExisting(commitID) } -func (r *RepoComment) ResolveLink(link string, likeType markup.LinkType) (finalLink string) { - switch likeType { - case markup.LinkTypeApp: - finalLink = r.ctx.ResolveLinkApp(link) +func (r *RepoComment) ResolveLink(link, preferLinkType string) string { + linkType, link := markup.ParseRenderedLink(link, preferLinkType) + switch linkType { + case markup.LinkTypeRoot: + return r.ctx.ResolveLinkRoot(link) default: - finalLink = r.ctx.ResolveLinkRelative(r.repoLink, r.opts.CurrentRefPath, link) + return r.ctx.ResolveLinkRelative(r.repoLink, r.opts.CurrentRefPath, link) } - return finalLink } var _ markup.RenderHelper = (*RepoComment)(nil) @@ -44,30 +44,31 @@ type RepoCommentOptions struct { DeprecatedRepoName string // it is only a patch for the non-standard "markup" api DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api CurrentRefPath string // eg: "branch/main" or "commit/11223344" + FootnoteContextID string // the extra context ID for footnotes, used to avoid conflicts with other footnotes in the same page } func NewRenderContextRepoComment(ctx context.Context, repo *repo_model.Repository, opts ...RepoCommentOptions) *markup.RenderContext { - helper := &RepoComment{ - repoLink: repo.Link(), - opts: util.OptionalArg(opts), - } + helper := &RepoComment{opts: util.OptionalArg(opts)} rctx := markup.NewRenderContext(ctx) helper.ctx = rctx + var metas map[string]string if repo != nil { helper.repoLink = repo.Link() helper.commitChecker = newCommitChecker(ctx, repo) - rctx = rctx.WithMetas(repo.ComposeMetas(ctx)) + metas = repo.ComposeCommentMetas(ctx) } else { - // this is almost dead code, only to pass the incorrect tests - helper.repoLink = fmt.Sprintf("%s/%s", helper.opts.DeprecatedOwnerName, helper.opts.DeprecatedRepoName) - rctx = rctx.WithMetas(map[string]string{ - "user": helper.opts.DeprecatedOwnerName, - "repo": helper.opts.DeprecatedRepoName, - - "markdownLineBreakStyle": "comment", - "markupAllowShortIssuePattern": "true", - }) + // repo can be nil when rendering a commit message in user's dashboard feedback whose repository has been deleted + metas = map[string]string{} + if helper.opts.DeprecatedOwnerName != "" { + // this is almost dead code, only to pass the incorrect tests + helper.repoLink = fmt.Sprintf("%s/%s", helper.opts.DeprecatedOwnerName, helper.opts.DeprecatedRepoName) + metas["user"] = helper.opts.DeprecatedOwnerName + metas["repo"] = helper.opts.DeprecatedRepoName + } + metas["markdownNewLineHardBreak"] = "true" + metas["markupAllowShortIssuePattern"] = "true" } - rctx = rctx.WithHelper(helper) + metas["footnoteContextId"] = helper.opts.FootnoteContextID + rctx = rctx.WithMetas(metas).WithHelper(helper) return rctx } diff --git a/models/renderhelper/repo_comment_test.go b/models/renderhelper/repo_comment_test.go index 01e20b9e02..3b13bff73c 100644 --- a/models/renderhelper/repo_comment_test.go +++ b/models/renderhelper/repo_comment_test.go @@ -4,7 +4,6 @@ package renderhelper import ( - "context" "testing" repo_model "code.gitea.io/gitea/models/repo" @@ -21,7 +20,7 @@ func TestRepoComment(t *testing.T) { repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) t.Run("AutoLink", func(t *testing.T) { - rctx := NewRenderContextRepoComment(context.Background(), repo1).WithMarkupType(markdown.MarkupName) + rctx := NewRenderContextRepoComment(t.Context(), repo1).WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` 65f1bf27bc3bf70f64657658635e66094edbcb4d #1 @@ -36,7 +35,7 @@ func TestRepoComment(t *testing.T) { }) t.Run("AbsoluteAndRelative", func(t *testing.T) { - rctx := NewRenderContextRepoComment(context.Background(), repo1).WithMarkupType(markdown.MarkupName) + rctx := NewRenderContextRepoComment(t.Context(), repo1).WithMarkupType(markdown.MarkupName) // It is Gitea's old behavior, the relative path is resolved to the repo path // It is different from GitHub, GitHub resolves relative links to current page's path @@ -56,7 +55,7 @@ func TestRepoComment(t *testing.T) { }) t.Run("WithCurrentRefPath", func(t *testing.T) { - rctx := NewRenderContextRepoComment(context.Background(), repo1, RepoCommentOptions{CurrentRefPath: "/commit/1234"}). + rctx := NewRenderContextRepoComment(t.Context(), repo1, RepoCommentOptions{CurrentRefPath: "/commit/1234"}). WithMarkupType(markdown.MarkupName) // the ref path is only used to render commit message: a commit message is rendered at the commit page with its commit ID path @@ -73,4 +72,11 @@ func TestRepoComment(t *testing.T) { <a href="/user2/repo1/commit/1234/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/commit/1234/image" alt="./image"/></a></p> `, rendered) }) + + t.Run("NoRepo", func(t *testing.T) { + rctx := NewRenderContextRepoComment(t.Context(), nil).WithMarkupType(markdown.MarkupName) + rendered, err := markup.RenderString(rctx, "any") + assert.NoError(t, err) + assert.Equal(t, "<p>any</p>\n", rendered) + }) } diff --git a/models/renderhelper/repo_file.go b/models/renderhelper/repo_file.go index 794828c617..e0375ed280 100644 --- a/models/renderhelper/repo_file.go +++ b/models/renderhelper/repo_file.go @@ -29,17 +29,17 @@ func (r *RepoFile) IsCommitIDExisting(commitID string) bool { return r.commitChecker.IsCommitIDExisting(commitID) } -func (r *RepoFile) ResolveLink(link string, likeType markup.LinkType) string { - finalLink := link - switch likeType { - case markup.LinkTypeApp: - finalLink = r.ctx.ResolveLinkApp(link) - case markup.LinkTypeDefault: - finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "src", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link) +func (r *RepoFile) ResolveLink(link, preferLinkType string) (finalLink string) { + linkType, link := markup.ParseRenderedLink(link, preferLinkType) + switch linkType { + case markup.LinkTypeRoot: + finalLink = r.ctx.ResolveLinkRoot(link) case markup.LinkTypeRaw: finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "raw", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link) case markup.LinkTypeMedia: finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "media", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link) + default: + finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "src", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link) } return finalLink } @@ -61,15 +61,13 @@ func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository, if repo != nil { helper.repoLink = repo.Link() helper.commitChecker = newCommitChecker(ctx, repo) - rctx = rctx.WithMetas(repo.ComposeDocumentMetas(ctx)) + rctx = rctx.WithMetas(repo.ComposeRepoFileMetas(ctx)) } else { // this is almost dead code, only to pass the incorrect tests helper.repoLink = fmt.Sprintf("%s/%s", helper.opts.DeprecatedOwnerName, helper.opts.DeprecatedRepoName) rctx = rctx.WithMetas(map[string]string{ "user": helper.opts.DeprecatedOwnerName, "repo": helper.opts.DeprecatedRepoName, - - "markdownLineBreakStyle": "document", }) } rctx = rctx.WithHelper(helper) diff --git a/models/renderhelper/repo_file_test.go b/models/renderhelper/repo_file_test.go index 959648b660..3b48efba3a 100644 --- a/models/renderhelper/repo_file_test.go +++ b/models/renderhelper/repo_file_test.go @@ -4,7 +4,6 @@ package renderhelper import ( - "context" "testing" repo_model "code.gitea.io/gitea/models/repo" @@ -22,7 +21,7 @@ func TestRepoFile(t *testing.T) { repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) t.Run("AutoLink", func(t *testing.T) { - rctx := NewRenderContextRepoFile(context.Background(), repo1).WithMarkupType(markdown.MarkupName) + rctx := NewRenderContextRepoFile(t.Context(), repo1).WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` 65f1bf27bc3bf70f64657658635e66094edbcb4d #1 @@ -37,7 +36,7 @@ func TestRepoFile(t *testing.T) { }) t.Run("AbsoluteAndRelative", func(t *testing.T) { - rctx := NewRenderContextRepoFile(context.Background(), repo1, RepoFileOptions{CurrentRefPath: "branch/main"}). + rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefPath: "branch/main"}). WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` [/test](/test) @@ -49,13 +48,13 @@ func TestRepoFile(t *testing.T) { assert.Equal(t, `<p><a href="/user2/repo1/src/branch/main/test" rel="nofollow">/test</a> <a href="/user2/repo1/src/branch/main/test" rel="nofollow">./test</a> -<a href="/user2/repo1/media/branch/main/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/branch/main/image" alt="/image"/></a> -<a href="/user2/repo1/media/branch/main/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/branch/main/image" alt="./image"/></a></p> +<a href="/user2/repo1/src/branch/main/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/branch/main/image" alt="/image"/></a> +<a href="/user2/repo1/src/branch/main/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/branch/main/image" alt="./image"/></a></p> `, rendered) }) t.Run("WithCurrentRefPath", func(t *testing.T) { - rctx := NewRenderContextRepoFile(context.Background(), repo1, RepoFileOptions{CurrentRefPath: "/commit/1234"}). + rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefPath: "/commit/1234"}). WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` [/test](/test) @@ -63,12 +62,12 @@ func TestRepoFile(t *testing.T) { `) assert.NoError(t, err) assert.Equal(t, `<p><a href="/user2/repo1/src/commit/1234/test" rel="nofollow">/test</a> -<a href="/user2/repo1/media/commit/1234/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/commit/1234/image" alt="/image"/></a></p> +<a href="/user2/repo1/src/commit/1234/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/commit/1234/image" alt="/image"/></a></p> `, rendered) }) t.Run("WithCurrentRefPathByTag", func(t *testing.T) { - rctx := NewRenderContextRepoFile(context.Background(), repo1, RepoFileOptions{ + rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{ CurrentRefPath: "/commit/1234", CurrentTreePath: "my-dir", }). @@ -78,7 +77,7 @@ func TestRepoFile(t *testing.T) { <video src="LINK"> `) assert.NoError(t, err) - assert.Equal(t, `<a href="/user2/repo1/media/commit/1234/my-dir/LINK" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/commit/1234/my-dir/LINK"/></a> + assert.Equal(t, `<a href="/user2/repo1/src/commit/1234/my-dir/LINK" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/media/commit/1234/my-dir/LINK"/></a> <video src="/user2/repo1/media/commit/1234/my-dir/LINK"> </video>`, rendered) }) @@ -89,7 +88,7 @@ func TestRepoFileOrgMode(t *testing.T) { repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) t.Run("Links", func(t *testing.T) { - rctx := NewRenderContextRepoFile(context.Background(), repo1, RepoFileOptions{ + rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{ CurrentRefPath: "/commit/1234", CurrentTreePath: "my-dir", }).WithRelativePath("my-dir/a.org") @@ -101,12 +100,12 @@ func TestRepoFileOrgMode(t *testing.T) { assert.NoError(t, err) assert.Equal(t, `<p> <a href="https://google.com/" rel="nofollow">https://google.com/</a> -<a href="/user2/repo1/media/commit/1234/my-dir/ImageLink.svg" rel="nofollow">The Image Desc</a></p> +<a href="/user2/repo1/src/commit/1234/my-dir/ImageLink.svg" rel="nofollow">The Image Desc</a></p> `, rendered) }) t.Run("CodeHighlight", func(t *testing.T) { - rctx := NewRenderContextRepoFile(context.Background(), repo1, RepoFileOptions{}).WithRelativePath("my-dir/a.org") + rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{}).WithRelativePath("my-dir/a.org") rendered, err := markup.RenderString(rctx, ` #+begin_src c diff --git a/models/renderhelper/repo_wiki.go b/models/renderhelper/repo_wiki.go index aa456bf6ce..b75f1b9701 100644 --- a/models/renderhelper/repo_wiki.go +++ b/models/renderhelper/repo_wiki.go @@ -30,18 +30,16 @@ func (r *RepoWiki) IsCommitIDExisting(commitID string) bool { return r.commitChecker.IsCommitIDExisting(commitID) } -func (r *RepoWiki) ResolveLink(link string, likeType markup.LinkType) string { - finalLink := link - switch likeType { - case markup.LinkTypeApp: - finalLink = r.ctx.ResolveLinkApp(link) - case markup.LinkTypeDefault: - finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki", r.opts.currentRefPath), r.opts.currentTreePath, link) - case markup.LinkTypeMedia: +func (r *RepoWiki) ResolveLink(link, preferLinkType string) (finalLink string) { + linkType, link := markup.ParseRenderedLink(link, preferLinkType) + switch linkType { + case markup.LinkTypeRoot: + finalLink = r.ctx.ResolveLinkRoot(link) + case markup.LinkTypeMedia, markup.LinkTypeRaw: finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki/raw", r.opts.currentRefPath), r.opts.currentTreePath, link) - case markup.LinkTypeRaw: // wiki doesn't use it + default: + finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki", r.opts.currentRefPath), r.opts.currentTreePath, link) } - return finalLink } @@ -70,7 +68,6 @@ func NewRenderContextRepoWiki(ctx context.Context, repo *repo_model.Repository, "user": helper.opts.DeprecatedOwnerName, "repo": helper.opts.DeprecatedRepoName, - "markdownLineBreakStyle": "document", "markupAllowShortIssuePattern": "true", }) } diff --git a/models/renderhelper/repo_wiki_test.go b/models/renderhelper/repo_wiki_test.go index beab2570e7..4f6da541a5 100644 --- a/models/renderhelper/repo_wiki_test.go +++ b/models/renderhelper/repo_wiki_test.go @@ -4,7 +4,6 @@ package renderhelper import ( - "context" "testing" repo_model "code.gitea.io/gitea/models/repo" @@ -20,7 +19,7 @@ func TestRepoWiki(t *testing.T) { repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) t.Run("AutoLink", func(t *testing.T) { - rctx := NewRenderContextRepoWiki(context.Background(), repo1).WithMarkupType(markdown.MarkupName) + rctx := NewRenderContextRepoWiki(t.Context(), repo1).WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` 65f1bf27bc3bf70f64657658635e66094edbcb4d #1 @@ -35,7 +34,7 @@ func TestRepoWiki(t *testing.T) { }) t.Run("AbsoluteAndRelative", func(t *testing.T) { - rctx := NewRenderContextRepoWiki(context.Background(), repo1).WithMarkupType(markdown.MarkupName) + rctx := NewRenderContextRepoWiki(t.Context(), repo1).WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` [/test](/test) [./test](./test) @@ -46,19 +45,19 @@ func TestRepoWiki(t *testing.T) { assert.Equal(t, `<p><a href="/user2/repo1/wiki/test" rel="nofollow">/test</a> <a href="/user2/repo1/wiki/test" rel="nofollow">./test</a> -<a href="/user2/repo1/wiki/raw/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/image" alt="/image"/></a> -<a href="/user2/repo1/wiki/raw/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/image" alt="./image"/></a></p> +<a href="/user2/repo1/wiki/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/image" alt="/image"/></a> +<a href="/user2/repo1/wiki/image" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/image" alt="./image"/></a></p> `, rendered) }) t.Run("PathInTag", func(t *testing.T) { - rctx := NewRenderContextRepoWiki(context.Background(), repo1).WithMarkupType(markdown.MarkupName) + rctx := NewRenderContextRepoWiki(t.Context(), repo1).WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` <img src="LINK"> <video src="LINK"> `) assert.NoError(t, err) - assert.Equal(t, `<a href="/user2/repo1/wiki/raw/LINK" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/LINK"/></a> + assert.Equal(t, `<a href="/user2/repo1/wiki/LINK" target="_blank" rel="nofollow noopener"><img src="/user2/repo1/wiki/raw/LINK"/></a> <video src="/user2/repo1/wiki/raw/LINK"> </video>`, rendered) }) diff --git a/models/renderhelper/simple_document.go b/models/renderhelper/simple_document.go index 91d888aa87..9b3dacaea3 100644 --- a/models/renderhelper/simple_document.go +++ b/models/renderhelper/simple_document.go @@ -15,8 +15,14 @@ type SimpleDocument struct { baseLink string } -func (r *SimpleDocument) ResolveLink(link string, likeType markup.LinkType) string { - return r.ctx.ResolveLinkRelative(r.baseLink, "", link) +func (r *SimpleDocument) ResolveLink(link, preferLinkType string) string { + linkType, link := markup.ParseRenderedLink(link, preferLinkType) + switch linkType { + case markup.LinkTypeRoot: + return r.ctx.ResolveLinkRoot(link) + default: + return r.ctx.ResolveLinkRelative(r.baseLink, "", link) + } } var _ markup.RenderHelper = (*SimpleDocument)(nil) diff --git a/models/renderhelper/simple_document_test.go b/models/renderhelper/simple_document_test.go index c0d5fd7429..890592860a 100644 --- a/models/renderhelper/simple_document_test.go +++ b/models/renderhelper/simple_document_test.go @@ -4,7 +4,6 @@ package renderhelper import ( - "context" "testing" "code.gitea.io/gitea/models/unittest" @@ -16,7 +15,7 @@ import ( func TestSimpleDocument(t *testing.T) { unittest.PrepareTestEnv(t) - rctx := NewRenderContextSimpleDocument(context.Background(), "/base").WithMarkupType(markdown.MarkupName) + rctx := NewRenderContextSimpleDocument(t.Context(), "/base").WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` 65f1bf27bc3bf70f64657658635e66094edbcb4d #1 @@ -31,7 +30,7 @@ func TestSimpleDocument(t *testing.T) { assert.Equal(t, `<p>65f1bf27bc3bf70f64657658635e66094edbcb4d #1 -<a href="/base/user2" rel="nofollow">@user2</a></p> +<a href="/user2" rel="nofollow">@user2</a></p> <p><a href="/base/test" rel="nofollow">/test</a> <a href="/base/test" rel="nofollow">./test</a> <a href="/base/image" target="_blank" rel="nofollow noopener"><img src="/base/image" alt="/image"/></a> diff --git a/models/repo/archiver.go b/models/repo/archiver.go index 14ffa1d89b..d06e94e5ac 100644 --- a/models/repo/archiver.go +++ b/models/repo/archiver.go @@ -50,22 +50,17 @@ func (archiver *RepoArchiver) RelativePath() string { func repoArchiverForRelativePath(relativePath string) (*RepoArchiver, error) { parts := strings.SplitN(relativePath, "/", 3) if len(parts) != 3 { - return nil, util.SilentWrap{Message: fmt.Sprintf("invalid storage path: %s", relativePath), Err: util.ErrInvalidArgument} + return nil, util.NewInvalidArgumentErrorf("invalid storage path: must have 3 parts") } repoID, err := strconv.ParseInt(parts[0], 10, 64) if err != nil { - return nil, util.SilentWrap{Message: fmt.Sprintf("invalid storage path: %s", relativePath), Err: util.ErrInvalidArgument} + return nil, util.NewInvalidArgumentErrorf("invalid storage path: invalid repo id") } - nameExts := strings.SplitN(parts[2], ".", 2) - if len(nameExts) != 2 { - return nil, util.SilentWrap{Message: fmt.Sprintf("invalid storage path: %s", relativePath), Err: util.ErrInvalidArgument} + commitID, archiveType := git.SplitArchiveNameType(parts[2]) + if archiveType == git.ArchiveUnknown { + return nil, util.NewInvalidArgumentErrorf("invalid storage path: invalid archive type") } - - return &RepoArchiver{ - RepoID: repoID, - CommitID: parts[1] + nameExts[0], - Type: git.ToArchiveType(nameExts[1]), - }, nil + return &RepoArchiver{RepoID: repoID, CommitID: commitID, Type: archiveType}, nil } // GetRepoArchiver get an archiver diff --git a/models/repo/attachment.go b/models/repo/attachment.go index fa4f6c47e6..835bee5402 100644 --- a/models/repo/attachment.go +++ b/models/repo/attachment.go @@ -224,7 +224,7 @@ func DeleteAttachmentsByComment(ctx context.Context, commentID int64, remove boo // UpdateAttachmentByUUID Updates attachment via uuid func UpdateAttachmentByUUID(ctx context.Context, attach *Attachment, cols ...string) error { if attach.UUID == "" { - return fmt.Errorf("attachment uuid should be not blank") + return errors.New("attachment uuid should be not blank") } _, err := db.GetEngine(ctx).Where("uuid=?", attach.UUID).Cols(cols...).Update(attach) return err diff --git a/models/repo/avatar.go b/models/repo/avatar.go index ccfac12cad..eff64bd239 100644 --- a/models/repo/avatar.go +++ b/models/repo/avatar.go @@ -9,6 +9,7 @@ import ( "image/png" "io" "net/url" + "strconv" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/avatar" @@ -37,7 +38,7 @@ func (repo *Repository) RelAvatarLink(ctx context.Context) string { // generateRandomAvatar generates a random avatar for repository. func generateRandomAvatar(ctx context.Context, repo *Repository) error { - idToString := fmt.Sprintf("%d", repo.ID) + idToString := strconv.FormatInt(repo.ID, 10) seed := idToString img, err := avatar.RandomImage([]byte(seed)) diff --git a/models/repo/collaboration_test.go b/models/repo/collaboration_test.go index 639050f5fd..7b07dbffdf 100644 --- a/models/repo/collaboration_test.go +++ b/models/repo/collaboration_test.go @@ -25,8 +25,8 @@ func TestRepository_GetCollaborators(t *testing.T) { assert.NoError(t, err) assert.Len(t, collaborators, int(expectedLen)) for _, collaborator := range collaborators { - assert.EqualValues(t, collaborator.User.ID, collaborator.Collaboration.UserID) - assert.EqualValues(t, repoID, collaborator.Collaboration.RepoID) + assert.Equal(t, collaborator.User.ID, collaborator.Collaboration.UserID) + assert.Equal(t, repoID, collaborator.Collaboration.RepoID) } } test(1) @@ -51,7 +51,7 @@ func TestRepository_GetCollaborators(t *testing.T) { assert.NoError(t, err) assert.Len(t, collaborators2, 1) - assert.NotEqualValues(t, collaborators1[0].ID, collaborators2[0].ID) + assert.NotEqual(t, collaborators1[0].ID, collaborators2[0].ID) } func TestRepository_IsCollaborator(t *testing.T) { @@ -76,10 +76,10 @@ func TestRepository_ChangeCollaborationAccessMode(t *testing.T) { assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessModeAdmin)) collaboration := unittest.AssertExistsAndLoadBean(t, &repo_model.Collaboration{RepoID: repo.ID, UserID: 4}) - assert.EqualValues(t, perm.AccessModeAdmin, collaboration.Mode) + assert.Equal(t, perm.AccessModeAdmin, collaboration.Mode) access := unittest.AssertExistsAndLoadBean(t, &access_model.Access{UserID: 4, RepoID: repo.ID}) - assert.EqualValues(t, perm.AccessModeAdmin, access.Mode) + assert.Equal(t, perm.AccessModeAdmin, access.Mode) assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessModeAdmin)) diff --git a/models/repo/license.go b/models/repo/license.go index 366b4598cc..9bcf0f7bc9 100644 --- a/models/repo/license.go +++ b/models/repo/license.go @@ -54,6 +54,7 @@ func UpdateRepoLicenses(ctx context.Context, repo *Repository, commitID string, for _, o := range oldLicenses { // Update already existing license if o.License == license { + o.CommitID = commitID if _, err := db.GetEngine(ctx).ID(o.ID).Cols("`commit_id`").Update(o); err != nil { return err } diff --git a/models/repo/org_repo.go b/models/repo/org_repo.go index 5f0af2d475..96f21ba2ac 100644 --- a/models/repo/org_repo.go +++ b/models/repo/org_repo.go @@ -47,10 +47,9 @@ func GetTeamRepositories(ctx context.Context, opts *SearchTeamRepoOptions) (Repo // AccessibleReposEnvironment operations involving the repositories that are // accessible to a particular user type AccessibleReposEnvironment interface { - CountRepos() (int64, error) - RepoIDs(page, pageSize int) ([]int64, error) - Repos(page, pageSize int) (RepositoryList, error) - MirrorRepos() (RepositoryList, error) + CountRepos(ctx context.Context) (int64, error) + RepoIDs(ctx context.Context) ([]int64, error) + MirrorRepos(ctx context.Context) (RepositoryList, error) AddKeyword(keyword string) SetSort(db.SearchOrderBy) } @@ -60,7 +59,6 @@ type accessibleReposEnv struct { user *user_model.User team *org_model.Team teamIDs []int64 - ctx context.Context keyword string orderBy db.SearchOrderBy } @@ -86,18 +84,16 @@ func AccessibleReposEnv(ctx context.Context, org *org_model.Organization, userID org: org, user: user, teamIDs: teamIDs, - ctx: ctx, orderBy: db.SearchOrderByRecentUpdated, }, nil } // AccessibleTeamReposEnv an AccessibleReposEnvironment for the repositories in `org` // that are accessible to the specified team. -func AccessibleTeamReposEnv(ctx context.Context, org *org_model.Organization, team *org_model.Team) AccessibleReposEnvironment { +func AccessibleTeamReposEnv(org *org_model.Organization, team *org_model.Team) AccessibleReposEnvironment { return &accessibleReposEnv{ org: org, team: team, - ctx: ctx, orderBy: db.SearchOrderByRecentUpdated, } } @@ -123,8 +119,8 @@ func (env *accessibleReposEnv) cond() builder.Cond { return cond } -func (env *accessibleReposEnv) CountRepos() (int64, error) { - repoCount, err := db.GetEngine(env.ctx). +func (env *accessibleReposEnv) CountRepos(ctx context.Context) (int64, error) { + repoCount, err := db.GetEngine(ctx). Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id"). Where(env.cond()). Distinct("`repository`.id"). @@ -135,43 +131,21 @@ func (env *accessibleReposEnv) CountRepos() (int64, error) { return repoCount, nil } -func (env *accessibleReposEnv) RepoIDs(page, pageSize int) ([]int64, error) { - if page <= 0 { - page = 1 - } - - repoIDs := make([]int64, 0, pageSize) - return repoIDs, db.GetEngine(env.ctx). +func (env *accessibleReposEnv) RepoIDs(ctx context.Context) ([]int64, error) { + var repoIDs []int64 + return repoIDs, db.GetEngine(ctx). Table("repository"). Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id"). Where(env.cond()). - GroupBy("`repository`.id,`repository`."+strings.Fields(string(env.orderBy))[0]). + GroupBy("`repository`.id,`repository`." + strings.Fields(string(env.orderBy))[0]). OrderBy(string(env.orderBy)). - Limit(pageSize, (page-1)*pageSize). Cols("`repository`.id"). Find(&repoIDs) } -func (env *accessibleReposEnv) Repos(page, pageSize int) (RepositoryList, error) { - repoIDs, err := env.RepoIDs(page, pageSize) - if err != nil { - return nil, fmt.Errorf("GetUserRepositoryIDs: %w", err) - } - - repos := make([]*Repository, 0, len(repoIDs)) - if len(repoIDs) == 0 { - return repos, nil - } - - return repos, db.GetEngine(env.ctx). - In("`repository`.id", repoIDs). - OrderBy(string(env.orderBy)). - Find(&repos) -} - -func (env *accessibleReposEnv) MirrorRepoIDs() ([]int64, error) { +func (env *accessibleReposEnv) MirrorRepoIDs(ctx context.Context) ([]int64, error) { repoIDs := make([]int64, 0, 10) - return repoIDs, db.GetEngine(env.ctx). + return repoIDs, db.GetEngine(ctx). Table("repository"). Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id AND `repository`.is_mirror=?", true). Where(env.cond()). @@ -181,8 +155,8 @@ func (env *accessibleReposEnv) MirrorRepoIDs() ([]int64, error) { Find(&repoIDs) } -func (env *accessibleReposEnv) MirrorRepos() (RepositoryList, error) { - repoIDs, err := env.MirrorRepoIDs() +func (env *accessibleReposEnv) MirrorRepos(ctx context.Context) (RepositoryList, error) { + repoIDs, err := env.MirrorRepoIDs(ctx) if err != nil { return nil, fmt.Errorf("MirrorRepoIDs: %w", err) } @@ -192,7 +166,7 @@ func (env *accessibleReposEnv) MirrorRepos() (RepositoryList, error) { return repos, nil } - return repos, db.GetEngine(env.ctx). + return repos, db.GetEngine(ctx). In("`repository`.id", repoIDs). Find(&repos) } diff --git a/models/repo/pushmirror_test.go b/models/repo/pushmirror_test.go index e19749d93a..9fb7471147 100644 --- a/models/repo/pushmirror_test.go +++ b/models/repo/pushmirror_test.go @@ -39,8 +39,6 @@ func TestPushMirrorsIterate(t *testing.T) { Interval: 0, }) - time.Sleep(1 * time.Millisecond) - repo_model.PushMirrorsIterate(db.DefaultContext, 1, func(idx int, bean any) error { m, ok := bean.(*repo_model.PushMirror) assert.True(t, ok) diff --git a/models/repo/release.go b/models/repo/release.go index 1c2e4a48e3..59f4caf5aa 100644 --- a/models/repo/release.go +++ b/models/repo/release.go @@ -161,6 +161,11 @@ func UpdateRelease(ctx context.Context, rel *Release) error { return err } +func UpdateReleaseNumCommits(ctx context.Context, rel *Release) error { + _, err := db.GetEngine(ctx).ID(rel.ID).Cols("num_commits").Update(rel) + return err +} + // AddReleaseAttachments adds a release attachments func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs []string) (err error) { // Check attachments @@ -175,7 +180,7 @@ func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs } attachments[i].ReleaseID = releaseID // No assign value could be 0, so ignore AllCols(). - if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Update(attachments[i]); err != nil { + if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Cols("release_id").Update(attachments[i]); err != nil { return fmt.Errorf("update attachment [%d]: %w", attachments[i].ID, err) } } @@ -418,8 +423,8 @@ func UpdateReleasesMigrationsByType(ctx context.Context, gitServiceType structs. return err } -// PushUpdateDeleteTagsContext updates a number of delete tags with context -func PushUpdateDeleteTagsContext(ctx context.Context, repo *Repository, tags []string) error { +// PushUpdateDeleteTags updates a number of delete tags with context +func PushUpdateDeleteTags(ctx context.Context, repo *Repository, tags []string) error { if len(tags) == 0 { return nil } @@ -448,58 +453,6 @@ func PushUpdateDeleteTagsContext(ctx context.Context, repo *Repository, tags []s return nil } -// PushUpdateDeleteTag must be called for any push actions to delete tag -func PushUpdateDeleteTag(ctx context.Context, repo *Repository, tagName string) error { - rel, err := GetRelease(ctx, repo.ID, tagName) - if err != nil { - if IsErrReleaseNotExist(err) { - return nil - } - return fmt.Errorf("GetRelease: %w", err) - } - if rel.IsTag { - if _, err = db.DeleteByID[Release](ctx, rel.ID); err != nil { - return fmt.Errorf("Delete: %w", err) - } - } else { - rel.IsDraft = true - rel.NumCommits = 0 - rel.Sha1 = "" - if _, err = db.GetEngine(ctx).ID(rel.ID).AllCols().Update(rel); err != nil { - return fmt.Errorf("Update: %w", err) - } - } - - return nil -} - -// SaveOrUpdateTag must be called for any push actions to add tag -func SaveOrUpdateTag(ctx context.Context, repo *Repository, newRel *Release) error { - rel, err := GetRelease(ctx, repo.ID, newRel.TagName) - if err != nil && !IsErrReleaseNotExist(err) { - return fmt.Errorf("GetRelease: %w", err) - } - - if rel == nil { - rel = newRel - if _, err = db.GetEngine(ctx).Insert(rel); err != nil { - return fmt.Errorf("InsertOne: %w", err) - } - } else { - rel.Sha1 = newRel.Sha1 - rel.CreatedUnix = newRel.CreatedUnix - rel.NumCommits = newRel.NumCommits - rel.IsDraft = false - if rel.IsTag && newRel.PublisherID > 0 { - rel.PublisherID = newRel.PublisherID - } - if _, err = db.GetEngine(ctx).ID(rel.ID).AllCols().Update(rel); err != nil { - return fmt.Errorf("Update: %w", err) - } - } - return nil -} - // RemapExternalUser ExternalUserRemappable interface func (r *Release) RemapExternalUser(externalName string, externalID, userID int64) error { r.OriginalAuthor = externalName @@ -558,3 +511,8 @@ func FindTagsByCommitIDs(ctx context.Context, repoID int64, commitIDs ...string) } return res, nil } + +func DeleteRepoReleases(ctx context.Context, repoID int64) error { + _, err := db.GetEngine(ctx).Where("repo_id = ?", repoID).Delete(new(Release)) + return err +} diff --git a/models/repo/repo.go b/models/repo/repo.go index 5ef4d470c3..34d1bf55f6 100644 --- a/models/repo/repo.go +++ b/models/repo/repo.go @@ -5,6 +5,7 @@ package repo import ( "context" + "errors" "fmt" "html/template" "maps" @@ -14,12 +15,14 @@ import ( "regexp" "strconv" "strings" + "sync" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/git" + giturl "code.gitea.io/gitea/modules/git/url" "code.gitea.io/gitea/modules/httplib" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup" @@ -57,23 +60,42 @@ type ErrRepoIsArchived struct { } func (err ErrRepoIsArchived) Error() string { - return fmt.Sprintf("%s is archived", err.Repo.LogString()) + return err.Repo.LogString() + " is archived" } -var ( - validRepoNamePattern = regexp.MustCompile(`[-.\w]+`) - invalidRepoNamePattern = regexp.MustCompile(`[.]{2,}`) - reservedRepoNames = []string{".", "..", "-"} - reservedRepoPatterns = []string{"*.git", "*.wiki", "*.rss", "*.atom"} -) +type globalVarsStruct struct { + validRepoNamePattern *regexp.Regexp + invalidRepoNamePattern *regexp.Regexp + reservedRepoNames []string + reservedRepoNamePatterns []string +} + +var globalVars = sync.OnceValue(func() *globalVarsStruct { + return &globalVarsStruct{ + validRepoNamePattern: regexp.MustCompile(`^[-.\w]+$`), + invalidRepoNamePattern: regexp.MustCompile(`[.]{2,}`), + reservedRepoNames: []string{".", "..", "-"}, + reservedRepoNamePatterns: []string{"*.wiki", "*.git", "*.rss", "*.atom"}, + } +}) // IsUsableRepoName returns true when name is usable func IsUsableRepoName(name string) error { - if !validRepoNamePattern.MatchString(name) || invalidRepoNamePattern.MatchString(name) { + vars := globalVars() + if !vars.validRepoNamePattern.MatchString(name) || vars.invalidRepoNamePattern.MatchString(name) { // Note: usually this error is normally caught up earlier in the UI return db.ErrNameCharsNotAllowed{Name: name} } - return db.IsUsableName(reservedRepoNames, reservedRepoPatterns, name) + return db.IsUsableName(vars.reservedRepoNames, vars.reservedRepoNamePatterns, name) +} + +// IsValidSSHAccessRepoName is like IsUsableRepoName, but it allows "*.wiki" because wiki repo needs to be accessed in SSH code +func IsValidSSHAccessRepoName(name string) bool { + vars := globalVars() + if !vars.validRepoNamePattern.MatchString(name) || vars.invalidRepoNamePattern.MatchString(name) { + return false + } + return db.IsUsableName(vars.reservedRepoNames, vars.reservedRepoNamePatterns[1:], name) == nil } // TrustModelType defines the types of trust model for this repository @@ -203,12 +225,24 @@ func init() { db.RegisterModel(new(Repository)) } -func (repo *Repository) GetName() string { - return repo.Name +func RelativePath(ownerName, repoName string) string { + return strings.ToLower(ownerName) + "/" + strings.ToLower(repoName) + ".git" } -func (repo *Repository) GetOwnerName() string { - return repo.OwnerName +// RelativePath should be an unix style path like username/reponame.git +func (repo *Repository) RelativePath() string { + return RelativePath(repo.OwnerName, repo.Name) +} + +type StorageRepo string + +// RelativePath should be an unix style path like username/reponame.git +func (sr StorageRepo) RelativePath() string { + return string(sr) +} + +func (repo *Repository) WikiStorageRepo() StorageRepo { + return StorageRepo(strings.ToLower(repo.OwnerName) + "/" + strings.ToLower(repo.Name) + ".wiki.git") } // SanitizedOriginalURL returns a sanitized OriginalURL @@ -279,6 +313,8 @@ func (repo *Repository) IsBroken() bool { } // MarkAsBrokenEmpty marks the repo as broken and empty +// FIXME: the status "broken" and "is_empty" were abused, +// The code always set them together, no way to distinguish whether a repo is really "empty" or "broken" func (repo *Repository) MarkAsBrokenEmpty() { repo.Status = RepositoryBroken repo.IsEmpty = true @@ -399,32 +435,33 @@ func (repo *Repository) MustGetUnit(ctx context.Context, tp unit.Type) *RepoUnit return ru } - if tp == unit.TypeExternalWiki { + switch tp { + case unit.TypeExternalWiki: return &RepoUnit{ Type: tp, Config: new(ExternalWikiConfig), } - } else if tp == unit.TypeExternalTracker { + case unit.TypeExternalTracker: return &RepoUnit{ Type: tp, Config: new(ExternalTrackerConfig), } - } else if tp == unit.TypePullRequests { + case unit.TypePullRequests: return &RepoUnit{ Type: tp, Config: new(PullRequestsConfig), } - } else if tp == unit.TypeIssues { + case unit.TypeIssues: return &RepoUnit{ Type: tp, Config: new(IssuesConfig), } - } else if tp == unit.TypeActions { + case unit.TypeActions: return &RepoUnit{ Type: tp, Config: new(ActionsConfig), } - } else if tp == unit.TypeProjects { + case unit.TypeProjects: cfg := new(ProjectsConfig) cfg.ProjectsMode = ProjectsModeNone return &RepoUnit{ @@ -484,15 +521,15 @@ func (repo *Repository) composeCommonMetas(ctx context.Context) map[string]strin "repo": repo.Name, } - unit, err := repo.GetUnit(ctx, unit.TypeExternalTracker) + unitExternalTracker, err := repo.GetUnit(ctx, unit.TypeExternalTracker) if err == nil { - metas["format"] = unit.ExternalTrackerConfig().ExternalTrackerFormat - switch unit.ExternalTrackerConfig().ExternalTrackerStyle { + metas["format"] = unitExternalTracker.ExternalTrackerConfig().ExternalTrackerFormat + switch unitExternalTracker.ExternalTrackerConfig().ExternalTrackerStyle { case markup.IssueNameStyleAlphanumeric: metas["style"] = markup.IssueNameStyleAlphanumeric case markup.IssueNameStyleRegexp: metas["style"] = markup.IssueNameStyleRegexp - metas["regexp"] = unit.ExternalTrackerConfig().ExternalTrackerRegexpPattern + metas["regexp"] = unitExternalTracker.ExternalTrackerConfig().ExternalTrackerRegexpPattern default: metas["style"] = markup.IssueNameStyleNumeric } @@ -516,11 +553,11 @@ func (repo *Repository) composeCommonMetas(ctx context.Context) map[string]strin return repo.commonRenderingMetas } -// ComposeMetas composes a map of metas for properly rendering comments or comment-like contents (commit message) -func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string { +// ComposeCommentMetas composes a map of metas for properly rendering comments or comment-like contents (commit message) +func (repo *Repository) ComposeCommentMetas(ctx context.Context) map[string]string { metas := maps.Clone(repo.composeCommonMetas(ctx)) - metas["markdownLineBreakStyle"] = "comment" - metas["markupAllowShortIssuePattern"] = "true" + metas["markdownNewLineHardBreak"] = strconv.FormatBool(setting.Markdown.RenderOptionsComment.NewLineHardBreak) + metas["markupAllowShortIssuePattern"] = strconv.FormatBool(setting.Markdown.RenderOptionsComment.ShortIssuePattern) return metas } @@ -528,16 +565,17 @@ func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string { func (repo *Repository) ComposeWikiMetas(ctx context.Context) map[string]string { // does wiki need the "teams" and "org" from common metas? metas := maps.Clone(repo.composeCommonMetas(ctx)) - metas["markdownLineBreakStyle"] = "document" - metas["markupAllowShortIssuePattern"] = "true" + metas["markdownNewLineHardBreak"] = strconv.FormatBool(setting.Markdown.RenderOptionsWiki.NewLineHardBreak) + metas["markupAllowShortIssuePattern"] = strconv.FormatBool(setting.Markdown.RenderOptionsWiki.ShortIssuePattern) return metas } -// ComposeDocumentMetas composes a map of metas for properly rendering documents (repo files) -func (repo *Repository) ComposeDocumentMetas(ctx context.Context) map[string]string { +// ComposeRepoFileMetas composes a map of metas for properly rendering documents (repo files) +func (repo *Repository) ComposeRepoFileMetas(ctx context.Context) map[string]string { // does document(file) need the "teams" and "org" from common metas? metas := maps.Clone(repo.composeCommonMetas(ctx)) - metas["markdownLineBreakStyle"] = "document" + metas["markdownNewLineHardBreak"] = strconv.FormatBool(setting.Markdown.RenderOptionsRepoFile.NewLineHardBreak) + metas["markupAllowShortIssuePattern"] = strconv.FormatBool(setting.Markdown.RenderOptionsRepoFile.ShortIssuePattern) return metas } @@ -615,7 +653,7 @@ func (repo *Repository) AllowsPulls(ctx context.Context) bool { // CanEnableEditor returns true if repository meets the requirements of web editor. func (repo *Repository) CanEnableEditor() bool { - return !repo.IsMirror + return !repo.IsMirror && !repo.IsArchived } // DescriptionHTML does special handles to description and return HTML string. @@ -632,17 +670,31 @@ func (repo *Repository) DescriptionHTML(ctx context.Context) template.HTML { type CloneLink struct { SSH string HTTPS string + Tea string } -// ComposeHTTPSCloneURL returns HTTPS clone URL based on given owner and repository name. -func ComposeHTTPSCloneURL(owner, repo string) string { - return fmt.Sprintf("%s%s/%s.git", setting.AppURL, url.PathEscape(owner), url.PathEscape(repo)) +// ComposeHTTPSCloneURL returns HTTPS clone URL based on the given owner and repository name. +func ComposeHTTPSCloneURL(ctx context.Context, owner, repo string) string { + return fmt.Sprintf("%s%s/%s.git", httplib.GuessCurrentAppURL(ctx), url.PathEscape(owner), url.PathEscape(repo)) } -func ComposeSSHCloneURL(ownerName, repoName string) string { +// ComposeSSHCloneURL returns SSH clone URL based on the given owner and repository name. +func ComposeSSHCloneURL(doer *user_model.User, ownerName, repoName string) string { sshUser := setting.SSH.User sshDomain := setting.SSH.Domain + if sshUser == "(DOER_USERNAME)" { + // Some users use SSH reverse-proxy and need to use the current signed-in username as the SSH user + // to make the SSH reverse-proxy could prepare the user's public keys ahead. + // For most cases we have the correct "doer", then use it as the SSH user. + // If we can't get the doer, then use the built-in SSH user. + if doer != nil { + sshUser = doer.Name + } else { + sshUser = setting.SSH.BuiltinServerUser + } + } + // non-standard port, it must use full URI if setting.SSH.Port != 22 { sshHost := net.JoinHostPort(sshDomain, strconv.Itoa(setting.SSH.Port)) @@ -660,21 +712,26 @@ func ComposeSSHCloneURL(ownerName, repoName string) string { return fmt.Sprintf("%s@%s:%s/%s.git", sshUser, sshHost, url.PathEscape(ownerName), url.PathEscape(repoName)) } -func (repo *Repository) cloneLink(isWiki bool) *CloneLink { - repoName := repo.Name - if isWiki { - repoName += ".wiki" - } +// ComposeTeaCloneCommand returns Tea CLI clone command based on the given owner and repository name. +func ComposeTeaCloneCommand(ctx context.Context, owner, repo string) string { + return fmt.Sprintf("tea clone %s/%s", url.PathEscape(owner), url.PathEscape(repo)) +} - cl := new(CloneLink) - cl.SSH = ComposeSSHCloneURL(repo.OwnerName, repoName) - cl.HTTPS = ComposeHTTPSCloneURL(repo.OwnerName, repoName) - return cl +func (repo *Repository) cloneLink(ctx context.Context, doer *user_model.User, repoPathName string) *CloneLink { + return &CloneLink{ + SSH: ComposeSSHCloneURL(doer, repo.OwnerName, repoPathName), + HTTPS: ComposeHTTPSCloneURL(ctx, repo.OwnerName, repoPathName), + Tea: ComposeTeaCloneCommand(ctx, repo.OwnerName, repoPathName), + } } // CloneLink returns clone URLs of repository. -func (repo *Repository) CloneLink() (cl *CloneLink) { - return repo.cloneLink(false) +func (repo *Repository) CloneLink(ctx context.Context, doer *user_model.User) (cl *CloneLink) { + return repo.cloneLink(ctx, doer, repo.Name) +} + +func (repo *Repository) CloneLinkGeneral(ctx context.Context) (cl *CloneLink) { + return repo.cloneLink(ctx, nil /* no doer, use a general git user */, repo.Name) } // GetOriginalURLHostname returns the hostname of a URL or the URL @@ -770,47 +827,25 @@ func GetRepositoryByName(ctx context.Context, ownerID int64, name string) (*Repo return &repo, err } -// getRepositoryURLPathSegments returns segments (owner, reponame) extracted from a url -func getRepositoryURLPathSegments(repoURL string) []string { - if strings.HasPrefix(repoURL, setting.AppURL) { - return strings.Split(strings.TrimPrefix(repoURL, setting.AppURL), "/") - } - - sshURLVariants := [4]string{ - setting.SSH.Domain + ":", - setting.SSH.User + "@" + setting.SSH.Domain + ":", - "git+ssh://" + setting.SSH.Domain + "/", - "git+ssh://" + setting.SSH.User + "@" + setting.SSH.Domain + "/", - } - - for _, sshURL := range sshURLVariants { - if strings.HasPrefix(repoURL, sshURL) { - return strings.Split(strings.TrimPrefix(repoURL, sshURL), "/") - } - } - - return nil -} - // GetRepositoryByURL returns the repository by given url func GetRepositoryByURL(ctx context.Context, repoURL string) (*Repository, error) { - // possible urls for git: - // https://my.domain/sub-path/<owner>/<repo>.git - // https://my.domain/sub-path/<owner>/<repo> - // git+ssh://user@my.domain/<owner>/<repo>.git - // git+ssh://user@my.domain/<owner>/<repo> - // user@my.domain:<owner>/<repo>.git - // user@my.domain:<owner>/<repo> - - pathSegments := getRepositoryURLPathSegments(repoURL) - - if len(pathSegments) != 2 { - return nil, fmt.Errorf("unknown or malformed repository URL") + ret, err := giturl.ParseRepositoryURL(ctx, repoURL) + if err != nil || ret.OwnerName == "" { + return nil, errors.New("unknown or malformed repository URL") } + return GetRepositoryByOwnerAndName(ctx, ret.OwnerName, ret.RepoName) +} - ownerName := pathSegments[0] - repoName := strings.TrimSuffix(pathSegments[1], ".git") - return GetRepositoryByOwnerAndName(ctx, ownerName, repoName) +// GetRepositoryByURLRelax also accepts an SSH clone URL without user part +func GetRepositoryByURLRelax(ctx context.Context, repoURL string) (*Repository, error) { + if !strings.Contains(repoURL, "://") && !strings.Contains(repoURL, "@") { + // convert "example.com:owner/repo" to "@example.com:owner/repo" + p1, p2, p3 := strings.Index(repoURL, "."), strings.Index(repoURL, ":"), strings.Index(repoURL, "/") + if 0 < p1 && p1 < p2 && p2 < p3 { + repoURL = "@" + repoURL + } + } + return GetRepositoryByURL(ctx, repoURL) } // GetRepositoryByID returns the repository by given id if exists. @@ -828,6 +863,9 @@ func GetRepositoryByID(ctx context.Context, id int64) (*Repository, error) { // GetRepositoriesMapByIDs returns the repositories by given id slice. func GetRepositoriesMapByIDs(ctx context.Context, ids []int64) (map[int64]*Repository, error) { repos := make(map[int64]*Repository, len(ids)) + if len(ids) == 0 { + return repos, nil + } return repos, db.GetEngine(ctx).In("id", ids).Find(&repos) } diff --git a/models/repo/repo_list.go b/models/repo/repo_list.go index 9bed2e9197..f2cdd2f284 100644 --- a/models/repo/repo_list.go +++ b/models/repo/repo_list.go @@ -21,11 +21,6 @@ import ( "xorm.io/builder" ) -// FindReposMapByIDs find repos as map -func FindReposMapByIDs(ctx context.Context, repoIDs []int64, res map[int64]*Repository) error { - return db.GetEngine(ctx).In("id", repoIDs).Find(&res) -} - // RepositoryListDefaultPageSize is the default number of repositories // to load in memory when running administrative tasks on all (or almost // all) of them. @@ -364,7 +359,7 @@ func UserOrgPublicUnitRepoCond(userID, orgID int64) builder.Cond { } // SearchRepositoryCondition creates a query condition according search repository options -func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { +func SearchRepositoryCondition(opts SearchRepoOptions) builder.Cond { cond := builder.NewCond() if opts.Private { @@ -454,7 +449,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { if opts.Keyword != "" { // separate keyword subQueryCond := builder.NewCond() - for _, v := range strings.Split(opts.Keyword, ",") { + for v := range strings.SplitSeq(opts.Keyword, ",") { if opts.TopicOnly { subQueryCond = subQueryCond.Or(builder.Eq{"topic.name": strings.ToLower(v)}) } else { @@ -469,7 +464,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { keywordCond := builder.In("id", subQuery) if !opts.TopicOnly { likes := builder.NewCond() - for _, v := range strings.Split(opts.Keyword, ",") { + for v := range strings.SplitSeq(opts.Keyword, ",") { likes = likes.Or(builder.Like{"lower_name", strings.ToLower(v)}) // If the string looks like "org/repo", match against that pattern too @@ -556,18 +551,18 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { // SearchRepository returns repositories based on search options, // it returns results in given range and number of total results. -func SearchRepository(ctx context.Context, opts *SearchRepoOptions) (RepositoryList, int64, error) { +func SearchRepository(ctx context.Context, opts SearchRepoOptions) (RepositoryList, int64, error) { cond := SearchRepositoryCondition(opts) return SearchRepositoryByCondition(ctx, opts, cond, true) } // CountRepository counts repositories based on search options, -func CountRepository(ctx context.Context, opts *SearchRepoOptions) (int64, error) { +func CountRepository(ctx context.Context, opts SearchRepoOptions) (int64, error) { return db.GetEngine(ctx).Where(SearchRepositoryCondition(opts)).Count(new(Repository)) } // SearchRepositoryByCondition search repositories by condition -func SearchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, cond builder.Cond, loadAttributes bool) (RepositoryList, int64, error) { +func SearchRepositoryByCondition(ctx context.Context, opts SearchRepoOptions, cond builder.Cond, loadAttributes bool) (RepositoryList, int64, error) { sess, count, err := searchRepositoryByCondition(ctx, opts, cond) if err != nil { return nil, 0, err @@ -595,23 +590,25 @@ func SearchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c return repos, count, nil } -func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, cond builder.Cond) (db.Engine, int64, error) { - if opts.Page <= 0 { - opts.Page = 1 +func searchRepositoryByCondition(ctx context.Context, opts SearchRepoOptions, cond builder.Cond) (db.Engine, int64, error) { + page := opts.Page + if page <= 0 { + page = 1 } - if len(opts.OrderBy) == 0 { - opts.OrderBy = db.SearchOrderByAlphabetically + orderBy := opts.OrderBy + if len(orderBy) == 0 { + orderBy = db.SearchOrderByAlphabetically } args := make([]any, 0) if opts.PriorityOwnerID > 0 { - opts.OrderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_id = ? THEN 0 ELSE owner_id END, %s", opts.OrderBy)) + orderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_id = ? THEN 0 ELSE owner_id END, %s", orderBy)) args = append(args, opts.PriorityOwnerID) } else if strings.Count(opts.Keyword, "/") == 1 { // With "owner/repo" search times, prioritise results which match the owner field orgName := strings.Split(opts.Keyword, "/")[0] - opts.OrderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_name LIKE ? THEN 0 ELSE 1 END, %s", opts.OrderBy)) + orderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_name LIKE ? THEN 0 ELSE 1 END, %s", orderBy)) args = append(args, orgName) } @@ -628,9 +625,9 @@ func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c } } - sess = sess.Where(cond).OrderBy(opts.OrderBy.String(), args...) + sess = sess.Where(cond).OrderBy(orderBy.String(), args...) if opts.PageSize > 0 { - sess = sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) + sess = sess.Limit(opts.PageSize, (page-1)*opts.PageSize) } return sess, count, nil } @@ -694,14 +691,14 @@ func AccessibleRepositoryCondition(user *user_model.User, unitType unit.Type) bu // SearchRepositoryByName takes keyword and part of repository name to search, // it returns results in given range and number of total results. -func SearchRepositoryByName(ctx context.Context, opts *SearchRepoOptions) (RepositoryList, int64, error) { +func SearchRepositoryByName(ctx context.Context, opts SearchRepoOptions) (RepositoryList, int64, error) { opts.IncludeDescription = false return SearchRepository(ctx, opts) } // SearchRepositoryIDs takes keyword and part of repository name to search, // it returns results in given range and number of total results. -func SearchRepositoryIDs(ctx context.Context, opts *SearchRepoOptions) ([]int64, int64, error) { +func SearchRepositoryIDs(ctx context.Context, opts SearchRepoOptions) ([]int64, int64, error) { opts.IncludeDescription = false cond := SearchRepositoryCondition(opts) @@ -745,7 +742,7 @@ func FindUserCodeAccessibleOwnerRepoIDs(ctx context.Context, ownerID int64, user } // GetUserRepositories returns a list of repositories of given user. -func GetUserRepositories(ctx context.Context, opts *SearchRepoOptions) (RepositoryList, int64, error) { +func GetUserRepositories(ctx context.Context, opts SearchRepoOptions) (RepositoryList, int64, error) { if len(opts.OrderBy) == 0 { opts.OrderBy = "updated_unix DESC" } @@ -772,5 +769,5 @@ func GetUserRepositories(ctx context.Context, opts *SearchRepoOptions) (Reposito sess = sess.Where(cond).OrderBy(opts.OrderBy.String()) repos := make(RepositoryList, 0, opts.PageSize) - return repos, count, db.SetSessionPagination(sess, opts).Find(&repos) + return repos, count, db.SetSessionPagination(sess, &opts).Find(&repos) } diff --git a/models/repo/repo_list_test.go b/models/repo/repo_list_test.go index ca6007f6c7..7eb76416c2 100644 --- a/models/repo/repo_list_test.go +++ b/models/repo/repo_list_test.go @@ -17,162 +17,162 @@ import ( func getTestCases() []struct { name string - opts *repo_model.SearchRepoOptions + opts repo_model.SearchRepoOptions count int } { testCases := []struct { name string - opts *repo_model.SearchRepoOptions + opts repo_model.SearchRepoOptions count int }{ { name: "PublicRepositoriesByName", - opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{PageSize: 10}, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{PageSize: 10}, Collaborate: optional.Some(false)}, count: 7, }, { name: "PublicAndPrivateRepositoriesByName", - opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, Collaborate: optional.Some(false)}, count: 14, }, { name: "PublicAndPrivateRepositoriesByNameWithPagesizeLimitFirstPage", - opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 5}, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 5}, Private: true, Collaborate: optional.Some(false)}, count: 14, }, { name: "PublicAndPrivateRepositoriesByNameWithPagesizeLimitSecondPage", - opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 2, PageSize: 5}, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 2, PageSize: 5}, Private: true, Collaborate: optional.Some(false)}, count: 14, }, { name: "PublicAndPrivateRepositoriesByNameWithPagesizeLimitThirdPage", - opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 3, PageSize: 5}, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 3, PageSize: 5}, Private: true, Collaborate: optional.Some(false)}, count: 14, }, { name: "PublicAndPrivateRepositoriesByNameWithPagesizeLimitFourthPage", - opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 3, PageSize: 5}, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 3, PageSize: 5}, Private: true, Collaborate: optional.Some(false)}, count: 14, }, { name: "PublicRepositoriesOfUser", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Collaborate: optional.Some(false)}, count: 2, }, { name: "PublicRepositoriesOfUser2", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Collaborate: optional.Some(false)}, count: 0, }, { name: "PublicRepositoriesOfOrg3", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Collaborate: optional.Some(false)}, count: 2, }, { name: "PublicAndPrivateRepositoriesOfUser", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, Collaborate: optional.Some(false)}, count: 4, }, { name: "PublicAndPrivateRepositoriesOfUser2", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true, Collaborate: optional.Some(false)}, count: 0, }, { name: "PublicAndPrivateRepositoriesOfOrg3", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Private: true, Collaborate: optional.Some(false)}, count: 4, }, { name: "PublicRepositoriesOfUserIncludingCollaborative", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15}, count: 5, }, { name: "PublicRepositoriesOfUser2IncludingCollaborative", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18}, count: 1, }, { name: "PublicRepositoriesOfOrg3IncludingCollaborative", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20}, count: 3, }, { name: "PublicAndPrivateRepositoriesOfUserIncludingCollaborative", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true}, count: 9, }, { name: "PublicAndPrivateRepositoriesOfUser2IncludingCollaborative", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true}, count: 4, }, { name: "PublicAndPrivateRepositoriesOfOrg3IncludingCollaborative", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Private: true}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 20, Private: true}, count: 7, }, { name: "PublicRepositoriesOfOrganization", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, Collaborate: optional.Some(false)}, count: 1, }, { name: "PublicAndPrivateRepositoriesOfOrganization", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, Private: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, Private: true, Collaborate: optional.Some(false)}, count: 2, }, { name: "AllPublic/PublicRepositoriesByName", - opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{PageSize: 10}, AllPublic: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{PageSize: 10}, AllPublic: true, Collaborate: optional.Some(false)}, count: 7, }, { name: "AllPublic/PublicAndPrivateRepositoriesByName", - opts: &repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, AllPublic: true, Collaborate: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{Keyword: "big_test_", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, AllPublic: true, Collaborate: optional.Some(false)}, count: 14, }, { name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: optional.Some(false)}, count: 34, }, { name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: optional.Some(false)}, count: 39, }, { name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName", - opts: &repo_model.SearchRepoOptions{Keyword: "test", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true}, + opts: repo_model.SearchRepoOptions{Keyword: "test", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true}, count: 15, }, { name: "AllPublic/PublicAndPrivateRepositoriesOfUser2IncludingCollaborativeByName", - opts: &repo_model.SearchRepoOptions{Keyword: "test", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true, AllPublic: true}, + opts: repo_model.SearchRepoOptions{Keyword: "test", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 18, Private: true, AllPublic: true}, count: 13, }, { name: "AllPublic/PublicRepositoriesOfOrganization", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: optional.Some(false), Template: optional.Some(false)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: optional.Some(false), Template: optional.Some(false)}, count: 34, }, { name: "AllTemplates", - opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Template: optional.Some(true)}, + opts: repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Template: optional.Some(true)}, count: 2, }, { name: "OwnerSlashRepoSearch", - opts: &repo_model.SearchRepoOptions{Keyword: "user/repo2", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, OwnerID: 0}, + opts: repo_model.SearchRepoOptions{Keyword: "user/repo2", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, OwnerID: 0}, count: 2, }, { name: "OwnerSlashSearch", - opts: &repo_model.SearchRepoOptions{Keyword: "user20/", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, OwnerID: 0}, + opts: repo_model.SearchRepoOptions{Keyword: "user20/", ListOptions: db.ListOptions{Page: 1, PageSize: 10}, Private: true, OwnerID: 0}, count: 4, }, } @@ -184,7 +184,7 @@ func TestSearchRepository(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) // test search public repository on explore page - repos, count, err := repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{ + repos, count, err := repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ Page: 1, PageSize: 10, @@ -199,7 +199,7 @@ func TestSearchRepository(t *testing.T) { } assert.Equal(t, int64(1), count) - repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{ + repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ Page: 1, PageSize: 10, @@ -213,7 +213,7 @@ func TestSearchRepository(t *testing.T) { assert.Len(t, repos, 2) // test search private repository on explore page - repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{ + repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ Page: 1, PageSize: 10, @@ -229,7 +229,7 @@ func TestSearchRepository(t *testing.T) { } assert.Equal(t, int64(1), count) - repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{ + repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ Page: 1, PageSize: 10, @@ -244,14 +244,14 @@ func TestSearchRepository(t *testing.T) { assert.Len(t, repos, 3) // Test non existing owner - repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, &repo_model.SearchRepoOptions{OwnerID: unittest.NonexistentID}) + repos, count, err = repo_model.SearchRepositoryByName(db.DefaultContext, repo_model.SearchRepoOptions{OwnerID: unittest.NonexistentID}) assert.NoError(t, err) assert.Empty(t, repos) assert.Equal(t, int64(0), count) // Test search within description - repos, count, err = repo_model.SearchRepository(db.DefaultContext, &repo_model.SearchRepoOptions{ + repos, count, err = repo_model.SearchRepository(db.DefaultContext, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ Page: 1, PageSize: 10, @@ -268,7 +268,7 @@ func TestSearchRepository(t *testing.T) { assert.Equal(t, int64(1), count) // Test NOT search within description - repos, count, err = repo_model.SearchRepository(db.DefaultContext, &repo_model.SearchRepoOptions{ + repos, count, err = repo_model.SearchRepository(db.DefaultContext, repo_model.SearchRepoOptions{ ListOptions: db.ListOptions{ Page: 1, PageSize: 10, @@ -374,22 +374,22 @@ func TestSearchRepositoryByTopicName(t *testing.T) { testCases := []struct { name string - opts *repo_model.SearchRepoOptions + opts repo_model.SearchRepoOptions count int }{ { name: "AllPublic/SearchPublicRepositoriesFromTopicAndName", - opts: &repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql"}, + opts: repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql"}, count: 2, }, { name: "AllPublic/OnlySearchPublicRepositoriesFromTopic", - opts: &repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql", TopicOnly: true}, + opts: repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql", TopicOnly: true}, count: 1, }, { name: "AllPublic/OnlySearchMultipleKeywordPublicRepositoriesFromTopic", - opts: &repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql,golang", TopicOnly: true}, + opts: repo_model.SearchRepoOptions{OwnerID: 21, AllPublic: true, Keyword: "graphql,golang", TopicOnly: true}, count: 2, }, } diff --git a/models/repo/repo_test.go b/models/repo/repo_test.go index 001f8ecd84..66abe864fc 100644 --- a/models/repo/repo_test.go +++ b/models/repo/repo_test.go @@ -16,6 +16,7 @@ import ( "code.gitea.io/gitea/modules/test" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) var ( @@ -85,7 +86,7 @@ func TestMetas(t *testing.T) { repo.Units = nil - metas := repo.ComposeMetas(db.DefaultContext) + metas := repo.ComposeCommentMetas(db.DefaultContext) assert.Equal(t, "testRepo", metas["repo"]) assert.Equal(t, "testOwner", metas["user"]) @@ -99,7 +100,7 @@ func TestMetas(t *testing.T) { testSuccess := func(expectedStyle string) { repo.Units = []*RepoUnit{&externalTracker} repo.commonRenderingMetas = nil - metas := repo.ComposeMetas(db.DefaultContext) + metas := repo.ComposeCommentMetas(db.DefaultContext) assert.Equal(t, expectedStyle, metas["style"]) assert.Equal(t, "testRepo", metas["repo"]) assert.Equal(t, "testOwner", metas["user"]) @@ -120,7 +121,7 @@ func TestMetas(t *testing.T) { repo, err := GetRepositoryByID(db.DefaultContext, 3) assert.NoError(t, err) - metas = repo.ComposeMetas(db.DefaultContext) + metas = repo.ComposeCommentMetas(db.DefaultContext) assert.Contains(t, metas, "org") assert.Contains(t, metas, "teams") assert.Equal(t, "org3", metas["org"]) @@ -132,60 +133,43 @@ func TestGetRepositoryByURL(t *testing.T) { t.Run("InvalidPath", func(t *testing.T) { repo, err := GetRepositoryByURL(db.DefaultContext, "something") - assert.Nil(t, repo) assert.Error(t, err) }) - t.Run("ValidHttpURL", func(t *testing.T) { - test := func(t *testing.T, url string) { - repo, err := GetRepositoryByURL(db.DefaultContext, url) - - assert.NotNil(t, repo) - assert.NoError(t, err) - - assert.Equal(t, int64(2), repo.ID) - assert.Equal(t, int64(2), repo.OwnerID) - } + testRepo2 := func(t *testing.T, url string) { + repo, err := GetRepositoryByURL(db.DefaultContext, url) + require.NoError(t, err) + assert.EqualValues(t, 2, repo.ID) + assert.EqualValues(t, 2, repo.OwnerID) + } - test(t, "https://try.gitea.io/user2/repo2") - test(t, "https://try.gitea.io/user2/repo2.git") + t.Run("ValidHttpURL", func(t *testing.T) { + testRepo2(t, "https://try.gitea.io/user2/repo2") + testRepo2(t, "https://try.gitea.io/user2/repo2.git") }) t.Run("ValidGitSshURL", func(t *testing.T) { - test := func(t *testing.T, url string) { - repo, err := GetRepositoryByURL(db.DefaultContext, url) - - assert.NotNil(t, repo) - assert.NoError(t, err) - - assert.Equal(t, int64(2), repo.ID) - assert.Equal(t, int64(2), repo.OwnerID) - } - - test(t, "git+ssh://sshuser@try.gitea.io/user2/repo2") - test(t, "git+ssh://sshuser@try.gitea.io/user2/repo2.git") + testRepo2(t, "git+ssh://sshuser@try.gitea.io/user2/repo2") + testRepo2(t, "git+ssh://sshuser@try.gitea.io/user2/repo2.git") - test(t, "git+ssh://try.gitea.io/user2/repo2") - test(t, "git+ssh://try.gitea.io/user2/repo2.git") + testRepo2(t, "git+ssh://try.gitea.io/user2/repo2") + testRepo2(t, "git+ssh://try.gitea.io/user2/repo2.git") }) t.Run("ValidImplicitSshURL", func(t *testing.T) { - test := func(t *testing.T, url string) { - repo, err := GetRepositoryByURL(db.DefaultContext, url) - - assert.NotNil(t, repo) - assert.NoError(t, err) + testRepo2(t, "sshuser@try.gitea.io:user2/repo2") + testRepo2(t, "sshuser@try.gitea.io:user2/repo2.git") + testRelax := func(t *testing.T, url string) { + repo, err := GetRepositoryByURLRelax(db.DefaultContext, url) + require.NoError(t, err) assert.Equal(t, int64(2), repo.ID) assert.Equal(t, int64(2), repo.OwnerID) } - - test(t, "sshuser@try.gitea.io:user2/repo2") - test(t, "sshuser@try.gitea.io:user2/repo2.git") - - test(t, "try.gitea.io:user2/repo2") - test(t, "try.gitea.io:user2/repo2.git") + // TODO: it doesn't seem to be common git ssh URL, should we really support this? + testRelax(t, "try.gitea.io:user2/repo2") + testRelax(t, "try.gitea.io:user2/repo2.git") }) } @@ -199,23 +183,30 @@ func TestComposeSSHCloneURL(t *testing.T) { setting.SSH.Domain = "domain" setting.SSH.Port = 22 setting.Repository.UseCompatSSHURI = false - assert.Equal(t, "git@domain:user/repo.git", ComposeSSHCloneURL("user", "repo")) + assert.Equal(t, "git@domain:user/repo.git", ComposeSSHCloneURL(&user_model.User{Name: "doer"}, "user", "repo")) setting.Repository.UseCompatSSHURI = true - assert.Equal(t, "ssh://git@domain/user/repo.git", ComposeSSHCloneURL("user", "repo")) + assert.Equal(t, "ssh://git@domain/user/repo.git", ComposeSSHCloneURL(&user_model.User{Name: "doer"}, "user", "repo")) // test SSH_DOMAIN while use non-standard SSH port setting.SSH.Port = 123 setting.Repository.UseCompatSSHURI = false - assert.Equal(t, "ssh://git@domain:123/user/repo.git", ComposeSSHCloneURL("user", "repo")) + assert.Equal(t, "ssh://git@domain:123/user/repo.git", ComposeSSHCloneURL(nil, "user", "repo")) setting.Repository.UseCompatSSHURI = true - assert.Equal(t, "ssh://git@domain:123/user/repo.git", ComposeSSHCloneURL("user", "repo")) + assert.Equal(t, "ssh://git@domain:123/user/repo.git", ComposeSSHCloneURL(nil, "user", "repo")) // test IPv6 SSH_DOMAIN setting.Repository.UseCompatSSHURI = false setting.SSH.Domain = "::1" setting.SSH.Port = 22 - assert.Equal(t, "git@[::1]:user/repo.git", ComposeSSHCloneURL("user", "repo")) + assert.Equal(t, "git@[::1]:user/repo.git", ComposeSSHCloneURL(nil, "user", "repo")) setting.SSH.Port = 123 - assert.Equal(t, "ssh://git@[::1]:123/user/repo.git", ComposeSSHCloneURL("user", "repo")) + assert.Equal(t, "ssh://git@[::1]:123/user/repo.git", ComposeSSHCloneURL(nil, "user", "repo")) + + setting.SSH.User = "(DOER_USERNAME)" + setting.SSH.Domain = "domain" + setting.SSH.Port = 22 + assert.Equal(t, "doer@domain:user/repo.git", ComposeSSHCloneURL(&user_model.User{Name: "doer"}, "user", "repo")) + setting.SSH.Port = 123 + assert.Equal(t, "ssh://doer@domain:123/user/repo.git", ComposeSSHCloneURL(&user_model.User{Name: "doer"}, "user", "repo")) } func TestIsUsableRepoName(t *testing.T) { @@ -225,7 +216,23 @@ func TestIsUsableRepoName(t *testing.T) { assert.Error(t, IsUsableRepoName("-")) assert.Error(t, IsUsableRepoName("🌞")) + assert.Error(t, IsUsableRepoName("the/repo")) assert.Error(t, IsUsableRepoName("the..repo")) assert.Error(t, IsUsableRepoName("foo.wiki")) assert.Error(t, IsUsableRepoName("foo.git")) + assert.Error(t, IsUsableRepoName("foo.RSS")) +} + +func TestIsValidSSHAccessRepoName(t *testing.T) { + assert.True(t, IsValidSSHAccessRepoName("a")) + assert.True(t, IsValidSSHAccessRepoName("-1_.")) + assert.True(t, IsValidSSHAccessRepoName(".profile")) + assert.True(t, IsValidSSHAccessRepoName("foo.wiki")) + + assert.False(t, IsValidSSHAccessRepoName("-")) + assert.False(t, IsValidSSHAccessRepoName("🌞")) + assert.False(t, IsValidSSHAccessRepoName("the/repo")) + assert.False(t, IsValidSSHAccessRepoName("the..repo")) + assert.False(t, IsValidSSHAccessRepoName("foo.git")) + assert.False(t, IsValidSSHAccessRepoName("foo.RSS")) } diff --git a/models/repo/repo_unit.go b/models/repo/repo_unit.go index cb52c2c9e2..a5207bc22a 100644 --- a/models/repo/repo_unit.go +++ b/models/repo/repo_unit.go @@ -5,7 +5,6 @@ package repo import ( "context" - "fmt" "slices" "strings" @@ -33,7 +32,7 @@ func IsErrUnitTypeNotExist(err error) bool { } func (err ErrUnitTypeNotExist) Error() string { - return fmt.Sprintf("Unit type does not exist: %s", err.UT.LogString()) + return "Unit type does not exist: " + err.UT.LogString() } func (err ErrUnitTypeNotExist) Unwrap() error { @@ -42,12 +41,13 @@ func (err ErrUnitTypeNotExist) Unwrap() error { // RepoUnit describes all units of a repository type RepoUnit struct { //revive:disable-line:exported - ID int64 - RepoID int64 `xorm:"INDEX(s)"` - Type unit.Type `xorm:"INDEX(s)"` - Config convert.Conversion `xorm:"TEXT"` - CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"` - EveryoneAccessMode perm.AccessMode `xorm:"NOT NULL DEFAULT 0"` + ID int64 + RepoID int64 `xorm:"INDEX(s)"` + Type unit.Type `xorm:"INDEX(s)"` + Config convert.Conversion `xorm:"TEXT"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"` + AnonymousAccessMode perm.AccessMode `xorm:"NOT NULL DEFAULT 0"` + EveryoneAccessMode perm.AccessMode `xorm:"NOT NULL DEFAULT 0"` } func init() { @@ -185,10 +185,8 @@ func (cfg *ActionsConfig) IsWorkflowDisabled(file string) bool { } func (cfg *ActionsConfig) DisableWorkflow(file string) { - for _, workflow := range cfg.DisabledWorkflows { - if file == workflow { - return - } + if slices.Contains(cfg.DisabledWorkflows, file) { + return } cfg.DisabledWorkflows = append(cfg.DisabledWorkflows, file) @@ -341,3 +339,9 @@ func UpdateRepoUnit(ctx context.Context, unit *RepoUnit) error { _, err := db.GetEngine(ctx).ID(unit.ID).Update(unit) return err } + +func UpdateRepoUnitPublicAccess(ctx context.Context, unit *RepoUnit) error { + _, err := db.GetEngine(ctx).Where("repo_id=? AND `type`=?", unit.RepoID, unit.Type). + Cols("anonymous_access_mode", "everyone_access_mode").Update(unit) + return err +} diff --git a/models/repo/repo_unit_test.go b/models/repo/repo_unit_test.go index a760594013..56dda5672d 100644 --- a/models/repo/repo_unit_test.go +++ b/models/repo/repo_unit_test.go @@ -12,19 +12,19 @@ import ( func TestActionsConfig(t *testing.T) { cfg := &ActionsConfig{} cfg.DisableWorkflow("test1.yaml") - assert.EqualValues(t, []string{"test1.yaml"}, cfg.DisabledWorkflows) + assert.Equal(t, []string{"test1.yaml"}, cfg.DisabledWorkflows) cfg.DisableWorkflow("test1.yaml") - assert.EqualValues(t, []string{"test1.yaml"}, cfg.DisabledWorkflows) + assert.Equal(t, []string{"test1.yaml"}, cfg.DisabledWorkflows) cfg.EnableWorkflow("test1.yaml") - assert.EqualValues(t, []string{}, cfg.DisabledWorkflows) + assert.Equal(t, []string{}, cfg.DisabledWorkflows) cfg.EnableWorkflow("test1.yaml") - assert.EqualValues(t, []string{}, cfg.DisabledWorkflows) + assert.Equal(t, []string{}, cfg.DisabledWorkflows) cfg.DisableWorkflow("test1.yaml") cfg.DisableWorkflow("test2.yaml") cfg.DisableWorkflow("test3.yaml") - assert.EqualValues(t, "test1.yaml,test2.yaml,test3.yaml", cfg.ToString()) + assert.Equal(t, "test1.yaml,test2.yaml,test3.yaml", cfg.ToString()) } diff --git a/models/repo/topic_test.go b/models/repo/topic_test.go index 1600896b6e..b6a7aed7b1 100644 --- a/models/repo/topic_test.go +++ b/models/repo/topic_test.go @@ -53,7 +53,7 @@ func TestAddTopic(t *testing.T) { totalNrOfTopics++ topic, err := repo_model.GetTopicByName(db.DefaultContext, "gitea") assert.NoError(t, err) - assert.EqualValues(t, 1, topic.RepoCount) + assert.Equal(t, 1, topic.RepoCount) topics, err = db.Find[repo_model.Topic](db.DefaultContext, &repo_model.FindTopicOptions{}) assert.NoError(t, err) diff --git a/models/repo/transfer.go b/models/repo/transfer.go index 43e15b33bc..3fb8cb69ab 100644 --- a/models/repo/transfer.go +++ b/models/repo/transfer.go @@ -61,13 +61,14 @@ func (err ErrRepoTransferInProgress) Unwrap() error { } // RepoTransfer is used to manage repository transfers -type RepoTransfer struct { //nolint +type RepoTransfer struct { //nolint:revive // export stutter ID int64 `xorm:"pk autoincr"` DoerID int64 Doer *user_model.User `xorm:"-"` RecipientID int64 Recipient *user_model.User `xorm:"-"` RepoID int64 + Repo *Repository `xorm:"-"` TeamIDs []int64 Teams []*organization.Team `xorm:"-"` @@ -79,48 +80,65 @@ func init() { db.RegisterModel(new(RepoTransfer)) } -// LoadAttributes fetches the transfer recipient from the database -func (r *RepoTransfer) LoadAttributes(ctx context.Context) error { +func (r *RepoTransfer) LoadRecipient(ctx context.Context) error { if r.Recipient == nil { u, err := user_model.GetUserByID(ctx, r.RecipientID) if err != nil { return err } - r.Recipient = u } - if r.Recipient.IsOrganization() && len(r.TeamIDs) != len(r.Teams) { - for _, v := range r.TeamIDs { - team, err := organization.GetTeamByID(ctx, v) - if err != nil { - return err - } + return nil +} - if team.OrgID != r.Recipient.ID { - return fmt.Errorf("team %d belongs not to org %d", v, r.Recipient.ID) - } +func (r *RepoTransfer) LoadRepo(ctx context.Context) error { + if r.Repo == nil { + repo, err := GetRepositoryByID(ctx, r.RepoID) + if err != nil { + return err + } + r.Repo = repo + } + + return nil +} + +// LoadAttributes fetches the transfer recipient from the database +func (r *RepoTransfer) LoadAttributes(ctx context.Context) error { + if err := r.LoadRecipient(ctx); err != nil { + return err + } + if r.Recipient.IsOrganization() && r.Teams == nil { + teamsMap, err := organization.GetTeamsByIDs(ctx, r.TeamIDs) + if err != nil { + return err + } + for _, team := range teamsMap { r.Teams = append(r.Teams, team) } } + if err := r.LoadRepo(ctx); err != nil { + return err + } + if r.Doer == nil { u, err := user_model.GetUserByID(ctx, r.DoerID) if err != nil { return err } - r.Doer = u } return nil } -// CanUserAcceptTransfer checks if the user has the rights to accept/decline a repo transfer. +// CanUserAcceptOrRejectTransfer checks if the user has the rights to accept/decline a repo transfer. // For user, it checks if it's himself // For organizations, it checks if the user is able to create repos -func (r *RepoTransfer) CanUserAcceptTransfer(ctx context.Context, u *user_model.User) bool { +func (r *RepoTransfer) CanUserAcceptOrRejectTransfer(ctx context.Context, u *user_model.User) bool { if err := r.LoadAttributes(ctx); err != nil { log.Error("LoadAttributes: %v", err) return false @@ -166,6 +184,10 @@ func GetPendingRepositoryTransfers(ctx context.Context, opts *PendingRepositoryT Find(&transfers) } +func IsRepositoryTransferExist(ctx context.Context, repoID int64) (bool, error) { + return db.GetEngine(ctx).Where("repo_id = ?", repoID).Exist(new(RepoTransfer)) +} + // GetPendingRepositoryTransfer fetches the most recent and ongoing transfer // process for the repository func GetPendingRepositoryTransfer(ctx context.Context, repo *Repository) (*RepoTransfer, error) { @@ -206,13 +228,28 @@ func CreatePendingRepositoryTransfer(ctx context.Context, doer, newOwner *user_m return err } + if _, err := user_model.GetUserByID(ctx, newOwner.ID); err != nil { + return err + } + // Make sure repo is ready to transfer if err := TestRepositoryReadyForTransfer(repo.Status); err != nil { return err } + exist, err := IsRepositoryTransferExist(ctx, repo.ID) + if err != nil { + return err + } + if exist { + return ErrRepoTransferInProgress{ + Uname: repo.Owner.LowerName, + Name: repo.Name, + } + } + repo.Status = RepositoryPendingTransfer - if err := UpdateRepositoryCols(ctx, repo, "status"); err != nil { + if err := UpdateRepositoryColsNoAutoTime(ctx, repo, "status"); err != nil { return err } diff --git a/models/repo/update.go b/models/repo/update.go index e7ca224028..64065f11c4 100644 --- a/models/repo/update.go +++ b/models/repo/update.go @@ -25,7 +25,7 @@ func UpdateRepositoryOwnerNames(ctx context.Context, ownerID int64, ownerName st } defer committer.Close() - if _, err := db.GetEngine(ctx).Where("owner_id = ?", ownerID).Cols("owner_name").Update(&Repository{ + if _, err := db.GetEngine(ctx).Where("owner_id = ?", ownerID).Cols("owner_name").NoAutoTime().Update(&Repository{ OwnerName: ownerName, }); err != nil { return err @@ -40,9 +40,15 @@ func UpdateRepositoryUpdatedTime(ctx context.Context, repoID int64, updateTime t return err } -// UpdateRepositoryCols updates repository's columns -func UpdateRepositoryCols(ctx context.Context, repo *Repository, cols ...string) error { - _, err := db.GetEngine(ctx).ID(repo.ID).Cols(cols...).Update(repo) +// UpdateRepositoryColsWithAutoTime updates repository's columns and the timestamp fields automatically +func UpdateRepositoryColsWithAutoTime(ctx context.Context, repo *Repository, colName string, moreColNames ...string) error { + _, err := db.GetEngine(ctx).ID(repo.ID).Cols(append([]string{colName}, moreColNames...)...).Update(repo) + return err +} + +// UpdateRepositoryColsNoAutoTime updates repository's columns, doesn't change timestamp field automatically +func UpdateRepositoryColsNoAutoTime(ctx context.Context, repo *Repository, colName string, moreColNames ...string) error { + _, err := db.GetEngine(ctx).ID(repo.ID).Cols(append([]string{colName}, moreColNames...)...).NoAutoTime().Update(repo) return err } @@ -105,31 +111,31 @@ func (err ErrRepoFilesAlreadyExist) Unwrap() error { return util.ErrAlreadyExist } -// CheckCreateRepository check if could created a repository -func CheckCreateRepository(ctx context.Context, doer, u *user_model.User, name string, overwriteOrAdopt bool) error { - if !doer.CanCreateRepo() { - return ErrReachLimitOfRepo{u.MaxRepoCreation} +// CheckCreateRepository check if doer could create a repository in new owner +func CheckCreateRepository(ctx context.Context, doer, owner *user_model.User, name string, overwriteOrAdopt bool) error { + if !doer.CanCreateRepoIn(owner) { + return ErrReachLimitOfRepo{owner.MaxRepoCreation} } if err := IsUsableRepoName(name); err != nil { return err } - has, err := IsRepositoryModelOrDirExist(ctx, u, name) + has, err := IsRepositoryModelOrDirExist(ctx, owner, name) if err != nil { return fmt.Errorf("IsRepositoryExist: %w", err) } else if has { - return ErrRepoAlreadyExist{u.Name, name} + return ErrRepoAlreadyExist{owner.Name, name} } - repoPath := RepoPath(u.Name, name) + repoPath := RepoPath(owner.Name, name) isExist, err := util.IsExist(repoPath) if err != nil { log.Error("Unable to check if %s exists. Error: %v", repoPath, err) return err } if !overwriteOrAdopt && isExist { - return ErrRepoFilesAlreadyExist{u.Name, name} + return ErrRepoFilesAlreadyExist{owner.Name, name} } return nil } diff --git a/models/repo/upload.go b/models/repo/upload.go index 18834f6b83..20a8fa26fe 100644 --- a/models/repo/upload.go +++ b/models/repo/upload.go @@ -10,7 +10,7 @@ import ( "io" "mime/multipart" "os" - "path" + "path/filepath" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/log" @@ -51,14 +51,10 @@ func init() { db.RegisterModel(new(Upload)) } -// UploadLocalPath returns where uploads is stored in local file system based on given UUID. -func UploadLocalPath(uuid string) string { - return path.Join(setting.Repository.Upload.TempPath, uuid[0:1], uuid[1:2], uuid) -} - -// LocalPath returns where uploads are temporarily stored in local file system. +// LocalPath returns where uploads are temporarily stored in local file system based on given UUID. func (upload *Upload) LocalPath() string { - return UploadLocalPath(upload.UUID) + uuid := upload.UUID + return setting.AppDataTempDir("repo-uploads").JoinPath(uuid[0:1], uuid[1:2], uuid) } // NewUpload creates a new upload object. @@ -69,7 +65,7 @@ func NewUpload(ctx context.Context, name string, buf []byte, file multipart.File } localPath := upload.LocalPath() - if err = os.MkdirAll(path.Dir(localPath), os.ModePerm); err != nil { + if err = os.MkdirAll(filepath.Dir(localPath), os.ModePerm); err != nil { return nil, fmt.Errorf("MkdirAll: %w", err) } @@ -128,7 +124,7 @@ func DeleteUploads(ctx context.Context, uploads ...*Upload) (err error) { defer committer.Close() ids := make([]int64, len(uploads)) - for i := 0; i < len(uploads); i++ { + for i := range uploads { ids[i] = uploads[i].ID } if err = db.DeleteByIDs[Upload](ctx, ids...); err != nil { diff --git a/models/repo/user_repo.go b/models/repo/user_repo.go index a9b1360df1..232087d865 100644 --- a/models/repo/user_repo.go +++ b/models/repo/user_repo.go @@ -5,6 +5,7 @@ package repo import ( "context" + "strings" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/perm" @@ -149,9 +150,9 @@ func GetRepoAssignees(ctx context.Context, repo *Repository) (_ []*user_model.Us // If isShowFullName is set to true, also include full name prefix search func GetIssuePostersWithSearch(ctx context.Context, repo *Repository, isPull bool, search string, isShowFullName bool) ([]*user_model.User, error) { users := make([]*user_model.User, 0, 30) - var prefixCond builder.Cond = builder.Like{"name", search + "%"} + var prefixCond builder.Cond = builder.Like{"lower_name", strings.ToLower(search) + "%"} if isShowFullName { - prefixCond = prefixCond.Or(builder.Like{"full_name", "%" + search + "%"}) + prefixCond = prefixCond.Or(db.BuildCaseInsensitiveLike("full_name", "%"+search+"%")) } cond := builder.In("`user`.id", diff --git a/models/repo/user_repo_test.go b/models/repo/user_repo_test.go index 44ebe5f214..50c970344c 100644 --- a/models/repo/user_repo_test.go +++ b/models/repo/user_repo_test.go @@ -12,6 +12,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestRepoAssignees(t *testing.T) { @@ -38,3 +39,19 @@ func TestRepoAssignees(t *testing.T) { assert.NotContains(t, []int64{users[0].ID, users[1].ID, users[2].ID}, 15) } } + +func TestGetIssuePostersWithSearch(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) + + users, err := repo_model.GetIssuePostersWithSearch(db.DefaultContext, repo2, false, "USER", false /* full name */) + require.NoError(t, err) + require.Len(t, users, 1) + assert.Equal(t, "user2", users[0].Name) + + users, err = repo_model.GetIssuePostersWithSearch(db.DefaultContext, repo2, false, "TW%O", true /* full name */) + require.NoError(t, err) + require.Len(t, users, 1) + assert.Equal(t, "user2", users[0].Name) +} diff --git a/models/repo/watch_test.go b/models/repo/watch_test.go index c39ef607e8..7ed72386c9 100644 --- a/models/repo/watch_test.go +++ b/models/repo/watch_test.go @@ -36,7 +36,7 @@ func TestGetWatchers(t *testing.T) { // One watchers are inactive, thus minus 1 assert.Len(t, watches, repo.NumWatches-1) for _, watch := range watches { - assert.EqualValues(t, repo.ID, watch.RepoID) + assert.Equal(t, repo.ID, watch.RepoID) } watches, err = repo_model.GetWatchers(db.DefaultContext, unittest.NonexistentID) diff --git a/models/repo/wiki.go b/models/repo/wiki.go index b378666a20..832e15ae0d 100644 --- a/models/repo/wiki.go +++ b/models/repo/wiki.go @@ -5,6 +5,7 @@ package repo import ( + "context" "fmt" "path/filepath" "strings" @@ -45,7 +46,7 @@ func IsErrWikiReservedName(err error) bool { } func (err ErrWikiReservedName) Error() string { - return fmt.Sprintf("wiki title is reserved: %s", err.Title) + return "wiki title is reserved: " + err.Title } func (err ErrWikiReservedName) Unwrap() error { @@ -64,7 +65,7 @@ func IsErrWikiInvalidFileName(err error) bool { } func (err ErrWikiInvalidFileName) Error() string { - return fmt.Sprintf("Invalid wiki filename: %s", err.FileName) + return "Invalid wiki filename: " + err.FileName } func (err ErrWikiInvalidFileName) Unwrap() error { @@ -72,8 +73,8 @@ func (err ErrWikiInvalidFileName) Unwrap() error { } // WikiCloneLink returns clone URLs of repository wiki. -func (repo *Repository) WikiCloneLink() *CloneLink { - return repo.cloneLink(true) +func (repo *Repository) WikiCloneLink(ctx context.Context, doer *user_model.User) *CloneLink { + return repo.cloneLink(ctx, doer, repo.Name+".wiki") } // WikiPath returns wiki data path by given user and repository name. diff --git a/models/repo/wiki_test.go b/models/repo/wiki_test.go index 629986f741..103420a392 100644 --- a/models/repo/wiki_test.go +++ b/models/repo/wiki_test.go @@ -18,7 +18,7 @@ func TestRepository_WikiCloneLink(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) - cloneLink := repo.WikiCloneLink() + cloneLink := repo.WikiCloneLink(t.Context(), nil) assert.Equal(t, "ssh://sshuser@try.gitea.io:3000/user2/repo1.wiki.git", cloneLink.SSH) assert.Equal(t, "https://try.gitea.io/user2/repo1.wiki.git", cloneLink.HTTPS) } diff --git a/models/repo_test.go b/models/repo_test.go index bcf62237f0..b6c53fd197 100644 --- a/models/repo_test.go +++ b/models/repo_test.go @@ -29,10 +29,10 @@ func Test_repoStatsCorrectIssueNumComments(t *testing.T) { issue2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2}) assert.NotNil(t, issue2) - assert.EqualValues(t, 0, issue2.NumComments) // the fixture data is wrong, but we don't fix it here + assert.Equal(t, 0, issue2.NumComments) // the fixture data is wrong, but we don't fix it here assert.NoError(t, repoStatsCorrectIssueNumComments(db.DefaultContext, 2)) // reload the issue issue2 = unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2}) - assert.EqualValues(t, 1, issue2.NumComments) + assert.Equal(t, 1, issue2.NumComments) } diff --git a/models/secret/secret.go b/models/secret/secret.go index ce0ad65a79..10a0287dfd 100644 --- a/models/secret/secret.go +++ b/models/secret/secret.go @@ -40,9 +40,15 @@ type Secret struct { RepoID int64 `xorm:"INDEX UNIQUE(owner_repo_name) NOT NULL DEFAULT 0"` Name string `xorm:"UNIQUE(owner_repo_name) NOT NULL"` Data string `xorm:"LONGTEXT"` // encrypted data + Description string `xorm:"TEXT"` CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"` } +const ( + SecretDataMaxLength = 65536 + SecretDescriptionMaxLength = 4096 +) + // ErrSecretNotFound represents a "secret not found" error. type ErrSecretNotFound struct { Name string @@ -57,7 +63,7 @@ func (err ErrSecretNotFound) Unwrap() error { } // InsertEncryptedSecret Creates, encrypts, and validates a new secret with yet unencrypted data and insert into database -func InsertEncryptedSecret(ctx context.Context, ownerID, repoID int64, name, data string) (*Secret, error) { +func InsertEncryptedSecret(ctx context.Context, ownerID, repoID int64, name, data, description string) (*Secret, error) { if ownerID != 0 && repoID != 0 { // It's trying to create a secret that belongs to a repository, but OwnerID has been set accidentally. // Remove OwnerID to avoid confusion; it's not worth returning an error here. @@ -67,15 +73,23 @@ func InsertEncryptedSecret(ctx context.Context, ownerID, repoID int64, name, dat return nil, fmt.Errorf("%w: ownerID and repoID cannot be both zero, global secrets are not supported", util.ErrInvalidArgument) } + if len(data) > SecretDataMaxLength { + return nil, util.NewInvalidArgumentErrorf("data too long") + } + + description = util.TruncateRunes(description, SecretDescriptionMaxLength) + encrypted, err := secret_module.EncryptSecret(setting.SecretKey, data) if err != nil { return nil, err } + secret := &Secret{ - OwnerID: ownerID, - RepoID: repoID, - Name: strings.ToUpper(name), - Data: encrypted, + OwnerID: ownerID, + RepoID: repoID, + Name: strings.ToUpper(name), + Data: encrypted, + Description: description, } return secret, db.Insert(ctx, secret) } @@ -114,16 +128,23 @@ func (opts FindSecretsOptions) ToConds() builder.Cond { } // UpdateSecret changes org or user reop secret. -func UpdateSecret(ctx context.Context, secretID int64, data string) error { +func UpdateSecret(ctx context.Context, secretID int64, data, description string) error { + if len(data) > SecretDataMaxLength { + return util.NewInvalidArgumentErrorf("data too long") + } + + description = util.TruncateRunes(description, SecretDescriptionMaxLength) + encrypted, err := secret_module.EncryptSecret(setting.SecretKey, data) if err != nil { return err } s := &Secret{ - Data: encrypted, + Data: encrypted, + Description: description, } - affected, err := db.GetEngine(ctx).ID(secretID).Cols("data").Update(s) + affected, err := db.GetEngine(ctx).ID(secretID).Cols("data", "description").Update(s) if affected != 1 { return ErrSecretNotFound{} } @@ -165,3 +186,17 @@ func GetSecretsOfTask(ctx context.Context, task *actions_model.ActionTask) (map[ return secrets, nil } + +func CountWrongRepoLevelSecrets(ctx context.Context) (int64, error) { + var result int64 + _, err := db.GetEngine(ctx).SQL("SELECT count(`id`) FROM `secret` WHERE `repo_id` > 0 AND `owner_id` > 0").Get(&result) + return result, err +} + +func UpdateWrongRepoLevelSecrets(ctx context.Context) (int64, error) { + result, err := db.GetEngine(ctx).Exec("UPDATE `secret` SET `owner_id` = 0 WHERE `repo_id` > 0 AND `owner_id` > 0") + if err != nil { + return 0, err + } + return result.RowsAffected() +} diff --git a/models/system/notice_test.go b/models/system/notice_test.go index 599b2fb65c..9fc9e6cce1 100644 --- a/models/system/notice_test.go +++ b/models/system/notice_test.go @@ -45,8 +45,6 @@ func TestCreateRepositoryNotice(t *testing.T) { unittest.AssertExistsAndLoadBean(t, noticeBean) } -// TODO TestRemoveAllWithNotice - func TestCountNotices(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) assert.Equal(t, int64(3), system.CountNotices(db.DefaultContext)) diff --git a/models/system/setting_test.go b/models/system/setting_test.go index 8f04412fb4..7e7e0c8fca 100644 --- a/models/system/setting_test.go +++ b/models/system/setting_test.go @@ -21,24 +21,24 @@ func TestSettings(t *testing.T) { rev, settings, err := system.GetAllSettings(db.DefaultContext) assert.NoError(t, err) - assert.EqualValues(t, 1, rev) + assert.Equal(t, 1, rev) assert.Len(t, settings, 1) // there is only one "revision" key err = system.SetSettings(db.DefaultContext, map[string]string{keyName: "true"}) assert.NoError(t, err) rev, settings, err = system.GetAllSettings(db.DefaultContext) assert.NoError(t, err) - assert.EqualValues(t, 2, rev) + assert.Equal(t, 2, rev) assert.Len(t, settings, 2) - assert.EqualValues(t, "true", settings[keyName]) + assert.Equal(t, "true", settings[keyName]) err = system.SetSettings(db.DefaultContext, map[string]string{keyName: "false"}) assert.NoError(t, err) rev, settings, err = system.GetAllSettings(db.DefaultContext) assert.NoError(t, err) - assert.EqualValues(t, 3, rev) + assert.Equal(t, 3, rev) assert.Len(t, settings, 2) - assert.EqualValues(t, "false", settings[keyName]) + assert.Equal(t, "false", settings[keyName]) // setting the same value should not trigger DuplicateKey error, and the "version" should be increased err = system.SetSettings(db.DefaultContext, map[string]string{keyName: "false"}) @@ -47,5 +47,5 @@ func TestSettings(t *testing.T) { rev, settings, err = system.GetAllSettings(db.DefaultContext) assert.NoError(t, err) assert.Len(t, settings, 2) - assert.EqualValues(t, 4, rev) + assert.Equal(t, 4, rev) } diff --git a/models/unit/unit.go b/models/unit/unit.go index c816fc6c68..c0560678ca 100644 --- a/models/unit/unit.go +++ b/models/unit/unit.go @@ -6,6 +6,7 @@ package unit import ( "errors" "fmt" + "slices" "strings" "sync/atomic" @@ -20,17 +21,21 @@ type Type int // Enumerate all the unit types const ( - TypeInvalid Type = iota // 0 invalid - TypeCode // 1 code - TypeIssues // 2 issues - TypePullRequests // 3 PRs - TypeReleases // 4 Releases - TypeWiki // 5 Wiki - TypeExternalWiki // 6 ExternalWiki - TypeExternalTracker // 7 ExternalTracker - TypeProjects // 8 Projects - TypePackages // 9 Packages - TypeActions // 10 Actions + TypeInvalid Type = iota // 0 invalid + + TypeCode // 1 code + TypeIssues // 2 issues + TypePullRequests // 3 PRs + TypeReleases // 4 Releases + TypeWiki // 5 Wiki + TypeExternalWiki // 6 ExternalWiki + TypeExternalTracker // 7 ExternalTracker + TypeProjects // 8 Projects + TypePackages // 9 Packages + TypeActions // 10 Actions + + // FIXME: TEAM-UNIT-PERMISSION: the team unit "admin" permission's design is not right, when a new unit is added in the future, + // admin team won't inherit the correct admin permission for the new unit, need to have a complete fix before adding any new unit. ) // Value returns integer value for unit type (used by template) @@ -200,22 +205,12 @@ func LoadUnitConfig() error { // UnitGlobalDisabled checks if unit type is global disabled func (u Type) UnitGlobalDisabled() bool { - for _, ud := range DisabledRepoUnitsGet() { - if u == ud { - return true - } - } - return false + return slices.Contains(DisabledRepoUnitsGet(), u) } // CanBeDefault checks if the unit type can be a default repo unit func (u *Type) CanBeDefault() bool { - for _, nadU := range NotAllowedDefaultRepoUnits { - if *u == nadU { - return false - } - } - return true + return !slices.Contains(NotAllowedDefaultRepoUnits, *u) } // Unit is a section of one repository @@ -380,20 +375,3 @@ func AllUnitKeyNames() []string { } return res } - -// MinUnitAccessMode returns the minial permission of the permission map -func MinUnitAccessMode(unitsMap map[Type]perm.AccessMode) perm.AccessMode { - res := perm.AccessModeNone - for t, mode := range unitsMap { - // Don't allow `TypeExternal{Tracker,Wiki}` to influence this as they can only be set to READ perms. - if t == TypeExternalTracker || t == TypeExternalWiki { - continue - } - - // get the minial permission great than AccessModeNone except all are AccessModeNone - if mode > perm.AccessModeNone && (res == perm.AccessModeNone || mode < res) { - res = mode - } - } - return res -} diff --git a/models/unittest/consistency.go b/models/unittest/consistency.go index 71839001be..364afb5c52 100644 --- a/models/unittest/consistency.go +++ b/models/unittest/consistency.go @@ -11,6 +11,7 @@ import ( "code.gitea.io/gitea/models/db" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "xorm.io/builder" ) @@ -24,7 +25,7 @@ const ( var consistencyCheckMap = make(map[string]func(t assert.TestingT, bean any)) // CheckConsistencyFor test that all matching database entries are consistent -func CheckConsistencyFor(t assert.TestingT, beansToCheck ...any) { +func CheckConsistencyFor(t require.TestingT, beansToCheck ...any) { for _, bean := range beansToCheck { sliceType := reflect.SliceOf(reflect.TypeOf(bean)) sliceValue := reflect.MakeSlice(sliceType, 0, 10) @@ -42,13 +43,11 @@ func CheckConsistencyFor(t assert.TestingT, beansToCheck ...any) { } } -func checkForConsistency(t assert.TestingT, bean any) { +func checkForConsistency(t require.TestingT, bean any) { tb, err := db.TableInfo(bean) assert.NoError(t, err) f := consistencyCheckMap[tb.Name] - if f == nil { - assert.FailNow(t, "unknown bean type: %#v", bean) - } + require.NotNil(t, f, "unknown bean type: %#v", bean) f(t, bean) } @@ -71,8 +70,8 @@ func init() { AssertCountByCond(t, "follow", builder.Eq{"user_id": user.int("ID")}, user.int("NumFollowing")) AssertCountByCond(t, "follow", builder.Eq{"follow_id": user.int("ID")}, user.int("NumFollowers")) if user.int("Type") != modelsUserTypeOrganization { - assert.EqualValues(t, 0, user.int("NumMembers"), "Unexpected number of members for user id: %d", user.int("ID")) - assert.EqualValues(t, 0, user.int("NumTeams"), "Unexpected number of teams for user id: %d", user.int("ID")) + assert.Equal(t, 0, user.int("NumMembers"), "Unexpected number of members for user id: %d", user.int("ID")) + assert.Equal(t, 0, user.int("NumTeams"), "Unexpected number of teams for user id: %d", user.int("ID")) } } @@ -119,7 +118,7 @@ func init() { assert.EqualValues(t, issue.int("NumComments"), actual, "Unexpected number of comments for issue id: %d", issue.int("ID")) if issue.bool("IsPull") { prRow := AssertExistsAndLoadMap(t, "pull_request", builder.Eq{"issue_id": issue.int("ID")}) - assert.EqualValues(t, parseInt(prRow["index"]), issue.int("Index"), "Unexpected index for issue id: %d", issue.int("ID")) + assert.Equal(t, parseInt(prRow["index"]), issue.int("Index"), "Unexpected index for issue id: %d", issue.int("ID")) } } @@ -127,7 +126,7 @@ func init() { pr := reflectionWrap(bean) issueRow := AssertExistsAndLoadMap(t, "issue", builder.Eq{"id": pr.int("IssueID")}) assert.True(t, parseBool(issueRow["is_pull"])) - assert.EqualValues(t, parseInt(issueRow["index"]), pr.int("Index"), "Unexpected index for pull request id: %d", pr.int("ID")) + assert.Equal(t, parseInt(issueRow["index"]), pr.int("Index"), "Unexpected index for pull request id: %d", pr.int("ID")) } checkForMilestoneConsistency := func(t assert.TestingT, bean any) { diff --git a/models/unittest/fixtures_loader.go b/models/unittest/fixtures_loader.go index 14686caf63..0560da8349 100644 --- a/models/unittest/fixtures_loader.go +++ b/models/unittest/fixtures_loader.go @@ -12,13 +12,17 @@ import ( "slices" "strings" + "code.gitea.io/gitea/models/db" + "gopkg.in/yaml.v3" "xorm.io/xorm" "xorm.io/xorm/schemas" ) -type fixtureItem struct { - tableName string +type FixtureItem struct { + fileFullPath string + tableName string + tableNameQuoted string sqlInserts []string sqlInsertArgs [][]any @@ -27,10 +31,11 @@ type fixtureItem struct { } type fixturesLoaderInternal struct { + xormEngine *xorm.Engine + xormTableNames map[string]bool db *sql.DB dbType schemas.DBType - files []string - fixtures map[string]*fixtureItem + fixtures map[string]*FixtureItem quoteObject func(string) string paramPlaceholder func(idx int) string } @@ -59,29 +64,27 @@ func (f *fixturesLoaderInternal) preprocessFixtureRow(row []map[string]any) (err return nil } -func (f *fixturesLoaderInternal) prepareFixtureItem(file string) (_ *fixtureItem, err error) { - fixture := &fixtureItem{} - fixture.tableName, _, _ = strings.Cut(filepath.Base(file), ".") +func (f *fixturesLoaderInternal) prepareFixtureItem(fixture *FixtureItem) (err error) { fixture.tableNameQuoted = f.quoteObject(fixture.tableName) if f.dbType == schemas.MSSQL { fixture.mssqlHasIdentityColumn, err = f.mssqlTableHasIdentityColumn(f.db, fixture.tableName) if err != nil { - return nil, err + return err } } - data, err := os.ReadFile(file) + data, err := os.ReadFile(fixture.fileFullPath) if err != nil { - return nil, fmt.Errorf("failed to read file %q: %w", file, err) + return fmt.Errorf("failed to read file %q: %w", fixture.fileFullPath, err) } var rows []map[string]any if err = yaml.Unmarshal(data, &rows); err != nil { - return nil, fmt.Errorf("failed to unmarshal yaml data from %q: %w", file, err) + return fmt.Errorf("failed to unmarshal yaml data from %q: %w", fixture.fileFullPath, err) } if err = f.preprocessFixtureRow(rows); err != nil { - return nil, fmt.Errorf("failed to preprocess fixture rows from %q: %w", file, err) + return fmt.Errorf("failed to preprocess fixture rows from %q: %w", fixture.fileFullPath, err) } var sqlBuf []byte @@ -107,19 +110,17 @@ func (f *fixturesLoaderInternal) prepareFixtureItem(file string) (_ *fixtureItem sqlBuf = sqlBuf[:0] sqlArguments = sqlArguments[:0] } - return fixture, nil + return nil } -func (f *fixturesLoaderInternal) loadFixtures(tx *sql.Tx, file string) (err error) { - fixture := f.fixtures[file] - if fixture == nil { - if fixture, err = f.prepareFixtureItem(file); err != nil { +func (f *fixturesLoaderInternal) loadFixtures(tx *sql.Tx, fixture *FixtureItem) (err error) { + if fixture.tableNameQuoted == "" { + if err = f.prepareFixtureItem(fixture); err != nil { return err } - f.fixtures[file] = fixture } - _, err = tx.Exec(fmt.Sprintf("DELETE FROM %s", fixture.tableNameQuoted)) // sqlite3 doesn't support truncate + _, err = tx.Exec("DELETE FROM " + fixture.tableNameQuoted) // sqlite3 doesn't support truncate if err != nil { return err } @@ -147,15 +148,26 @@ func (f *fixturesLoaderInternal) Load() error { } defer func() { _ = tx.Rollback() }() - for _, file := range f.files { - if err := f.loadFixtures(tx, file); err != nil { - return fmt.Errorf("failed to load fixtures from %s: %w", file, err) + for _, fixture := range f.fixtures { + if !f.xormTableNames[fixture.tableName] { + continue + } + if err := f.loadFixtures(tx, fixture); err != nil { + return fmt.Errorf("failed to load fixtures from %s: %w", fixture.fileFullPath, err) } } - return tx.Commit() + if err = tx.Commit(); err != nil { + return err + } + for xormTableName := range f.xormTableNames { + if f.fixtures[xormTableName] == nil { + _, _ = f.xormEngine.Exec("DELETE FROM `" + xormTableName + "`") + } + } + return nil } -func FixturesFileFullPaths(dir string, files []string) ([]string, error) { +func FixturesFileFullPaths(dir string, files []string) (map[string]*FixtureItem, error) { if files != nil && len(files) == 0 { return nil, nil // load nothing } @@ -169,20 +181,25 @@ func FixturesFileFullPaths(dir string, files []string) ([]string, error) { files = append(files, e.Name()) } } - for i, file := range files { - if !filepath.IsAbs(file) { - files[i] = filepath.Join(dir, file) + fixtureItems := map[string]*FixtureItem{} + for _, file := range files { + fileFillPath := file + if !filepath.IsAbs(fileFillPath) { + fileFillPath = filepath.Join(dir, file) } + tableName, _, _ := strings.Cut(filepath.Base(file), ".") + fixtureItems[tableName] = &FixtureItem{fileFullPath: fileFillPath, tableName: tableName} } - return files, nil + return fixtureItems, nil } func NewFixturesLoader(x *xorm.Engine, opts FixturesOptions) (FixturesLoader, error) { - files, err := FixturesFileFullPaths(opts.Dir, opts.Files) + fixtureItems, err := FixturesFileFullPaths(opts.Dir, opts.Files) if err != nil { return nil, fmt.Errorf("failed to get fixtures files: %w", err) } - f := &fixturesLoaderInternal{db: x.DB().DB, dbType: x.Dialect().URI().DBType, files: files, fixtures: map[string]*fixtureItem{}} + + f := &fixturesLoaderInternal{xormEngine: x, db: x.DB().DB, dbType: x.Dialect().URI().DBType, fixtures: fixtureItems} switch f.dbType { case schemas.SQLITE: f.quoteObject = func(s string) string { return fmt.Sprintf(`"%s"`, s) } @@ -197,5 +214,12 @@ func NewFixturesLoader(x *xorm.Engine, opts FixturesOptions) (FixturesLoader, er f.quoteObject = func(s string) string { return fmt.Sprintf("[%s]", s) } f.paramPlaceholder = func(idx int) string { return "?" } } + + xormBeans, _ := db.NamesToBean() + f.xormTableNames = map[string]bool{} + for _, bean := range xormBeans { + f.xormTableNames[db.TableName(bean)] = true + } + return f, nil } diff --git a/models/unittest/fixtures_test.go b/models/unittest/fixtures_test.go index a4c55f4e55..8a4c5f1793 100644 --- a/models/unittest/fixtures_test.go +++ b/models/unittest/fixtures_test.go @@ -99,7 +99,7 @@ func BenchmarkFixturesLoader(b *testing.B) { // BenchmarkFixturesLoader/Internal // BenchmarkFixturesLoader/Internal-12 1746 670457 ns/op b.Run("Internal", func(b *testing.B) { - for i := 0; i < b.N; i++ { + for b.Loop() { require.NoError(b, loaderInternal.Load()) } }) @@ -107,7 +107,7 @@ func BenchmarkFixturesLoader(b *testing.B) { if loaderVendor == nil { b.Skip() } - for i := 0; i < b.N; i++ { + for b.Loop() { require.NoError(b, loaderVendor.Load()) } }) diff --git a/models/unittest/fscopy.go b/models/unittest/fscopy.go index 690089bbc5..98b01815bd 100644 --- a/models/unittest/fscopy.go +++ b/models/unittest/fscopy.go @@ -11,35 +11,13 @@ import ( "code.gitea.io/gitea/modules/util" ) -// Copy copies file from source to target path. -func Copy(src, dest string) error { - // Gather file information to set back later. - si, err := os.Lstat(src) - if err != nil { - return err - } - - // Handle symbolic link. - if si.Mode()&os.ModeSymlink != 0 { - target, err := os.Readlink(src) - if err != nil { - return err - } - // NOTE: os.Chmod and os.Chtimes don't recognize symbolic link, - // which will lead "no such file or directory" error. - return os.Symlink(target, dest) - } - - return util.CopyFile(src, dest) -} - -// Sync synchronizes the two files. This is skipped if both files +// SyncFile synchronizes the two files. This is skipped if both files // exist and the size, modtime, and mode match. -func Sync(srcPath, destPath string) error { +func SyncFile(srcPath, destPath string) error { dest, err := os.Stat(destPath) if err != nil { if os.IsNotExist(err) { - return Copy(srcPath, destPath) + return util.CopyFile(srcPath, destPath) } return err } @@ -50,12 +28,12 @@ func Sync(srcPath, destPath string) error { } if src.Size() == dest.Size() && - src.ModTime() == dest.ModTime() && + src.ModTime().Equal(dest.ModTime()) && src.Mode() == dest.Mode() { return nil } - return Copy(srcPath, destPath) + return util.CopyFile(srcPath, destPath) } // SyncDirs synchronizes files recursively from source to target directory. @@ -66,6 +44,10 @@ func SyncDirs(srcPath, destPath string) error { return err } + // the keep file is used to keep the directory in a git repository, it doesn't need to be synced + // and go-git doesn't work with the ".keep" file (it would report errors like "ref is empty") + const keepFile = ".keep" + // find and delete all untracked files destFiles, err := util.ListDirRecursively(destPath, &util.ListDirOptions{IncludeDir: true}) if err != nil { @@ -73,16 +55,20 @@ func SyncDirs(srcPath, destPath string) error { } for _, destFile := range destFiles { destFilePath := filepath.Join(destPath, destFile) + shouldRemove := filepath.Base(destFilePath) == keepFile if _, err = os.Stat(filepath.Join(srcPath, destFile)); err != nil { if os.IsNotExist(err) { - // if src file does not exist, remove dest file - if err = os.RemoveAll(destFilePath); err != nil { - return err - } + shouldRemove = true } else { return err } } + // if src file does not exist, remove dest file + if shouldRemove { + if err = os.RemoveAll(destFilePath); err != nil { + return err + } + } } // sync src files to dest @@ -95,8 +81,8 @@ func SyncDirs(srcPath, destPath string) error { // util.ListDirRecursively appends a slash to the directory name if strings.HasSuffix(srcFile, "/") { err = os.MkdirAll(destFilePath, os.ModePerm) - } else { - err = Sync(filepath.Join(srcPath, srcFile), destFilePath) + } else if filepath.Base(destFilePath) != keepFile { + err = SyncFile(filepath.Join(srcPath, srcFile), destFilePath) } if err != nil { return err diff --git a/models/unittest/testdb.go b/models/unittest/testdb.go index 0dcff166cc..cb60cf5f85 100644 --- a/models/unittest/testdb.go +++ b/models/unittest/testdb.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting/config" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/tempdir" "code.gitea.io/gitea/modules/test" "code.gitea.io/gitea/modules/util" @@ -35,8 +36,8 @@ func fatalTestError(fmtStr string, args ...any) { os.Exit(1) } -// InitSettings initializes config provider and load common settings for tests -func InitSettings() { +// InitSettingsForTesting initializes config provider and load common settings for tests +func InitSettingsForTesting() { setting.IsInTesting = true log.OsExiter = func(code int) { if code != 0 { @@ -75,7 +76,7 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) { testOpts := util.OptionalArg(testOptsArg, &TestOptions{}) giteaRoot = test.SetupGiteaRoot() setting.CustomPath = filepath.Join(giteaRoot, "custom") - InitSettings() + InitSettingsForTesting() fixturesOpts := FixturesOptions{Dir: filepath.Join(giteaRoot, "models", "fixtures"), Files: testOpts.FixtureFiles} if err := CreateTestEngine(fixturesOpts); err != nil { @@ -84,6 +85,7 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) { setting.IsInTesting = true setting.AppURL = "https://try.gitea.io/" + setting.Domain = "try.gitea.io" setting.RunUser = "runuser" setting.SSH.User = "sshuser" setting.SSH.BuiltinServerUser = "builtinuser" @@ -91,15 +93,19 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) { setting.SSH.Domain = "try.gitea.io" setting.Database.Type = "sqlite3" setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master" - repoRootPath, err := os.MkdirTemp(os.TempDir(), "repos") + repoRootPath, cleanup1, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("repos") if err != nil { fatalTestError("TempDir: %v\n", err) } + defer cleanup1() + setting.RepoRootPath = repoRootPath - appDataPath, err := os.MkdirTemp(os.TempDir(), "appdata") + appDataPath, cleanup2, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("appdata") if err != nil { fatalTestError("TempDir: %v\n", err) } + defer cleanup2() + setting.AppDataPath = appDataPath setting.AppWorkPath = giteaRoot setting.StaticRootPath = giteaRoot @@ -152,13 +158,6 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) { fatalTestError("tear down failed: %v\n", err) } } - - if err = util.RemoveAll(repoRootPath); err != nil { - fatalTestError("util.RemoveAll: %v\n", err) - } - if err = util.RemoveAll(appDataPath); err != nil { - fatalTestError("util.RemoveAll: %v\n", err) - } os.Exit(exitStatus) } diff --git a/models/unittest/unit_tests.go b/models/unittest/unit_tests.go index 1c5595aef8..4a4cec40ae 100644 --- a/models/unittest/unit_tests.go +++ b/models/unittest/unit_tests.go @@ -153,9 +153,9 @@ func DumpQueryResult(t require.TestingT, sqlOrBean any, sqlArgs ...any) { goDB := x.DB().DB sql, ok := sqlOrBean.(string) if !ok { - sql = fmt.Sprintf("SELECT * FROM %s", db.TableName(sqlOrBean)) + sql = "SELECT * FROM " + db.TableName(sqlOrBean) } else if !strings.Contains(sql, " ") { - sql = fmt.Sprintf("SELECT * FROM %s", sql) + sql = "SELECT * FROM " + sql } rows, err := goDB.Query(sql, sqlArgs...) require.NoError(t, err) diff --git a/models/user/avatar.go b/models/user/avatar.go index 5453c78fc6..542bd93b98 100644 --- a/models/user/avatar.go +++ b/models/user/avatar.go @@ -5,7 +5,6 @@ package user import ( "context" - "crypto/md5" "fmt" "image/png" "io" @@ -38,27 +37,32 @@ func GenerateRandomAvatar(ctx context.Context, u *User) error { u.Avatar = avatars.HashEmail(seed) - // Don't share the images so that we can delete them easily - if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error { - if err := png.Encode(w, img); err != nil { - log.Error("Encode: %v", err) + _, err = storage.Avatars.Stat(u.CustomAvatarRelativePath()) + if err != nil { + // If unable to Stat the avatar file (usually it means non-existing), then try to save a new one + // Don't share the images so that we can delete them easily + if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error { + if err := png.Encode(w, img); err != nil { + log.Error("Encode: %v", err) + } + return nil + }); err != nil { + return fmt.Errorf("failed to save avatar %s: %w", u.CustomAvatarRelativePath(), err) } - return err - }); err != nil { - return fmt.Errorf("Failed to create dir %s: %w", u.CustomAvatarRelativePath(), err) } if _, err := db.GetEngine(ctx).ID(u.ID).Cols("avatar").Update(u); err != nil { return err } - log.Info("New random avatar created: %d", u.ID) return nil } // AvatarLinkWithSize returns a link to the user's avatar with size. size <= 0 means default size func (u *User) AvatarLinkWithSize(ctx context.Context, size int) string { - if u.IsGhost() { + // ghost user was deleted, Gitea actions is a bot user, 0 means the user should be a virtual user + // which comes from git configure information + if u.IsGhost() || u.IsGiteaActions() || u.ID <= 0 { return avatars.DefaultAvatarLink() } @@ -101,7 +105,7 @@ func (u *User) IsUploadAvatarChanged(data []byte) bool { if !u.UseCustomAvatar || len(u.Avatar) == 0 { return true } - avatarID := fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%d-%x", u.ID, md5.Sum(data))))) + avatarID := avatar.HashAvatar(u.ID, data) return u.Avatar != avatarID } diff --git a/models/user/avatar_test.go b/models/user/avatar_test.go index 1078875ee1..941068957c 100644 --- a/models/user/avatar_test.go +++ b/models/user/avatar_test.go @@ -4,13 +4,18 @@ package user import ( + "io" + "strings" "testing" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/unittest" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/test" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestUserAvatarLink(t *testing.T) { @@ -26,3 +31,37 @@ func TestUserAvatarLink(t *testing.T) { link = u.AvatarLink(db.DefaultContext) assert.Equal(t, "https://localhost/sub-path/avatars/avatar.png", link) } + +func TestUserAvatarGenerate(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + var err error + tmpDir := t.TempDir() + storage.Avatars, err = storage.NewLocalStorage(t.Context(), &setting.Storage{Path: tmpDir}) + require.NoError(t, err) + + u := unittest.AssertExistsAndLoadBean(t, &User{ID: 2}) + + // there was no avatar, generate a new one + assert.Empty(t, u.Avatar) + err = GenerateRandomAvatar(db.DefaultContext, u) + require.NoError(t, err) + assert.NotEmpty(t, u.Avatar) + + // make sure the generated one exists + oldAvatarPath := u.CustomAvatarRelativePath() + _, err = storage.Avatars.Stat(u.CustomAvatarRelativePath()) + require.NoError(t, err) + // and try to change its content + _, err = storage.Avatars.Save(u.CustomAvatarRelativePath(), strings.NewReader("abcd"), 4) + require.NoError(t, err) + + // try to generate again + err = GenerateRandomAvatar(db.DefaultContext, u) + require.NoError(t, err) + assert.Equal(t, oldAvatarPath, u.CustomAvatarRelativePath()) + f, err := storage.Avatars.Open(u.CustomAvatarRelativePath()) + require.NoError(t, err) + defer f.Close() + content, _ := io.ReadAll(f) + assert.Equal(t, "abcd", string(content)) +} diff --git a/models/user/badge.go b/models/user/badge.go index 3ff3530a36..e475ceb748 100644 --- a/models/user/badge.go +++ b/models/user/badge.go @@ -19,7 +19,7 @@ type Badge struct { } // UserBadge represents a user badge -type UserBadge struct { //nolint:revive +type UserBadge struct { //nolint:revive // export stutter ID int64 `xorm:"pk autoincr"` BadgeID int64 UserID int64 `xorm:"INDEX"` diff --git a/models/user/email_address.go b/models/user/email_address.go index 74ba5f617a..2ba6a56450 100644 --- a/models/user/email_address.go +++ b/models/user/email_address.go @@ -8,7 +8,6 @@ import ( "context" "fmt" "net/mail" - "regexp" "strings" "time" @@ -153,8 +152,6 @@ func UpdateEmailAddress(ctx context.Context, email *EmailAddress) error { return err } -var emailRegexp = regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$") - // ValidateEmail check if email is a valid & allowed address func ValidateEmail(email string) error { if err := validateEmailBasic(email); err != nil { @@ -514,7 +511,7 @@ func validateEmailBasic(email string) error { return ErrEmailInvalid{email} } - if !emailRegexp.MatchString(email) { + if !globalVars().emailRegexp.MatchString(email) { return ErrEmailCharIsNotSupported{email} } @@ -545,3 +542,13 @@ func IsEmailDomainAllowed(email string) bool { return validation.IsEmailDomainListed(setting.Service.EmailDomainAllowList, email) } + +func GetActivatedEmailAddresses(ctx context.Context, uid int64) ([]string, error) { + emails := make([]string, 0, 2) + if err := db.GetEngine(ctx).Table("email_address").Select("email"). + Where("uid=? AND is_activated=?", uid, true).Asc("id"). + Find(&emails); err != nil { + return nil, err + } + return emails, nil +} diff --git a/models/user/email_address_test.go b/models/user/email_address_test.go index d72d873de2..c0666246b0 100644 --- a/models/user/email_address_test.go +++ b/models/user/email_address_test.go @@ -4,6 +4,7 @@ package user_test import ( + "slices" "testing" "code.gitea.io/gitea/models/db" @@ -100,12 +101,7 @@ func TestListEmails(t *testing.T) { assert.Greater(t, count, int64(5)) contains := func(match func(s *user_model.SearchEmailResult) bool) bool { - for _, v := range emails { - if match(v) { - return true - } - } - return false + return slices.ContainsFunc(emails, match) } assert.True(t, contains(func(s *user_model.SearchEmailResult) bool { return s.UID == 18 })) @@ -205,7 +201,7 @@ func TestEmailAddressValidate(t *testing.T) { } for kase, err := range kases { t.Run(kase, func(t *testing.T) { - assert.EqualValues(t, err, user_model.ValidateEmail(kase)) + assert.Equal(t, err, user_model.ValidateEmail(kase)) }) } } diff --git a/models/user/must_change_password.go b/models/user/must_change_password.go index 7eab08de89..686847c7d7 100644 --- a/models/user/must_change_password.go +++ b/models/user/must_change_password.go @@ -34,7 +34,7 @@ func SetMustChangePassword(ctx context.Context, all, mustChangePassword bool, in if !all { include = sliceTrimSpaceDropEmpty(include) if len(include) == 0 { - return 0, util.NewSilentWrapErrorf(util.ErrInvalidArgument, "no users to include provided") + return 0, util.ErrorWrap(util.ErrInvalidArgument, "no users to include provided") } cond = cond.And(builder.In("lower_name", include)) diff --git a/models/user/openid.go b/models/user/openid.go index ee4ecabae0..420c67ca18 100644 --- a/models/user/openid.go +++ b/models/user/openid.go @@ -11,9 +11,6 @@ import ( "code.gitea.io/gitea/modules/util" ) -// ErrOpenIDNotExist openid is not known -var ErrOpenIDNotExist = util.NewNotExistErrorf("OpenID is unknown") - // UserOpenID is the list of all OpenID identities of a user. // Since this is a middle table, name it OpenID is not suitable, so we ignore the lint here type UserOpenID struct { //revive:disable-line:exported @@ -99,7 +96,7 @@ func DeleteUserOpenID(ctx context.Context, openid *UserOpenID) (err error) { if err != nil { return err } else if deleted != 1 { - return ErrOpenIDNotExist + return util.NewNotExistErrorf("OpenID is unknown") } return nil } diff --git a/models/user/openid_test.go b/models/user/openid_test.go index 27e6edd1e0..708af9e653 100644 --- a/models/user/openid_test.go +++ b/models/user/openid_test.go @@ -11,6 +11,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestGetUserOpenIDs(t *testing.T) { @@ -34,30 +35,23 @@ func TestGetUserOpenIDs(t *testing.T) { func TestToggleUserOpenIDVisibility(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) oids, err := user_model.GetUserOpenIDs(db.DefaultContext, int64(2)) - if !assert.NoError(t, err) || !assert.Len(t, oids, 1) { - return - } + require.NoError(t, err) + require.Len(t, oids, 1) assert.True(t, oids[0].Show) err = user_model.ToggleUserOpenIDVisibility(db.DefaultContext, oids[0].ID) - if !assert.NoError(t, err) { - return - } + require.NoError(t, err) oids, err = user_model.GetUserOpenIDs(db.DefaultContext, int64(2)) - if !assert.NoError(t, err) || !assert.Len(t, oids, 1) { - return - } + require.NoError(t, err) + require.Len(t, oids, 1) + assert.False(t, oids[0].Show) err = user_model.ToggleUserOpenIDVisibility(db.DefaultContext, oids[0].ID) - if !assert.NoError(t, err) { - return - } + require.NoError(t, err) oids, err = user_model.GetUserOpenIDs(db.DefaultContext, int64(2)) - if !assert.NoError(t, err) { - return - } + require.NoError(t, err) if assert.Len(t, oids, 1) { assert.True(t, oids[0].Show) } diff --git a/models/user/search.go b/models/user/search.go index 85915f4020..cfd0d011bc 100644 --- a/models/user/search.go +++ b/models/user/search.go @@ -45,13 +45,14 @@ func (opts *SearchUserOptions) toSearchQueryBase(ctx context.Context) *xorm.Sess var cond builder.Cond cond = builder.Eq{"type": opts.Type} if opts.IncludeReserved { - if opts.Type == UserTypeIndividual { + switch opts.Type { + case UserTypeIndividual: cond = cond.Or(builder.Eq{"type": UserTypeUserReserved}).Or( builder.Eq{"type": UserTypeBot}, ).Or( builder.Eq{"type": UserTypeRemoteUser}, ) - } else if opts.Type == UserTypeOrganization { + case UserTypeOrganization: cond = cond.Or(builder.Eq{"type": UserTypeOrganizationReserved}) } } @@ -136,7 +137,7 @@ func (opts *SearchUserOptions) toSearchQueryBase(ctx context.Context) *xorm.Sess // SearchUsers takes options i.e. keyword and part of user name to search, // it returns results in given range and number of total results. -func SearchUsers(ctx context.Context, opts *SearchUserOptions) (users []*User, _ int64, _ error) { +func SearchUsers(ctx context.Context, opts SearchUserOptions) (users []*User, _ int64, _ error) { sessCount := opts.toSearchQueryBase(ctx) defer sessCount.Close() count, err := sessCount.Count(new(User)) @@ -151,7 +152,7 @@ func SearchUsers(ctx context.Context, opts *SearchUserOptions) (users []*User, _ sessQuery := opts.toSearchQueryBase(ctx).OrderBy(opts.OrderBy.String()) defer sessQuery.Close() if opts.Page > 0 { - sessQuery = db.SetSessionPagination(sessQuery, opts) + sessQuery = db.SetSessionPagination(sessQuery, &opts) } // the sql may contain JOIN, so we must only select User related columns diff --git a/models/user/setting.go b/models/user/setting.go index b4af0e5ccd..c65afae76c 100644 --- a/models/user/setting.go +++ b/models/user/setting.go @@ -5,6 +5,7 @@ package user import ( "context" + "errors" "fmt" "strings" @@ -114,10 +115,10 @@ func GetUserAllSettings(ctx context.Context, uid int64) (map[string]*Setting, er func validateUserSettingKey(key string) error { if len(key) == 0 { - return fmt.Errorf("setting key must be set") + return errors.New("setting key must be set") } if strings.ToLower(key) != key { - return fmt.Errorf("setting key should be lowercase") + return errors.New("setting key should be lowercase") } return nil } diff --git a/models/user/setting_keys.go b/models/user/setting_keys.go index 3149aae18b..2c2ed078be 100644 --- a/models/user/setting_keys.go +++ b/models/user/setting_keys.go @@ -10,6 +10,7 @@ const ( SettingsKeyDiffWhitespaceBehavior = "diff.whitespace_behaviour" // SettingsKeyShowOutdatedComments is the setting key wether or not to show outdated comments in PRs SettingsKeyShowOutdatedComments = "comment_code.show_outdated" + // UserActivityPubPrivPem is user's private key UserActivityPubPrivPem = "activitypub.priv_pem" // UserActivityPubPubPem is user's public key @@ -18,4 +19,6 @@ const ( SignupIP = "signup.ip" // SignupUserAgent is the user agent that the user signed up with SignupUserAgent = "signup.user_agent" + + SettingsKeyCodeViewShowFileTree = "code_view.show_file_tree" ) diff --git a/models/user/setting_test.go b/models/user/setting_test.go index c607d9fd00..3c199013f3 100644 --- a/models/user/setting_test.go +++ b/models/user/setting_test.go @@ -30,15 +30,15 @@ func TestSettings(t *testing.T) { settings, err := user_model.GetSettings(db.DefaultContext, 99, []string{keyName}) assert.NoError(t, err) assert.Len(t, settings, 1) - assert.EqualValues(t, newSetting.SettingValue, settings[keyName].SettingValue) + assert.Equal(t, newSetting.SettingValue, settings[keyName].SettingValue) settingValue, err := user_model.GetUserSetting(db.DefaultContext, 99, keyName) assert.NoError(t, err) - assert.EqualValues(t, newSetting.SettingValue, settingValue) + assert.Equal(t, newSetting.SettingValue, settingValue) settingValue, err = user_model.GetUserSetting(db.DefaultContext, 99, "no_such") assert.NoError(t, err) - assert.EqualValues(t, "", settingValue) + assert.Empty(t, settingValue) // updated setting updatedSetting := &user_model.Setting{UserID: 99, SettingKey: keyName, SettingValue: "Updated"} @@ -49,7 +49,7 @@ func TestSettings(t *testing.T) { settings, err = user_model.GetUserAllSettings(db.DefaultContext, 99) assert.NoError(t, err) assert.Len(t, settings, 1) - assert.EqualValues(t, updatedSetting.SettingValue, settings[updatedSetting.SettingKey].SettingValue) + assert.Equal(t, updatedSetting.SettingValue, settings[updatedSetting.SettingKey].SettingValue) // delete setting err = user_model.DeleteUserSetting(db.DefaultContext, 99, keyName) diff --git a/models/user/user.go b/models/user/user.go index 19879fbcc7..7c871bf575 100644 --- a/models/user/user.go +++ b/models/user/user.go @@ -14,6 +14,7 @@ import ( "path/filepath" "regexp" "strings" + "sync" "time" "unicode" @@ -213,7 +214,7 @@ func (u *User) GetPlaceholderEmail() string { return fmt.Sprintf("%s@%s", u.LowerName, setting.Service.NoReplyAddress) } -// GetEmail returns an noreply email, if the user has set to keep his +// GetEmail returns a noreply email, if the user has set to keep his // email address private, otherwise the primary email address. func (u *User) GetEmail() string { if u.KeepEmailPrivate { @@ -246,19 +247,20 @@ func (u *User) MaxCreationLimit() int { return u.MaxRepoCreation } -// CanCreateRepo returns if user login can create a repository -// NOTE: functions calling this assume a failure due to repository count limit; if new checks are added, those functions should be revised -func (u *User) CanCreateRepo() bool { +// CanCreateRepoIn checks whether the doer(u) can create a repository in the owner +// NOTE: functions calling this assume a failure due to repository count limit; it ONLY checks the repo number LIMIT, if new checks are added, those functions should be revised +func (u *User) CanCreateRepoIn(owner *User) bool { if u.IsAdmin { return true } - if u.MaxRepoCreation <= -1 { - if setting.Repository.MaxCreationLimit <= -1 { + const noLimit = -1 + if owner.MaxRepoCreation == noLimit { + if setting.Repository.MaxCreationLimit == noLimit { return true } - return u.NumRepos < setting.Repository.MaxCreationLimit + return owner.NumRepos < setting.Repository.MaxCreationLimit } - return u.NumRepos < u.MaxRepoCreation + return owner.NumRepos < owner.MaxRepoCreation } // CanCreateOrganization returns true if user can create organisation. @@ -271,13 +273,12 @@ func (u *User) CanEditGitHook() bool { return !setting.DisableGitHooks && (u.IsAdmin || u.AllowGitHook) } -// CanForkRepo returns if user login can fork a repository -// It checks especially that the user can create repos, and potentially more -func (u *User) CanForkRepo() bool { +// CanForkRepoIn ONLY checks repository count limit +func (u *User) CanForkRepoIn(owner *User) bool { if setting.Repository.AllowForkWithoutMaximumLimit { return true } - return u.CanCreateRepo() + return u.CanCreateRepoIn(owner) } // CanImportLocal returns true if user can migrate repository by local path. @@ -384,11 +385,12 @@ func (u *User) ValidatePassword(passwd string) bool { } // IsPasswordSet checks if the password is set or left empty +// TODO: It's better to clarify the "password" behavior for different types (individual, bot) func (u *User) IsPasswordSet() bool { - return len(u.Passwd) != 0 + return u.Passwd != "" } -// IsOrganization returns true if user is actually a organization. +// IsOrganization returns true if user is actually an organization. func (u *User) IsOrganization() bool { return u.Type == UserTypeOrganization } @@ -398,13 +400,14 @@ func (u *User) IsIndividual() bool { return u.Type == UserTypeIndividual } -func (u *User) IsUser() bool { - return u.Type == UserTypeIndividual || u.Type == UserTypeBot +// IsTypeBot returns whether the user is of type bot +func (u *User) IsTypeBot() bool { + return u.Type == UserTypeBot } -// IsBot returns whether or not the user is of type bot -func (u *User) IsBot() bool { - return u.Type == UserTypeBot +// IsTokenAccessAllowed returns whether the user is an individual or a bot (which allows for token access) +func (u *User) IsTokenAccessAllowed() bool { + return u.Type == UserTypeIndividual || u.Type == UserTypeBot } // DisplayName returns full name if it's not empty, @@ -417,19 +420,9 @@ func (u *User) DisplayName() string { return u.Name } -var emailToReplacer = strings.NewReplacer( - "\n", "", - "\r", "", - "<", "", - ">", "", - ",", "", - ":", "", - ";", "", -) - // EmailTo returns a string suitable to be put into a e-mail `To:` header. func (u *User) EmailTo() string { - sanitizedDisplayName := emailToReplacer.Replace(u.DisplayName()) + sanitizedDisplayName := globalVars().emailToReplacer.Replace(u.DisplayName()) // should be an edge case but nice to have if sanitizedDisplayName == u.Email { @@ -502,10 +495,10 @@ func (u *User) IsMailable() bool { return u.IsActive } -// IsUserExist checks if given user name exist, -// the user name should be noncased unique. +// IsUserExist checks if given username exist, +// the username should be non-cased unique. // If uid is presented, then check will rule out that one, -// it is used when update a user name in settings page. +// it is used when update a username in settings page. func IsUserExist(ctx context.Context, uid int64, name string) (bool, error) { if len(name) == 0 { return false, nil @@ -515,7 +508,7 @@ func IsUserExist(ctx context.Context, uid int64, name string) (bool, error) { Get(&User{LowerName: strings.ToLower(name)}) } -// Note: As of the beginning of 2022, it is recommended to use at least +// SaltByteLength as of the beginning of 2022, it is recommended to use at least // 64 bits of salt, but NIST is already recommending to use to 128 bits. // (16 bytes = 16 * 8 = 128 bits) const SaltByteLength = 16 @@ -526,28 +519,58 @@ func GetUserSalt() (string, error) { if err != nil { return "", err } - // Returns a 32 bytes long string. + // Returns a 32-byte long string. return hex.EncodeToString(rBytes), nil } -// Note: The set of characters here can safely expand without a breaking change, -// but characters removed from this set can cause user account linking to break -var ( - customCharsReplacement = strings.NewReplacer("Æ", "AE") - removeCharsRE = regexp.MustCompile("['`´]") - transformDiacritics = transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC) - replaceCharsHyphenRE = regexp.MustCompile(`[\s~+]`) -) +type globalVarsStruct struct { + customCharsReplacement *strings.Replacer + removeCharsRE *regexp.Regexp + transformDiacritics transform.Transformer + replaceCharsHyphenRE *regexp.Regexp + emailToReplacer *strings.Replacer + emailRegexp *regexp.Regexp + systemUserNewFuncs map[int64]func() *User +} + +var globalVars = sync.OnceValue(func() *globalVarsStruct { + return &globalVarsStruct{ + // Note: The set of characters here can safely expand without a breaking change, + // but characters removed from this set can cause user account linking to break + customCharsReplacement: strings.NewReplacer("Æ", "AE"), + + removeCharsRE: regexp.MustCompile("['`´]"), + transformDiacritics: transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC), + replaceCharsHyphenRE: regexp.MustCompile(`[\s~+]`), + + emailToReplacer: strings.NewReplacer( + "\n", "", + "\r", "", + "<", "", + ">", "", + ",", "", + ":", "", + ";", "", + ), + emailRegexp: regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$"), + + systemUserNewFuncs: map[int64]func() *User{ + GhostUserID: NewGhostUser, + ActionsUserID: NewActionsUser, + }, + } +}) // NormalizeUserName only takes the name part if it is an email address, transforms it diacritics to ASCII characters. // It returns a string with the single-quotes removed, and any other non-supported username characters are replaced with a `-` character func NormalizeUserName(s string) (string, error) { + vars := globalVars() s, _, _ = strings.Cut(s, "@") - strDiacriticsRemoved, n, err := transform.String(transformDiacritics, customCharsReplacement.Replace(s)) + strDiacriticsRemoved, n, err := transform.String(vars.transformDiacritics, vars.customCharsReplacement.Replace(s)) if err != nil { return "", fmt.Errorf("failed to normalize the string of provided username %q at position %d", s, n) } - return replaceCharsHyphenRE.ReplaceAllLiteralString(removeCharsRE.ReplaceAllLiteralString(strDiacriticsRemoved, ""), "-"), nil + return vars.replaceCharsHyphenRE.ReplaceAllLiteralString(vars.removeCharsRE.ReplaceAllLiteralString(strDiacriticsRemoved, ""), "-"), nil } var ( @@ -805,6 +828,21 @@ func IsLastAdminUser(ctx context.Context, user *User) bool { type CountUserFilter struct { LastLoginSince *int64 IsAdmin optional.Option[bool] + IsActive optional.Option[bool] +} + +// HasUsers checks whether there are any users in the database, or only one user exists. +func HasUsers(ctx context.Context) (ret struct { + HasAnyUser, HasOnlyOneUser bool +}, err error, +) { + res, err := db.GetEngine(ctx).Table(&User{}).Cols("id").Limit(2).Query() + if err != nil { + return ret, fmt.Errorf("error checking user existence: %w", err) + } + ret.HasAnyUser = len(res) != 0 + ret.HasOnlyOneUser = len(res) == 1 + return ret, nil } // CountUsers returns number of users. @@ -825,6 +863,10 @@ func countUsers(ctx context.Context, opts *CountUserFilter) int64 { if opts.IsAdmin.Has() { cond = cond.And(builder.Eq{"is_admin": opts.IsAdmin.Value()}) } + + if opts.IsActive.Has() { + cond = cond.And(builder.Eq{"is_active": opts.IsActive.Value()}) + } } count, err := sess.Where(cond).Count(new(User)) @@ -963,30 +1005,28 @@ func GetUserByIDs(ctx context.Context, ids []int64) ([]*User, error) { return users, err } -// GetPossibleUserByID returns the user if id > 0 or return system usrs if id < 0 +// GetPossibleUserByID returns the user if id > 0 or returns system user if id < 0 func GetPossibleUserByID(ctx context.Context, id int64) (*User, error) { - switch id { - case GhostUserID: - return NewGhostUser(), nil - case ActionsUserID: - return NewActionsUser(), nil - case 0: + if id < 0 { + if newFunc, ok := globalVars().systemUserNewFuncs[id]; ok { + return newFunc(), nil + } + return nil, ErrUserNotExist{UID: id} + } else if id == 0 { return nil, ErrUserNotExist{} - default: - return GetUserByID(ctx, id) } + return GetUserByID(ctx, id) } -// GetPossibleUserByIDs returns the users if id > 0 or return system users if id < 0 +// GetPossibleUserByIDs returns the users if id > 0 or returns system users if id < 0 func GetPossibleUserByIDs(ctx context.Context, ids []int64) ([]*User, error) { uniqueIDs := container.SetOf(ids...) users := make([]*User, 0, len(ids)) _ = uniqueIDs.Remove(0) - if uniqueIDs.Remove(GhostUserID) { - users = append(users, NewGhostUser()) - } - if uniqueIDs.Remove(ActionsUserID) { - users = append(users, NewActionsUser()) + for systemUID, newFunc := range globalVars().systemUserNewFuncs { + if uniqueIDs.Remove(systemUID) { + users = append(users, newFunc()) + } } res, err := GetUserByIDs(ctx, uniqueIDs.Values()) if err != nil { @@ -996,7 +1036,7 @@ func GetPossibleUserByIDs(ctx context.Context, ids []int64) ([]*User, error) { return users, nil } -// GetUserByNameCtx returns user by given name. +// GetUserByName returns user by given name. func GetUserByName(ctx context.Context, name string) (*User, error) { if len(name) == 0 { return nil, ErrUserNotExist{Name: name} @@ -1027,8 +1067,8 @@ func GetUserEmailsByNames(ctx context.Context, names []string) []string { return mails } -// GetMaileableUsersByIDs gets users from ids, but only if they can receive mails -func GetMaileableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([]*User, error) { +// GetMailableUsersByIDs gets users from ids, but only if they can receive mails +func GetMailableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([]*User, error) { if len(ids) == 0 { return nil, nil } @@ -1053,17 +1093,6 @@ func GetMaileableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([ Find(&ous) } -// GetUserNamesByIDs returns usernames for all resolved users from a list of Ids. -func GetUserNamesByIDs(ctx context.Context, ids []int64) ([]string, error) { - unames := make([]string, 0, len(ids)) - err := db.GetEngine(ctx).In("id", ids). - Table("user"). - Asc("name"). - Cols("name"). - Find(&unames) - return unames, err -} - // GetUserNameByID returns username for the id func GetUserNameByID(ctx context.Context, id int64) (string, error) { var name string @@ -1119,28 +1148,96 @@ func ValidateCommitWithEmail(ctx context.Context, c *git.Commit) *User { } // ValidateCommitsWithEmails checks if authors' e-mails of commits are corresponding to users. -func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) []*UserCommit { +func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) ([]*UserCommit, error) { var ( - emails = make(map[string]*User) newCommits = make([]*UserCommit, 0, len(oldCommits)) + emailSet = make(container.Set[string]) ) for _, c := range oldCommits { - var u *User if c.Author != nil { - if v, ok := emails[c.Author.Email]; !ok { - u, _ = GetUserByEmail(ctx, c.Author.Email) - emails[c.Author.Email] = u - } else { - u = v - } + emailSet.Add(c.Author.Email) } + } + + emailUserMap, err := GetUsersByEmails(ctx, emailSet.Values()) + if err != nil { + return nil, err + } + for _, c := range oldCommits { + user := emailUserMap.GetByEmail(c.Author.Email) // FIXME: why ValidateCommitsWithEmails uses "Author", but ParseCommitsWithSignature uses "Committer"? + if user == nil { + user = &User{ + Name: c.Author.Name, + Email: c.Author.Email, + } + } newCommits = append(newCommits, &UserCommit{ - User: u, + User: user, Commit: c, }) } - return newCommits + return newCommits, nil +} + +type EmailUserMap struct { + m map[string]*User +} + +func (eum *EmailUserMap) GetByEmail(email string) *User { + return eum.m[strings.ToLower(email)] +} + +func GetUsersByEmails(ctx context.Context, emails []string) (*EmailUserMap, error) { + if len(emails) == 0 { + return nil, nil + } + + needCheckEmails := make(container.Set[string]) + needCheckUserNames := make(container.Set[string]) + for _, email := range emails { + if strings.HasSuffix(email, "@"+setting.Service.NoReplyAddress) { + username := strings.TrimSuffix(email, "@"+setting.Service.NoReplyAddress) + needCheckUserNames.Add(strings.ToLower(username)) + } else { + needCheckEmails.Add(strings.ToLower(email)) + } + } + + emailAddresses := make([]*EmailAddress, 0, len(needCheckEmails)) + if err := db.GetEngine(ctx).In("lower_email", needCheckEmails.Values()). + And("is_activated=?", true). + Find(&emailAddresses); err != nil { + return nil, err + } + userIDs := make(container.Set[int64]) + for _, email := range emailAddresses { + userIDs.Add(email.UID) + } + results := make(map[string]*User, len(emails)) + + if len(userIDs) > 0 { + users, err := GetUsersMapByIDs(ctx, userIDs.Values()) + if err != nil { + return nil, err + } + + for _, email := range emailAddresses { + user := users[email.UID] + if user != nil { + results[email.LowerEmail] = user + } + } + } + + users := make(map[int64]*User, len(needCheckUserNames)) + if err := db.GetEngine(ctx).In("lower_name", needCheckUserNames.Values()).Find(&users); err != nil { + return nil, err + } + for _, user := range users { + results[strings.ToLower(user.GetPlaceholderEmail())] = user + } + return &EmailUserMap{results}, nil } // GetUserByEmail returns the user object by given e-mail if exists. @@ -1161,8 +1258,8 @@ func GetUserByEmail(ctx context.Context, email string) (*User, error) { } // Finally, if email address is the protected email address: - if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) { - username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) + if strings.HasSuffix(email, "@"+setting.Service.NoReplyAddress) { + username := strings.TrimSuffix(email, "@"+setting.Service.NoReplyAddress) user := &User{} has, err := db.GetEngine(ctx).Where("lower_name=?", username).Get(user) if err != nil { diff --git a/models/user/user_list.go b/models/user/user_list.go new file mode 100644 index 0000000000..1b6a27dd86 --- /dev/null +++ b/models/user/user_list.go @@ -0,0 +1,48 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package user + +import ( + "context" + + "code.gitea.io/gitea/models/db" +) + +func GetUsersMapByIDs(ctx context.Context, userIDs []int64) (map[int64]*User, error) { + userMaps := make(map[int64]*User, len(userIDs)) + if len(userIDs) == 0 { + return userMaps, nil + } + + left := len(userIDs) + for left > 0 { + limit := min(left, db.DefaultMaxInSize) + err := db.GetEngine(ctx). + In("id", userIDs[:limit]). + Find(&userMaps) + if err != nil { + return nil, err + } + left -= limit + userIDs = userIDs[limit:] + } + return userMaps, nil +} + +func GetPossibleUserFromMap(userID int64, usererMaps map[int64]*User) *User { + switch userID { + case GhostUserID: + return NewGhostUser() + case ActionsUserID: + return NewActionsUser() + case 0: + return nil + default: + user, ok := usererMaps[userID] + if !ok { + return NewGhostUser() + } + return user + } +} diff --git a/models/user/user_system.go b/models/user/user_system.go index 612cdb2cae..e07274d291 100644 --- a/models/user/user_system.go +++ b/models/user/user_system.go @@ -10,9 +10,8 @@ import ( ) const ( - GhostUserID = -1 - GhostUserName = "Ghost" - GhostUserLowerName = "ghost" + GhostUserID int64 = -1 + GhostUserName = "Ghost" ) // NewGhostUser creates and returns a fake user for someone has deleted their account. @@ -20,10 +19,14 @@ func NewGhostUser() *User { return &User{ ID: GhostUserID, Name: GhostUserName, - LowerName: GhostUserLowerName, + LowerName: strings.ToLower(GhostUserName), } } +func IsGhostUserName(name string) bool { + return strings.EqualFold(name, GhostUserName) +} + // IsGhost check if user is fake user for a deleted account func (u *User) IsGhost() bool { if u == nil { @@ -32,22 +35,16 @@ func (u *User) IsGhost() bool { return u.ID == GhostUserID && u.Name == GhostUserName } -// NewReplaceUser creates and returns a fake user for external user -func NewReplaceUser(name string) *User { - return &User{ - ID: 0, - Name: name, - LowerName: strings.ToLower(name), - } -} - const ( - ActionsUserID = -2 - ActionsUserName = "gitea-actions" - ActionsFullName = "Gitea Actions" - ActionsEmail = "teabot@gitea.io" + ActionsUserID int64 = -2 + ActionsUserName = "gitea-actions" + ActionsUserEmail = "teabot@gitea.io" ) +func IsGiteaActionsUserName(name string) bool { + return strings.EqualFold(name, ActionsUserName) +} + // NewActionsUser creates and returns a fake user for running the actions. func NewActionsUser() *User { return &User{ @@ -55,16 +52,26 @@ func NewActionsUser() *User { Name: ActionsUserName, LowerName: ActionsUserName, IsActive: true, - FullName: ActionsFullName, - Email: ActionsEmail, + FullName: "Gitea Actions", + Email: ActionsUserEmail, KeepEmailPrivate: true, LoginName: ActionsUserName, - Type: UserTypeIndividual, + Type: UserTypeBot, AllowCreateOrganization: true, Visibility: structs.VisibleTypePublic, } } -func (u *User) IsActions() bool { +func (u *User) IsGiteaActions() bool { return u != nil && u.ID == ActionsUserID } + +func GetSystemUserByName(name string) *User { + if IsGhostUserName(name) { + return NewGhostUser() + } + if IsGiteaActionsUserName(name) { + return NewActionsUser() + } + return nil +} diff --git a/models/user/user_system_test.go b/models/user/user_system_test.go new file mode 100644 index 0000000000..97768b509b --- /dev/null +++ b/models/user/user_system_test.go @@ -0,0 +1,32 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package user + +import ( + "testing" + + "code.gitea.io/gitea/models/db" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSystemUser(t *testing.T) { + u, err := GetPossibleUserByID(db.DefaultContext, -1) + require.NoError(t, err) + assert.Equal(t, "Ghost", u.Name) + assert.Equal(t, "ghost", u.LowerName) + assert.True(t, u.IsGhost()) + assert.True(t, IsGhostUserName("gHost")) + + u, err = GetPossibleUserByID(db.DefaultContext, -2) + require.NoError(t, err) + assert.Equal(t, "gitea-actions", u.Name) + assert.Equal(t, "gitea-actions", u.LowerName) + assert.True(t, u.IsGiteaActions()) + assert.True(t, IsGiteaActionsUserName("Gitea-actionS")) + + _, err = GetPossibleUserByID(db.DefaultContext, -3) + require.Error(t, err) +} diff --git a/models/user/user_test.go b/models/user/user_test.go index 7ebc64f69e..a2597ba3f5 100644 --- a/models/user/user_test.go +++ b/models/user/user_test.go @@ -4,7 +4,6 @@ package user_test import ( - "context" "crypto/rand" "fmt" "strings" @@ -20,11 +19,28 @@ import ( "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/test" "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) +func TestIsUsableUsername(t *testing.T) { + assert.NoError(t, user_model.IsUsableUsername("a")) + assert.NoError(t, user_model.IsUsableUsername("foo.wiki")) + assert.NoError(t, user_model.IsUsableUsername("foo.git")) + + assert.Error(t, user_model.IsUsableUsername("a--b")) + assert.Error(t, user_model.IsUsableUsername("-1_.")) + assert.Error(t, user_model.IsUsableUsername(".profile")) + assert.Error(t, user_model.IsUsableUsername("-")) + assert.Error(t, user_model.IsUsableUsername("🌞")) + assert.Error(t, user_model.IsUsableUsername("the..repo")) + assert.Error(t, user_model.IsUsableUsername("foo.RSS")) + assert.Error(t, user_model.IsUsableUsername("foo.PnG")) +} + func TestOAuth2Application_LoadUser(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) app := unittest.AssertExistsAndLoadBean(t, &auth.OAuth2Application{ID: 1}) @@ -33,14 +49,43 @@ func TestOAuth2Application_LoadUser(t *testing.T) { assert.NotNil(t, user) } -func TestGetUserEmailsByNames(t *testing.T) { +func TestUserEmails(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - - // ignore none active user email - assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user9"})) - assert.ElementsMatch(t, []string{"user8@example.com", "user5@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user5"})) - - assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "org7"})) + t.Run("GetUserEmailsByNames", func(t *testing.T) { + // ignore none active user email + assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user9"})) + assert.ElementsMatch(t, []string{"user8@example.com", "user5@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user5"})) + assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "org7"})) + }) + t.Run("GetUsersByEmails", func(t *testing.T) { + defer test.MockVariableValue(&setting.Service.NoReplyAddress, "NoReply.gitea.internal")() + testGetUserByEmail := func(t *testing.T, email string, uid int64) { + m, err := user_model.GetUsersByEmails(db.DefaultContext, []string{email}) + require.NoError(t, err) + user := m.GetByEmail(email) + if uid == 0 { + require.Nil(t, user) + return + } + require.NotNil(t, user) + assert.Equal(t, uid, user.ID) + } + cases := []struct { + Email string + UID int64 + }{ + {"UseR1@example.com", 1}, + {"user1-2@example.COM", 1}, + {"USER2@" + setting.Service.NoReplyAddress, 2}, + {"user4@example.com", 4}, + {"no-such", 0}, + } + for _, c := range cases { + t.Run(c.Email, func(t *testing.T) { + testGetUserByEmail(t, c.Email, c.UID) + }) + } + }) } func TestCanCreateOrganization(t *testing.T) { @@ -63,73 +108,73 @@ func TestCanCreateOrganization(t *testing.T) { func TestSearchUsers(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - testSuccess := func(opts *user_model.SearchUserOptions, expectedUserOrOrgIDs []int64) { + testSuccess := func(opts user_model.SearchUserOptions, expectedUserOrOrgIDs []int64) { users, _, err := user_model.SearchUsers(db.DefaultContext, opts) assert.NoError(t, err) cassText := fmt.Sprintf("ids: %v, opts: %v", expectedUserOrOrgIDs, opts) if assert.Len(t, users, len(expectedUserOrOrgIDs), "case: %s", cassText) { for i, expectedID := range expectedUserOrOrgIDs { - assert.EqualValues(t, expectedID, users[i].ID, "case: %s", cassText) + assert.Equal(t, expectedID, users[i].ID, "case: %s", cassText) } } } // test orgs - testOrgSuccess := func(opts *user_model.SearchUserOptions, expectedOrgIDs []int64) { + testOrgSuccess := func(opts user_model.SearchUserOptions, expectedOrgIDs []int64) { opts.Type = user_model.UserTypeOrganization testSuccess(opts, expectedOrgIDs) } - testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1, PageSize: 2}}, + testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1, PageSize: 2}}, []int64{3, 6}) - testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 2, PageSize: 2}}, + testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 2, PageSize: 2}}, []int64{7, 17}) - testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 3, PageSize: 2}}, + testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 3, PageSize: 2}}, []int64{19, 25}) - testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}}, + testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}}, []int64{26, 41}) - testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 5, PageSize: 2}}, + testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 5, PageSize: 2}}, []int64{42}) - testOrgSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 6, PageSize: 2}}, + testOrgSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 6, PageSize: 2}}, []int64{}) // test users - testUserSuccess := func(opts *user_model.SearchUserOptions, expectedUserIDs []int64) { + testUserSuccess := func(opts user_model.SearchUserOptions, expectedUserIDs []int64) { opts.Type = user_model.UserTypeIndividual testSuccess(opts, expectedUserIDs) } - testUserSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}}, + testUserSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}}, []int64{1, 2, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37, 38, 39, 40}) - testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(false)}, + testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(false)}, []int64{9}) - testUserSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)}, + testUserSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)}, []int64{1, 2, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37, 38, 39, 40}) - testUserSuccess(&user_model.SearchUserOptions{Keyword: "user1", OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)}, + testUserSuccess(user_model.SearchUserOptions{Keyword: "user1", OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)}, []int64{1, 10, 11, 12, 13, 14, 15, 16, 18}) // order by name asc default - testUserSuccess(&user_model.SearchUserOptions{Keyword: "user1", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)}, + testUserSuccess(user_model.SearchUserOptions{Keyword: "user1", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)}, []int64{1, 10, 11, 12, 13, 14, 15, 16, 18}) - testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsAdmin: optional.Some(true)}, + testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsAdmin: optional.Some(true)}, []int64{1}) - testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsRestricted: optional.Some(true)}, + testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsRestricted: optional.Some(true)}, []int64{29}) - testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsProhibitLogin: optional.Some(true)}, + testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsProhibitLogin: optional.Some(true)}, []int64{37}) - testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsTwoFactorEnabled: optional.Some(true)}, + testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsTwoFactorEnabled: optional.Some(true)}, []int64{24}) } @@ -159,9 +204,9 @@ func TestHashPasswordDeterministic(t *testing.T) { b := make([]byte, 16) u := &user_model.User{} algos := hash.RecommendedHashAlgorithms - for j := 0; j < len(algos); j++ { + for j := range algos { u.PasswdHashAlgo = algos[j] - for i := 0; i < 50; i++ { + for range 50 { // generate a random password rand.Read(b) pass := string(b) @@ -186,7 +231,7 @@ func BenchmarkHashPassword(b *testing.B) { pass := "password1337" u := &user_model.User{Passwd: pass} b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { u.SetPassword(pass) } } @@ -264,7 +309,7 @@ func TestCreateUserCustomTimestamps(t *testing.T) { err := user_model.CreateUser(db.DefaultContext, user, &user_model.Meta{}) assert.NoError(t, err) - fetched, err := user_model.GetUserByID(context.Background(), user.ID) + fetched, err := user_model.GetUserByID(t.Context(), user.ID) assert.NoError(t, err) assert.Equal(t, creationTimestamp, fetched.CreatedUnix) assert.Equal(t, creationTimestamp, fetched.UpdatedUnix) @@ -291,7 +336,7 @@ func TestCreateUserWithoutCustomTimestamps(t *testing.T) { timestampEnd := time.Now().Unix() - fetched, err := user_model.GetUserByID(context.Background(), user.ID) + fetched, err := user_model.GetUserByID(t.Context(), user.ID) assert.NoError(t, err) assert.LessOrEqual(t, timestampStart, fetched.CreatedUnix) @@ -318,14 +363,14 @@ func TestGetUserIDsByNames(t *testing.T) { func TestGetMaileableUsersByIDs(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - results, err := user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, false) + results, err := user_model.GetMailableUsersByIDs(db.DefaultContext, []int64{1, 4}, false) assert.NoError(t, err) assert.Len(t, results, 1) if len(results) > 1 { assert.Equal(t, 1, results[0].ID) } - results, err = user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, true) + results, err = user_model.GetMailableUsersByIDs(db.DefaultContext, []int64{1, 4}, true) assert.NoError(t, err) assert.Len(t, results, 2) if len(results) > 2 { @@ -488,18 +533,15 @@ func TestIsUserVisibleToViewer(t *testing.T) { } func Test_ValidateUser(t *testing.T) { - oldSetting := setting.Service.AllowedUserVisibilityModesSlice - defer func() { - setting.Service.AllowedUserVisibilityModesSlice = oldSetting - }() - setting.Service.AllowedUserVisibilityModesSlice = []bool{true, false, true} + defer test.MockVariableValue(&setting.Service.AllowedUserVisibilityModesSlice, []bool{true, false, true})() + kases := map[*user_model.User]bool{ {ID: 1, Visibility: structs.VisibleTypePublic}: true, {ID: 2, Visibility: structs.VisibleTypeLimited}: false, {ID: 2, Visibility: structs.VisibleTypePrivate}: true, } for kase, expected := range kases { - assert.EqualValues(t, expected, nil == user_model.ValidateUser(kase), "case: %+v", kase) + assert.Equal(t, expected, nil == user_model.ValidateUser(kase), "case: %+v", kase) } } @@ -523,7 +565,7 @@ func Test_NormalizeUserFromEmail(t *testing.T) { for _, testCase := range testCases { normalizedName, err := user_model.NormalizeUserName(testCase.Input) assert.NoError(t, err) - assert.EqualValues(t, testCase.Expected, normalizedName) + assert.Equal(t, testCase.Expected, normalizedName) if testCase.IsNormalizedValid { assert.NoError(t, user_model.IsUsableUsername(normalizedName)) } else { @@ -550,7 +592,7 @@ func TestEmailTo(t *testing.T) { for _, testCase := range testCases { t.Run(testCase.result, func(t *testing.T) { testUser := &user_model.User{FullName: testCase.fullName, Email: testCase.mail} - assert.EqualValues(t, testCase.result, testUser.EmailTo()) + assert.Equal(t, testCase.result, testUser.EmailTo()) }) } } @@ -561,12 +603,7 @@ func TestDisabledUserFeatures(t *testing.T) { testValues := container.SetOf(setting.UserFeatureDeletion, setting.UserFeatureManageSSHKeys, setting.UserFeatureManageGPGKeys) - - oldSetting := setting.Admin.ExternalUserDisableFeatures - defer func() { - setting.Admin.ExternalUserDisableFeatures = oldSetting - }() - setting.Admin.ExternalUserDisableFeatures = testValues + defer test.MockVariableValue(&setting.Admin.ExternalUserDisableFeatures, testValues)() user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) @@ -602,3 +639,37 @@ func TestGetInactiveUsers(t *testing.T) { assert.NoError(t, err) assert.Empty(t, users) } + +func TestCanCreateRepo(t *testing.T) { + defer test.MockVariableValue(&setting.Repository.MaxCreationLimit)() + const noLimit = -1 + doerNormal := &user_model.User{} + doerAdmin := &user_model.User{IsAdmin: true} + t.Run("NoGlobalLimit", func(t *testing.T) { + setting.Repository.MaxCreationLimit = noLimit + + assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: noLimit})) + assert.False(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 0})) + assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 100})) + + assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: noLimit})) + assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 0})) + assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 100})) + }) + + t.Run("GlobalLimit50", func(t *testing.T) { + setting.Repository.MaxCreationLimit = 50 + + assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: noLimit})) + assert.False(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 60, MaxRepoCreation: noLimit})) // limited by global limit + assert.False(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 0})) + assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 100})) + assert.True(t, doerNormal.CanCreateRepoIn(&user_model.User{NumRepos: 60, MaxRepoCreation: 100})) + + assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: noLimit})) + assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 60, MaxRepoCreation: noLimit})) + assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 0})) + assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 10, MaxRepoCreation: 100})) + assert.True(t, doerAdmin.CanCreateRepoIn(&user_model.User{NumRepos: 60, MaxRepoCreation: 100})) + }) +} diff --git a/models/webhook/hooktask.go b/models/webhook/hooktask.go index ff3fdbadb2..96ec11e43f 100644 --- a/models/webhook/hooktask.go +++ b/models/webhook/hooktask.go @@ -198,7 +198,8 @@ func MarkTaskDelivered(ctx context.Context, task *HookTask) (bool, error) { func CleanupHookTaskTable(ctx context.Context, cleanupType HookTaskCleanupType, olderThan time.Duration, numberToKeep int) error { log.Trace("Doing: CleanupHookTaskTable") - if cleanupType == OlderThan { + switch cleanupType { + case OlderThan: deleteOlderThan := time.Now().Add(-olderThan).UnixNano() deletes, err := db.GetEngine(ctx). Where("is_delivered = ? and delivered < ?", true, deleteOlderThan). @@ -207,7 +208,7 @@ func CleanupHookTaskTable(ctx context.Context, cleanupType HookTaskCleanupType, return err } log.Trace("Deleted %d rows from hook_task", deletes) - } else if cleanupType == PerWebhook { + case PerWebhook: hookIDs := make([]int64, 0, 10) err := db.GetEngine(ctx). Table("webhook"). diff --git a/models/webhook/webhook.go b/models/webhook/webhook.go index 894357e36a..b234d9ffee 100644 --- a/models/webhook/webhook.go +++ b/models/webhook/webhook.go @@ -167,186 +167,39 @@ func (w *Webhook) UpdateEvent() error { return err } -// HasCreateEvent returns true if hook enabled create event. -func (w *Webhook) HasCreateEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.Create) -} - -// HasDeleteEvent returns true if hook enabled delete event. -func (w *Webhook) HasDeleteEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.Delete) -} - -// HasForkEvent returns true if hook enabled fork event. -func (w *Webhook) HasForkEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.Fork) -} - -// HasIssuesEvent returns true if hook enabled issues event. -func (w *Webhook) HasIssuesEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.Issues) -} - -// HasIssuesAssignEvent returns true if hook enabled issues assign event. -func (w *Webhook) HasIssuesAssignEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.IssueAssign) -} - -// HasIssuesLabelEvent returns true if hook enabled issues label event. -func (w *Webhook) HasIssuesLabelEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.IssueLabel) -} - -// HasIssuesMilestoneEvent returns true if hook enabled issues milestone event. -func (w *Webhook) HasIssuesMilestoneEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.IssueMilestone) -} - -// HasIssueCommentEvent returns true if hook enabled issue_comment event. -func (w *Webhook) HasIssueCommentEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.IssueComment) -} - -// HasPushEvent returns true if hook enabled push event. -func (w *Webhook) HasPushEvent() bool { - return w.PushOnly || w.SendEverything || - (w.ChooseEvents && w.HookEvents.Push) -} - -// HasPullRequestEvent returns true if hook enabled pull request event. -func (w *Webhook) HasPullRequestEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequest) -} - -// HasPullRequestAssignEvent returns true if hook enabled pull request assign event. -func (w *Webhook) HasPullRequestAssignEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestAssign) -} - -// HasPullRequestLabelEvent returns true if hook enabled pull request label event. -func (w *Webhook) HasPullRequestLabelEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestLabel) -} - -// HasPullRequestMilestoneEvent returns true if hook enabled pull request milestone event. -func (w *Webhook) HasPullRequestMilestoneEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestMilestone) -} - -// HasPullRequestCommentEvent returns true if hook enabled pull_request_comment event. -func (w *Webhook) HasPullRequestCommentEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestComment) -} - -// HasPullRequestApprovedEvent returns true if hook enabled pull request review event. -func (w *Webhook) HasPullRequestApprovedEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestReview) -} - -// HasPullRequestRejectedEvent returns true if hook enabled pull request review event. -func (w *Webhook) HasPullRequestRejectedEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestReview) -} - -// HasPullRequestReviewCommentEvent returns true if hook enabled pull request review event. -func (w *Webhook) HasPullRequestReviewCommentEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestReview) -} - -// HasPullRequestSyncEvent returns true if hook enabled pull request sync event. -func (w *Webhook) HasPullRequestSyncEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestSync) -} - -// HasWikiEvent returns true if hook enabled wiki event. -func (w *Webhook) HasWikiEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvent.Wiki) -} - -// HasReleaseEvent returns if hook enabled release event. -func (w *Webhook) HasReleaseEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.Release) -} - -// HasRepositoryEvent returns if hook enabled repository event. -func (w *Webhook) HasRepositoryEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.Repository) -} - -// HasPackageEvent returns if hook enabled package event. -func (w *Webhook) HasPackageEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.Package) -} - -// HasPullRequestReviewRequestEvent returns true if hook enabled pull request review request event. -func (w *Webhook) HasPullRequestReviewRequestEvent() bool { - return w.SendEverything || - (w.ChooseEvents && w.HookEvents.PullRequestReviewRequest) -} - -// EventCheckers returns event checkers -func (w *Webhook) EventCheckers() []struct { - Has func() bool - Type webhook_module.HookEventType -} { - return []struct { - Has func() bool - Type webhook_module.HookEventType - }{ - {w.HasCreateEvent, webhook_module.HookEventCreate}, - {w.HasDeleteEvent, webhook_module.HookEventDelete}, - {w.HasForkEvent, webhook_module.HookEventFork}, - {w.HasPushEvent, webhook_module.HookEventPush}, - {w.HasIssuesEvent, webhook_module.HookEventIssues}, - {w.HasIssuesAssignEvent, webhook_module.HookEventIssueAssign}, - {w.HasIssuesLabelEvent, webhook_module.HookEventIssueLabel}, - {w.HasIssuesMilestoneEvent, webhook_module.HookEventIssueMilestone}, - {w.HasIssueCommentEvent, webhook_module.HookEventIssueComment}, - {w.HasPullRequestEvent, webhook_module.HookEventPullRequest}, - {w.HasPullRequestAssignEvent, webhook_module.HookEventPullRequestAssign}, - {w.HasPullRequestLabelEvent, webhook_module.HookEventPullRequestLabel}, - {w.HasPullRequestMilestoneEvent, webhook_module.HookEventPullRequestMilestone}, - {w.HasPullRequestCommentEvent, webhook_module.HookEventPullRequestComment}, - {w.HasPullRequestApprovedEvent, webhook_module.HookEventPullRequestReviewApproved}, - {w.HasPullRequestRejectedEvent, webhook_module.HookEventPullRequestReviewRejected}, - {w.HasPullRequestCommentEvent, webhook_module.HookEventPullRequestReviewComment}, - {w.HasPullRequestSyncEvent, webhook_module.HookEventPullRequestSync}, - {w.HasWikiEvent, webhook_module.HookEventWiki}, - {w.HasRepositoryEvent, webhook_module.HookEventRepository}, - {w.HasReleaseEvent, webhook_module.HookEventRelease}, - {w.HasPackageEvent, webhook_module.HookEventPackage}, - {w.HasPullRequestReviewRequestEvent, webhook_module.HookEventPullRequestReviewRequest}, +func (w *Webhook) HasEvent(evt webhook_module.HookEventType) bool { + if w.SendEverything { + return true } + if w.PushOnly { + return evt == webhook_module.HookEventPush + } + checkEvt := evt + switch evt { + case webhook_module.HookEventPullRequestReviewApproved, webhook_module.HookEventPullRequestReviewRejected, webhook_module.HookEventPullRequestReviewComment: + checkEvt = webhook_module.HookEventPullRequestReview + } + return w.HookEvents[checkEvt] } // EventsArray returns an array of hook events func (w *Webhook) EventsArray() []string { - events := make([]string, 0, 7) + if w.SendEverything { + events := make([]string, 0, len(webhook_module.AllEvents())) + for _, evt := range webhook_module.AllEvents() { + events = append(events, string(evt)) + } + return events + } + + if w.PushOnly { + return []string{string(webhook_module.HookEventPush)} + } - for _, c := range w.EventCheckers() { - if c.Has() { - events = append(events, string(c.Type)) + events := make([]string, 0, len(w.HookEvents)) + for event, enabled := range w.HookEvents { + if enabled { + events = append(events, string(event)) } } return events @@ -387,7 +240,7 @@ func CreateWebhooks(ctx context.Context, ws []*Webhook) error { if len(ws) == 0 { return nil } - for i := 0; i < len(ws); i++ { + for i := range ws { ws[i].Type = strings.TrimSpace(ws[i].Type) } return db.Insert(ctx, ws) diff --git a/models/webhook/webhook_system.go b/models/webhook/webhook_system.go index a2a9ee321a..58d9d4a5c1 100644 --- a/models/webhook/webhook_system.go +++ b/models/webhook/webhook_system.go @@ -11,6 +11,19 @@ import ( "code.gitea.io/gitea/modules/optional" ) +// GetSystemOrDefaultWebhooks returns webhooks by given argument or all if argument is missing. +func GetSystemOrDefaultWebhooks(ctx context.Context, isSystemWebhook optional.Option[bool]) ([]*Webhook, error) { + webhooks := make([]*Webhook, 0, 5) + if !isSystemWebhook.Has() { + return webhooks, db.GetEngine(ctx).Where("repo_id=? AND owner_id=?", 0, 0). + Find(&webhooks) + } + + return webhooks, db.GetEngine(ctx). + Where("repo_id=? AND owner_id=? AND is_system_webhook=?", 0, 0, isSystemWebhook.Value()). + Find(&webhooks) +} + // GetDefaultWebhooks returns all admin-default webhooks. func GetDefaultWebhooks(ctx context.Context) ([]*Webhook, error) { webhooks := make([]*Webhook, 0, 5) diff --git a/models/webhook/webhook_system_test.go b/models/webhook/webhook_system_test.go new file mode 100644 index 0000000000..96157ed9c9 --- /dev/null +++ b/models/webhook/webhook_system_test.go @@ -0,0 +1,37 @@ +// Copyright 2017 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package webhook + +import ( + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/optional" + + "github.com/stretchr/testify/assert" +) + +func TestGetSystemOrDefaultWebhooks(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + hooks, err := GetSystemOrDefaultWebhooks(db.DefaultContext, optional.None[bool]()) + assert.NoError(t, err) + if assert.Len(t, hooks, 2) { + assert.Equal(t, int64(5), hooks[0].ID) + assert.Equal(t, int64(6), hooks[1].ID) + } + + hooks, err = GetSystemOrDefaultWebhooks(db.DefaultContext, optional.Some(true)) + assert.NoError(t, err) + if assert.Len(t, hooks, 1) { + assert.Equal(t, int64(5), hooks[0].ID) + } + + hooks, err = GetSystemOrDefaultWebhooks(db.DefaultContext, optional.Some(false)) + assert.NoError(t, err) + if assert.Len(t, hooks, 1) { + assert.Equal(t, int64(6), hooks[0].ID) + } +} diff --git a/models/webhook/webhook_test.go b/models/webhook/webhook_test.go index c6c3f40d46..edad8fc996 100644 --- a/models/webhook/webhook_test.go +++ b/models/webhook/webhook_test.go @@ -4,7 +4,6 @@ package webhook import ( - "context" "testing" "time" @@ -54,9 +53,9 @@ func TestWebhook_UpdateEvent(t *testing.T) { SendEverything: false, ChooseEvents: false, HookEvents: webhook_module.HookEvents{ - Create: false, - Push: true, - PullRequest: false, + webhook_module.HookEventCreate: false, + webhook_module.HookEventPush: true, + webhook_module.HookEventPullRequest: false, }, } webhook.HookEvent = hookEvent @@ -73,8 +72,8 @@ func TestWebhook_EventsArray(t *testing.T) { "issues", "issue_assign", "issue_label", "issue_milestone", "issue_comment", "pull_request", "pull_request_assign", "pull_request_label", "pull_request_milestone", "pull_request_comment", "pull_request_review_approved", "pull_request_review_rejected", - "pull_request_review_comment", "pull_request_sync", "wiki", "repository", "release", - "package", "pull_request_review_request", + "pull_request_review_comment", "pull_request_sync", "pull_request_review_request", "wiki", "repository", "release", + "package", "status", "workflow_run", "workflow_job", }, (&Webhook{ HookEvent: &webhook_module.HookEvent{SendEverything: true}, @@ -91,7 +90,7 @@ func TestWebhook_EventsArray(t *testing.T) { func TestCreateWebhook(t *testing.T) { hook := &Webhook{ RepoID: 3, - URL: "www.example.com/unit_test", + URL: "https://www.example.com/unit_test", ContentType: ContentTypeJSON, Events: `{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}`, } @@ -245,7 +244,7 @@ func TestCleanupHookTaskTable_PerWebhook_DeletesDelivered(t *testing.T) { assert.NoError(t, err) unittest.AssertExistsAndLoadBean(t, hookTask) - assert.NoError(t, CleanupHookTaskTable(context.Background(), PerWebhook, 168*time.Hour, 0)) + assert.NoError(t, CleanupHookTaskTable(t.Context(), PerWebhook, 168*time.Hour, 0)) unittest.AssertNotExistsBean(t, hookTask) } @@ -261,7 +260,7 @@ func TestCleanupHookTaskTable_PerWebhook_LeavesUndelivered(t *testing.T) { assert.NoError(t, err) unittest.AssertExistsAndLoadBean(t, hookTask) - assert.NoError(t, CleanupHookTaskTable(context.Background(), PerWebhook, 168*time.Hour, 0)) + assert.NoError(t, CleanupHookTaskTable(t.Context(), PerWebhook, 168*time.Hour, 0)) unittest.AssertExistsAndLoadBean(t, hookTask) } @@ -278,7 +277,7 @@ func TestCleanupHookTaskTable_PerWebhook_LeavesMostRecentTask(t *testing.T) { assert.NoError(t, err) unittest.AssertExistsAndLoadBean(t, hookTask) - assert.NoError(t, CleanupHookTaskTable(context.Background(), PerWebhook, 168*time.Hour, 1)) + assert.NoError(t, CleanupHookTaskTable(t.Context(), PerWebhook, 168*time.Hour, 1)) unittest.AssertExistsAndLoadBean(t, hookTask) } @@ -295,7 +294,7 @@ func TestCleanupHookTaskTable_OlderThan_DeletesDelivered(t *testing.T) { assert.NoError(t, err) unittest.AssertExistsAndLoadBean(t, hookTask) - assert.NoError(t, CleanupHookTaskTable(context.Background(), OlderThan, 168*time.Hour, 0)) + assert.NoError(t, CleanupHookTaskTable(t.Context(), OlderThan, 168*time.Hour, 0)) unittest.AssertNotExistsBean(t, hookTask) } @@ -311,7 +310,7 @@ func TestCleanupHookTaskTable_OlderThan_LeavesUndelivered(t *testing.T) { assert.NoError(t, err) unittest.AssertExistsAndLoadBean(t, hookTask) - assert.NoError(t, CleanupHookTaskTable(context.Background(), OlderThan, 168*time.Hour, 0)) + assert.NoError(t, CleanupHookTaskTable(t.Context(), OlderThan, 168*time.Hour, 0)) unittest.AssertExistsAndLoadBean(t, hookTask) } @@ -328,6 +327,6 @@ func TestCleanupHookTaskTable_OlderThan_LeavesTaskEarlierThanAgeToDelete(t *test assert.NoError(t, err) unittest.AssertExistsAndLoadBean(t, hookTask) - assert.NoError(t, CleanupHookTaskTable(context.Background(), OlderThan, 168*time.Hour, 0)) + assert.NoError(t, CleanupHookTaskTable(t.Context(), OlderThan, 168*time.Hour, 0)) unittest.AssertExistsAndLoadBean(t, hookTask) } |