aboutsummaryrefslogtreecommitdiffstats
path: root/services
diff options
context:
space:
mode:
Diffstat (limited to 'services')
-rw-r--r--services/actions/auth_test.go4
-rw-r--r--services/actions/cleanup.go38
-rw-r--r--services/actions/clear_tasks.go35
-rw-r--r--services/actions/context_test.go3
-rw-r--r--services/actions/job_emitter.go7
-rw-r--r--services/actions/notifier.go4
-rw-r--r--services/actions/notifier_helper.go10
-rw-r--r--services/actions/schedule_tasks.go14
-rw-r--r--services/actions/task.go131
-rw-r--r--services/actions/variables.go19
-rw-r--r--services/actions/workflow.go285
-rw-r--r--services/agit/agit.go9
-rw-r--r--services/asymkey/commit.go363
-rw-r--r--services/asymkey/sign.go22
-rw-r--r--services/attachment/attachment_test.go2
-rw-r--r--services/auth/auth.go2
-rw-r--r--services/auth/oauth2_test.go7
-rw-r--r--services/auth/source/oauth2/source_sync_test.go21
-rw-r--r--services/auth/sspi.go2
-rw-r--r--services/automerge/automerge.go6
-rw-r--r--services/context/access_log.go2
-rw-r--r--services/context/access_log_test.go2
-rw-r--r--services/context/api.go86
-rw-r--r--services/context/api_test.go2
-rw-r--r--services/context/base.go8
-rw-r--r--services/context/context.go26
-rw-r--r--services/context/context_response.go6
-rw-r--r--services/context/org.go332
-rw-r--r--services/context/package.go24
-rw-r--r--services/context/pagination.go6
-rw-r--r--services/context/permission.go12
-rw-r--r--services/context/repo.go30
-rw-r--r--services/context/upload/upload.go7
-rw-r--r--services/context/user.go20
-rw-r--r--services/contexttest/context_tests.go27
-rw-r--r--services/convert/convert.go25
-rw-r--r--services/convert/git_commit.go10
-rw-r--r--services/convert/git_commit_test.go2
-rw-r--r--services/convert/issue.go9
-rw-r--r--services/convert/pull.go127
-rw-r--r--services/convert/pull_review_test.go4
-rw-r--r--services/convert/pull_test.go2
-rw-r--r--services/convert/release_test.go4
-rw-r--r--services/convert/repository.go3
-rw-r--r--services/convert/user_test.go4
-rw-r--r--services/convert/utils_test.go8
-rw-r--r--services/cron/tasks_basic.go2
-rw-r--r--services/doctor/dbconsistency.go19
-rw-r--r--services/doctor/dbversion.go3
-rw-r--r--services/doctor/doctor.go8
-rw-r--r--services/doctor/fix16961_test.go10
-rw-r--r--services/doctor/heads.go6
-rw-r--r--services/doctor/mergebase.go10
-rw-r--r--services/doctor/misc.go17
-rw-r--r--services/doctor/storage.go2
-rw-r--r--services/feed/feed.go146
-rw-r--r--services/feed/feed_test.go41
-rw-r--r--services/feed/notifier.go38
-rw-r--r--services/forms/repo_form.go22
-rw-r--r--services/forms/user_form.go21
-rw-r--r--services/forms/user_form_test.go27
-rw-r--r--services/git/commit.go95
-rw-r--r--services/gitdiff/git_diff_tree.go249
-rw-r--r--services/gitdiff/git_diff_tree_test.go427
-rw-r--r--services/gitdiff/gitdiff.go509
-rw-r--r--services/gitdiff/gitdiff_test.go49
-rw-r--r--services/gitdiff/highlightdiff.go99
-rw-r--r--services/gitdiff/highlightdiff_test.go151
-rw-r--r--services/gitdiff/submodule_test.go3
-rw-r--r--services/gitdiff/testdata/academic-module/HEAD1
-rw-r--r--services/gitdiff/testdata/academic-module/config10
-rw-r--r--services/gitdiff/testdata/academic-module/indexbin46960 -> 0 bytes
-rw-r--r--services/gitdiff/testdata/academic-module/logs/HEAD1
-rw-r--r--services/gitdiff/testdata/academic-module/logs/refs/heads/master1
-rw-r--r--services/gitdiff/testdata/academic-module/logs/refs/remotes/origin/HEAD1
-rw-r--r--services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.idxbin65332 -> 0 bytes
-rw-r--r--services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.packbin1167905 -> 0 bytes
-rw-r--r--services/gitdiff/testdata/academic-module/packed-refs2
-rw-r--r--services/gitdiff/testdata/academic-module/refs/heads/master1
-rw-r--r--services/gitdiff/testdata/academic-module/refs/remotes/origin/HEAD1
-rw-r--r--services/issue/comments.go40
-rw-r--r--services/issue/issue.go14
-rw-r--r--services/issue/issue_test.go4
-rw-r--r--services/issue/pull.go42
-rw-r--r--services/issue/suggestion.go73
-rw-r--r--services/issue/suggestion_test.go57
-rw-r--r--services/lfs/server.go11
-rw-r--r--services/mailer/mail.go548
-rw-r--r--services/mailer/mail_comment.go10
-rw-r--r--services/mailer/mail_issue.go109
-rw-r--r--services/mailer/mail_issue_common.go336
-rw-r--r--services/mailer/mail_release.go9
-rw-r--r--services/mailer/mail_repo.go3
-rw-r--r--services/mailer/mail_team_invite.go4
-rw-r--r--services/mailer/mail_test.go183
-rw-r--r--services/mailer/mail_user.go161
-rw-r--r--services/mailer/notify.go21
-rw-r--r--services/markup/renderhelper.go4
-rw-r--r--services/markup/renderhelper_codepreview.go4
-rw-r--r--services/markup/renderhelper_issueicontitle.go4
-rw-r--r--services/markup/renderhelper_mention_test.go9
-rw-r--r--services/migrations/codebase.go50
-rw-r--r--services/migrations/codebase_test.go19
-rw-r--r--services/migrations/codecommit.go27
-rw-r--r--services/migrations/dump.go48
-rw-r--r--services/migrations/git.go8
-rw-r--r--services/migrations/gitea_downloader.go35
-rw-r--r--services/migrations/gitea_downloader_test.go31
-rw-r--r--services/migrations/gitea_uploader.go116
-rw-r--r--services/migrations/gitea_uploader_test.go51
-rw-r--r--services/migrations/github.go133
-rw-r--r--services/migrations/github_test.go26
-rw-r--r--services/migrations/gitlab.go70
-rw-r--r--services/migrations/gitlab_test.go36
-rw-r--r--services/migrations/gogs.go83
-rw-r--r--services/migrations/gogs_test.go17
-rw-r--r--services/migrations/migrate.go58
-rw-r--r--services/migrations/onedev.go52
-rw-r--r--services/migrations/onedev_test.go18
-rw-r--r--services/migrations/restore.go27
-rw-r--r--services/mirror/mirror_pull.go95
-rw-r--r--services/mirror/mirror_pull_test.go94
-rw-r--r--services/mirror/mirror_push.go19
-rw-r--r--services/mirror/mirror_test.go46
-rw-r--r--services/notify/notifier.go3
-rw-r--r--services/notify/notify.go7
-rw-r--r--services/notify/null.go4
-rw-r--r--services/org/team_test.go2
-rw-r--r--services/org/user.go2
-rw-r--r--services/org/user_test.go2
-rw-r--r--services/packages/arch/repository.go38
-rw-r--r--services/packages/arch/vercmp.go113
-rw-r--r--services/packages/arch/vercmp_test.go27
-rw-r--r--services/packages/cargo/index.go30
-rw-r--r--services/packages/package_update.go79
-rw-r--r--services/projects/issue.go152
-rw-r--r--services/projects/issue_test.go210
-rw-r--r--services/projects/main_test.go17
-rw-r--r--services/pull/check.go10
-rw-r--r--services/pull/check_test.go5
-rw-r--r--services/pull/commit_status.go5
-rw-r--r--services/pull/merge.go20
-rw-r--r--services/pull/merge_ff_only.go2
-rw-r--r--services/pull/merge_merge.go2
-rw-r--r--services/pull/merge_prepare.go22
-rw-r--r--services/pull/merge_rebase.go12
-rw-r--r--services/pull/merge_squash.go6
-rw-r--r--services/pull/patch.go24
-rw-r--r--services/pull/patch_unmerged.go4
-rw-r--r--services/pull/protected_branch.go49
-rw-r--r--services/pull/pull.go86
-rw-r--r--services/pull/temp_repo.go23
-rw-r--r--services/pull/update.go20
-rw-r--r--services/pull/update_rebase.go6
-rw-r--r--services/release/release.go16
-rw-r--r--services/release/release_test.go12
-rw-r--r--services/repository/adopt.go39
-rw-r--r--services/repository/adopt_test.go2
-rw-r--r--services/repository/archiver/archiver_test.go6
-rw-r--r--services/repository/avatar_test.go2
-rw-r--r--services/repository/branch.go13
-rw-r--r--services/repository/check.go4
-rw-r--r--services/repository/contributors_graph.go4
-rw-r--r--services/repository/contributors_graph_test.go6
-rw-r--r--services/repository/create.go49
-rw-r--r--services/repository/delete.go17
-rw-r--r--services/repository/files/cherry_pick.go14
-rw-r--r--services/repository/files/commit.go4
-rw-r--r--services/repository/files/content_test.go4
-rw-r--r--services/repository/files/diff.go12
-rw-r--r--services/repository/files/diff_test.go9
-rw-r--r--services/repository/files/file_test.go6
-rw-r--r--services/repository/files/patch.go16
-rw-r--r--services/repository/files/temp_repo.go88
-rw-r--r--services/repository/files/tree.go99
-rw-r--r--services/repository/files/tree_test.go51
-rw-r--r--services/repository/files/update.go28
-rw-r--r--services/repository/files/upload.go24
-rw-r--r--services/repository/fork.go19
-rw-r--r--services/repository/generate.go17
-rw-r--r--services/repository/gitgraph/graph.go116
-rw-r--r--services/repository/gitgraph/graph_models.go266
-rw-r--r--services/repository/gitgraph/graph_test.go712
-rw-r--r--services/repository/gitgraph/parser.go336
-rw-r--r--services/repository/hooks.go5
-rw-r--r--services/repository/init.go12
-rw-r--r--services/repository/lfs_test.go3
-rw-r--r--services/repository/license.go49
-rw-r--r--services/repository/license_test.go10
-rw-r--r--services/repository/migrate.go34
-rw-r--r--services/repository/push.go223
-rw-r--r--services/repository/repository.go7
-rw-r--r--services/repository/setting.go3
-rw-r--r--services/repository/transfer.go7
-rw-r--r--services/secrets/secrets.go6
-rw-r--r--services/user/user_test.go2
-rw-r--r--services/versioned_migration/migration.go24
-rw-r--r--services/webhook/deliver.go23
-rw-r--r--services/webhook/deliver_test.go11
-rw-r--r--services/webhook/dingtalk.go12
-rw-r--r--services/webhook/dingtalk_test.go3
-rw-r--r--services/webhook/discord.go12
-rw-r--r--services/webhook/discord_test.go3
-rw-r--r--services/webhook/feishu.go12
-rw-r--r--services/webhook/feishu_test.go3
-rw-r--r--services/webhook/general.go63
-rw-r--r--services/webhook/matrix.go16
-rw-r--r--services/webhook/matrix_test.go3
-rw-r--r--services/webhook/msteams.go28
-rw-r--r--services/webhook/msteams_test.go7
-rw-r--r--services/webhook/notifier.go128
-rw-r--r--services/webhook/packagist.go8
-rw-r--r--services/webhook/packagist_test.go7
-rw-r--r--services/webhook/payloader.go8
-rw-r--r--services/webhook/slack.go12
-rw-r--r--services/webhook/slack_test.go3
-rw-r--r--services/webhook/telegram.go12
-rw-r--r--services/webhook/telegram_test.go3
-rw-r--r--services/webhook/wechatwork.go12
-rw-r--r--services/webtheme/webtheme.go136
-rw-r--r--services/webtheme/webtheme_test.go37
-rw-r--r--services/wiki/wiki.go10
-rw-r--r--services/wiki/wiki_test.go30
223 files changed, 7759 insertions, 2750 deletions
diff --git a/services/actions/auth_test.go b/services/actions/auth_test.go
index 85e7409105..38d0ba7f82 100644
--- a/services/actions/auth_test.go
+++ b/services/actions/auth_test.go
@@ -18,7 +18,7 @@ func TestCreateAuthorizationToken(t *testing.T) {
var taskID int64 = 23
token, err := CreateAuthorizationToken(taskID, 1, 2)
assert.NoError(t, err)
- assert.NotEqual(t, "", token)
+ assert.NotEmpty(t, token)
claims := jwt.MapClaims{}
_, err = jwt.ParseWithClaims(token, claims, func(t *jwt.Token) (any, error) {
return setting.GetGeneralTokenSigningSecret(), nil
@@ -44,7 +44,7 @@ func TestParseAuthorizationToken(t *testing.T) {
var taskID int64 = 23
token, err := CreateAuthorizationToken(taskID, 1, 2)
assert.NoError(t, err)
- assert.NotEqual(t, "", token)
+ assert.NotEmpty(t, token)
headers := http.Header{}
headers.Set("Authorization", "Bearer "+token)
rTaskID, err := ParseAuthorizationToken(&http.Request{
diff --git a/services/actions/cleanup.go b/services/actions/cleanup.go
index 1223ebcab6..23d6e3a49d 100644
--- a/services/actions/cleanup.go
+++ b/services/actions/cleanup.go
@@ -9,14 +9,17 @@ import (
"time"
actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
actions_module "code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/timeutil"
+
+ "xorm.io/builder"
)
-// Cleanup removes expired actions logs, data and artifacts
+// Cleanup removes expired actions logs, data, artifacts and used ephemeral runners
func Cleanup(ctx context.Context) error {
// clean up expired artifacts
if err := CleanupArtifacts(ctx); err != nil {
@@ -28,6 +31,11 @@ func Cleanup(ctx context.Context) error {
return fmt.Errorf("cleanup logs: %w", err)
}
+ // clean up old ephemeral runners
+ if err := CleanupEphemeralRunners(ctx); err != nil {
+ return fmt.Errorf("cleanup old ephemeral runners: %w", err)
+ }
+
return nil
}
@@ -52,9 +60,9 @@ func cleanExpiredArtifacts(taskCtx context.Context) error {
}
if err := storage.ActionsArtifacts.Delete(artifact.StoragePath); err != nil {
log.Error("Cannot delete artifact %d: %v", artifact.ID, err)
- continue
+ // go on
}
- log.Info("Artifact %d set expired", artifact.ID)
+ log.Info("Artifact %d is deleted (due to expiration)", artifact.ID)
}
return nil
}
@@ -76,9 +84,9 @@ func cleanNeedDeleteArtifacts(taskCtx context.Context) error {
}
if err := storage.ActionsArtifacts.Delete(artifact.StoragePath); err != nil {
log.Error("Cannot delete artifact %d: %v", artifact.ID, err)
- continue
+ // go on
}
- log.Info("Artifact %d set deleted", artifact.ID)
+ log.Info("Artifact %d is deleted (due to pending deletion)", artifact.ID)
}
if len(artifacts) < deleteArtifactBatchSize {
log.Debug("No more artifacts pending deletion")
@@ -103,8 +111,7 @@ func CleanupLogs(ctx context.Context) error {
for _, task := range tasks {
if err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename); err != nil {
log.Error("Failed to remove log %s (in storage %v) of task %v: %v", task.LogFilename, task.LogInStorage, task.ID, err)
- // do not return error here, continue to next task
- continue
+ // do not return error here, go on
}
task.LogIndexes = nil // clear log indexes since it's a heavy field
task.LogExpired = true
@@ -124,3 +131,20 @@ func CleanupLogs(ctx context.Context) error {
log.Info("Removed %d logs", count)
return nil
}
+
+// CleanupEphemeralRunners removes used ephemeral runners which are no longer able to process jobs
+func CleanupEphemeralRunners(ctx context.Context) error {
+ subQuery := builder.Select("`action_runner`.id").
+ From(builder.Select("*").From("`action_runner`"), "`action_runner`"). // mysql needs this redundant subquery
+ Join("INNER", "`action_task`", "`action_task`.`runner_id` = `action_runner`.`id`").
+ Where(builder.Eq{"`action_runner`.`ephemeral`": true}).
+ And(builder.NotIn("`action_task`.`status`", actions_model.StatusWaiting, actions_model.StatusRunning, actions_model.StatusBlocked))
+ b := builder.Delete(builder.In("id", subQuery)).From("`action_runner`")
+ res, err := db.GetEngine(ctx).Exec(b)
+ if err != nil {
+ return fmt.Errorf("find runners: %w", err)
+ }
+ affected, _ := res.RowsAffected()
+ log.Info("Removed %d runners", affected)
+ return nil
+}
diff --git a/services/actions/clear_tasks.go b/services/actions/clear_tasks.go
index 67373782d5..2aeb0e8c96 100644
--- a/services/actions/clear_tasks.go
+++ b/services/actions/clear_tasks.go
@@ -10,10 +10,13 @@ import (
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ notify_service "code.gitea.io/gitea/services/notify"
)
// StopZombieTasks stops the task which have running status, but haven't been updated for a long time
@@ -32,6 +35,28 @@ func StopEndlessTasks(ctx context.Context) error {
})
}
+func notifyWorkflowJobStatusUpdate(ctx context.Context, jobs []*actions_model.ActionRunJob) {
+ if len(jobs) > 0 {
+ CreateCommitStatus(ctx, jobs...)
+ for _, job := range jobs {
+ _ = job.LoadAttributes(ctx)
+ notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil)
+ }
+ }
+}
+
+func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error {
+ jobs, err := actions_model.CancelPreviousJobs(ctx, repoID, ref, workflowID, event)
+ notifyWorkflowJobStatusUpdate(ctx, jobs)
+ return err
+}
+
+func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) error {
+ jobs, err := actions_model.CleanRepoScheduleTasks(ctx, repo)
+ notifyWorkflowJobStatusUpdate(ctx, jobs)
+ return err
+}
+
func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error {
tasks, err := db.Find[actions_model.ActionTask](ctx, opts)
if err != nil {
@@ -67,7 +92,7 @@ func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error {
remove()
}
- CreateCommitStatus(ctx, jobs...)
+ notifyWorkflowJobStatusUpdate(ctx, jobs)
return nil
}
@@ -87,14 +112,20 @@ func CancelAbandonedJobs(ctx context.Context) error {
for _, job := range jobs {
job.Status = actions_model.StatusCancelled
job.Stopped = now
+ updated := false
if err := db.WithTx(ctx, func(ctx context.Context) error {
- _, err := actions_model.UpdateRunJob(ctx, job, nil, "status", "stopped")
+ n, err := actions_model.UpdateRunJob(ctx, job, nil, "status", "stopped")
+ updated = err == nil && n > 0
return err
}); err != nil {
log.Warn("cancel abandoned job %v: %v", job.ID, err)
// go on
}
CreateCommitStatus(ctx, job)
+ if updated {
+ _ = job.LoadAttributes(ctx)
+ notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil)
+ }
}
return nil
diff --git a/services/actions/context_test.go b/services/actions/context_test.go
index 6ed094b289..74ef694021 100644
--- a/services/actions/context_test.go
+++ b/services/actions/context_test.go
@@ -4,7 +4,6 @@
package actions
import (
- "context"
"testing"
actions_model "code.gitea.io/gitea/models/actions"
@@ -19,7 +18,7 @@ func TestFindTaskNeeds(t *testing.T) {
task := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionTask{ID: 51})
job := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRunJob{ID: task.JobID})
- ret, err := FindTaskNeeds(context.Background(), job)
+ ret, err := FindTaskNeeds(t.Context(), job)
assert.NoError(t, err)
assert.Len(t, ret, 1)
assert.Contains(t, ret, "job1")
diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go
index 1f859fcf70..c11bb5875f 100644
--- a/services/actions/job_emitter.go
+++ b/services/actions/job_emitter.go
@@ -12,6 +12,7 @@ import (
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/queue"
+ notify_service "code.gitea.io/gitea/services/notify"
"github.com/nektos/act/pkg/jobparser"
"xorm.io/builder"
@@ -49,6 +50,7 @@ func checkJobsOfRun(ctx context.Context, runID int64) error {
if err != nil {
return err
}
+ var updatedjobs []*actions_model.ActionRunJob
if err := db.WithTx(ctx, func(ctx context.Context) error {
idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
for _, job := range jobs {
@@ -64,6 +66,7 @@ func checkJobsOfRun(ctx context.Context, runID int64) error {
} else if n != 1 {
return fmt.Errorf("no affected for updating blocked job %v", job.ID)
}
+ updatedjobs = append(updatedjobs, job)
}
}
return nil
@@ -71,6 +74,10 @@ func checkJobsOfRun(ctx context.Context, runID int64) error {
return err
}
CreateCommitStatus(ctx, jobs...)
+ for _, job := range updatedjobs {
+ _ = job.LoadAttributes(ctx)
+ notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil)
+ }
return nil
}
diff --git a/services/actions/notifier.go b/services/actions/notifier.go
index 1a23b4e0c5..831cde3523 100644
--- a/services/actions/notifier.go
+++ b/services/actions/notifier.go
@@ -532,7 +532,7 @@ func (n *actionsNotifier) PushCommits(ctx context.Context, pusher *user_model.Us
ctx = withMethod(ctx, "PushCommits")
apiPusher := convert.ToUser(ctx, pusher, nil)
- apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo)
if err != nil {
log.Error("commits.ToAPIPayloadCommits failed: %v", err)
return
@@ -593,7 +593,7 @@ func (n *actionsNotifier) SyncPushCommits(ctx context.Context, pusher *user_mode
ctx = withMethod(ctx, "SyncPushCommits")
apiPusher := convert.ToUser(ctx, pusher, nil)
- apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo)
if err != nil {
log.Error("commits.ToAPIPayloadCommits failed: %v", err)
return
diff --git a/services/actions/notifier_helper.go b/services/actions/notifier_helper.go
index 2d8885dc32..d179134798 100644
--- a/services/actions/notifier_helper.go
+++ b/services/actions/notifier_helper.go
@@ -27,6 +27,7 @@ import (
api "code.gitea.io/gitea/modules/structs"
webhook_module "code.gitea.io/gitea/modules/webhook"
"code.gitea.io/gitea/services/convert"
+ notify_service "code.gitea.io/gitea/services/notify"
"github.com/nektos/act/pkg/jobparser"
"github.com/nektos/act/pkg/model"
@@ -136,7 +137,7 @@ func notify(ctx context.Context, input *notifyInput) error {
return nil
}
if unit_model.TypeActions.UnitGlobalDisabled() {
- if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo); err != nil {
+ if err := CleanRepoScheduleTasks(ctx, input.Repo); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err)
}
return nil
@@ -341,7 +342,7 @@ func handleWorkflows(
// cancel running jobs if the event is push or pull_request_sync
if run.Event == webhook_module.HookEventPush ||
run.Event == webhook_module.HookEventPullRequestSync {
- if err := actions_model.CancelPreviousJobs(
+ if err := CancelPreviousJobs(
ctx,
run.RepoID,
run.Ref,
@@ -363,6 +364,9 @@ func handleWorkflows(
continue
}
CreateCommitStatus(ctx, alljobs...)
+ for _, job := range alljobs {
+ notify_service.WorkflowJobStatusUpdate(ctx, input.Repo, input.Doer, job, nil)
+ }
}
return nil
}
@@ -472,7 +476,7 @@ func handleSchedules(
log.Error("CountSchedules: %v", err)
return err
} else if count > 0 {
- if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo); err != nil {
+ if err := CleanRepoScheduleTasks(ctx, input.Repo); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err)
}
}
diff --git a/services/actions/schedule_tasks.go b/services/actions/schedule_tasks.go
index 18f3324fd2..a30b166063 100644
--- a/services/actions/schedule_tasks.go
+++ b/services/actions/schedule_tasks.go
@@ -15,6 +15,7 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
webhook_module "code.gitea.io/gitea/modules/webhook"
+ notify_service "code.gitea.io/gitea/services/notify"
"github.com/nektos/act/pkg/jobparser"
)
@@ -55,7 +56,7 @@ func startTasks(ctx context.Context) error {
// cancel running jobs if the event is push
if row.Schedule.Event == webhook_module.HookEventPush {
// cancel running jobs of the same workflow
- if err := actions_model.CancelPreviousJobs(
+ if err := CancelPreviousJobs(
ctx,
row.RepoID,
row.Schedule.Ref,
@@ -148,6 +149,17 @@ func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule)
if err := actions_model.InsertRun(ctx, run, workflows); err != nil {
return err
}
+ allJobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: run.ID})
+ if err != nil {
+ log.Error("FindRunJobs: %v", err)
+ }
+ err = run.LoadAttributes(ctx)
+ if err != nil {
+ log.Error("LoadAttributes: %v", err)
+ }
+ for _, job := range allJobs {
+ notify_service.WorkflowJobStatusUpdate(ctx, run.Repo, run.TriggerUser, job, nil)
+ }
// Return nil if no errors occurred
return nil
diff --git a/services/actions/task.go b/services/actions/task.go
new file mode 100644
index 0000000000..9c8198206a
--- /dev/null
+++ b/services/actions/task.go
@@ -0,0 +1,131 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "context"
+ "fmt"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ secret_model "code.gitea.io/gitea/models/secret"
+ notify_service "code.gitea.io/gitea/services/notify"
+
+ runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
+ "google.golang.org/protobuf/types/known/structpb"
+)
+
+func PickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv1.Task, bool, error) {
+ var (
+ task *runnerv1.Task
+ job *actions_model.ActionRunJob
+ actionTask *actions_model.ActionTask
+ )
+
+ if runner.Ephemeral {
+ var task actions_model.ActionTask
+ has, err := db.GetEngine(ctx).Where("runner_id = ?", runner.ID).Get(&task)
+ // Let the runner retry the request, do not allow to proceed
+ if err != nil {
+ return nil, false, err
+ }
+ if has {
+ if task.Status == actions_model.StatusWaiting || task.Status == actions_model.StatusRunning || task.Status == actions_model.StatusBlocked {
+ return nil, false, nil
+ }
+ // task has been finished, remove it
+ _, err = db.DeleteByID[actions_model.ActionRunner](ctx, runner.ID)
+ if err != nil {
+ return nil, false, err
+ }
+ return nil, false, fmt.Errorf("runner has been removed")
+ }
+ }
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ t, ok, err := actions_model.CreateTaskForRunner(ctx, runner)
+ if err != nil {
+ return fmt.Errorf("CreateTaskForRunner: %w", err)
+ }
+ if !ok {
+ return nil
+ }
+
+ if err := t.LoadAttributes(ctx); err != nil {
+ return fmt.Errorf("task LoadAttributes: %w", err)
+ }
+ job = t.Job
+ actionTask = t
+
+ secrets, err := secret_model.GetSecretsOfTask(ctx, t)
+ if err != nil {
+ return fmt.Errorf("GetSecretsOfTask: %w", err)
+ }
+
+ vars, err := actions_model.GetVariablesOfRun(ctx, t.Job.Run)
+ if err != nil {
+ return fmt.Errorf("GetVariablesOfRun: %w", err)
+ }
+
+ needs, err := findTaskNeeds(ctx, job)
+ if err != nil {
+ return fmt.Errorf("findTaskNeeds: %w", err)
+ }
+
+ taskContext, err := generateTaskContext(t)
+ if err != nil {
+ return fmt.Errorf("generateTaskContext: %w", err)
+ }
+
+ task = &runnerv1.Task{
+ Id: t.ID,
+ WorkflowPayload: t.Job.WorkflowPayload,
+ Context: taskContext,
+ Secrets: secrets,
+ Vars: vars,
+ Needs: needs,
+ }
+
+ return nil
+ }); err != nil {
+ return nil, false, err
+ }
+
+ if task == nil {
+ return nil, false, nil
+ }
+
+ CreateCommitStatus(ctx, job)
+ notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, actionTask)
+
+ return task, true, nil
+}
+
+func generateTaskContext(t *actions_model.ActionTask) (*structpb.Struct, error) {
+ giteaRuntimeToken, err := CreateAuthorizationToken(t.ID, t.Job.RunID, t.JobID)
+ if err != nil {
+ return nil, err
+ }
+
+ gitCtx := GenerateGiteaContext(t.Job.Run, t.Job)
+ gitCtx["token"] = t.Token
+ gitCtx["gitea_runtime_token"] = giteaRuntimeToken
+
+ return structpb.NewStruct(gitCtx)
+}
+
+func findTaskNeeds(ctx context.Context, taskJob *actions_model.ActionRunJob) (map[string]*runnerv1.TaskNeed, error) {
+ taskNeeds, err := FindTaskNeeds(ctx, taskJob)
+ if err != nil {
+ return nil, err
+ }
+ ret := make(map[string]*runnerv1.TaskNeed, len(taskNeeds))
+ for jobID, taskNeed := range taskNeeds {
+ ret[jobID] = &runnerv1.TaskNeed{
+ Outputs: taskNeed.Outputs,
+ Result: runnerv1.Result(taskNeed.Result),
+ }
+ }
+ return ret, nil
+}
diff --git a/services/actions/variables.go b/services/actions/variables.go
index 8dde9c4af5..2603f1d461 100644
--- a/services/actions/variables.go
+++ b/services/actions/variables.go
@@ -6,7 +6,6 @@ package actions
import (
"context"
"regexp"
- "strings"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/modules/log"
@@ -14,7 +13,7 @@ import (
secret_service "code.gitea.io/gitea/services/secrets"
)
-func CreateVariable(ctx context.Context, ownerID, repoID int64, name, data string) (*actions_model.ActionVariable, error) {
+func CreateVariable(ctx context.Context, ownerID, repoID int64, name, data, description string) (*actions_model.ActionVariable, error) {
if err := secret_service.ValidateName(name); err != nil {
return nil, err
}
@@ -23,7 +22,7 @@ func CreateVariable(ctx context.Context, ownerID, repoID int64, name, data strin
return nil, err
}
- v, err := actions_model.InsertVariable(ctx, ownerID, repoID, name, util.ReserveLineBreakForTextarea(data))
+ v, err := actions_model.InsertVariable(ctx, ownerID, repoID, name, util.ReserveLineBreakForTextarea(data), description)
if err != nil {
return nil, err
}
@@ -31,20 +30,18 @@ func CreateVariable(ctx context.Context, ownerID, repoID int64, name, data strin
return v, nil
}
-func UpdateVariable(ctx context.Context, variableID int64, name, data string) (bool, error) {
- if err := secret_service.ValidateName(name); err != nil {
+func UpdateVariableNameData(ctx context.Context, variable *actions_model.ActionVariable) (bool, error) {
+ if err := secret_service.ValidateName(variable.Name); err != nil {
return false, err
}
- if err := envNameCIRegexMatch(name); err != nil {
+ if err := envNameCIRegexMatch(variable.Name); err != nil {
return false, err
}
- return actions_model.UpdateVariable(ctx, &actions_model.ActionVariable{
- ID: variableID,
- Name: strings.ToUpper(name),
- Data: util.ReserveLineBreakForTextarea(data),
- })
+ variable.Data = util.ReserveLineBreakForTextarea(variable.Data)
+
+ return actions_model.UpdateVariableCols(ctx, variable, "name", "data", "description")
}
func DeleteVariableByID(ctx context.Context, variableID int64) error {
diff --git a/services/actions/workflow.go b/services/actions/workflow.go
new file mode 100644
index 0000000000..dc8a1dd349
--- /dev/null
+++ b/services/actions/workflow.go
@@ -0,0 +1,285 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "path"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/reqctx"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+ notify_service "code.gitea.io/gitea/services/notify"
+
+ "github.com/nektos/act/pkg/jobparser"
+ "github.com/nektos/act/pkg/model"
+)
+
+func getActionWorkflowPath(commit *git.Commit) string {
+ paths := []string{".gitea/workflows", ".github/workflows"}
+ for _, treePath := range paths {
+ if _, err := commit.SubTree(treePath); err == nil {
+ return treePath
+ }
+ }
+ return ""
+}
+
+func getActionWorkflowEntry(ctx *context.APIContext, commit *git.Commit, folder string, entry *git.TreeEntry) *api.ActionWorkflow {
+ cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
+ cfg := cfgUnit.ActionsConfig()
+
+ defaultBranch, _ := commit.GetBranchName()
+
+ workflowURL := fmt.Sprintf("%s/actions/workflows/%s", ctx.Repo.Repository.APIURL(), url.PathEscape(entry.Name()))
+ workflowRepoURL := fmt.Sprintf("%s/src/branch/%s/%s/%s", ctx.Repo.Repository.HTMLURL(ctx), util.PathEscapeSegments(defaultBranch), util.PathEscapeSegments(folder), url.PathEscape(entry.Name()))
+ badgeURL := fmt.Sprintf("%s/actions/workflows/%s/badge.svg?branch=%s", ctx.Repo.Repository.HTMLURL(ctx), url.PathEscape(entry.Name()), url.QueryEscape(ctx.Repo.Repository.DefaultBranch))
+
+ // See https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#get-a-workflow
+ // State types:
+ // - active
+ // - deleted
+ // - disabled_fork
+ // - disabled_inactivity
+ // - disabled_manually
+ state := "active"
+ if cfg.IsWorkflowDisabled(entry.Name()) {
+ state = "disabled_manually"
+ }
+
+ // The CreatedAt and UpdatedAt fields currently reflect the timestamp of the latest commit, which can later be refined
+ // by retrieving the first and last commits for the file history. The first commit would indicate the creation date,
+ // while the last commit would represent the modification date. The DeletedAt could be determined by identifying
+ // the last commit where the file existed. However, this implementation has not been done here yet, as it would likely
+ // cause a significant performance degradation.
+ createdAt := commit.Author.When
+ updatedAt := commit.Author.When
+
+ return &api.ActionWorkflow{
+ ID: entry.Name(),
+ Name: entry.Name(),
+ Path: path.Join(folder, entry.Name()),
+ State: state,
+ CreatedAt: createdAt,
+ UpdatedAt: updatedAt,
+ URL: workflowURL,
+ HTMLURL: workflowRepoURL,
+ BadgeURL: badgeURL,
+ }
+}
+
+func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnable bool) error {
+ workflow, err := GetActionWorkflow(ctx, workflowID)
+ if err != nil {
+ return err
+ }
+
+ cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
+ cfg := cfgUnit.ActionsConfig()
+
+ if isEnable {
+ cfg.EnableWorkflow(workflow.ID)
+ } else {
+ cfg.DisableWorkflow(workflow.ID)
+ }
+
+ return repo_model.UpdateRepoUnit(ctx, cfgUnit)
+}
+
+func ListActionWorkflows(ctx *context.APIContext) ([]*api.ActionWorkflow, error) {
+ defaultBranchCommit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ ctx.APIErrorInternal(err)
+ return nil, err
+ }
+
+ entries, err := actions.ListWorkflows(defaultBranchCommit)
+ if err != nil {
+ ctx.APIError(http.StatusNotFound, err.Error())
+ return nil, err
+ }
+
+ folder := getActionWorkflowPath(defaultBranchCommit)
+
+ workflows := make([]*api.ActionWorkflow, len(entries))
+ for i, entry := range entries {
+ workflows[i] = getActionWorkflowEntry(ctx, defaultBranchCommit, folder, entry)
+ }
+
+ return workflows, nil
+}
+
+func GetActionWorkflow(ctx *context.APIContext, workflowID string) (*api.ActionWorkflow, error) {
+ entries, err := ListActionWorkflows(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, entry := range entries {
+ if entry.Name == workflowID {
+ return entry, nil
+ }
+ }
+
+ return nil, util.NewNotExistErrorf("workflow %q not found", workflowID)
+}
+
+func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, gitRepo *git.Repository, workflowID, ref string, processInputs func(model *model.WorkflowDispatch, inputs map[string]any) error) error {
+ if workflowID == "" {
+ return util.ErrorWrapLocale(
+ util.NewNotExistErrorf("workflowID is empty"),
+ "actions.workflow.not_found", workflowID,
+ )
+ }
+
+ if ref == "" {
+ return util.ErrorWrapLocale(
+ util.NewNotExistErrorf("ref is empty"),
+ "form.target_ref_not_exist", ref,
+ )
+ }
+
+ // can not rerun job when workflow is disabled
+ cfgUnit := repo.MustGetUnit(ctx, unit.TypeActions)
+ cfg := cfgUnit.ActionsConfig()
+ if cfg.IsWorkflowDisabled(workflowID) {
+ return util.ErrorWrapLocale(
+ util.NewPermissionDeniedErrorf("workflow is disabled"),
+ "actions.workflow.disabled",
+ )
+ }
+
+ // get target commit of run from specified ref
+ refName := git.RefName(ref)
+ var runTargetCommit *git.Commit
+ var err error
+ if refName.IsTag() {
+ runTargetCommit, err = gitRepo.GetTagCommit(refName.TagName())
+ } else if refName.IsBranch() {
+ runTargetCommit, err = gitRepo.GetBranchCommit(refName.BranchName())
+ } else {
+ refName = git.RefNameFromBranch(ref)
+ runTargetCommit, err = gitRepo.GetBranchCommit(ref)
+ }
+ if err != nil {
+ return util.ErrorWrapLocale(
+ util.NewNotExistErrorf("ref %q doesn't exist", ref),
+ "form.target_ref_not_exist", ref,
+ )
+ }
+
+ // get workflow entry from runTargetCommit
+ entries, err := actions.ListWorkflows(runTargetCommit)
+ if err != nil {
+ return err
+ }
+
+ // find workflow from commit
+ var workflows []*jobparser.SingleWorkflow
+ for _, entry := range entries {
+ if entry.Name() != workflowID {
+ continue
+ }
+
+ content, err := actions.GetContentFromEntry(entry)
+ if err != nil {
+ return err
+ }
+ workflows, err = jobparser.Parse(content)
+ if err != nil {
+ return err
+ }
+ break
+ }
+
+ if len(workflows) == 0 {
+ return util.ErrorWrapLocale(
+ util.NewNotExistErrorf("workflow %q doesn't exist", workflowID),
+ "actions.workflow.not_found", workflowID,
+ )
+ }
+
+ // get inputs from post
+ workflow := &model.Workflow{
+ RawOn: workflows[0].RawOn,
+ }
+ inputsWithDefaults := make(map[string]any)
+ if workflowDispatch := workflow.WorkflowDispatchConfig(); workflowDispatch != nil {
+ if err = processInputs(workflowDispatch, inputsWithDefaults); err != nil {
+ return err
+ }
+ }
+
+ // ctx.Req.PostForm -> WorkflowDispatchPayload.Inputs -> ActionRun.EventPayload -> runner: ghc.Event
+ // https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
+ // https://docs.github.com/en/webhooks/webhook-events-and-payloads#workflow_dispatch
+ workflowDispatchPayload := &api.WorkflowDispatchPayload{
+ Workflow: workflowID,
+ Ref: ref,
+ Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeNone}),
+ Inputs: inputsWithDefaults,
+ Sender: convert.ToUserWithAccessMode(ctx, doer, perm.AccessModeNone),
+ }
+ var eventPayload []byte
+ if eventPayload, err = workflowDispatchPayload.JSONPayload(); err != nil {
+ return fmt.Errorf("JSONPayload: %w", err)
+ }
+
+ run := &actions_model.ActionRun{
+ Title: strings.SplitN(runTargetCommit.CommitMessage, "\n", 2)[0],
+ RepoID: repo.ID,
+ OwnerID: repo.OwnerID,
+ WorkflowID: workflowID,
+ TriggerUserID: doer.ID,
+ Ref: string(refName),
+ CommitSHA: runTargetCommit.ID.String(),
+ IsForkPullRequest: false,
+ Event: "workflow_dispatch",
+ TriggerEvent: "workflow_dispatch",
+ EventPayload: string(eventPayload),
+ Status: actions_model.StatusWaiting,
+ }
+
+ // cancel running jobs of the same workflow
+ if err := CancelPreviousJobs(
+ ctx,
+ run.RepoID,
+ run.Ref,
+ run.WorkflowID,
+ run.Event,
+ ); err != nil {
+ log.Error("CancelRunningJobs: %v", err)
+ }
+
+ // Insert the action run and its associated jobs into the database
+ if err := actions_model.InsertRun(ctx, run, workflows); err != nil {
+ return fmt.Errorf("InsertRun: %w", err)
+ }
+
+ allJobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: run.ID})
+ if err != nil {
+ log.Error("FindRunJobs: %v", err)
+ }
+ CreateCommitStatus(ctx, allJobs...)
+ for _, job := range allJobs {
+ notify_service.WorkflowJobStatusUpdate(ctx, repo, doer, job, nil)
+ }
+
+ return nil
+}
diff --git a/services/agit/agit.go b/services/agit/agit.go
index 897e825012..1e6ce93312 100644
--- a/services/agit/agit.go
+++ b/services/agit/agit.go
@@ -13,6 +13,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/private"
"code.gitea.io/gitea/modules/setting"
@@ -56,10 +57,10 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
baseBranchName := opts.RefFullNames[i].ForBranchName()
currentTopicBranch := ""
- if !gitRepo.IsBranchExist(baseBranchName) {
+ if !gitrepo.IsBranchExist(ctx, repo, baseBranchName) {
// try match refs/for/<target-branch>/<topic-branch>
for p, v := range baseBranchName {
- if v == '/' && gitRepo.IsBranchExist(baseBranchName[:p]) && p != len(baseBranchName)-1 {
+ if v == '/' && gitrepo.IsBranchExist(ctx, repo, baseBranchName[:p]) && p != len(baseBranchName)-1 {
currentTopicBranch = baseBranchName[p+1:]
baseBranchName = baseBranchName[:p]
break
@@ -182,9 +183,9 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
}
if !forcePush.Value() {
- output, _, err := git.NewCommand(ctx, "rev-list", "--max-count=1").
+ output, _, err := git.NewCommand("rev-list", "--max-count=1").
AddDynamicArguments(oldCommitID, "^"+opts.NewCommitIDs[i]).
- RunStdString(&git.RunOpts{Dir: repo.RepoPath(), Env: os.Environ()})
+ RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath(), Env: os.Environ()})
if err != nil {
return nil, fmt.Errorf("failed to detect force push: %w", err)
} else if len(output) > 0 {
diff --git a/services/asymkey/commit.go b/services/asymkey/commit.go
new file mode 100644
index 0000000000..df29133972
--- /dev/null
+++ b/services/asymkey/commit.go
@@ -0,0 +1,363 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package asymkey
+
+import (
+ "context"
+ "strings"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/ProtonMail/go-crypto/openpgp/packet"
+)
+
+// ParseCommitWithSignature check if signature is good against keystore.
+func ParseCommitWithSignature(ctx context.Context, c *git.Commit) *asymkey_model.CommitVerification {
+ var committer *user_model.User
+ if c.Committer != nil {
+ var err error
+ // Find Committer account
+ committer, err = user_model.GetUserByEmail(ctx, c.Committer.Email) // This finds the user by primary email or activated email so commit will not be valid if email is not
+ if err != nil { // Skipping not user for committer
+ committer = &user_model.User{
+ Name: c.Committer.Name,
+ Email: c.Committer.Email,
+ }
+ // We can expect this to often be an ErrUserNotExist. in the case
+ // it is not, however, it is important to log it.
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("GetUserByEmail: %v", err)
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.no_committer_account",
+ }
+ }
+ }
+ }
+
+ return ParseCommitWithSignatureCommitter(ctx, c, committer)
+}
+
+func ParseCommitWithSignatureCommitter(ctx context.Context, c *git.Commit, committer *user_model.User) *asymkey_model.CommitVerification {
+ // If no signature just report the committer
+ if c.Signature == nil {
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false, // Default value
+ Reason: "gpg.error.not_signed_commit", // Default value
+ }
+ }
+
+ // If this a SSH signature handle it differently
+ if strings.HasPrefix(c.Signature.Signature, "-----BEGIN SSH SIGNATURE-----") {
+ return asymkey_model.ParseCommitWithSSHSignature(ctx, c, committer)
+ }
+
+ // Parsing signature
+ sig, err := asymkey_model.ExtractSignature(c.Signature.Signature)
+ if err != nil { // Skipping failed to extract sign
+ log.Error("SignatureRead err: %v", err)
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.extract_sign",
+ }
+ }
+
+ keyID := asymkey_model.TryGetKeyIDFromSignature(sig)
+ defaultReason := asymkey_model.NoKeyFound
+
+ // First check if the sig has a keyID and if so just look at that
+ if commitVerification := HashAndVerifyForKeyID(
+ ctx,
+ sig,
+ c.Signature.Payload,
+ committer,
+ keyID,
+ setting.AppName,
+ ""); commitVerification != nil {
+ if commitVerification.Reason == asymkey_model.BadSignature {
+ defaultReason = asymkey_model.BadSignature
+ } else {
+ return commitVerification
+ }
+ }
+
+ // Now try to associate the signature with the committer, if present
+ if committer.ID != 0 {
+ keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ OwnerID: committer.ID,
+ })
+ if err != nil { // Skipping failed to get gpg keys of user
+ log.Error("ListGPGKeys: %v", err)
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.failed_retrieval_gpg_keys",
+ }
+ }
+
+ if err := asymkey_model.GPGKeyList(keys).LoadSubKeys(ctx); err != nil {
+ log.Error("LoadSubKeys: %v", err)
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.failed_retrieval_gpg_keys",
+ }
+ }
+
+ committerEmailAddresses, _ := user_model.GetEmailAddresses(ctx, committer.ID)
+ activated := false
+ for _, e := range committerEmailAddresses {
+ if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
+ activated = true
+ break
+ }
+ }
+
+ for _, k := range keys {
+ // Pre-check (& optimization) that emails attached to key can be attached to the committer email and can validate
+ canValidate := false
+ email := ""
+ if k.Verified && activated {
+ canValidate = true
+ email = c.Committer.Email
+ }
+ if !canValidate {
+ for _, e := range k.Emails {
+ if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
+ canValidate = true
+ email = e.Email
+ break
+ }
+ }
+ }
+ if !canValidate {
+ continue // Skip this key
+ }
+
+ commitVerification := asymkey_model.HashAndVerifyWithSubKeysCommitVerification(sig, c.Signature.Payload, k, committer, committer, email)
+ if commitVerification != nil {
+ return commitVerification
+ }
+ }
+ }
+
+ if setting.Repository.Signing.SigningKey != "" && setting.Repository.Signing.SigningKey != "default" && setting.Repository.Signing.SigningKey != "none" {
+ // OK we should try the default key
+ gpgSettings := git.GPGSettings{
+ Sign: true,
+ KeyID: setting.Repository.Signing.SigningKey,
+ Name: setting.Repository.Signing.SigningName,
+ Email: setting.Repository.Signing.SigningEmail,
+ }
+ if err := gpgSettings.LoadPublicKeyContent(); err != nil {
+ log.Error("Error getting default signing key: %s %v", gpgSettings.KeyID, err)
+ } else if commitVerification := VerifyWithGPGSettings(ctx, &gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil {
+ if commitVerification.Reason == asymkey_model.BadSignature {
+ defaultReason = asymkey_model.BadSignature
+ } else {
+ return commitVerification
+ }
+ }
+ }
+
+ defaultGPGSettings, err := c.GetRepositoryDefaultPublicGPGKey(false)
+ if err != nil {
+ log.Error("Error getting default public gpg key: %v", err)
+ } else if defaultGPGSettings == nil {
+ log.Warn("Unable to get defaultGPGSettings for unattached commit: %s", c.ID.String())
+ } else if defaultGPGSettings.Sign {
+ if commitVerification := VerifyWithGPGSettings(ctx, defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil {
+ if commitVerification.Reason == asymkey_model.BadSignature {
+ defaultReason = asymkey_model.BadSignature
+ } else {
+ return commitVerification
+ }
+ }
+ }
+
+ return &asymkey_model.CommitVerification{ // Default at this stage
+ CommittingUser: committer,
+ Verified: false,
+ Warning: defaultReason != asymkey_model.NoKeyFound,
+ Reason: defaultReason,
+ SigningKey: &asymkey_model.GPGKey{
+ KeyID: keyID,
+ },
+ }
+}
+
+func checkKeyEmails(ctx context.Context, email string, keys ...*asymkey_model.GPGKey) (bool, string) {
+ uid := int64(0)
+ var userEmails []*user_model.EmailAddress
+ var user *user_model.User
+ for _, key := range keys {
+ for _, e := range key.Emails {
+ if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) {
+ return true, e.Email
+ }
+ }
+ if key.Verified && key.OwnerID != 0 {
+ if uid != key.OwnerID {
+ userEmails, _ = user_model.GetEmailAddresses(ctx, key.OwnerID)
+ uid = key.OwnerID
+ user = &user_model.User{ID: uid}
+ _, _ = user_model.GetUser(ctx, user)
+ }
+ for _, e := range userEmails {
+ if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) {
+ return true, e.Email
+ }
+ }
+ if user.KeepEmailPrivate && strings.EqualFold(email, user.GetEmail()) {
+ return true, user.GetEmail()
+ }
+ }
+ }
+ return false, email
+}
+
+func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload string, committer *user_model.User, keyID, name, email string) *asymkey_model.CommitVerification {
+ if keyID == "" {
+ return nil
+ }
+ keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ KeyID: keyID,
+ IncludeSubKeys: true,
+ })
+ if err != nil {
+ log.Error("GetGPGKeysByKeyID: %v", err)
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.failed_retrieval_gpg_keys",
+ }
+ }
+ if len(keys) == 0 {
+ return nil
+ }
+ for _, key := range keys {
+ var primaryKeys []*asymkey_model.GPGKey
+ if key.PrimaryKeyID != "" {
+ primaryKeys, err = db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ KeyID: key.PrimaryKeyID,
+ IncludeSubKeys: true,
+ })
+ if err != nil {
+ log.Error("GetGPGKeysByKeyID: %v", err)
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.failed_retrieval_gpg_keys",
+ }
+ }
+ }
+
+ activated, email := checkKeyEmails(ctx, email, append([]*asymkey_model.GPGKey{key}, primaryKeys...)...)
+ if !activated {
+ continue
+ }
+
+ signer := &user_model.User{
+ Name: name,
+ Email: email,
+ }
+ if key.OwnerID != 0 {
+ owner, err := user_model.GetUserByID(ctx, key.OwnerID)
+ if err == nil {
+ signer = owner
+ } else if !user_model.IsErrUserNotExist(err) {
+ log.Error("Failed to user_model.GetUserByID: %d for key ID: %d (%s) %v", key.OwnerID, key.ID, key.KeyID, err)
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.no_committer_account",
+ }
+ }
+ }
+ commitVerification := asymkey_model.HashAndVerifyWithSubKeysCommitVerification(sig, payload, key, committer, signer, email)
+ if commitVerification != nil {
+ return commitVerification
+ }
+ }
+ // This is a bad situation ... We have a key id that is in our database but the signature doesn't match.
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Warning: true,
+ Reason: asymkey_model.BadSignature,
+ }
+}
+
+func VerifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *user_model.User, keyID string) *asymkey_model.CommitVerification {
+ // First try to find the key in the db
+ if commitVerification := HashAndVerifyForKeyID(ctx, sig, payload, committer, gpgSettings.KeyID, gpgSettings.Name, gpgSettings.Email); commitVerification != nil {
+ return commitVerification
+ }
+
+ // Otherwise we have to parse the key
+ ekeys, err := asymkey_model.CheckArmoredGPGKeyString(gpgSettings.PublicKeyContent)
+ if err != nil {
+ log.Error("Unable to get default signing key: %v", err)
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.generate_hash",
+ }
+ }
+ for _, ekey := range ekeys {
+ pubkey := ekey.PrimaryKey
+ content, err := asymkey_model.Base64EncPubKey(pubkey)
+ if err != nil {
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.generate_hash",
+ }
+ }
+ k := &asymkey_model.GPGKey{
+ Content: content,
+ CanSign: pubkey.CanSign(),
+ KeyID: pubkey.KeyIdString(),
+ }
+ for _, subKey := range ekey.Subkeys {
+ content, err := asymkey_model.Base64EncPubKey(subKey.PublicKey)
+ if err != nil {
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Reason: "gpg.error.generate_hash",
+ }
+ }
+ k.SubsKey = append(k.SubsKey, &asymkey_model.GPGKey{
+ Content: content,
+ CanSign: subKey.PublicKey.CanSign(),
+ KeyID: subKey.PublicKey.KeyIdString(),
+ })
+ }
+ if commitVerification := asymkey_model.HashAndVerifyWithSubKeysCommitVerification(sig, payload, k, committer, &user_model.User{
+ Name: gpgSettings.Name,
+ Email: gpgSettings.Email,
+ }, gpgSettings.Email); commitVerification != nil {
+ return commitVerification
+ }
+ if keyID == k.KeyID {
+ // This is a bad situation ... We have a key id that matches our default key but the signature doesn't match.
+ return &asymkey_model.CommitVerification{
+ CommittingUser: committer,
+ Verified: false,
+ Warning: true,
+ Reason: asymkey_model.BadSignature,
+ }
+ }
+ }
+ return nil
+}
diff --git a/services/asymkey/sign.go b/services/asymkey/sign.go
index 2f5d76a293..2216bca54a 100644
--- a/services/asymkey/sign.go
+++ b/services/asymkey/sign.go
@@ -92,15 +92,15 @@ func SigningKey(ctx context.Context, repoPath string) (string, *git.Signature) {
if setting.Repository.Signing.SigningKey == "default" || setting.Repository.Signing.SigningKey == "" {
// Can ignore the error here as it means that commit.gpgsign is not set
- value, _, _ := git.NewCommand(ctx, "config", "--get", "commit.gpgsign").RunStdString(&git.RunOpts{Dir: repoPath})
+ value, _, _ := git.NewCommand("config", "--get", "commit.gpgsign").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
sign, valid := git.ParseBool(strings.TrimSpace(value))
if !sign || !valid {
return "", nil
}
- signingKey, _, _ := git.NewCommand(ctx, "config", "--get", "user.signingkey").RunStdString(&git.RunOpts{Dir: repoPath})
- signingName, _, _ := git.NewCommand(ctx, "config", "--get", "user.name").RunStdString(&git.RunOpts{Dir: repoPath})
- signingEmail, _, _ := git.NewCommand(ctx, "config", "--get", "user.email").RunStdString(&git.RunOpts{Dir: repoPath})
+ signingKey, _, _ := git.NewCommand("config", "--get", "user.signingkey").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
+ signingName, _, _ := git.NewCommand("config", "--get", "user.name").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
+ signingEmail, _, _ := git.NewCommand("config", "--get", "user.email").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
return strings.TrimSpace(signingKey), &git.Signature{
Name: strings.TrimSpace(signingName),
Email: strings.TrimSpace(signingEmail),
@@ -204,7 +204,7 @@ Loop:
return false, "", nil, &ErrWontSign{twofa}
}
case parentSigned:
- gitRepo, err := gitrepo.OpenWikiRepository(ctx, repo)
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo.WikiStorageRepo())
if err != nil {
return false, "", nil, err
}
@@ -216,7 +216,7 @@ Loop:
if commit.Signature == nil {
return false, "", nil, &ErrWontSign{parentSigned}
}
- verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, "", nil, &ErrWontSign{parentSigned}
}
@@ -272,7 +272,7 @@ Loop:
if commit.Signature == nil {
return false, "", nil, &ErrWontSign{parentSigned}
}
- verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, "", nil, &ErrWontSign{parentSigned}
}
@@ -347,7 +347,7 @@ Loop:
if err != nil {
return false, "", nil, err
}
- verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, "", nil, &ErrWontSign{baseSigned}
}
@@ -363,7 +363,7 @@ Loop:
if err != nil {
return false, "", nil, err
}
- verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, "", nil, &ErrWontSign{headSigned}
}
@@ -379,7 +379,7 @@ Loop:
if err != nil {
return false, "", nil, err
}
- verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, "", nil, &ErrWontSign{commitsSigned}
}
@@ -393,7 +393,7 @@ Loop:
return false, "", nil, err
}
for _, commit := range commitList {
- verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, "", nil, &ErrWontSign{commitsSigned}
}
diff --git a/services/attachment/attachment_test.go b/services/attachment/attachment_test.go
index 142bcfe629..65475836be 100644
--- a/services/attachment/attachment_test.go
+++ b/services/attachment/attachment_test.go
@@ -41,6 +41,6 @@ func TestUploadAttachment(t *testing.T) {
attachment, err := repo_model.GetAttachmentByUUID(db.DefaultContext, attach.UUID)
assert.NoError(t, err)
- assert.EqualValues(t, user.ID, attachment.UploaderID)
+ assert.Equal(t, user.ID, attachment.UploaderID)
assert.Equal(t, int64(0), attachment.DownloadCount)
}
diff --git a/services/auth/auth.go b/services/auth/auth.go
index 7deca9bc3d..f7deeb4c50 100644
--- a/services/auth/auth.go
+++ b/services/auth/auth.go
@@ -149,7 +149,7 @@ func handleSignIn(resp http.ResponseWriter, req *http.Request, sess SessionStore
middleware.SetLocaleCookie(resp, user.Language, 0)
// force to generate a new CSRF token
- if ctx := gitea_context.GetWebContext(req); ctx != nil {
+ if ctx := gitea_context.GetWebContext(req.Context()); ctx != nil {
ctx.Csrf.PrepareForSessionUser(ctx)
}
}
diff --git a/services/auth/oauth2_test.go b/services/auth/oauth2_test.go
index 0d9e793cf3..f003742a94 100644
--- a/services/auth/oauth2_test.go
+++ b/services/auth/oauth2_test.go
@@ -4,7 +4,6 @@
package auth
import (
- "context"
"testing"
"code.gitea.io/gitea/models/unittest"
@@ -26,8 +25,8 @@ func TestUserIDFromToken(t *testing.T) {
ds := make(reqctx.ContextData)
o := OAuth2{}
- uid := o.userIDFromToken(context.Background(), token, ds)
- assert.Equal(t, int64(user_model.ActionsUserID), uid)
+ uid := o.userIDFromToken(t.Context(), token, ds)
+ assert.Equal(t, user_model.ActionsUserID, uid)
assert.Equal(t, true, ds["IsActionsToken"])
assert.Equal(t, ds["ActionsTaskID"], int64(RunningTaskID))
})
@@ -48,7 +47,7 @@ func TestCheckTaskIsRunning(t *testing.T) {
for name := range cases {
c := cases[name]
t.Run(name, func(t *testing.T) {
- actual := CheckTaskIsRunning(context.Background(), c.TaskID)
+ actual := CheckTaskIsRunning(t.Context(), c.TaskID)
assert.Equal(t, c.Expected, actual)
})
}
diff --git a/services/auth/source/oauth2/source_sync_test.go b/services/auth/source/oauth2/source_sync_test.go
index 893ed62502..08d841cc90 100644
--- a/services/auth/source/oauth2/source_sync_test.go
+++ b/services/auth/source/oauth2/source_sync_test.go
@@ -4,7 +4,6 @@
package oauth2
import (
- "context"
"testing"
"code.gitea.io/gitea/models/auth"
@@ -36,7 +35,7 @@ func TestSource(t *testing.T) {
Email: "external@example.com",
}
- err := user_model.CreateUser(context.Background(), user, &user_model.Meta{}, &user_model.CreateUserOverwriteOptions{})
+ err := user_model.CreateUser(t.Context(), user, &user_model.Meta{}, &user_model.CreateUserOverwriteOptions{})
assert.NoError(t, err)
e := &user_model.ExternalLoginUser{
@@ -45,7 +44,7 @@ func TestSource(t *testing.T) {
LoginSourceID: user.LoginSource,
RefreshToken: "valid",
}
- err = user_model.LinkExternalToUser(context.Background(), user, e)
+ err = user_model.LinkExternalToUser(t.Context(), user, e)
assert.NoError(t, err)
provider, err := createProvider(source.authSource.Name, source)
@@ -53,7 +52,7 @@ func TestSource(t *testing.T) {
t.Run("refresh", func(t *testing.T) {
t.Run("valid", func(t *testing.T) {
- err := source.refresh(context.Background(), provider, e)
+ err := source.refresh(t.Context(), provider, e)
assert.NoError(t, err)
e := &user_model.ExternalLoginUser{
@@ -61,19 +60,19 @@ func TestSource(t *testing.T) {
LoginSourceID: e.LoginSourceID,
}
- ok, err := user_model.GetExternalLogin(context.Background(), e)
+ ok, err := user_model.GetExternalLogin(t.Context(), e)
assert.NoError(t, err)
assert.True(t, ok)
assert.Equal(t, "refresh", e.RefreshToken)
assert.Equal(t, "token", e.AccessToken)
- u, err := user_model.GetUserByID(context.Background(), user.ID)
+ u, err := user_model.GetUserByID(t.Context(), user.ID)
assert.NoError(t, err)
assert.True(t, u.IsActive)
})
t.Run("expired", func(t *testing.T) {
- err := source.refresh(context.Background(), provider, &user_model.ExternalLoginUser{
+ err := source.refresh(t.Context(), provider, &user_model.ExternalLoginUser{
ExternalID: "external",
UserID: user.ID,
LoginSourceID: user.LoginSource,
@@ -86,13 +85,13 @@ func TestSource(t *testing.T) {
LoginSourceID: e.LoginSourceID,
}
- ok, err := user_model.GetExternalLogin(context.Background(), e)
+ ok, err := user_model.GetExternalLogin(t.Context(), e)
assert.NoError(t, err)
assert.True(t, ok)
- assert.Equal(t, "", e.RefreshToken)
- assert.Equal(t, "", e.AccessToken)
+ assert.Empty(t, e.RefreshToken)
+ assert.Empty(t, e.AccessToken)
- u, err := user_model.GetUserByID(context.Background(), user.ID)
+ u, err := user_model.GetUserByID(t.Context(), user.ID)
assert.NoError(t, err)
assert.False(t, u.IsActive)
})
diff --git a/services/auth/sspi.go b/services/auth/sspi.go
index 3882740ae3..8cb39886c4 100644
--- a/services/auth/sspi.go
+++ b/services/auth/sspi.go
@@ -88,7 +88,7 @@ func (s *SSPI) Verify(req *http.Request, w http.ResponseWriter, store DataStore,
store.GetData()["EnableSSPI"] = true
// in this case, the Verify function is called in Gitea's web context
// FIXME: it doesn't look good to render the page here, why not redirect?
- gitea_context.GetWebContext(req).HTML(http.StatusUnauthorized, tplSignIn)
+ gitea_context.GetWebContext(req.Context()).HTML(http.StatusUnauthorized, tplSignIn)
return nil, err
}
if outToken != "" {
diff --git a/services/automerge/automerge.go b/services/automerge/automerge.go
index bdb0493ae8..62d560ff94 100644
--- a/services/automerge/automerge.go
+++ b/services/automerge/automerge.go
@@ -248,13 +248,13 @@ func handlePullRequestAutoMerge(pullID int64, sha string) {
switch pr.Flow {
case issues_model.PullRequestFlowGithub:
- headBranchExist := headGitRepo.IsBranchExist(pr.HeadBranch)
- if pr.HeadRepo == nil || !headBranchExist {
+ headBranchExist := pr.HeadRepo != nil && gitrepo.IsBranchExist(ctx, pr.HeadRepo, pr.HeadBranch)
+ if !headBranchExist {
log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch: %s]", pr, pr.HeadRepoID, pr.HeadBranch)
return
}
case issues_model.PullRequestFlowAGit:
- headBranchExist := git.IsReferenceExist(ctx, baseGitRepo.Path, pr.GetGitRefName())
+ headBranchExist := gitrepo.IsReferenceExist(ctx, pr.BaseRepo, pr.GetGitRefName())
if !headBranchExist {
log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch(Agit): %s]", pr, pr.HeadRepoID, pr.HeadBranch)
return
diff --git a/services/context/access_log.go b/services/context/access_log.go
index 001d93a362..925e4a3056 100644
--- a/services/context/access_log.go
+++ b/services/context/access_log.go
@@ -92,7 +92,7 @@ func (lr *accessLogRecorder) record(start time.Time, respWriter ResponseWriter,
log.Error("Could not execute access logger template: %v", err.Error())
}
- lr.logger.Log(1, log.INFO, "%s", buf.String())
+ lr.logger.Log(1, &log.Event{Level: log.INFO}, "%s", buf.String())
}
func newAccessLogRecorder() *accessLogRecorder {
diff --git a/services/context/access_log_test.go b/services/context/access_log_test.go
index bd3e47e0cc..c40ef9acd1 100644
--- a/services/context/access_log_test.go
+++ b/services/context/access_log_test.go
@@ -20,7 +20,7 @@ type testAccessLoggerMock struct {
logs []string
}
-func (t *testAccessLoggerMock) Log(skip int, level log.Level, format string, v ...any) {
+func (t *testAccessLoggerMock) Log(skip int, event *log.Event, format string, v ...any) {
t.logs = append(t.logs, fmt.Sprintf(format, v...))
}
diff --git a/services/context/api.go b/services/context/api.go
index bdeff0af63..10fad419ba 100644
--- a/services/context/api.go
+++ b/services/context/api.go
@@ -5,6 +5,7 @@
package context
import (
+ "errors"
"fmt"
"net/http"
"net/url"
@@ -17,11 +18,15 @@ import (
"code.gitea.io/gitea/modules/httpcache"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
web_types "code.gitea.io/gitea/modules/web/types"
)
// APIContext is a specific context for API service
+// ATTENTION: This struct should never be manually constructed in routes/services,
+// it has many internal details which should be carefully prepared by the framework.
+// If it is abused, it would cause strange bugs like panic/resource-leak.
type APIContext struct {
*Base
@@ -103,14 +108,28 @@ type APIRepoArchivedError struct {
APIError
}
-// ServerError responds with error message, status is 500
-func (ctx *APIContext) ServerError(title string, err error) {
- ctx.Error(http.StatusInternalServerError, title, err)
+// APIErrorInternal responds with error message, status is 500
+func (ctx *APIContext) APIErrorInternal(err error) {
+ ctx.apiErrorInternal(1, err)
}
-// Error responds with an error message to client with given obj as the message.
+func (ctx *APIContext) apiErrorInternal(skip int, err error) {
+ log.ErrorWithSkip(skip+1, "InternalServerError: %v", err)
+
+ var message string
+ if !setting.IsProd || (ctx.Doer != nil && ctx.Doer.IsAdmin) {
+ message = err.Error()
+ }
+
+ ctx.JSON(http.StatusInternalServerError, APIError{
+ Message: message,
+ URL: setting.API.SwaggerURL,
+ })
+}
+
+// APIError responds with an error message to client with given obj as the message.
// If status is 500, also it prints error to log.
-func (ctx *APIContext) Error(status int, title string, obj any) {
+func (ctx *APIContext) APIError(status int, obj any) {
var message string
if err, ok := obj.(error); ok {
message = err.Error()
@@ -119,7 +138,7 @@ func (ctx *APIContext) Error(status int, title string, obj any) {
}
if status == http.StatusInternalServerError {
- log.ErrorWithSkip(1, "%s: %s", title, message)
+ log.ErrorWithSkip(1, "APIError: %s", message)
if setting.IsProd && !(ctx.Doer != nil && ctx.Doer.IsAdmin) {
message = ""
@@ -132,22 +151,6 @@ func (ctx *APIContext) Error(status int, title string, obj any) {
})
}
-// InternalServerError responds with an error message to the client with the error as a message
-// and the file and line of the caller.
-func (ctx *APIContext) InternalServerError(err error) {
- log.ErrorWithSkip(1, "InternalServerError: %v", err)
-
- var message string
- if !setting.IsProd || (ctx.Doer != nil && ctx.Doer.IsAdmin) {
- message = err.Error()
- }
-
- ctx.JSON(http.StatusInternalServerError, APIError{
- Message: message,
- URL: setting.API.SwaggerURL,
- })
-}
-
type apiContextKeyType struct{}
var apiContextKey = apiContextKeyType{}
@@ -207,7 +210,7 @@ func (ctx *APIContext) SetLinkHeader(total, pageSize int) {
}
}
-// APIContexter returns apicontext as middleware
+// APIContexter returns APIContext middleware
func APIContexter() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
@@ -224,12 +227,12 @@ func APIContexter() func(http.Handler) http.Handler {
// If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.
if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") {
if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size
- ctx.InternalServerError(err)
+ ctx.APIErrorInternal(err)
return
}
}
- httpcache.SetCacheControlInHeader(ctx.Resp.Header(), 0, "no-transform")
+ httpcache.SetCacheControlInHeader(ctx.Resp.Header(), &httpcache.CacheControlOptions{NoTransform: true})
ctx.Resp.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions)
next.ServeHTTP(ctx.Resp, ctx.Req)
@@ -237,9 +240,9 @@ func APIContexter() func(http.Handler) http.Handler {
}
}
-// NotFound handles 404s for APIContext
+// APIErrorNotFound handles 404s for APIContext
// String will replace message, errors will be added to a slice
-func (ctx *APIContext) NotFound(objs ...any) {
+func (ctx *APIContext) APIErrorNotFound(objs ...any) {
message := ctx.Locale.TrString("error.not_found")
var errors []string
for _, obj := range objs {
@@ -276,7 +279,7 @@ func ReferencesGitRepo(allowEmpty ...bool) func(ctx *APIContext) {
var err error
ctx.Repo.GitRepo, err = gitrepo.RepositoryFromRequestContextOrOpen(ctx, ctx.Repo.Repository)
if err != nil {
- ctx.Error(http.StatusInternalServerError, fmt.Sprintf("Open Repository %v failed", ctx.Repo.Repository.FullName()), err)
+ ctx.APIErrorInternal(err)
return
}
}
@@ -288,25 +291,30 @@ func RepoRefForAPI(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
ctx := GetAPIContext(req)
+ if ctx.Repo.Repository.IsEmpty {
+ ctx.APIErrorNotFound("repository is empty")
+ return
+ }
+
if ctx.Repo.GitRepo == nil {
- ctx.InternalServerError(fmt.Errorf("no open git repo"))
+ ctx.APIErrorInternal(fmt.Errorf("no open git repo"))
return
}
refName, _, _ := getRefNameLegacy(ctx.Base, ctx.Repo, ctx.PathParam("*"), ctx.FormTrim("ref"))
var err error
- if ctx.Repo.GitRepo.IsBranchExist(refName) {
+ if gitrepo.IsBranchExist(ctx, ctx.Repo.Repository, refName) {
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
if err != nil {
- ctx.InternalServerError(err)
+ ctx.APIErrorInternal(err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
- } else if ctx.Repo.GitRepo.IsTagExist(refName) {
+ } else if gitrepo.IsTagExist(ctx, ctx.Repo.Repository, refName) {
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName)
if err != nil {
- ctx.InternalServerError(err)
+ ctx.APIErrorInternal(err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
@@ -314,11 +322,11 @@ func RepoRefForAPI(next http.Handler) http.Handler {
ctx.Repo.CommitID = refName
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
if err != nil {
- ctx.NotFound("GetCommit", err)
+ ctx.APIErrorNotFound("GetCommit", err)
return
}
} else {
- ctx.NotFound(fmt.Errorf("not exist: '%s'", ctx.PathParam("*")))
+ ctx.APIErrorNotFound(fmt.Errorf("not exist: '%s'", ctx.PathParam("*")))
return
}
@@ -347,12 +355,12 @@ func (ctx *APIContext) GetErrMsg() string {
// NotFoundOrServerError use error check function to determine if the error
// is about not found. It responds with 404 status code for not found error,
// or error context description for logging purpose of 500 server error.
-func (ctx *APIContext) NotFoundOrServerError(logMsg string, errCheck func(error) bool, logErr error) {
- if errCheck(logErr) {
+func (ctx *APIContext) NotFoundOrServerError(err error) {
+ if errors.Is(err, util.ErrNotExist) {
ctx.JSON(http.StatusNotFound, nil)
return
}
- ctx.Error(http.StatusInternalServerError, "NotFoundOrServerError", logMsg)
+ ctx.APIErrorInternal(err)
}
// IsUserSiteAdmin returns true if current user is a site admin
@@ -365,7 +373,7 @@ func (ctx *APIContext) IsUserRepoAdmin() bool {
return ctx.Repo.IsAdmin()
}
-// IsUserRepoWriter returns true if current user has write privilege in current repo
+// IsUserRepoWriter returns true if current user has "write" privilege in current repo
func (ctx *APIContext) IsUserRepoWriter(unitTypes []unit.Type) bool {
for _, unitType := range unitTypes {
if ctx.Repo.CanWrite(unitType) {
diff --git a/services/context/api_test.go b/services/context/api_test.go
index 911a49949e..87d74004db 100644
--- a/services/context/api_test.go
+++ b/services/context/api_test.go
@@ -45,6 +45,6 @@ func TestGenAPILinks(t *testing.T) {
links := genAPILinks(u, 100, 20, curPage)
- assert.EqualValues(t, links, response)
+ assert.Equal(t, links, response)
}
}
diff --git a/services/context/base.go b/services/context/base.go
index 5db84f42a5..3701668bf6 100644
--- a/services/context/base.go
+++ b/services/context/base.go
@@ -23,6 +23,10 @@ type BaseContextKeyType struct{}
var BaseContextKey BaseContextKeyType
+// Base is the base context for all web handlers
+// ATTENTION: This struct should never be manually constructed in routes/services,
+// it has many internal details which should be carefully prepared by the framework.
+// If it is abused, it would cause strange bugs like panic/resource-leak.
type Base struct {
reqctx.RequestContext
@@ -77,8 +81,8 @@ func (b *Base) RespHeader() http.Header {
return b.Resp.Header()
}
-// Error returned an error to web browser
-func (b *Base) Error(status int, contents ...string) {
+// HTTPError returned an error to web browser
+func (b *Base) HTTPError(status int, contents ...string) {
v := http.StatusText(status)
if len(contents) > 0 {
v = contents[0]
diff --git a/services/context/context.go b/services/context/context.go
index 5b16f9be98..79bc5da920 100644
--- a/services/context/context.go
+++ b/services/context/context.go
@@ -34,7 +34,10 @@ type Render interface {
HTML(w io.Writer, status int, name templates.TplName, data any, templateCtx context.Context) error
}
-// Context represents context of a request.
+// Context represents context of a web request.
+// ATTENTION: This struct should never be manually constructed in routes/services,
+// it has many internal details which should be carefully prepared by the framework.
+// If it is abused, it would cause strange bugs like panic/resource-leak.
type Context struct {
*Base
@@ -76,9 +79,9 @@ type webContextKeyType struct{}
var WebContextKey = webContextKeyType{}
-func GetWebContext(req *http.Request) *Context {
- ctx, _ := req.Context().Value(WebContextKey).(*Context)
- return ctx
+func GetWebContext(ctx context.Context) *Context {
+ webCtx, _ := ctx.Value(WebContextKey).(*Context)
+ return webCtx
}
// ValidateContext is a special context for form validation middleware. It may be different from other contexts.
@@ -132,6 +135,7 @@ func NewWebContext(base *Base, render Render, session session.Store) *Context {
}
ctx.TemplateContext = NewTemplateContextForWeb(ctx)
ctx.Flash = &middleware.Flash{DataStore: ctx, Values: url.Values{}}
+ ctx.SetContextValue(WebContextKey, ctx)
return ctx
}
@@ -162,7 +166,6 @@ func Contexter() func(next http.Handler) http.Handler {
ctx.PageData = map[string]any{}
ctx.Data["PageData"] = ctx.PageData
- ctx.Base.SetContextValue(WebContextKey, ctx)
ctx.Csrf = NewCSRFProtector(csrfOpts)
// get the last flash message from cookie
@@ -188,7 +191,7 @@ func Contexter() func(next http.Handler) http.Handler {
}
}
- httpcache.SetCacheControlInHeader(ctx.Resp.Header(), 0, "no-transform")
+ httpcache.SetCacheControlInHeader(ctx.Resp.Header(), &httpcache.CacheControlOptions{NoTransform: true})
ctx.Resp.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions)
ctx.Data["SystemConfig"] = setting.Config()
@@ -210,13 +213,16 @@ func Contexter() func(next http.Handler) http.Handler {
// Attention: this function changes ctx.Data and ctx.Flash
// If HasError is called, then before Redirect, the error message should be stored by ctx.Flash.Error(ctx.GetErrMsg()) again.
func (ctx *Context) HasError() bool {
- hasErr, ok := ctx.Data["HasError"]
- if !ok {
+ hasErr, _ := ctx.Data["HasError"].(bool)
+ hasErr = hasErr || ctx.Flash.ErrorMsg != ""
+ if !hasErr {
return false
}
- ctx.Flash.ErrorMsg = ctx.GetErrMsg()
+ if ctx.Flash.ErrorMsg == "" {
+ ctx.Flash.ErrorMsg = ctx.GetErrMsg()
+ }
ctx.Data["Flash"] = ctx.Flash
- return hasErr.(bool)
+ return hasErr
}
// GetErrMsg returns error message in form validation.
diff --git a/services/context/context_response.go b/services/context/context_response.go
index c7044791eb..61b432395a 100644
--- a/services/context/context_response.go
+++ b/services/context/context_response.go
@@ -28,7 +28,7 @@ import (
func RedirectToUser(ctx *Base, userName string, redirectUserID int64) {
user, err := user_model.GetUserByID(ctx, redirectUserID)
if err != nil {
- ctx.Error(http.StatusInternalServerError, "unable to get user")
+ ctx.HTTPError(http.StatusInternalServerError, "unable to get user")
return
}
@@ -122,8 +122,8 @@ func (ctx *Context) RenderWithErr(msg any, tpl templates.TplName, form any) {
}
// NotFound displays a 404 (Not Found) page and prints the given error, if any.
-func (ctx *Context) NotFound(logMsg string, logErr error) {
- ctx.notFoundInternal(logMsg, logErr)
+func (ctx *Context) NotFound(logErr error) {
+ ctx.notFoundInternal("", logErr)
}
func (ctx *Context) notFoundInternal(logMsg string, logErr error) {
diff --git a/services/context/org.go b/services/context/org.go
index be87cef7a3..992a48afa0 100644
--- a/services/context/org.go
+++ b/services/context/org.go
@@ -51,7 +51,7 @@ func GetOrganizationByParams(ctx *Context) {
if err == nil {
RedirectToUser(ctx.Base, orgName, redirectUserID)
} else if user_model.IsErrUserRedirectNotExist(err) {
- ctx.NotFound("GetUserByName", err)
+ ctx.NotFound(err)
} else {
ctx.ServerError("LookupUserRedirect", err)
}
@@ -62,215 +62,193 @@ func GetOrganizationByParams(ctx *Context) {
}
}
-// HandleOrgAssignment handles organization assignment
-func HandleOrgAssignment(ctx *Context, args ...bool) {
- var (
- requireMember bool
- requireOwner bool
- requireTeamMember bool
- requireTeamAdmin bool
- )
- if len(args) >= 1 {
- requireMember = args[0]
- }
- if len(args) >= 2 {
- requireOwner = args[1]
- }
- if len(args) >= 3 {
- requireTeamMember = args[2]
- }
- if len(args) >= 4 {
- requireTeamAdmin = args[3]
- }
-
- var err error
+type OrgAssignmentOptions struct {
+ RequireMember bool
+ RequireOwner bool
+ RequireTeamMember bool
+ RequireTeamAdmin bool
+}
- if ctx.ContextUser == nil {
- // if Organization is not defined, get it from params
- if ctx.Org.Organization == nil {
- GetOrganizationByParams(ctx)
- if ctx.Written() {
- return
+// OrgAssignment returns a middleware to handle organization assignment
+func OrgAssignment(opts OrgAssignmentOptions) func(ctx *Context) {
+ return func(ctx *Context) {
+ var err error
+ if ctx.ContextUser == nil {
+ // if Organization is not defined, get it from params
+ if ctx.Org.Organization == nil {
+ GetOrganizationByParams(ctx)
+ if ctx.Written() {
+ return
+ }
}
+ } else if ctx.ContextUser.IsOrganization() {
+ ctx.Org.Organization = (*organization.Organization)(ctx.ContextUser)
+ } else {
+ // ContextUser is an individual User
+ return
}
- } else if ctx.ContextUser.IsOrganization() {
- if ctx.Org == nil {
- ctx.Org = &Organization{}
- }
- ctx.Org.Organization = (*organization.Organization)(ctx.ContextUser)
- } else {
- // ContextUser is an individual User
- return
- }
- org := ctx.Org.Organization
+ org := ctx.Org.Organization
- // Handle Visibility
- if org.Visibility != structs.VisibleTypePublic && !ctx.IsSigned {
- // We must be signed in to see limited or private organizations
- ctx.NotFound("OrgAssignment", err)
- return
- }
-
- if org.Visibility == structs.VisibleTypePrivate {
- requireMember = true
- } else if ctx.IsSigned && ctx.Doer.IsRestricted {
- requireMember = true
- }
-
- ctx.ContextUser = org.AsUser()
- ctx.Data["Org"] = org
-
- // Admin has super access.
- if ctx.IsSigned && ctx.Doer.IsAdmin {
- ctx.Org.IsOwner = true
- ctx.Org.IsMember = true
- ctx.Org.IsTeamMember = true
- ctx.Org.IsTeamAdmin = true
- ctx.Org.CanCreateOrgRepo = true
- } else if ctx.IsSigned {
- ctx.Org.IsOwner, err = org.IsOwnedBy(ctx, ctx.Doer.ID)
- if err != nil {
- ctx.ServerError("IsOwnedBy", err)
+ // Handle Visibility
+ if org.Visibility != structs.VisibleTypePublic && !ctx.IsSigned {
+ // We must be signed in to see limited or private organizations
+ ctx.NotFound(err)
return
}
- if ctx.Org.IsOwner {
+ if org.Visibility == structs.VisibleTypePrivate {
+ opts.RequireMember = true
+ } else if ctx.IsSigned && ctx.Doer.IsRestricted {
+ opts.RequireMember = true
+ }
+
+ ctx.ContextUser = org.AsUser()
+ ctx.Data["Org"] = org
+
+ // Admin has super access.
+ if ctx.IsSigned && ctx.Doer.IsAdmin {
+ ctx.Org.IsOwner = true
ctx.Org.IsMember = true
ctx.Org.IsTeamMember = true
ctx.Org.IsTeamAdmin = true
ctx.Org.CanCreateOrgRepo = true
- } else {
- ctx.Org.IsMember, err = org.IsOrgMember(ctx, ctx.Doer.ID)
+ } else if ctx.IsSigned {
+ ctx.Org.IsOwner, err = org.IsOwnedBy(ctx, ctx.Doer.ID)
if err != nil {
- ctx.ServerError("IsOrgMember", err)
+ ctx.ServerError("IsOwnedBy", err)
return
}
- ctx.Org.CanCreateOrgRepo, err = org.CanCreateOrgRepo(ctx, ctx.Doer.ID)
- if err != nil {
- ctx.ServerError("CanCreateOrgRepo", err)
- return
+
+ if ctx.Org.IsOwner {
+ ctx.Org.IsMember = true
+ ctx.Org.IsTeamMember = true
+ ctx.Org.IsTeamAdmin = true
+ ctx.Org.CanCreateOrgRepo = true
+ } else {
+ ctx.Org.IsMember, err = org.IsOrgMember(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("IsOrgMember", err)
+ return
+ }
+ ctx.Org.CanCreateOrgRepo, err = org.CanCreateOrgRepo(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("CanCreateOrgRepo", err)
+ return
+ }
}
+ } else {
+ // Fake data.
+ ctx.Data["SignedUser"] = &user_model.User{}
}
- } else {
- // Fake data.
- ctx.Data["SignedUser"] = &user_model.User{}
- }
- if (requireMember && !ctx.Org.IsMember) ||
- (requireOwner && !ctx.Org.IsOwner) {
- ctx.NotFound("OrgAssignment", err)
- return
- }
- ctx.Data["IsOrganizationOwner"] = ctx.Org.IsOwner
- ctx.Data["IsOrganizationMember"] = ctx.Org.IsMember
- ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled
- ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
- ctx.Data["IsPublicMember"] = func(uid int64) bool {
- is, _ := organization.IsPublicMembership(ctx, ctx.Org.Organization.ID, uid)
- return is
- }
- ctx.Data["CanCreateOrgRepo"] = ctx.Org.CanCreateOrgRepo
+ if (opts.RequireMember && !ctx.Org.IsMember) || (opts.RequireOwner && !ctx.Org.IsOwner) {
+ ctx.NotFound(err)
+ return
+ }
+ ctx.Data["IsOrganizationOwner"] = ctx.Org.IsOwner
+ ctx.Data["IsOrganizationMember"] = ctx.Org.IsMember
+ ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+ ctx.Data["IsPublicMember"] = func(uid int64) bool {
+ is, _ := organization.IsPublicMembership(ctx, ctx.Org.Organization.ID, uid)
+ return is
+ }
+ ctx.Data["CanCreateOrgRepo"] = ctx.Org.CanCreateOrgRepo
- ctx.Org.OrgLink = org.AsUser().OrganisationLink()
- ctx.Data["OrgLink"] = ctx.Org.OrgLink
+ ctx.Org.OrgLink = org.AsUser().OrganisationLink()
+ ctx.Data["OrgLink"] = ctx.Org.OrgLink
- // Member
- opts := &organization.FindOrgMembersOpts{
- Doer: ctx.Doer,
- OrgID: org.ID,
- IsDoerMember: ctx.Org.IsMember,
- }
- ctx.Data["NumMembers"], err = organization.CountOrgMembers(ctx, opts)
- if err != nil {
- ctx.ServerError("CountOrgMembers", err)
- return
- }
+ // Member
+ findMembersOpts := &organization.FindOrgMembersOpts{
+ Doer: ctx.Doer,
+ OrgID: org.ID,
+ IsDoerMember: ctx.Org.IsMember,
+ }
+ ctx.Data["NumMembers"], err = organization.CountOrgMembers(ctx, findMembersOpts)
+ if err != nil {
+ ctx.ServerError("CountOrgMembers", err)
+ return
+ }
- // Team.
- if ctx.Org.IsMember {
- shouldSeeAllTeams := false
- if ctx.Org.IsOwner {
- shouldSeeAllTeams = true
- } else {
- teams, err := org.GetUserTeams(ctx, ctx.Doer.ID)
- if err != nil {
- ctx.ServerError("GetUserTeams", err)
- return
+ // Team.
+ if ctx.Org.IsMember {
+ shouldSeeAllTeams := false
+ if ctx.Org.IsOwner {
+ shouldSeeAllTeams = true
+ } else {
+ teams, err := org.GetUserTeams(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetUserTeams", err)
+ return
+ }
+ for _, team := range teams {
+ if team.IncludesAllRepositories && team.AccessMode >= perm.AccessModeAdmin {
+ shouldSeeAllTeams = true
+ break
+ }
+ }
}
- for _, team := range teams {
- if team.IncludesAllRepositories && team.AccessMode >= perm.AccessModeAdmin {
- shouldSeeAllTeams = true
- break
+ if shouldSeeAllTeams {
+ ctx.Org.Teams, err = org.LoadTeams(ctx)
+ if err != nil {
+ ctx.ServerError("LoadTeams", err)
+ return
+ }
+ } else {
+ ctx.Org.Teams, err = org.GetUserTeams(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetUserTeams", err)
+ return
}
}
+ ctx.Data["NumTeams"] = len(ctx.Org.Teams)
}
- if shouldSeeAllTeams {
- ctx.Org.Teams, err = org.LoadTeams(ctx)
- if err != nil {
- ctx.ServerError("LoadTeams", err)
- return
+
+ teamName := ctx.PathParam("team")
+ if len(teamName) > 0 {
+ teamExists := false
+ for _, team := range ctx.Org.Teams {
+ if team.LowerName == strings.ToLower(teamName) {
+ teamExists = true
+ ctx.Org.Team = team
+ ctx.Org.IsTeamMember = true
+ ctx.Data["Team"] = ctx.Org.Team
+ break
+ }
}
- } else {
- ctx.Org.Teams, err = org.GetUserTeams(ctx, ctx.Doer.ID)
- if err != nil {
- ctx.ServerError("GetUserTeams", err)
+
+ if !teamExists {
+ ctx.NotFound(err)
return
}
- }
- ctx.Data["NumTeams"] = len(ctx.Org.Teams)
- }
- teamName := ctx.PathParam("team")
- if len(teamName) > 0 {
- teamExists := false
- for _, team := range ctx.Org.Teams {
- if team.LowerName == strings.ToLower(teamName) {
- teamExists = true
- ctx.Org.Team = team
- ctx.Org.IsTeamMember = true
- ctx.Data["Team"] = ctx.Org.Team
- break
+ ctx.Data["IsTeamMember"] = ctx.Org.IsTeamMember
+ if opts.RequireTeamMember && !ctx.Org.IsTeamMember {
+ ctx.NotFound(err)
+ return
}
- }
-
- if !teamExists {
- ctx.NotFound("OrgAssignment", err)
- return
- }
- ctx.Data["IsTeamMember"] = ctx.Org.IsTeamMember
- if requireTeamMember && !ctx.Org.IsTeamMember {
- ctx.NotFound("OrgAssignment", err)
- return
+ ctx.Org.IsTeamAdmin = ctx.Org.Team.IsOwnerTeam() || ctx.Org.Team.AccessMode >= perm.AccessModeAdmin
+ ctx.Data["IsTeamAdmin"] = ctx.Org.IsTeamAdmin
+ if opts.RequireTeamAdmin && !ctx.Org.IsTeamAdmin {
+ ctx.NotFound(err)
+ return
+ }
}
+ ctx.Data["ContextUser"] = ctx.ContextUser
- ctx.Org.IsTeamAdmin = ctx.Org.Team.IsOwnerTeam() || ctx.Org.Team.AccessMode >= perm.AccessModeAdmin
- ctx.Data["IsTeamAdmin"] = ctx.Org.IsTeamAdmin
- if requireTeamAdmin && !ctx.Org.IsTeamAdmin {
- ctx.NotFound("OrgAssignment", err)
- return
- }
- }
- ctx.Data["ContextUser"] = ctx.ContextUser
+ ctx.Data["CanReadProjects"] = ctx.Org.CanReadUnit(ctx, unit.TypeProjects)
+ ctx.Data["CanReadPackages"] = ctx.Org.CanReadUnit(ctx, unit.TypePackages)
+ ctx.Data["CanReadCode"] = ctx.Org.CanReadUnit(ctx, unit.TypeCode)
- ctx.Data["CanReadProjects"] = ctx.Org.CanReadUnit(ctx, unit.TypeProjects)
- ctx.Data["CanReadPackages"] = ctx.Org.CanReadUnit(ctx, unit.TypePackages)
- ctx.Data["CanReadCode"] = ctx.Org.CanReadUnit(ctx, unit.TypeCode)
-
- ctx.Data["IsFollowing"] = ctx.Doer != nil && user_model.IsFollowing(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
- if len(ctx.ContextUser.Description) != 0 {
- content, err := markdown.RenderString(markup.NewRenderContext(ctx), ctx.ContextUser.Description)
- if err != nil {
- ctx.ServerError("RenderString", err)
- return
+ ctx.Data["IsFollowing"] = ctx.Doer != nil && user_model.IsFollowing(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ if len(ctx.ContextUser.Description) != 0 {
+ content, err := markdown.RenderString(markup.NewRenderContext(ctx), ctx.ContextUser.Description)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ ctx.Data["RenderedDescription"] = content
}
- ctx.Data["RenderedDescription"] = content
- }
-}
-
-// OrgAssignment returns a middleware to handle organization assignment
-func OrgAssignment(args ...bool) func(ctx *Context) {
- return func(ctx *Context) {
- HandleOrgAssignment(ctx, args...)
}
}
diff --git a/services/context/package.go b/services/context/package.go
index e98e01acbb..8b722932b1 100644
--- a/services/context/package.go
+++ b/services/context/package.go
@@ -33,15 +33,15 @@ type packageAssignmentCtx struct {
// PackageAssignment returns a middleware to handle Context.Package assignment
func PackageAssignment() func(ctx *Context) {
return func(ctx *Context) {
- errorFn := func(status int, title string, obj any) {
+ errorFn := func(status int, obj any) {
err, ok := obj.(error)
if !ok {
err = fmt.Errorf("%s", obj)
}
if status == http.StatusNotFound {
- ctx.NotFound(title, err)
+ ctx.NotFound(err)
} else {
- ctx.ServerError(title, err)
+ ctx.ServerError("PackageAssignment", err)
}
}
paCtx := &packageAssignmentCtx{Base: ctx.Base, Doer: ctx.Doer, ContextUser: ctx.ContextUser}
@@ -53,18 +53,18 @@ func PackageAssignment() func(ctx *Context) {
func PackageAssignmentAPI() func(ctx *APIContext) {
return func(ctx *APIContext) {
paCtx := &packageAssignmentCtx{Base: ctx.Base, Doer: ctx.Doer, ContextUser: ctx.ContextUser}
- ctx.Package = packageAssignment(paCtx, ctx.Error)
+ ctx.Package = packageAssignment(paCtx, ctx.APIError)
}
}
-func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, string, any)) *Package {
+func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, any)) *Package {
pkg := &Package{
Owner: ctx.ContextUser,
}
var err error
pkg.AccessMode, err = determineAccessMode(ctx.Base, pkg, ctx.Doer)
if err != nil {
- errCb(http.StatusInternalServerError, "determineAccessMode", err)
+ errCb(http.StatusInternalServerError, fmt.Errorf("determineAccessMode: %w", err))
return pkg
}
@@ -75,16 +75,16 @@ func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, string, any))
pv, err := packages_model.GetVersionByNameAndVersion(ctx, pkg.Owner.ID, packages_model.Type(packageType), name, version)
if err != nil {
if err == packages_model.ErrPackageNotExist {
- errCb(http.StatusNotFound, "GetVersionByNameAndVersion", err)
+ errCb(http.StatusNotFound, fmt.Errorf("GetVersionByNameAndVersion: %w", err))
} else {
- errCb(http.StatusInternalServerError, "GetVersionByNameAndVersion", err)
+ errCb(http.StatusInternalServerError, fmt.Errorf("GetVersionByNameAndVersion: %w", err))
}
return pkg
}
pkg.Descriptor, err = packages_model.GetPackageDescriptor(ctx, pv)
if err != nil {
- errCb(http.StatusInternalServerError, "GetPackageDescriptor", err)
+ errCb(http.StatusInternalServerError, fmt.Errorf("GetPackageDescriptor: %w", err))
return pkg
}
}
@@ -93,7 +93,7 @@ func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, string, any))
}
func determineAccessMode(ctx *Base, pkg *Package, doer *user_model.User) (perm.AccessMode, error) {
- if setting.Service.RequireSignInView && (doer == nil || doer.IsGhost()) {
+ if setting.Service.RequireSignInViewStrict && (doer == nil || doer.IsGhost()) {
return perm.AccessModeNone, nil
}
@@ -154,9 +154,9 @@ func PackageContexter() func(next http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
base := NewBaseContext(resp, req)
- // it is still needed when rendering 500 page in a package handler
+ // FIXME: web Context is still needed when rendering 500 page in a package handler
+ // It should be refactored to use new error handling mechanisms
ctx := NewWebContext(base, renderer, nil)
- ctx.SetContextValue(WebContextKey, ctx)
next.ServeHTTP(ctx.Resp, ctx.Req)
})
}
diff --git a/services/context/pagination.go b/services/context/pagination.go
index d33dd217d0..25a9298e01 100644
--- a/services/context/pagination.go
+++ b/services/context/pagination.go
@@ -21,12 +21,18 @@ type Pagination struct {
// NewPagination creates a new instance of the Pagination struct.
// "pagingNum" is "page size" or "limit", "current" is "page"
+// total=-1 means only showing prev/next
func NewPagination(total, pagingNum, current, numPages int) *Pagination {
p := &Pagination{}
p.Paginater = paginator.New(total, pagingNum, current, numPages)
return p
}
+func (p *Pagination) WithCurRows(n int) *Pagination {
+ p.Paginater.SetCurRows(n)
+ return p
+}
+
func (p *Pagination) AddParamFromRequest(req *http.Request) {
for key, values := range req.URL.Query() {
if key == "page" || len(values) == 0 || (len(values) == 1 && values[0] == "") {
diff --git a/services/context/permission.go b/services/context/permission.go
index 0d69ccc4a4..7055f798da 100644
--- a/services/context/permission.go
+++ b/services/context/permission.go
@@ -15,7 +15,7 @@ import (
func RequireRepoAdmin() func(ctx *Context) {
return func(ctx *Context) {
if !ctx.IsSigned || !ctx.Repo.IsAdmin() {
- ctx.NotFound("RequireRepoAdmin denies the request", nil)
+ ctx.NotFound(nil)
return
}
}
@@ -25,7 +25,7 @@ func RequireRepoAdmin() func(ctx *Context) {
func CanWriteToBranch() func(ctx *Context) {
return func(ctx *Context) {
if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
- ctx.NotFound("CanWriteToBranch denies permission", nil)
+ ctx.NotFound(nil)
return
}
}
@@ -39,7 +39,7 @@ func RequireUnitWriter(unitTypes ...unit.Type) func(ctx *Context) {
return
}
}
- ctx.NotFound("RequireUnitWriter denies the request", nil)
+ ctx.NotFound(nil)
}
}
@@ -54,7 +54,7 @@ func RequireUnitReader(unitTypes ...unit.Type) func(ctx *Context) {
return
}
}
- ctx.NotFound("RequireUnitReader denies the request", nil)
+ ctx.NotFound(nil)
}
}
@@ -78,7 +78,7 @@ func CheckRepoScopedToken(ctx *Context, repo *repo_model.Repository, level auth_
}
if publicOnly && repo.IsPrivate {
- ctx.Error(http.StatusForbidden)
+ ctx.HTTPError(http.StatusForbidden)
return
}
@@ -89,7 +89,7 @@ func CheckRepoScopedToken(ctx *Context, repo *repo_model.Repository, level auth_
}
if !scopeMatched {
- ctx.Error(http.StatusForbidden)
+ ctx.HTTPError(http.StatusForbidden)
return
}
}
diff --git a/services/context/repo.go b/services/context/repo.go
index 1cb35b9b83..7d0b44c42f 100644
--- a/services/context/repo.go
+++ b/services/context/repo.go
@@ -89,7 +89,7 @@ func (r *Repository) GetObjectFormat() git.ObjectFormat {
func RepoMustNotBeArchived() func(ctx *Context) {
return func(ctx *Context) {
if ctx.Repo.Repository.IsArchived {
- ctx.NotFound("IsArchived", errors.New(ctx.Locale.TrString("repo.archive.title")))
+ ctx.NotFound(errors.New(ctx.Locale.TrString("repo.archive.title")))
}
}
}
@@ -315,7 +315,7 @@ func RedirectToRepo(ctx *Base, redirectRepoID int64) {
repo, err := repo_model.GetRepositoryByID(ctx, redirectRepoID)
if err != nil {
log.Error("GetRepositoryByID: %v", err)
- ctx.Error(http.StatusInternalServerError, "GetRepositoryByID")
+ ctx.HTTPError(http.StatusInternalServerError, "GetRepositoryByID")
return
}
@@ -328,7 +328,9 @@ func RedirectToRepo(ctx *Base, redirectRepoID int64) {
if ctx.Req.URL.RawQuery != "" {
redirectPath += "?" + ctx.Req.URL.RawQuery
}
- ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusTemporaryRedirect)
+ // Git client needs a 301 redirect by default to follow the new location
+ // It's not documentated in git documentation, but it's the behavior of git client
+ ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusMovedPermanently)
}
func repoAssignment(ctx *Context, repo *repo_model.Repository) {
@@ -344,12 +346,12 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) {
return
}
- if !ctx.Repo.Permission.HasAnyUnitAccessOrEveryoneAccess() && !canWriteAsMaintainer(ctx) {
+ if !ctx.Repo.Permission.HasAnyUnitAccessOrPublicAccess() && !canWriteAsMaintainer(ctx) {
if ctx.FormString("go-get") == "1" {
EarlyResponseForGoGetMeta(ctx)
return
}
- ctx.NotFound("no access right", nil)
+ ctx.NotFound(nil)
return
}
ctx.Data["Permission"] = &ctx.Repo.Permission
@@ -402,7 +404,7 @@ func RepoAssignment(ctx *Context) {
if redirectUserID, err := user_model.LookupUserRedirect(ctx, userName); err == nil {
RedirectToUser(ctx.Base, userName, redirectUserID)
} else if user_model.IsErrUserRedirectNotExist(err) {
- ctx.NotFound("GetUserByName", nil)
+ ctx.NotFound(nil)
} else {
ctx.ServerError("LookupUserRedirect", err)
}
@@ -447,7 +449,7 @@ func RepoAssignment(ctx *Context) {
EarlyResponseForGoGetMeta(ctx)
return
}
- ctx.NotFound("GetRepositoryByName", nil)
+ ctx.NotFound(nil)
} else {
ctx.ServerError("LookupRepoRedirect", err)
}
@@ -814,7 +816,7 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
reqPath := ctx.PathParam("*")
if reqPath == "" {
refShortName = ctx.Repo.Repository.DefaultBranch
- if !ctx.Repo.GitRepo.IsBranchExist(refShortName) {
+ if !gitrepo.IsBranchExist(ctx, ctx.Repo.Repository, refShortName) {
brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 1)
if err == nil && len(brs) != 0 {
refShortName = brs[0].Name
@@ -854,7 +856,7 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
return
}
- if refType == git.RefTypeBranch && ctx.Repo.GitRepo.IsBranchExist(refShortName) {
+ if refType == git.RefTypeBranch && gitrepo.IsBranchExist(ctx, ctx.Repo.Repository, refShortName) {
ctx.Repo.BranchName = refShortName
ctx.Repo.RefFullName = git.RefNameFromBranch(refShortName)
@@ -864,13 +866,13 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
- } else if refType == git.RefTypeTag && ctx.Repo.GitRepo.IsTagExist(refShortName) {
+ } else if refType == git.RefTypeTag && gitrepo.IsTagExist(ctx, ctx.Repo.Repository, refShortName) {
ctx.Repo.RefFullName = git.RefNameFromTag(refShortName)
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refShortName)
if err != nil {
if git.IsErrNotExist(err) {
- ctx.NotFound("GetTagCommit", err)
+ ctx.NotFound(err)
return
}
ctx.ServerError("GetTagCommit", err)
@@ -883,7 +885,7 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refShortName)
if err != nil {
- ctx.NotFound("GetCommit", err)
+ ctx.NotFound(err)
return
}
// If short commit ID add canonical link header
@@ -892,7 +894,7 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
ctx.RespHeader().Set("Link", fmt.Sprintf(`<%s>; rel="canonical"`, canonicalURL))
}
} else {
- ctx.NotFound("RepoRef invalid repo", fmt.Errorf("branch or tag not exist: %s", refShortName))
+ ctx.NotFound(fmt.Errorf("branch or tag not exist: %s", refShortName))
return
}
@@ -945,7 +947,7 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
func GitHookService() func(ctx *Context) {
return func(ctx *Context) {
if !ctx.Doer.CanEditGitHook() {
- ctx.NotFound("GitHookService", nil)
+ ctx.NotFound(nil)
return
}
}
diff --git a/services/context/upload/upload.go b/services/context/upload/upload.go
index da4370a433..12aa485aa7 100644
--- a/services/context/upload/upload.go
+++ b/services/context/upload/upload.go
@@ -87,14 +87,15 @@ func Verify(buf []byte, fileName, allowedTypesStr string) error {
// AddUploadContext renders template values for dropzone
func AddUploadContext(ctx *context.Context, uploadType string) {
- if uploadType == "release" {
+ switch uploadType {
+ case "release":
ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/releases/attachments"
ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/releases/attachments/remove"
ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/releases/attachments"
ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Release.AllowedTypes, "|", ",")
ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles
ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize
- } else if uploadType == "comment" {
+ case "comment":
ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/issues/attachments"
ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/issues/attachments/remove"
if len(ctx.PathParam("index")) > 0 {
@@ -105,7 +106,7 @@ func AddUploadContext(ctx *context.Context, uploadType string) {
ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Attachment.AllowedTypes, "|", ",")
ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles
ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize
- } else if uploadType == "repo" {
+ case "repo":
ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/upload-file"
ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/upload-remove"
ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/upload-file"
diff --git a/services/context/user.go b/services/context/user.go
index dbc35e198d..c09ded8339 100644
--- a/services/context/user.go
+++ b/services/context/user.go
@@ -14,15 +14,15 @@ import (
// UserAssignmentWeb returns a middleware to handle context-user assignment for web routes
func UserAssignmentWeb() func(ctx *Context) {
return func(ctx *Context) {
- errorFn := func(status int, title string, obj any) {
+ errorFn := func(status int, obj any) {
err, ok := obj.(error)
if !ok {
err = fmt.Errorf("%s", obj)
}
if status == http.StatusNotFound {
- ctx.NotFound(title, err)
+ ctx.NotFound(err)
} else {
- ctx.ServerError(title, err)
+ ctx.ServerError("UserAssignmentWeb", err)
}
}
ctx.ContextUser = userAssignment(ctx.Base, ctx.Doer, errorFn)
@@ -42,9 +42,9 @@ func UserIDAssignmentAPI() func(ctx *APIContext) {
ctx.ContextUser, err = user_model.GetUserByID(ctx, userID)
if err != nil {
if user_model.IsErrUserNotExist(err) {
- ctx.Error(http.StatusNotFound, "GetUserByID", err)
+ ctx.APIError(http.StatusNotFound, err)
} else {
- ctx.Error(http.StatusInternalServerError, "GetUserByID", err)
+ ctx.APIErrorInternal(err)
}
}
}
@@ -54,11 +54,11 @@ func UserIDAssignmentAPI() func(ctx *APIContext) {
// UserAssignmentAPI returns a middleware to handle context-user assignment for api routes
func UserAssignmentAPI() func(ctx *APIContext) {
return func(ctx *APIContext) {
- ctx.ContextUser = userAssignment(ctx.Base, ctx.Doer, ctx.Error)
+ ctx.ContextUser = userAssignment(ctx.Base, ctx.Doer, ctx.APIError)
}
}
-func userAssignment(ctx *Base, doer *user_model.User, errCb func(int, string, any)) (contextUser *user_model.User) {
+func userAssignment(ctx *Base, doer *user_model.User, errCb func(int, any)) (contextUser *user_model.User) {
username := ctx.PathParam("username")
if doer != nil && doer.LowerName == strings.ToLower(username) {
@@ -71,12 +71,12 @@ func userAssignment(ctx *Base, doer *user_model.User, errCb func(int, string, an
if redirectUserID, err := user_model.LookupUserRedirect(ctx, username); err == nil {
RedirectToUser(ctx, username, redirectUserID)
} else if user_model.IsErrUserRedirectNotExist(err) {
- errCb(http.StatusNotFound, "GetUserByName", err)
+ errCb(http.StatusNotFound, err)
} else {
- errCb(http.StatusInternalServerError, "LookupUserRedirect", err)
+ errCb(http.StatusInternalServerError, fmt.Errorf("LookupUserRedirect: %w", err))
}
} else {
- errCb(http.StatusInternalServerError, "GetUserByName", err)
+ errCb(http.StatusInternalServerError, fmt.Errorf("GetUserByName: %w", err))
}
}
}
diff --git a/services/contexttest/context_tests.go b/services/contexttest/context_tests.go
index b0f71cad20..c895de3569 100644
--- a/services/contexttest/context_tests.go
+++ b/services/contexttest/context_tests.go
@@ -20,6 +20,7 @@ import (
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/cache"
+ git_module "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/reqctx"
"code.gitea.io/gitea/modules/session"
@@ -30,6 +31,7 @@ import (
"github.com/go-chi/chi/v5"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func mockRequest(t *testing.T, reqPath string) *http.Request {
@@ -67,7 +69,6 @@ func MockContext(t *testing.T, reqPath string, opts ...MockContextOption) (*cont
chiCtx := chi.NewRouteContext()
ctx := context.NewWebContext(base, opt.Render, nil)
- ctx.SetContextValue(context.WebContextKey, ctx)
ctx.SetContextValue(chi.RouteCtxKey, chiCtx)
if opt.SessionStore != nil {
ctx.SetContextValue(session.MockStoreContextKey, opt.SessionStore)
@@ -86,7 +87,7 @@ func MockAPIContext(t *testing.T, reqPath string) (*context.APIContext, *httptes
base := context.NewBaseContext(resp, req)
base.Data = middleware.GetContextData(req.Context())
base.Locale = &translation.MockLocale{}
- ctx := &context.APIContext{Base: base}
+ ctx := &context.APIContext{Base: base, Repo: &context.Repository{}}
chiCtx := chi.NewRouteContext()
ctx.SetContextValue(chi.RouteCtxKey, chiCtx)
return ctx, resp
@@ -107,13 +108,13 @@ func MockPrivateContext(t *testing.T, reqPath string) (*context.PrivateContext,
// LoadRepo load a repo into a test context.
func LoadRepo(t *testing.T, ctx gocontext.Context, repoID int64) {
var doer *user_model.User
- repo := &context.Repository{}
+ var repo *context.Repository
switch ctx := ctx.(type) {
case *context.Context:
- ctx.Repo = repo
+ repo = ctx.Repo
doer = ctx.Doer
case *context.APIContext:
- ctx.Repo = repo
+ repo = ctx.Repo
doer = ctx.Doer
default:
assert.FailNow(t, "context is not *context.Context or *context.APIContext")
@@ -141,15 +142,17 @@ func LoadRepoCommit(t *testing.T, ctx gocontext.Context) {
}
gitRepo, err := gitrepo.OpenRepository(ctx, repo.Repository)
- assert.NoError(t, err)
+ require.NoError(t, err)
defer gitRepo.Close()
- branch, err := gitRepo.GetHEADBranch()
- assert.NoError(t, err)
- assert.NotNil(t, branch)
- if branch != nil {
- repo.Commit, err = gitRepo.GetBranchCommit(branch.Name)
- assert.NoError(t, err)
+
+ if repo.RefFullName == "" {
+ repo.RefFullName = git_module.RefNameFromBranch(repo.Repository.DefaultBranch)
+ }
+ if repo.RefFullName.IsPull() {
+ repo.BranchName = repo.RefFullName.ShortName()
}
+ repo.Commit, err = gitRepo.GetCommit(repo.RefFullName.String())
+ require.NoError(t, err)
}
// LoadUser load a user into a test context
diff --git a/services/convert/convert.go b/services/convert/convert.go
index c8cad2a2ad..ac2680766c 100644
--- a/services/convert/convert.go
+++ b/services/convert/convert.go
@@ -28,6 +28,7 @@ import (
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
"code.gitea.io/gitea/services/gitdiff"
)
@@ -229,9 +230,31 @@ func ToActionTask(ctx context.Context, t *actions_model.ActionTask) (*api.Action
}, nil
}
+// ToActionArtifact convert a actions_model.ActionArtifact to an api.ActionArtifact
+func ToActionArtifact(repo *repo_model.Repository, art *actions_model.ActionArtifact) (*api.ActionArtifact, error) {
+ url := fmt.Sprintf("%s/actions/artifacts/%d", repo.APIURL(), art.ID)
+
+ return &api.ActionArtifact{
+ ID: art.ID,
+ Name: art.ArtifactName,
+ SizeInBytes: art.FileSize,
+ Expired: art.Status == actions_model.ArtifactStatusExpired,
+ URL: url,
+ ArchiveDownloadURL: url + "/zip",
+ CreatedAt: art.CreatedUnix.AsLocalTime(),
+ UpdatedAt: art.UpdatedUnix.AsLocalTime(),
+ ExpiresAt: art.ExpiredUnix.AsLocalTime(),
+ WorkflowRun: &api.ActionWorkflowRun{
+ ID: art.RunID,
+ RepositoryID: art.RepoID,
+ HeadSha: art.CommitSHA,
+ },
+ }, nil
+}
+
// ToVerification convert a git.Commit.Signature to an api.PayloadCommitVerification
func ToVerification(ctx context.Context, c *git.Commit) *api.PayloadCommitVerification {
- verif := asymkey_model.ParseCommitWithSignature(ctx, c)
+ verif := asymkey_service.ParseCommitWithSignature(ctx, c)
commitVerification := &api.PayloadCommitVerification{
Verified: verif.Verified,
Reason: verif.Reason,
diff --git a/services/convert/git_commit.go b/services/convert/git_commit.go
index e0efcddbcb..3ec81b52ee 100644
--- a/services/convert/git_commit.go
+++ b/services/convert/git_commit.go
@@ -210,17 +210,15 @@ func ToCommit(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Rep
// Get diff stats for commit
if opts.Stat {
- diff, err := gitdiff.GetDiff(ctx, gitRepo, &gitdiff.DiffOptions{
- AfterCommitID: commit.ID.String(),
- })
+ diffShortStat, err := gitdiff.GetDiffShortStat(gitRepo, "", commit.ID.String())
if err != nil {
return nil, err
}
res.Stats = &api.CommitStats{
- Total: diff.TotalAddition + diff.TotalDeletion,
- Additions: diff.TotalAddition,
- Deletions: diff.TotalDeletion,
+ Total: diffShortStat.TotalAddition + diffShortStat.TotalDeletion,
+ Additions: diffShortStat.TotalAddition,
+ Deletions: diffShortStat.TotalDeletion,
}
}
diff --git a/services/convert/git_commit_test.go b/services/convert/git_commit_test.go
index 73cb5e8c71..ad1cc0eca3 100644
--- a/services/convert/git_commit_test.go
+++ b/services/convert/git_commit_test.go
@@ -33,7 +33,7 @@ func TestToCommitMeta(t *testing.T) {
commitMeta := ToCommitMeta(headRepo, tag)
assert.NotNil(t, commitMeta)
- assert.EqualValues(t, &api.CommitMeta{
+ assert.Equal(t, &api.CommitMeta{
SHA: sha1.EmptyObjectID().String(),
URL: util.URLJoin(headRepo.APIURL(), "git/commits", sha1.EmptyObjectID().String()),
Created: time.Unix(0, 0),
diff --git a/services/convert/issue.go b/services/convert/issue.go
index 37935accca..7f386e6293 100644
--- a/services/convert/issue.go
+++ b/services/convert/issue.go
@@ -41,6 +41,9 @@ func toIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Iss
if err := issue.LoadAttachments(ctx); err != nil {
return &api.Issue{}
}
+ if err := issue.LoadPinOrder(ctx); err != nil {
+ return &api.Issue{}
+ }
apiIssue := &api.Issue{
ID: issue.ID,
@@ -55,7 +58,7 @@ func toIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Iss
Comments: issue.NumComments,
Created: issue.CreatedUnix.AsTime(),
Updated: issue.UpdatedUnix.AsTime(),
- PinOrder: issue.PinOrder,
+ PinOrder: util.Iif(issue.PinOrder == -1, 0, issue.PinOrder), // -1 means loaded with no pin order
}
if issue.Repo != nil {
@@ -67,7 +70,7 @@ func toIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Iss
if err := issue.LoadLabels(ctx); err != nil {
return &api.Issue{}
}
- apiIssue.Labels = ToLabelList(issue.Labels, issue.Repo, issue.Repo.Owner)
+ apiIssue.Labels = util.SliceNilAsEmpty(ToLabelList(issue.Labels, issue.Repo, issue.Repo.Owner))
apiIssue.Repo = &api.RepositoryMeta{
ID: issue.Repo.ID,
Name: issue.Repo.Name,
@@ -122,6 +125,7 @@ func toIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Iss
// ToIssueList converts an IssueList to API format
func ToIssueList(ctx context.Context, doer *user_model.User, il issues_model.IssueList) []*api.Issue {
result := make([]*api.Issue, len(il))
+ _ = il.LoadPinOrder(ctx)
for i := range il {
result[i] = ToIssue(ctx, doer, il[i])
}
@@ -131,6 +135,7 @@ func ToIssueList(ctx context.Context, doer *user_model.User, il issues_model.Iss
// ToAPIIssueList converts an IssueList to API format
func ToAPIIssueList(ctx context.Context, doer *user_model.User, il issues_model.IssueList) []*api.Issue {
result := make([]*api.Issue, len(il))
+ _ = il.LoadPinOrder(ctx)
for i := range il {
result[i] = ToAPIIssue(ctx, doer, il[i])
}
diff --git a/services/convert/pull.go b/services/convert/pull.go
index a1ab7eeb8e..c22b5282c8 100644
--- a/services/convert/pull.go
+++ b/services/convert/pull.go
@@ -17,7 +17,10 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/gitdiff"
)
// ToAPIPullRequest assumes following fields have been assigned with valid values:
@@ -77,7 +80,7 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u
Labels: apiIssue.Labels,
Milestone: apiIssue.Milestone,
Assignee: apiIssue.Assignee,
- Assignees: apiIssue.Assignees,
+ Assignees: util.SliceNilAsEmpty(apiIssue.Assignees),
State: apiIssue.State,
Draft: pr.IsWorkInProgress(ctx),
IsLocked: apiIssue.IsLocked,
@@ -92,7 +95,11 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u
Deadline: apiIssue.Deadline,
Created: pr.Issue.CreatedUnix.AsTimePtr(),
Updated: pr.Issue.UpdatedUnix.AsTimePtr(),
- PinOrder: apiIssue.PinOrder,
+ PinOrder: util.Iif(apiIssue.PinOrder == -1, 0, apiIssue.PinOrder),
+
+ // output "[]" rather than null to align to github outputs
+ RequestedReviewers: []*api.User{},
+ RequestedReviewersTeams: []*api.Team{},
AllowMaintainerEdit: pr.AllowMaintainerEdit,
@@ -234,9 +241,13 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u
// Calculate diff
startCommitID = pr.MergeBase
- apiPullRequest.ChangedFiles, apiPullRequest.Additions, apiPullRequest.Deletions, err = gitRepo.GetDiffShortStat(startCommitID, endCommitID)
+ diffShortStats, err := gitdiff.GetDiffShortStat(gitRepo, startCommitID, endCommitID)
if err != nil {
log.Error("GetDiffShortStat: %v", err)
+ } else {
+ apiPullRequest.ChangedFiles = &diffShortStats.NumFiles
+ apiPullRequest.Additions = &diffShortStats.TotalAddition
+ apiPullRequest.Deletions = &diffShortStats.TotalDeletion
}
}
@@ -299,6 +310,9 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs
if err := issueList.LoadAssignees(ctx); err != nil {
return nil, err
}
+ if err = issueList.LoadPinOrder(ctx); err != nil {
+ return nil, err
+ }
reviews, err := prs.LoadReviews(ctx)
if err != nil {
@@ -363,7 +377,7 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs
Deadline: apiIssue.Deadline,
Created: pr.Issue.CreatedUnix.AsTimePtr(),
Updated: pr.Issue.UpdatedUnix.AsTimePtr(),
- PinOrder: apiIssue.PinOrder,
+ PinOrder: util.Iif(apiIssue.PinOrder == -1, 0, apiIssue.PinOrder),
AllowMaintainerEdit: pr.AllowMaintainerEdit,
@@ -375,7 +389,7 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs
},
Head: &api.PRBranchInfo{
Name: pr.HeadBranch,
- Ref: fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index),
+ Ref: pr.GetGitRefName(),
RepoID: -1,
},
}
@@ -408,88 +422,43 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs
return nil, err
}
}
-
if baseBranch != nil {
apiPullRequest.Base.Sha = baseBranch.CommitID
}
- if pr.Flow == issues_model.PullRequestFlowAGit {
- apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName())
+ // pull request head branch, both repository and branch could not exist
+ if pr.HeadRepo != nil {
+ apiPullRequest.Head.RepoID = pr.HeadRepo.ID
+ exist, err := git_model.IsBranchExist(ctx, pr.HeadRepo.ID, pr.HeadBranch)
if err != nil {
- log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err)
+ log.Error("IsBranchExist[%d]: %v", pr.HeadRepo.ID, err)
return nil, err
}
- apiPullRequest.Head.RepoID = pr.BaseRepoID
- apiPullRequest.Head.Repository = apiPullRequest.Base.Repository
- apiPullRequest.Head.Name = ""
- }
-
- var headGitRepo *git.Repository
- if pr.HeadRepo != nil && pr.Flow == issues_model.PullRequestFlowGithub {
- if pr.HeadRepoID == pr.BaseRepoID {
- apiPullRequest.Head.RepoID = pr.HeadRepo.ID
- apiPullRequest.Head.Repository = apiRepo
- headGitRepo = gitRepo
- } else {
- p, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
- if err != nil {
- log.Error("GetUserRepoPermission[%d]: %v", pr.HeadRepoID, err)
- p.AccessMode = perm.AccessModeNone
- }
-
- apiPullRequest.Head.RepoID = pr.HeadRepo.ID
- apiPullRequest.Head.Repository = ToRepo(ctx, pr.HeadRepo, p)
-
- headGitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
- if err != nil {
- log.Error("OpenRepository[%s]: %v", pr.HeadRepo.RepoPath(), err)
- return nil, err
- }
- defer headGitRepo.Close()
- }
-
- headBranch, err := headGitRepo.GetBranch(pr.HeadBranch)
- if err != nil && !git.IsErrBranchNotExist(err) {
- log.Error("GetBranch[%s]: %v", pr.HeadBranch, err)
- return nil, err
- }
-
- // Outer scope variables to be used in diff calculation
- var (
- startCommitID string
- endCommitID string
- )
-
- if git.IsErrBranchNotExist(err) {
- headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref)
- if err != nil && !git.IsErrNotExist(err) {
- log.Error("GetCommit[%s]: %v", pr.HeadBranch, err)
- return nil, err
- }
- if err == nil {
- apiPullRequest.Head.Sha = headCommitID
- endCommitID = headCommitID
- }
- } else {
- commit, err := headBranch.GetCommit()
- if err != nil && !git.IsErrNotExist(err) {
- log.Error("GetCommit[%s]: %v", headBranch.Name, err)
- return nil, err
- }
- if err == nil {
- apiPullRequest.Head.Ref = pr.HeadBranch
- apiPullRequest.Head.Sha = commit.ID.String()
- endCommitID = commit.ID.String()
- }
+ if exist {
+ apiPullRequest.Head.Ref = pr.HeadBranch
}
+ }
+ if apiPullRequest.Head.Ref == "" {
+ apiPullRequest.Head.Ref = pr.GetGitRefName()
+ }
- // Calculate diff
- startCommitID = pr.MergeBase
-
- apiPullRequest.ChangedFiles, apiPullRequest.Additions, apiPullRequest.Deletions, err = gitRepo.GetDiffShortStat(startCommitID, endCommitID)
+ if pr.HeadRepoID == pr.BaseRepoID {
+ apiPullRequest.Head.Repository = apiPullRequest.Base.Repository
+ } else {
+ p, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
if err != nil {
- log.Error("GetDiffShortStat: %v", err)
+ log.Error("GetUserRepoPermission[%d]: %v", pr.HeadRepoID, err)
+ p.AccessMode = perm.AccessModeNone
}
+ apiPullRequest.Head.Repository = ToRepo(ctx, pr.HeadRepo, p)
+ }
+
+ if pr.Flow == issues_model.PullRequestFlowAGit {
+ apiPullRequest.Head.Name = ""
+ }
+ apiPullRequest.Head.Sha, err = gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ log.Error("GetRefCommitID[%s]: %v", pr.GetGitRefName(), err)
}
if len(apiPullRequest.Head.Sha) == 0 && len(apiPullRequest.Head.Ref) != 0 {
@@ -510,6 +479,12 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs
apiPullRequest.MergedBy = ToUser(ctx, pr.Merger, nil)
}
+ // Do not provide "ChangeFiles/Additions/Deletions" for the PR list, because the "diff" is quite slow
+ // If callers are interested in these values, they should do a separate request to get the PR details
+ if apiPullRequest.ChangedFiles != nil || apiPullRequest.Additions != nil || apiPullRequest.Deletions != nil {
+ setting.PanicInDevOrTesting("ChangedFiles/Additions/Deletions should not be set in PR list")
+ }
+
apiPullRequests = append(apiPullRequests, apiPullRequest)
}
diff --git a/services/convert/pull_review_test.go b/services/convert/pull_review_test.go
index a1296fafd4..d0a077ab24 100644
--- a/services/convert/pull_review_test.go
+++ b/services/convert/pull_review_test.go
@@ -19,8 +19,8 @@ func Test_ToPullReview(t *testing.T) {
reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 6})
- assert.EqualValues(t, reviewer.ID, review.ReviewerID)
- assert.EqualValues(t, issues_model.ReviewTypePending, review.Type)
+ assert.Equal(t, reviewer.ID, review.ReviewerID)
+ assert.Equal(t, issues_model.ReviewTypePending, review.Type)
reviewList := []*issues_model.Review{review}
diff --git a/services/convert/pull_test.go b/services/convert/pull_test.go
index e069fa4a68..cd86283c8a 100644
--- a/services/convert/pull_test.go
+++ b/services/convert/pull_test.go
@@ -27,7 +27,7 @@ func TestPullRequest_APIFormat(t *testing.T) {
assert.NoError(t, pr.LoadIssue(db.DefaultContext))
apiPullRequest := ToAPIPullRequest(git.DefaultContext, pr, nil)
assert.NotNil(t, apiPullRequest)
- assert.EqualValues(t, &structs.PRBranchInfo{
+ assert.Equal(t, &structs.PRBranchInfo{
Name: "branch1",
Ref: "refs/pull/2/head",
Sha: "4a357436d925b5c974181ff12a994538ddc5a269",
diff --git a/services/convert/release_test.go b/services/convert/release_test.go
index 201b27e16d..bb618c9ca3 100644
--- a/services/convert/release_test.go
+++ b/services/convert/release_test.go
@@ -23,6 +23,6 @@ func TestRelease_ToRelease(t *testing.T) {
apiRelease := ToAPIRelease(db.DefaultContext, repo1, release1)
assert.NotNil(t, apiRelease)
assert.EqualValues(t, 1, apiRelease.ID)
- assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL)
- assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL)
+ assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL)
+ assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL)
}
diff --git a/services/convert/repository.go b/services/convert/repository.go
index 632b6392d5..7dfdfd2179 100644
--- a/services/convert/repository.go
+++ b/services/convert/repository.go
@@ -14,6 +14,7 @@ import (
unit_model "code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/log"
api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
)
// ToRepo converts a Repository to api.Repository
@@ -242,7 +243,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR
MirrorInterval: mirrorInterval,
MirrorUpdated: mirrorUpdated,
RepoTransfer: transfer,
- Topics: repo.Topics,
+ Topics: util.SliceNilAsEmpty(repo.Topics),
ObjectFormatName: repo.ObjectFormatName,
Licenses: repoLicenses.StringList(),
}
diff --git a/services/convert/user_test.go b/services/convert/user_test.go
index 4b1effc7aa..199d500732 100644
--- a/services/convert/user_test.go
+++ b/services/convert/user_test.go
@@ -30,11 +30,11 @@ func TestUser_ToUser(t *testing.T) {
apiUser = toUser(db.DefaultContext, user1, false, false)
assert.False(t, apiUser.IsAdmin)
- assert.EqualValues(t, api.VisibleTypePublic.String(), apiUser.Visibility)
+ assert.Equal(t, api.VisibleTypePublic.String(), apiUser.Visibility)
user31 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate})
apiUser = toUser(db.DefaultContext, user31, true, true)
assert.False(t, apiUser.IsAdmin)
- assert.EqualValues(t, api.VisibleTypePrivate.String(), apiUser.Visibility)
+ assert.Equal(t, api.VisibleTypePrivate.String(), apiUser.Visibility)
}
diff --git a/services/convert/utils_test.go b/services/convert/utils_test.go
index a8363ec6bd..7965624e2b 100644
--- a/services/convert/utils_test.go
+++ b/services/convert/utils_test.go
@@ -10,10 +10,10 @@ import (
)
func TestToCorrectPageSize(t *testing.T) {
- assert.EqualValues(t, 30, ToCorrectPageSize(0))
- assert.EqualValues(t, 30, ToCorrectPageSize(-10))
- assert.EqualValues(t, 20, ToCorrectPageSize(20))
- assert.EqualValues(t, 50, ToCorrectPageSize(100))
+ assert.Equal(t, 30, ToCorrectPageSize(0))
+ assert.Equal(t, 30, ToCorrectPageSize(-10))
+ assert.Equal(t, 20, ToCorrectPageSize(20))
+ assert.Equal(t, 50, ToCorrectPageSize(100))
}
func TestToGitServiceType(t *testing.T) {
diff --git a/services/cron/tasks_basic.go b/services/cron/tasks_basic.go
index fb5938745e..841981787d 100644
--- a/services/cron/tasks_basic.go
+++ b/services/cron/tasks_basic.go
@@ -54,7 +54,7 @@ func registerRepoHealthCheck() {
RunAtStart: false,
Schedule: "@midnight",
},
- Timeout: 60 * time.Second,
+ Timeout: time.Duration(setting.Git.Timeout.Default) * time.Second,
Args: []string{},
}, func(ctx context.Context, _ *user_model.User, config Config) error {
rhcConfig := config.(*RepoHealthCheckConfig)
diff --git a/services/doctor/dbconsistency.go b/services/doctor/dbconsistency.go
index 7cb7445148..62326ed07c 100644
--- a/services/doctor/dbconsistency.go
+++ b/services/doctor/dbconsistency.go
@@ -12,6 +12,7 @@ import (
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/migrations"
repo_model "code.gitea.io/gitea/models/repo"
+ secret_model "code.gitea.io/gitea/models/secret"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)
@@ -164,6 +165,24 @@ func prepareDBConsistencyChecks() []consistencyCheck {
Fixer: repo_model.DeleteOrphanedTopics,
FixedMessage: "Removed",
},
+ {
+ Name: "Repository level Runners with non-zero owner_id",
+ Counter: actions_model.CountWrongRepoLevelRunners,
+ Fixer: actions_model.UpdateWrongRepoLevelRunners,
+ FixedMessage: "Corrected",
+ },
+ {
+ Name: "Repository level Variables with non-zero owner_id",
+ Counter: actions_model.CountWrongRepoLevelVariables,
+ Fixer: actions_model.UpdateWrongRepoLevelVariables,
+ FixedMessage: "Corrected",
+ },
+ {
+ Name: "Repository level Secrets with non-zero owner_id",
+ Counter: secret_model.CountWrongRepoLevelSecrets,
+ Fixer: secret_model.UpdateWrongRepoLevelSecrets,
+ FixedMessage: "Corrected",
+ },
}
// TODO: function to recalc all counters
diff --git a/services/doctor/dbversion.go b/services/doctor/dbversion.go
index 2a102b2194..34279a45e7 100644
--- a/services/doctor/dbversion.go
+++ b/services/doctor/dbversion.go
@@ -9,6 +9,7 @@ import (
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/migrations"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/versioned_migration"
)
func checkDBVersion(ctx context.Context, logger log.Logger, autofix bool) error {
@@ -21,7 +22,7 @@ func checkDBVersion(ctx context.Context, logger log.Logger, autofix bool) error
logger.Warn("Got Error: %v during ensure up to date", err)
logger.Warn("Attempting to migrate to the latest DB version to fix this.")
- err = db.InitEngineWithMigration(ctx, migrations.Migrate)
+ err = db.InitEngineWithMigration(ctx, versioned_migration.Migrate)
if err != nil {
logger.Critical("Error: %v during migration", err)
}
diff --git a/services/doctor/doctor.go b/services/doctor/doctor.go
index a4eb5e16b9..c6810a5fa0 100644
--- a/services/doctor/doctor.go
+++ b/services/doctor/doctor.go
@@ -48,7 +48,7 @@ type doctorCheckLogger struct {
var _ log.BaseLogger = (*doctorCheckLogger)(nil)
-func (d *doctorCheckLogger) Log(skip int, level log.Level, format string, v ...any) {
+func (d *doctorCheckLogger) Log(skip int, event *log.Event, format string, v ...any) {
_, _ = fmt.Fprintf(os.Stdout, format+"\n", v...)
}
@@ -62,11 +62,11 @@ type doctorCheckStepLogger struct {
var _ log.BaseLogger = (*doctorCheckStepLogger)(nil)
-func (d *doctorCheckStepLogger) Log(skip int, level log.Level, format string, v ...any) {
- levelChar := fmt.Sprintf("[%s]", strings.ToUpper(level.String()[0:1]))
+func (d *doctorCheckStepLogger) Log(skip int, event *log.Event, format string, v ...any) {
+ levelChar := fmt.Sprintf("[%s]", strings.ToUpper(event.Level.String()[0:1]))
var levelArg any = levelChar
if d.colorize {
- levelArg = log.NewColoredValue(levelChar, level.ColorAttributes()...)
+ levelArg = log.NewColoredValue(levelChar, event.Level.ColorAttributes()...)
}
args := append([]any{levelArg}, v...)
_, _ = fmt.Fprintf(os.Stdout, " - %s "+format+"\n", args...)
diff --git a/services/doctor/fix16961_test.go b/services/doctor/fix16961_test.go
index 498ed9c8d5..11a128620c 100644
--- a/services/doctor/fix16961_test.go
+++ b/services/doctor/fix16961_test.go
@@ -19,12 +19,6 @@ func Test_fixUnitConfig_16961(t *testing.T) {
wantErr bool
}{
{
- name: "empty",
- bs: "",
- wantFixed: true,
- wantErr: false,
- },
- {
name: "normal: {}",
bs: "{}",
wantFixed: false,
@@ -221,7 +215,7 @@ func Test_fixPullRequestsConfig_16961(t *testing.T) {
if gotFixed != tt.wantFixed {
t.Errorf("fixPullRequestsConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
}
- assert.EqualValues(t, &tt.expected, cfg)
+ assert.Equal(t, &tt.expected, cfg)
})
}
}
@@ -265,7 +259,7 @@ func Test_fixIssuesConfig_16961(t *testing.T) {
if gotFixed != tt.wantFixed {
t.Errorf("fixIssuesConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
}
- assert.EqualValues(t, &tt.expected, cfg)
+ assert.Equal(t, &tt.expected, cfg)
})
}
}
diff --git a/services/doctor/heads.go b/services/doctor/heads.go
index 41fca01d57..bbfd40da5e 100644
--- a/services/doctor/heads.go
+++ b/services/doctor/heads.go
@@ -18,9 +18,9 @@ func synchronizeRepoHeads(ctx context.Context, logger log.Logger, autofix bool)
numReposUpdated := 0
err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
numRepos++
- _, _, defaultBranchErr := git.NewCommand(ctx, "rev-parse").AddDashesAndList(repo.DefaultBranch).RunStdString(&git.RunOpts{Dir: repo.RepoPath()})
+ _, _, defaultBranchErr := git.NewCommand("rev-parse").AddDashesAndList(repo.DefaultBranch).RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()})
- head, _, headErr := git.NewCommand(ctx, "symbolic-ref", "--short", "HEAD").RunStdString(&git.RunOpts{Dir: repo.RepoPath()})
+ head, _, headErr := git.NewCommand("symbolic-ref", "--short", "HEAD").RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()})
// what we expect: default branch is valid, and HEAD points to it
if headErr == nil && defaultBranchErr == nil && head == repo.DefaultBranch {
@@ -46,7 +46,7 @@ func synchronizeRepoHeads(ctx context.Context, logger log.Logger, autofix bool)
}
// otherwise, let's try fixing HEAD
- err := git.NewCommand(ctx, "symbolic-ref").AddDashesAndList("HEAD", git.BranchPrefix+repo.DefaultBranch).Run(&git.RunOpts{Dir: repo.RepoPath()})
+ err := git.NewCommand("symbolic-ref").AddDashesAndList("HEAD", git.BranchPrefix+repo.DefaultBranch).Run(ctx, &git.RunOpts{Dir: repo.RepoPath()})
if err != nil {
logger.Warn("Failed to fix HEAD for %s/%s: %v", repo.OwnerName, repo.Name, err)
return nil
diff --git a/services/doctor/mergebase.go b/services/doctor/mergebase.go
index de460c4190..482bcd0a46 100644
--- a/services/doctor/mergebase.go
+++ b/services/doctor/mergebase.go
@@ -42,17 +42,17 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro
if !pr.HasMerged {
var err error
- pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitRefName()).RunStdString(&git.RunOpts{Dir: repoPath})
+ pr.MergeBase, _, err = git.NewCommand("merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitRefName()).RunStdString(ctx, &git.RunOpts{Dir: repoPath})
if err != nil {
var err2 error
- pr.MergeBase, _, err2 = git.NewCommand(ctx, "rev-parse").AddDynamicArguments(git.BranchPrefix + pr.BaseBranch).RunStdString(&git.RunOpts{Dir: repoPath})
+ pr.MergeBase, _, err2 = git.NewCommand("rev-parse").AddDynamicArguments(git.BranchPrefix+pr.BaseBranch).RunStdString(ctx, &git.RunOpts{Dir: repoPath})
if err2 != nil {
logger.Warn("Unable to get merge base for PR ID %d, #%d onto %s in %s/%s. Error: %v & %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err, err2)
return nil
}
}
} else {
- parentsString, _, err := git.NewCommand(ctx, "rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID).RunStdString(&git.RunOpts{Dir: repoPath})
+ parentsString, _, err := git.NewCommand("rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID).RunStdString(ctx, &git.RunOpts{Dir: repoPath})
if err != nil {
logger.Warn("Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
return nil
@@ -64,8 +64,8 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro
refs := append([]string{}, parents[1:]...)
refs = append(refs, pr.GetGitRefName())
- cmd := git.NewCommand(ctx, "merge-base").AddDashesAndList(refs...)
- pr.MergeBase, _, err = cmd.RunStdString(&git.RunOpts{Dir: repoPath})
+ cmd := git.NewCommand("merge-base").AddDashesAndList(refs...)
+ pr.MergeBase, _, err = cmd.RunStdString(ctx, &git.RunOpts{Dir: repoPath})
if err != nil {
logger.Warn("Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
return nil
diff --git a/services/doctor/misc.go b/services/doctor/misc.go
index 9300c3a25c..1269d088c3 100644
--- a/services/doctor/misc.go
+++ b/services/doctor/misc.go
@@ -8,7 +8,7 @@ import (
"fmt"
"os"
"os/exec"
- "path"
+ "path/filepath"
"strings"
"code.gitea.io/gitea/models"
@@ -18,7 +18,6 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
@@ -50,14 +49,14 @@ func checkScriptType(ctx context.Context, logger log.Logger, autofix bool) error
func checkHooks(ctx context.Context, logger log.Logger, autofix bool) error {
if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
- results, err := repository.CheckDelegateHooks(repo.RepoPath())
+ results, err := gitrepo.CheckDelegateHooks(ctx, repo)
if err != nil {
logger.Critical("Unable to check delegate hooks for repo %-v. ERROR: %v", repo, err)
return fmt.Errorf("Unable to check delegate hooks for repo %-v. ERROR: %w", repo, err)
}
if len(results) > 0 && autofix {
logger.Warn("Regenerated hooks for %s", repo.FullName())
- if err := repository.CreateDelegateHooks(repo.RepoPath()); err != nil {
+ if err := gitrepo.CreateDelegateHooks(ctx, repo); err != nil {
logger.Critical("Unable to recreate delegate hooks for %-v. ERROR: %v", repo, err)
return fmt.Errorf("Unable to recreate delegate hooks for %-v. ERROR: %w", repo, err)
}
@@ -99,11 +98,11 @@ func checkEnablePushOptions(ctx context.Context, logger log.Logger, autofix bool
defer r.Close()
if autofix {
- _, _, err := git.NewCommand(ctx, "config", "receive.advertisePushOptions", "true").RunStdString(&git.RunOpts{Dir: r.Path})
+ _, _, err := git.NewCommand("config", "receive.advertisePushOptions", "true").RunStdString(ctx, &git.RunOpts{Dir: r.Path})
return err
}
- value, _, err := git.NewCommand(ctx, "config", "receive.advertisePushOptions").RunStdString(&git.RunOpts{Dir: r.Path})
+ value, _, err := git.NewCommand("config", "receive.advertisePushOptions").RunStdString(ctx, &git.RunOpts{Dir: r.Path})
if err != nil {
return err
}
@@ -149,7 +148,7 @@ func checkDaemonExport(ctx context.Context, logger log.Logger, autofix bool) err
}
// Create/Remove git-daemon-export-ok for git-daemon...
- daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
+ daemonExportFile := filepath.Join(repo.RepoPath(), `git-daemon-export-ok`)
isExist, err := util.IsExist(daemonExportFile)
if err != nil {
log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err)
@@ -197,7 +196,7 @@ func checkCommitGraph(ctx context.Context, logger log.Logger, autofix bool) erro
commitGraphExists := func() (bool, error) {
// Check commit-graph exists
- commitGraphFile := path.Join(repo.RepoPath(), `objects/info/commit-graph`)
+ commitGraphFile := filepath.Join(repo.RepoPath(), `objects/info/commit-graph`)
isExist, err := util.IsExist(commitGraphFile)
if err != nil {
logger.Error("Unable to check if %s exists. Error: %v", commitGraphFile, err)
@@ -205,7 +204,7 @@ func checkCommitGraph(ctx context.Context, logger log.Logger, autofix bool) erro
}
if !isExist {
- commitGraphsDir := path.Join(repo.RepoPath(), `objects/info/commit-graphs`)
+ commitGraphsDir := filepath.Join(repo.RepoPath(), `objects/info/commit-graphs`)
isExist, err = util.IsExist(commitGraphsDir)
if err != nil {
logger.Error("Unable to check if %s exists. Error: %v", commitGraphsDir, err)
diff --git a/services/doctor/storage.go b/services/doctor/storage.go
index 3f3b562c37..77fc6d65df 100644
--- a/services/doctor/storage.go
+++ b/services/doctor/storage.go
@@ -121,7 +121,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
storer: storage.LFS,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
// The oid of an LFS stored object is the name but with all the path.Separators removed
- oid := strings.ReplaceAll(path, "/", "")
+ oid := strings.ReplaceAll(strings.ReplaceAll(path, "\\", ""), "/", "")
exists, err := git.ExistsLFSObject(ctx, oid)
return !exists, err
},
diff --git a/services/feed/feed.go b/services/feed/feed.go
index 93bf875fd0..214e9b5765 100644
--- a/services/feed/feed.go
+++ b/services/feed/feed.go
@@ -5,11 +5,157 @@ package feed
import (
"context"
+ "fmt"
activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
)
+func userFeedCacheKey(userID int64) string {
+ return fmt.Sprintf("user_feed_%d", userID)
+}
+
+func GetFeedsForDashboard(ctx context.Context, opts activities_model.GetFeedsOptions) (activities_model.ActionList, int, error) {
+ opts.DontCount = opts.RequestedTeam == nil && opts.Date == ""
+ results, cnt, err := activities_model.GetFeeds(ctx, opts)
+ return results, util.Iif(opts.DontCount, -1, int(cnt)), err
+}
+
// GetFeeds returns actions according to the provided options
func GetFeeds(ctx context.Context, opts activities_model.GetFeedsOptions) (activities_model.ActionList, int64, error) {
return activities_model.GetFeeds(ctx, opts)
}
+
+// notifyWatchers creates batch of actions for every watcher.
+// It could insert duplicate actions for a repository action, like this:
+// * Original action: UserID=1 (the real actor), ActUserID=1
+// * Organization action: UserID=100 (the repo's org), ActUserID=1
+// * Watcher action: UserID=20 (a user who is watching a repo), ActUserID=1
+func notifyWatchers(ctx context.Context, act *activities_model.Action, watchers []*repo_model.Watch, permCode, permIssue, permPR []bool) error {
+ // Add feed for actioner.
+ act.UserID = act.ActUserID
+ if err := db.Insert(ctx, act); err != nil {
+ return fmt.Errorf("insert new actioner: %w", err)
+ }
+
+ // Add feed for organization
+ if act.Repo.Owner.IsOrganization() && act.ActUserID != act.Repo.Owner.ID {
+ act.ID = 0
+ act.UserID = act.Repo.Owner.ID
+ if err := db.Insert(ctx, act); err != nil {
+ return fmt.Errorf("insert new actioner: %w", err)
+ }
+ }
+
+ for i, watcher := range watchers {
+ if act.ActUserID == watcher.UserID {
+ continue
+ }
+ act.ID = 0
+ act.UserID = watcher.UserID
+ act.Repo.Units = nil
+
+ switch act.OpType {
+ case activities_model.ActionCommitRepo, activities_model.ActionPushTag, activities_model.ActionDeleteTag, activities_model.ActionPublishRelease, activities_model.ActionDeleteBranch:
+ if !permCode[i] {
+ continue
+ }
+ case activities_model.ActionCreateIssue, activities_model.ActionCommentIssue, activities_model.ActionCloseIssue, activities_model.ActionReopenIssue:
+ if !permIssue[i] {
+ continue
+ }
+ case activities_model.ActionCreatePullRequest, activities_model.ActionCommentPull, activities_model.ActionMergePullRequest, activities_model.ActionClosePullRequest, activities_model.ActionReopenPullRequest, activities_model.ActionAutoMergePullRequest:
+ if !permPR[i] {
+ continue
+ }
+ }
+
+ if err := db.Insert(ctx, act); err != nil {
+ return fmt.Errorf("insert new action: %w", err)
+ }
+
+ total, err := activities_model.CountUserFeeds(ctx, act.UserID)
+ if err != nil {
+ return fmt.Errorf("count user feeds: %w", err)
+ }
+
+ _ = cache.GetCache().Put(userFeedCacheKey(act.UserID), fmt.Sprintf("%d", total), setting.CacheService.TTLSeconds())
+ }
+
+ return nil
+}
+
+// NotifyWatchersActions creates batch of actions for every watcher.
+func NotifyWatchers(ctx context.Context, acts ...*activities_model.Action) error {
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ if len(acts) == 0 {
+ return nil
+ }
+
+ repoID := acts[0].RepoID
+ if repoID == 0 {
+ setting.PanicInDevOrTesting("action should belong to a repo")
+ return nil
+ }
+ if err := acts[0].LoadRepo(ctx); err != nil {
+ return err
+ }
+ repo := acts[0].Repo
+ if err := repo.LoadOwner(ctx); err != nil {
+ return err
+ }
+
+ actUserID := acts[0].ActUserID
+
+ // Add feeds for user self and all watchers.
+ watchers, err := repo_model.GetWatchers(ctx, repoID)
+ if err != nil {
+ return fmt.Errorf("get watchers: %w", err)
+ }
+
+ permCode := make([]bool, len(watchers))
+ permIssue := make([]bool, len(watchers))
+ permPR := make([]bool, len(watchers))
+ for i, watcher := range watchers {
+ user, err := user_model.GetUserByID(ctx, watcher.UserID)
+ if err != nil {
+ permCode[i] = false
+ permIssue[i] = false
+ permPR[i] = false
+ continue
+ }
+ perm, err := access_model.GetUserRepoPermission(ctx, repo, user)
+ if err != nil {
+ permCode[i] = false
+ permIssue[i] = false
+ permPR[i] = false
+ continue
+ }
+ permCode[i] = perm.CanRead(unit.TypeCode)
+ permIssue[i] = perm.CanRead(unit.TypeIssues)
+ permPR[i] = perm.CanRead(unit.TypePullRequests)
+ }
+
+ for _, act := range acts {
+ if act.RepoID != repoID {
+ setting.PanicInDevOrTesting("action should belong to the same repo, expected[%d], got[%d] ", repoID, act.RepoID)
+ }
+ if act.ActUserID != actUserID {
+ setting.PanicInDevOrTesting("action should have the same actor, expected[%d], got[%d] ", actUserID, act.ActUserID)
+ }
+
+ act.Repo = repo
+ if err := notifyWatchers(ctx, act, watchers, permCode, permIssue, permPR); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
+}
diff --git a/services/feed/feed_test.go b/services/feed/feed_test.go
index 1e4d029e18..a3492938c8 100644
--- a/services/feed/feed_test.go
+++ b/services/feed/feed_test.go
@@ -30,7 +30,7 @@ func TestGetFeeds(t *testing.T) {
assert.NoError(t, err)
if assert.Len(t, actions, 1) {
assert.EqualValues(t, 1, actions[0].ID)
- assert.EqualValues(t, user.ID, actions[0].UserID)
+ assert.Equal(t, user.ID, actions[0].UserID)
}
assert.Equal(t, int64(1), count)
@@ -107,7 +107,7 @@ func TestGetFeeds2(t *testing.T) {
assert.Len(t, actions, 1)
if assert.Len(t, actions, 1) {
assert.EqualValues(t, 2, actions[0].ID)
- assert.EqualValues(t, org.ID, actions[0].UserID)
+ assert.Equal(t, org.ID, actions[0].UserID)
}
assert.Equal(t, int64(1), count)
@@ -163,3 +163,40 @@ func TestRepoActions(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, actions, 1)
}
+
+func TestNotifyWatchers(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ action := &activities_model.Action{
+ ActUserID: 8,
+ RepoID: 1,
+ OpType: activities_model.ActionStarRepo,
+ }
+ assert.NoError(t, NotifyWatchers(db.DefaultContext, action))
+
+ // One watchers are inactive, thus action is only created for user 8, 1, 4, 11
+ unittest.AssertExistsAndLoadBean(t, &activities_model.Action{
+ ActUserID: action.ActUserID,
+ UserID: 8,
+ RepoID: action.RepoID,
+ OpType: action.OpType,
+ })
+ unittest.AssertExistsAndLoadBean(t, &activities_model.Action{
+ ActUserID: action.ActUserID,
+ UserID: 1,
+ RepoID: action.RepoID,
+ OpType: action.OpType,
+ })
+ unittest.AssertExistsAndLoadBean(t, &activities_model.Action{
+ ActUserID: action.ActUserID,
+ UserID: 4,
+ RepoID: action.RepoID,
+ OpType: action.OpType,
+ })
+ unittest.AssertExistsAndLoadBean(t, &activities_model.Action{
+ ActUserID: action.ActUserID,
+ UserID: 11,
+ RepoID: action.RepoID,
+ OpType: action.OpType,
+ })
+}
diff --git a/services/feed/notifier.go b/services/feed/notifier.go
index 3aaf885c9a..64aeccdfd2 100644
--- a/services/feed/notifier.go
+++ b/services/feed/notifier.go
@@ -49,7 +49,7 @@ func (a *actionNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue
}
repo := issue.Repo
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: issue.Poster.ID,
ActUser: issue.Poster,
OpType: activities_model.ActionCreateIssue,
@@ -90,7 +90,7 @@ func (a *actionNotifier) IssueChangeStatus(ctx context.Context, doer *user_model
}
// Notify watchers for whatever action comes in, ignore if no action type.
- if err := activities_model.NotifyWatchers(ctx, act); err != nil {
+ if err := NotifyWatchers(ctx, act); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
@@ -126,7 +126,7 @@ func (a *actionNotifier) CreateIssueComment(ctx context.Context, doer *user_mode
}
// Notify watchers for whatever action comes in, ignore if no action type.
- if err := activities_model.NotifyWatchers(ctx, act); err != nil {
+ if err := NotifyWatchers(ctx, act); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
@@ -145,7 +145,7 @@ func (a *actionNotifier) NewPullRequest(ctx context.Context, pull *issues_model.
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: pull.Issue.Poster.ID,
ActUser: pull.Issue.Poster,
OpType: activities_model.ActionCreatePullRequest,
@@ -159,7 +159,7 @@ func (a *actionNotifier) NewPullRequest(ctx context.Context, pull *issues_model.
}
func (a *actionNotifier) RenameRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldRepoName string) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionRenameRepo,
@@ -173,7 +173,7 @@ func (a *actionNotifier) RenameRepository(ctx context.Context, doer *user_model.
}
func (a *actionNotifier) TransferRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldOwnerName string) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionTransferRepo,
@@ -187,7 +187,7 @@ func (a *actionNotifier) TransferRepository(ctx context.Context, doer *user_mode
}
func (a *actionNotifier) CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionCreateRepo,
@@ -200,7 +200,7 @@ func (a *actionNotifier) CreateRepository(ctx context.Context, doer, u *user_mod
}
func (a *actionNotifier) ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionCreateRepo,
@@ -265,13 +265,13 @@ func (a *actionNotifier) PullRequestReview(ctx context.Context, pr *issues_model
actions = append(actions, action)
}
- if err := activities_model.NotifyWatchersActions(ctx, actions); err != nil {
+ if err := NotifyWatchers(ctx, actions...); err != nil {
log.Error("notify watchers '%d/%d': %v", review.Reviewer.ID, review.Issue.RepoID, err)
}
}
func (*actionNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionMergePullRequest,
@@ -285,7 +285,7 @@ func (*actionNotifier) MergePullRequest(ctx context.Context, doer *user_model.Us
}
func (*actionNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionAutoMergePullRequest,
@@ -303,7 +303,7 @@ func (*actionNotifier) NotifyPullRevieweDismiss(ctx context.Context, doer *user_
if len(review.OriginalAuthor) > 0 {
reviewerName = review.OriginalAuthor
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionPullReviewDismissed,
@@ -337,7 +337,7 @@ func (a *actionNotifier) PushCommits(ctx context.Context, pusher *user_model.Use
opType = activities_model.ActionDeleteBranch
}
- if err = activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err = NotifyWatchers(ctx, &activities_model.Action{
ActUserID: pusher.ID,
ActUser: pusher,
OpType: opType,
@@ -357,7 +357,7 @@ func (a *actionNotifier) CreateRef(ctx context.Context, doer *user_model.User, r
// has sent same action in `PushCommits`, so skip it.
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: opType,
@@ -376,7 +376,7 @@ func (a *actionNotifier) DeleteRef(ctx context.Context, doer *user_model.User, r
// has sent same action in `PushCommits`, so skip it.
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: opType,
@@ -402,7 +402,7 @@ func (a *actionNotifier) SyncPushCommits(ctx context.Context, pusher *user_model
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(ctx),
OpType: activities_model.ActionMirrorSyncPush,
@@ -423,7 +423,7 @@ func (a *actionNotifier) SyncCreateRef(ctx context.Context, doer *user_model.Use
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(ctx),
OpType: activities_model.ActionMirrorSyncCreate,
@@ -443,7 +443,7 @@ func (a *actionNotifier) SyncDeleteRef(ctx context.Context, doer *user_model.Use
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(ctx),
OpType: activities_model.ActionMirrorSyncDelete,
@@ -461,7 +461,7 @@ func (a *actionNotifier) NewRelease(ctx context.Context, rel *repo_model.Release
log.Error("LoadAttributes: %v", err)
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := NotifyWatchers(ctx, &activities_model.Action{
ActUserID: rel.PublisherID,
ActUser: rel.Publisher,
OpType: activities_model.ActionPublishRelease,
diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go
index 2c6373e03c..d20220b784 100644
--- a/services/forms/repo_form.go
+++ b/services/forms/repo_form.go
@@ -110,17 +110,14 @@ type RepoSettingForm struct {
EnablePrune bool
// Advanced settings
- EnableCode bool
- DefaultCodeEveryoneAccess string
+ EnableCode bool
- EnableWiki bool
- EnableExternalWiki bool
- DefaultWikiBranch string
- DefaultWikiEveryoneAccess string
- ExternalWikiURL string
+ EnableWiki bool
+ EnableExternalWiki bool
+ DefaultWikiBranch string
+ ExternalWikiURL string
EnableIssues bool
- DefaultIssuesEveryoneAccess string
EnableExternalTracker bool
ExternalTrackerURL string
TrackerURLFormat string
@@ -170,13 +167,6 @@ func (f *RepoSettingForm) Validate(req *http.Request, errs binding.Errors) bindi
return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
}
-// __________ .__
-// \______ \____________ ____ ____ | |__
-// | | _/\_ __ \__ \ / \_/ ___\| | \
-// | | \ | | \// __ \| | \ \___| Y \
-// |______ / |__| (____ /___| /\___ >___| /
-// \/ \/ \/ \/ \/
-
// ProtectBranchForm form for changing protected branch settings
type ProtectBranchForm struct {
RuleName string `binding:"Required"`
@@ -243,6 +233,8 @@ type WebhookForm struct {
Repository bool
Release bool
Package bool
+ Status bool
+ WorkflowJob bool
Active bool
BranchFilter string `binding:"GlobPattern"`
AuthorizationHeader string
diff --git a/services/forms/user_form.go b/services/forms/user_form.go
index ed79936add..ddf2bd09b0 100644
--- a/services/forms/user_form.go
+++ b/services/forms/user_form.go
@@ -7,9 +7,7 @@ package forms
import (
"mime/multipart"
"net/http"
- "strings"
- auth_model "code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/web/middleware"
@@ -325,8 +323,9 @@ func (f *AddKeyForm) Validate(req *http.Request, errs binding.Errors) binding.Er
// AddSecretForm for adding secrets
type AddSecretForm struct {
- Name string `binding:"Required;MaxSize(255)"`
- Data string `binding:"Required;MaxSize(65535)"`
+ Name string `binding:"Required;MaxSize(255)"`
+ Data string `binding:"Required;MaxSize(65535)"`
+ Description string `binding:"MaxSize(65535)"`
}
// Validate validates the fields
@@ -336,8 +335,9 @@ func (f *AddSecretForm) Validate(req *http.Request, errs binding.Errors) binding
}
type EditVariableForm struct {
- Name string `binding:"Required;MaxSize(255)"`
- Data string `binding:"Required;MaxSize(65535)"`
+ Name string `binding:"Required;MaxSize(255)"`
+ Data string `binding:"Required;MaxSize(65535)"`
+ Description string `binding:"MaxSize(65535)"`
}
func (f *EditVariableForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
@@ -347,8 +347,7 @@ func (f *EditVariableForm) Validate(req *http.Request, errs binding.Errors) bind
// NewAccessTokenForm form for creating access token
type NewAccessTokenForm struct {
- Name string `binding:"Required;MaxSize(255)" locale:"settings.token_name"`
- Scope []string
+ Name string `binding:"Required;MaxSize(255)" locale:"settings.token_name"`
}
// Validate validates the fields
@@ -357,12 +356,6 @@ func (f *NewAccessTokenForm) Validate(req *http.Request, errs binding.Errors) bi
return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
}
-func (f *NewAccessTokenForm) GetScope() (auth_model.AccessTokenScope, error) {
- scope := strings.Join(f.Scope, ",")
- s, err := auth_model.AccessTokenScope(scope).Normalize()
- return s, err
-}
-
// EditOAuth2ApplicationForm form for editing oauth2 applications
type EditOAuth2ApplicationForm struct {
Name string `binding:"Required;MaxSize(255)" form:"application_name"`
diff --git a/services/forms/user_form_test.go b/services/forms/user_form_test.go
index 66050187c9..b4120f20ed 100644
--- a/services/forms/user_form_test.go
+++ b/services/forms/user_form_test.go
@@ -4,10 +4,8 @@
package forms
import (
- "strconv"
"testing"
- auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/modules/setting"
"github.com/gobwas/glob"
@@ -104,28 +102,3 @@ func TestRegisterForm_IsDomainAllowed_BlockedEmail(t *testing.T) {
assert.Equal(t, v.valid, form.IsEmailDomainAllowed())
}
}
-
-func TestNewAccessTokenForm_GetScope(t *testing.T) {
- tests := []struct {
- form NewAccessTokenForm
- scope auth_model.AccessTokenScope
- expectedErr error
- }{
- {
- form: NewAccessTokenForm{Name: "test", Scope: []string{"read:repository"}},
- scope: "read:repository",
- },
- {
- form: NewAccessTokenForm{Name: "test", Scope: []string{"read:repository", "write:user"}},
- scope: "read:repository,write:user",
- },
- }
-
- for i, test := range tests {
- t.Run(strconv.Itoa(i), func(t *testing.T) {
- scope, err := test.form.GetScope()
- assert.Equal(t, test.expectedErr, err)
- assert.Equal(t, test.scope, scope)
- })
- }
-}
diff --git a/services/git/commit.go b/services/git/commit.go
new file mode 100644
index 0000000000..8ab8f3d369
--- /dev/null
+++ b/services/git/commit.go
@@ -0,0 +1,95 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git
+
+import (
+ "context"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+)
+
+// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys.
+func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.UserCommit, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error)) ([]*asymkey_model.SignCommit, error) {
+ newCommits := make([]*asymkey_model.SignCommit, 0, len(oldCommits))
+ keyMap := map[string]bool{}
+
+ emails := make(container.Set[string])
+ for _, c := range oldCommits {
+ if c.Committer != nil {
+ emails.Add(c.Committer.Email)
+ }
+ }
+
+ emailUsers, err := user_model.GetUsersByEmails(ctx, emails.Values())
+ if err != nil {
+ return nil, err
+ }
+
+ for _, c := range oldCommits {
+ committer, ok := emailUsers[c.Committer.Email]
+ if !ok && c.Committer != nil {
+ committer = &user_model.User{
+ Name: c.Committer.Name,
+ Email: c.Committer.Email,
+ }
+ }
+
+ signCommit := &asymkey_model.SignCommit{
+ UserCommit: c,
+ Verification: asymkey_service.ParseCommitWithSignatureCommitter(ctx, c.Commit, committer),
+ }
+
+ _ = asymkey_model.CalculateTrustStatus(signCommit.Verification, repoTrustModel, isOwnerMemberCollaborator, &keyMap)
+
+ newCommits = append(newCommits, signCommit)
+ }
+ return newCommits, nil
+}
+
+// ConvertFromGitCommit converts git commits into SignCommitWithStatuses
+func ConvertFromGitCommit(ctx context.Context, commits []*git.Commit, repo *repo_model.Repository) ([]*git_model.SignCommitWithStatuses, error) {
+ validatedCommits, err := user_model.ValidateCommitsWithEmails(ctx, commits)
+ if err != nil {
+ return nil, err
+ }
+ signedCommits, err := ParseCommitsWithSignature(
+ ctx,
+ validatedCommits,
+ repo.GetTrustModel(),
+ func(user *user_model.User) (bool, error) {
+ return repo_model.IsOwnerMemberCollaborator(ctx, repo, user.ID)
+ },
+ )
+ if err != nil {
+ return nil, err
+ }
+ return ParseCommitsWithStatus(ctx, signedCommits, repo)
+}
+
+// ParseCommitsWithStatus checks commits latest statuses and calculates its worst status state
+func ParseCommitsWithStatus(ctx context.Context, oldCommits []*asymkey_model.SignCommit, repo *repo_model.Repository) ([]*git_model.SignCommitWithStatuses, error) {
+ newCommits := make([]*git_model.SignCommitWithStatuses, 0, len(oldCommits))
+
+ for _, c := range oldCommits {
+ commit := &git_model.SignCommitWithStatuses{
+ SignCommit: c,
+ }
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptions{})
+ if err != nil {
+ return nil, err
+ }
+
+ commit.Statuses = statuses
+ commit.Status = git_model.CalcCommitStatus(statuses)
+ newCommits = append(newCommits, commit)
+ }
+ return newCommits, nil
+}
diff --git a/services/gitdiff/git_diff_tree.go b/services/gitdiff/git_diff_tree.go
new file mode 100644
index 0000000000..035210a31d
--- /dev/null
+++ b/services/gitdiff/git_diff_tree.go
@@ -0,0 +1,249 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "bufio"
+ "context"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+)
+
+type DiffTree struct {
+ Files []*DiffTreeRecord
+}
+
+type DiffTreeRecord struct {
+ // Status is one of 'added', 'deleted', 'modified', 'renamed', 'copied', 'typechanged', 'unmerged', 'unknown'
+ Status string
+
+ // For renames and copies, the percentage of similarity between the source and target of the move/rename.
+ Score uint8
+
+ HeadPath string
+ BasePath string
+ HeadMode git.EntryMode
+ BaseMode git.EntryMode
+ HeadBlobID string
+ BaseBlobID string
+}
+
+// GetDiffTree returns the list of path of the files that have changed between the two commits.
+// If useMergeBase is true, the diff will be calculated using the merge base of the two commits.
+// This is the same behavior as using a three-dot diff in git diff.
+func GetDiffTree(ctx context.Context, gitRepo *git.Repository, useMergeBase bool, baseSha, headSha string) (*DiffTree, error) {
+ gitDiffTreeRecords, err := runGitDiffTree(ctx, gitRepo, useMergeBase, baseSha, headSha)
+ if err != nil {
+ return nil, err
+ }
+
+ return &DiffTree{
+ Files: gitDiffTreeRecords,
+ }, nil
+}
+
+func runGitDiffTree(ctx context.Context, gitRepo *git.Repository, useMergeBase bool, baseSha, headSha string) ([]*DiffTreeRecord, error) {
+ useMergeBase, baseCommitID, headCommitID, err := validateGitDiffTreeArguments(gitRepo, useMergeBase, baseSha, headSha)
+ if err != nil {
+ return nil, err
+ }
+
+ cmd := git.NewCommand("diff-tree", "--raw", "-r", "--find-renames", "--root")
+ if useMergeBase {
+ cmd.AddArguments("--merge-base")
+ }
+ cmd.AddDynamicArguments(baseCommitID, headCommitID)
+ stdout, _, runErr := cmd.RunStdString(ctx, &git.RunOpts{Dir: gitRepo.Path})
+ if runErr != nil {
+ log.Warn("git diff-tree: %v", runErr)
+ return nil, runErr
+ }
+
+ return parseGitDiffTree(strings.NewReader(stdout))
+}
+
+func validateGitDiffTreeArguments(gitRepo *git.Repository, useMergeBase bool, baseSha, headSha string) (shouldUseMergeBase bool, resolvedBaseSha, resolvedHeadSha string, err error) {
+ // if the head is empty its an error
+ if headSha == "" {
+ return false, "", "", fmt.Errorf("headSha is empty")
+ }
+
+ // if the head commit doesn't exist its and error
+ headCommit, err := gitRepo.GetCommit(headSha)
+ if err != nil {
+ return false, "", "", fmt.Errorf("failed to get commit headSha: %v", err)
+ }
+ headCommitID := headCommit.ID.String()
+
+ // if the base is empty we should use the parent of the head commit
+ if baseSha == "" {
+ // if the headCommit has no parent we should use an empty commit
+ // this can happen when we are generating a diff against an orphaned commit
+ if headCommit.ParentCount() == 0 {
+ objectFormat, err := gitRepo.GetObjectFormat()
+ if err != nil {
+ return false, "", "", err
+ }
+
+ // We set use merge base to false because we have no base commit
+ return false, objectFormat.EmptyTree().String(), headCommitID, nil
+ }
+
+ baseCommit, err := headCommit.Parent(0)
+ if err != nil {
+ return false, "", "", fmt.Errorf("baseSha is '', attempted to use parent of commit %s, got error: %v", headCommit.ID.String(), err)
+ }
+ return useMergeBase, baseCommit.ID.String(), headCommitID, nil
+ }
+
+ // try and get the base commit
+ baseCommit, err := gitRepo.GetCommit(baseSha)
+ // propagate the error if we couldn't get the base commit
+ if err != nil {
+ return useMergeBase, "", "", fmt.Errorf("failed to get base commit %s: %v", baseSha, err)
+ }
+
+ return useMergeBase, baseCommit.ID.String(), headCommit.ID.String(), nil
+}
+
+func parseGitDiffTree(gitOutput io.Reader) ([]*DiffTreeRecord, error) {
+ /*
+ The output of `git diff-tree --raw -r --find-renames` is of the form:
+
+ :<old_mode> <new_mode> <old_sha> <new_sha> <status>\t<path>
+
+ or for renames:
+
+ :<old_mode> <new_mode> <old_sha> <new_sha> <status>\t<old_path>\t<new_path>
+
+ See: <https://git-scm.com/docs/git-diff-tree#_raw_output_format> for more details
+ */
+ results := make([]*DiffTreeRecord, 0)
+
+ lines := bufio.NewScanner(gitOutput)
+ for lines.Scan() {
+ line := lines.Text()
+
+ if len(line) == 0 {
+ continue
+ }
+
+ record, err := parseGitDiffTreeLine(line)
+ if err != nil {
+ return nil, err
+ }
+
+ results = append(results, record)
+ }
+
+ if err := lines.Err(); err != nil {
+ return nil, err
+ }
+
+ return results, nil
+}
+
+func parseGitDiffTreeLine(line string) (*DiffTreeRecord, error) {
+ line = strings.TrimPrefix(line, ":")
+ splitSections := strings.SplitN(line, "\t", 2)
+ if len(splitSections) < 2 {
+ return nil, fmt.Errorf("unparsable output for diff-tree --raw: `%s`)", line)
+ }
+
+ fields := strings.Fields(splitSections[0])
+ if len(fields) < 5 {
+ return nil, fmt.Errorf("unparsable output for diff-tree --raw: `%s`, expected 5 space delimited values got %d)", line, len(fields))
+ }
+
+ baseMode, err := git.ParseEntryMode(fields[0])
+ if err != nil {
+ return nil, err
+ }
+
+ headMode, err := git.ParseEntryMode(fields[1])
+ if err != nil {
+ return nil, err
+ }
+
+ baseBlobID := fields[2]
+ headBlobID := fields[3]
+
+ status, score, err := statusFromLetter(fields[4])
+ if err != nil {
+ return nil, fmt.Errorf("unparsable output for diff-tree --raw: %s, error: %s", line, err)
+ }
+
+ filePaths := strings.Split(splitSections[1], "\t")
+
+ var headPath, basePath string
+ if status == "renamed" {
+ if len(filePaths) != 2 {
+ return nil, fmt.Errorf("unparsable output for diff-tree --raw: `%s`, expected 2 paths found %d", line, len(filePaths))
+ }
+ basePath = filePaths[0]
+ headPath = filePaths[1]
+ } else {
+ basePath = filePaths[0]
+ headPath = filePaths[0]
+ }
+
+ return &DiffTreeRecord{
+ Status: status,
+ Score: score,
+ BaseMode: baseMode,
+ HeadMode: headMode,
+ BaseBlobID: baseBlobID,
+ HeadBlobID: headBlobID,
+ BasePath: basePath,
+ HeadPath: headPath,
+ }, nil
+}
+
+func statusFromLetter(rawStatus string) (status string, score uint8, err error) {
+ if len(rawStatus) < 1 {
+ return "", 0, fmt.Errorf("empty status letter")
+ }
+ switch rawStatus[0] {
+ case 'A':
+ return "added", 0, nil
+ case 'D':
+ return "deleted", 0, nil
+ case 'M':
+ return "modified", 0, nil
+ case 'R':
+ score, err = tryParseStatusScore(rawStatus)
+ return "renamed", score, err
+ case 'C':
+ score, err = tryParseStatusScore(rawStatus)
+ return "copied", score, err
+ case 'T':
+ return "typechanged", 0, nil
+ case 'U':
+ return "unmerged", 0, nil
+ case 'X':
+ return "unknown", 0, nil
+ default:
+ return "", 0, fmt.Errorf("unknown status letter: '%s'", rawStatus)
+ }
+}
+
+func tryParseStatusScore(rawStatus string) (uint8, error) {
+ if len(rawStatus) < 2 {
+ return 0, fmt.Errorf("status score missing")
+ }
+
+ score, err := strconv.ParseUint(rawStatus[1:], 10, 8)
+ if err != nil {
+ return 0, fmt.Errorf("failed to parse status score: %w", err)
+ } else if score > 100 {
+ return 0, fmt.Errorf("status score out of range: %d", score)
+ }
+
+ return uint8(score), nil
+}
diff --git a/services/gitdiff/git_diff_tree_test.go b/services/gitdiff/git_diff_tree_test.go
new file mode 100644
index 0000000000..313d279e95
--- /dev/null
+++ b/services/gitdiff/git_diff_tree_test.go
@@ -0,0 +1,427 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "strings"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/git"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGitDiffTree(t *testing.T) {
+ test := []struct {
+ Name string
+ RepoPath string
+ BaseSha string
+ HeadSha string
+ useMergeBase bool
+ Expected *DiffTree
+ }{
+ {
+ Name: "happy path",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ BaseSha: "72866af952e98d02a73003501836074b286a78f6",
+ HeadSha: "d8e0bbb45f200e67d9a784ce55bd90821af45ebd",
+ Expected: &DiffTree{
+ Files: []*DiffTreeRecord{
+ {
+ Status: "modified",
+ HeadPath: "LICENSE",
+ BasePath: "LICENSE",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "ee469963e76ae1bb7ee83d7510df2864e6c8c640",
+ BaseBlobID: "c996f4725be8fc8c1d1c776e58c97ddc5d03b336",
+ },
+ {
+ Status: "modified",
+ HeadPath: "README.md",
+ BasePath: "README.md",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "9dfc0a6257d8eff526f0cfaf6a8ea950f55a9dba",
+ BaseBlobID: "074e590b8e64898b02beef03ece83f962c94f54c",
+ },
+ },
+ },
+ },
+ {
+ Name: "first commit (no parent)",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ HeadSha: "72866af952e98d02a73003501836074b286a78f6",
+ Expected: &DiffTree{
+ Files: []*DiffTreeRecord{
+ {
+ Status: "added",
+ HeadPath: ".gitignore",
+ BasePath: ".gitignore",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "f1c181ec9c5c921245027c6b452ecfc1d3626364",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ {
+ Status: "added",
+ HeadPath: "LICENSE",
+ BasePath: "LICENSE",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "c996f4725be8fc8c1d1c776e58c97ddc5d03b336",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ {
+ Status: "added",
+ HeadPath: "README.md",
+ BasePath: "README.md",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "074e590b8e64898b02beef03ece83f962c94f54c",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ },
+ },
+ },
+ {
+ Name: "first commit (no parent), merge base = true",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ HeadSha: "72866af952e98d02a73003501836074b286a78f6",
+ useMergeBase: true,
+ Expected: &DiffTree{
+ Files: []*DiffTreeRecord{
+ {
+ Status: "added",
+ HeadPath: ".gitignore",
+ BasePath: ".gitignore",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "f1c181ec9c5c921245027c6b452ecfc1d3626364",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ {
+ Status: "added",
+ HeadPath: "LICENSE",
+ BasePath: "LICENSE",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "c996f4725be8fc8c1d1c776e58c97ddc5d03b336",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ {
+ Status: "added",
+ HeadPath: "README.md",
+ BasePath: "README.md",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "074e590b8e64898b02beef03ece83f962c94f54c",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ },
+ },
+ },
+ {
+ Name: "base and head same",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ BaseSha: "ed8f4d2fa5b2420706580d191f5dd50c4e491f3f",
+ HeadSha: "ed8f4d2fa5b2420706580d191f5dd50c4e491f3f",
+ Expected: &DiffTree{
+ Files: []*DiffTreeRecord{},
+ },
+ },
+ {
+ Name: "useMergeBase false",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ BaseSha: "ed8f4d2fa5b2420706580d191f5dd50c4e491f3f",
+ HeadSha: "111cac04bd7d20301964e27a93698aabb5781b80", // this commit can be found on the update-readme branch
+ useMergeBase: false,
+ Expected: &DiffTree{
+ Files: []*DiffTreeRecord{
+ {
+ Status: "modified",
+ HeadPath: "LICENSE",
+ BasePath: "LICENSE",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "c996f4725be8fc8c1d1c776e58c97ddc5d03b336",
+ BaseBlobID: "ed5119b3c1f45547b6785bc03eac7f87570fa17f",
+ },
+
+ {
+ Status: "modified",
+ HeadPath: "README.md",
+ BasePath: "README.md",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "fb39771a8865c9a67f2ab9b616c854805664553c",
+ BaseBlobID: "9dfc0a6257d8eff526f0cfaf6a8ea950f55a9dba",
+ },
+ },
+ },
+ },
+ {
+ Name: "useMergeBase true",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ BaseSha: "ed8f4d2fa5b2420706580d191f5dd50c4e491f3f",
+ HeadSha: "111cac04bd7d20301964e27a93698aabb5781b80", // this commit can be found on the update-readme branch
+ useMergeBase: true,
+ Expected: &DiffTree{
+ Files: []*DiffTreeRecord{
+ {
+ Status: "modified",
+ HeadPath: "README.md",
+ BasePath: "README.md",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "fb39771a8865c9a67f2ab9b616c854805664553c",
+ BaseBlobID: "9dfc0a6257d8eff526f0cfaf6a8ea950f55a9dba",
+ },
+ },
+ },
+ },
+ {
+ Name: "no base set",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ HeadSha: "d8e0bbb45f200e67d9a784ce55bd90821af45ebd", // this commit can be found on the update-readme branch
+ useMergeBase: false,
+ Expected: &DiffTree{
+ Files: []*DiffTreeRecord{
+ {
+ Status: "modified",
+ HeadPath: "LICENSE",
+ BasePath: "LICENSE",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "ee469963e76ae1bb7ee83d7510df2864e6c8c640",
+ BaseBlobID: "ed5119b3c1f45547b6785bc03eac7f87570fa17f",
+ },
+ },
+ },
+ },
+ }
+
+ for _, tt := range test {
+ t.Run(tt.Name, func(t *testing.T) {
+ gitRepo, err := git.OpenRepository(git.DefaultContext, tt.RepoPath)
+ assert.NoError(t, err)
+ defer gitRepo.Close()
+
+ diffPaths, err := GetDiffTree(db.DefaultContext, gitRepo, tt.useMergeBase, tt.BaseSha, tt.HeadSha)
+ require.NoError(t, err)
+
+ assert.Equal(t, tt.Expected, diffPaths)
+ })
+ }
+}
+
+func TestParseGitDiffTree(t *testing.T) {
+ test := []struct {
+ Name string
+ GitOutput string
+ Expected []*DiffTreeRecord
+ }{
+ {
+ Name: "file change",
+ GitOutput: ":100644 100644 64e43d23bcd08db12563a0a4d84309cadb437e1a 5dbc7792b5bb228647cfcc8dfe65fc649119dedc M\tResources/views/curriculum/edit.blade.php",
+ Expected: []*DiffTreeRecord{
+ {
+ Status: "modified",
+ HeadPath: "Resources/views/curriculum/edit.blade.php",
+ BasePath: "Resources/views/curriculum/edit.blade.php",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "5dbc7792b5bb228647cfcc8dfe65fc649119dedc",
+ BaseBlobID: "64e43d23bcd08db12563a0a4d84309cadb437e1a",
+ },
+ },
+ },
+ {
+ Name: "file added",
+ GitOutput: ":000000 100644 0000000000000000000000000000000000000000 0063162fb403db15ceb0517b34ab782e4e58b619 A\tResources/views/class/index.blade.php",
+ Expected: []*DiffTreeRecord{
+ {
+ Status: "added",
+ HeadPath: "Resources/views/class/index.blade.php",
+ BasePath: "Resources/views/class/index.blade.php",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "0063162fb403db15ceb0517b34ab782e4e58b619",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ },
+ },
+ {
+ Name: "file deleted",
+ GitOutput: ":100644 000000 bac4286303c8c0017ea2f0a48c561ddcc0330a14 0000000000000000000000000000000000000000 D\tResources/views/classes/index.blade.php",
+ Expected: []*DiffTreeRecord{
+ {
+ Status: "deleted",
+ HeadPath: "Resources/views/classes/index.blade.php",
+ BasePath: "Resources/views/classes/index.blade.php",
+ HeadMode: git.EntryModeNoEntry,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "0000000000000000000000000000000000000000",
+ BaseBlobID: "bac4286303c8c0017ea2f0a48c561ddcc0330a14",
+ },
+ },
+ },
+ {
+ Name: "file renamed",
+ GitOutput: ":100644 100644 c8a055cfb45cd39747292983ad1797ceab40f5b1 97248f79a90aaf81fe7fd74b33c1cb182dd41783 R087\tDatabase/Seeders/AdminDatabaseSeeder.php\tDatabase/Seeders/AcademicDatabaseSeeder.php",
+ Expected: []*DiffTreeRecord{
+ {
+ Status: "renamed",
+ Score: 87,
+ HeadPath: "Database/Seeders/AcademicDatabaseSeeder.php",
+ BasePath: "Database/Seeders/AdminDatabaseSeeder.php",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "97248f79a90aaf81fe7fd74b33c1cb182dd41783",
+ BaseBlobID: "c8a055cfb45cd39747292983ad1797ceab40f5b1",
+ },
+ },
+ },
+ {
+ Name: "no changes",
+ GitOutput: ``,
+ Expected: []*DiffTreeRecord{},
+ },
+ {
+ Name: "multiple changes",
+ GitOutput: ":000000 100644 0000000000000000000000000000000000000000 db736b44533a840981f1f17b7029d0f612b69550 A\tHttp/Controllers/ClassController.php\n" +
+ ":100644 000000 9a4d2344d4d0145db7c91b3f3e123c74367d4ef4 0000000000000000000000000000000000000000 D\tHttp/Controllers/ClassesController.php\n" +
+ ":100644 100644 f060d6aede65d423f49e7dc248dfa0d8835ef920 b82c8e39a3602dedadb44669956d6eb5b6a7cc86 M\tHttp/Controllers/ProgramDirectorController.php\n",
+ Expected: []*DiffTreeRecord{
+ {
+ Status: "added",
+ HeadPath: "Http/Controllers/ClassController.php",
+ BasePath: "Http/Controllers/ClassController.php",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "db736b44533a840981f1f17b7029d0f612b69550",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ {
+ Status: "deleted",
+ HeadPath: "Http/Controllers/ClassesController.php",
+ BasePath: "Http/Controllers/ClassesController.php",
+ HeadMode: git.EntryModeNoEntry,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "0000000000000000000000000000000000000000",
+ BaseBlobID: "9a4d2344d4d0145db7c91b3f3e123c74367d4ef4",
+ },
+ {
+ Status: "modified",
+ HeadPath: "Http/Controllers/ProgramDirectorController.php",
+ BasePath: "Http/Controllers/ProgramDirectorController.php",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "b82c8e39a3602dedadb44669956d6eb5b6a7cc86",
+ BaseBlobID: "f060d6aede65d423f49e7dc248dfa0d8835ef920",
+ },
+ },
+ },
+ {
+ Name: "spaces in file path",
+ GitOutput: ":000000 100644 0000000000000000000000000000000000000000 db736b44533a840981f1f17b7029d0f612b69550 A\tHttp /Controllers/Class Controller.php\n" +
+ ":100644 000000 9a4d2344d4d0145db7c91b3f3e123c74367d4ef4 0000000000000000000000000000000000000000 D\tHttp/Cont rollers/Classes Controller.php\n" +
+ ":100644 100644 f060d6aede65d423f49e7dc248dfa0d8835ef920 b82c8e39a3602dedadb44669956d6eb5b6a7cc86 R010\tHttp/Controllers/Program Director Controller.php\tHttp/Cont rollers/ProgramDirectorController.php\n",
+ Expected: []*DiffTreeRecord{
+ {
+ Status: "added",
+ HeadPath: "Http /Controllers/Class Controller.php",
+ BasePath: "Http /Controllers/Class Controller.php",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeNoEntry,
+ HeadBlobID: "db736b44533a840981f1f17b7029d0f612b69550",
+ BaseBlobID: "0000000000000000000000000000000000000000",
+ },
+ {
+ Status: "deleted",
+ HeadPath: "Http/Cont rollers/Classes Controller.php",
+ BasePath: "Http/Cont rollers/Classes Controller.php",
+ HeadMode: git.EntryModeNoEntry,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "0000000000000000000000000000000000000000",
+ BaseBlobID: "9a4d2344d4d0145db7c91b3f3e123c74367d4ef4",
+ },
+ {
+ Status: "renamed",
+ Score: 10,
+ HeadPath: "Http/Cont rollers/ProgramDirectorController.php",
+ BasePath: "Http/Controllers/Program Director Controller.php",
+ HeadMode: git.EntryModeBlob,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "b82c8e39a3602dedadb44669956d6eb5b6a7cc86",
+ BaseBlobID: "f060d6aede65d423f49e7dc248dfa0d8835ef920",
+ },
+ },
+ },
+ {
+ Name: "file type changed",
+ GitOutput: ":100644 120000 344e0ca8aa791cc4164fb0ea645f334fd40d00f0 a7c2973de00bfdc6ca51d315f401b5199fe01dc3 T\twebpack.mix.js",
+ Expected: []*DiffTreeRecord{
+ {
+ Status: "typechanged",
+ HeadPath: "webpack.mix.js",
+ BasePath: "webpack.mix.js",
+ HeadMode: git.EntryModeSymlink,
+ BaseMode: git.EntryModeBlob,
+ HeadBlobID: "a7c2973de00bfdc6ca51d315f401b5199fe01dc3",
+ BaseBlobID: "344e0ca8aa791cc4164fb0ea645f334fd40d00f0",
+ },
+ },
+ },
+ }
+
+ for _, tt := range test {
+ t.Run(tt.Name, func(t *testing.T) {
+ entries, err := parseGitDiffTree(strings.NewReader(tt.GitOutput))
+ assert.NoError(t, err)
+ assert.Equal(t, tt.Expected, entries)
+ })
+ }
+}
+
+func TestGitDiffTreeErrors(t *testing.T) {
+ test := []struct {
+ Name string
+ RepoPath string
+ BaseSha string
+ HeadSha string
+ }{
+ {
+ Name: "head doesn't exist",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ BaseSha: "f32b0a9dfd09a60f616f29158f772cedd89942d2",
+ HeadSha: "asdfasdfasdf",
+ },
+ {
+ Name: "base doesn't exist",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ BaseSha: "asdfasdfasdf",
+ HeadSha: "f32b0a9dfd09a60f616f29158f772cedd89942d2",
+ },
+ {
+ Name: "head not set",
+ RepoPath: "../../modules/git/tests/repos/repo5_pulls",
+ BaseSha: "f32b0a9dfd09a60f616f29158f772cedd89942d2",
+ },
+ }
+
+ for _, tt := range test {
+ t.Run(tt.Name, func(t *testing.T) {
+ gitRepo, err := git.OpenRepository(git.DefaultContext, tt.RepoPath)
+ assert.NoError(t, err)
+ defer gitRepo.Close()
+
+ diffPaths, err := GetDiffTree(db.DefaultContext, gitRepo, true, tt.BaseSha, tt.HeadSha)
+ assert.Error(t, err)
+ assert.Nil(t, diffPaths)
+ })
+ }
+}
diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go
index f046e59678..b9781cf8d0 100644
--- a/services/gitdiff/gitdiff.go
+++ b/services/gitdiff/gitdiff.go
@@ -31,6 +31,7 @@ import (
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/util"
"github.com/sergi/go-diff/diffmatchpatch"
stdcharset "golang.org/x/net/html/charset"
@@ -75,12 +76,12 @@ const (
// DiffLine represents a line difference in a DiffSection.
type DiffLine struct {
- LeftIdx int
- RightIdx int
- Match int
+ LeftIdx int // line number, 1-based
+ RightIdx int // line number, 1-based
+ Match int // the diff matched index. -1: no match. 0: plain and no need to match. >0: for add/del, "Lines" slice index of the other side
Type DiffLineType
Content string
- Comments []*issues_model.Comment
+ Comments issues_model.CommentList // related PR code comments
SectionInfo *DiffLineSectionInfo
}
@@ -95,9 +96,18 @@ type DiffLineSectionInfo struct {
RightHunkSize int
}
+// DiffHTMLOperation is the HTML version of diffmatchpatch.Diff
+type DiffHTMLOperation struct {
+ Type diffmatchpatch.Operation
+ HTML template.HTML
+}
+
// BlobExcerptChunkSize represent max lines of excerpt
const BlobExcerptChunkSize = 20
+// MaxDiffHighlightEntireFileSize is the maximum file size that will be highlighted with "entire file diff"
+const MaxDiffHighlightEntireFileSize = 1 * 1024 * 1024
+
// GetType returns the type of DiffLine.
func (d *DiffLine) GetType() int {
return int(d.Type)
@@ -112,8 +122,9 @@ func (d *DiffLine) GetHTMLDiffLineType() string {
return "del"
case DiffLineSection:
return "tag"
+ default:
+ return "same"
}
- return "same"
}
// CanComment returns whether a line can get commented
@@ -192,44 +203,20 @@ func getLineContent(content string, locale translation.Locale) DiffInline {
type DiffSection struct {
file *DiffFile
FileName string
- Name string
Lines []*DiffLine
}
-var (
- addedCodePrefix = []byte(`<span class="added-code">`)
- removedCodePrefix = []byte(`<span class="removed-code">`)
- codeTagSuffix = []byte(`</span>`)
-)
-
-func diffToHTML(lineWrapperTags []string, diffs []diffmatchpatch.Diff, lineType DiffLineType) string {
- buf := bytes.NewBuffer(nil)
- // restore the line wrapper tags <span class="line"> and <span class="cl">, if necessary
- for _, tag := range lineWrapperTags {
- buf.WriteString(tag)
- }
- for _, diff := range diffs {
- switch {
- case diff.Type == diffmatchpatch.DiffEqual:
- buf.WriteString(diff.Text)
- case diff.Type == diffmatchpatch.DiffInsert && lineType == DiffLineAdd:
- buf.Write(addedCodePrefix)
- buf.WriteString(diff.Text)
- buf.Write(codeTagSuffix)
- case diff.Type == diffmatchpatch.DiffDelete && lineType == DiffLineDel:
- buf.Write(removedCodePrefix)
- buf.WriteString(diff.Text)
- buf.Write(codeTagSuffix)
- }
- }
- for range lineWrapperTags {
- buf.WriteString("</span>")
+func (diffSection *DiffSection) GetLine(idx int) *DiffLine {
+ if idx <= 0 {
+ return nil
}
- return buf.String()
+ return diffSection.Lines[idx]
}
// GetLine gets a specific line by type (add or del) and file line number
-func (diffSection *DiffSection) GetLine(lineType DiffLineType, idx int) *DiffLine {
+// This algorithm is not quite right.
+// Actually now we have "Match" field, it is always right, so use it instead in new GetLine
+func (diffSection *DiffSection) getLineLegacy(lineType DiffLineType, idx int) *DiffLine { //nolint:unused
var (
difference = 0
addCount = 0
@@ -271,10 +258,10 @@ LOOP:
return nil
}
-var diffMatchPatch = diffmatchpatch.New()
-
-func init() {
- diffMatchPatch.DiffEditCost = 100
+func defaultDiffMatchPatch() *diffmatchpatch.DiffMatchPatch {
+ dmp := diffmatchpatch.New()
+ dmp.DiffEditCost = 100
+ return dmp
}
// DiffInline is a struct that has a content and escape status
@@ -283,97 +270,125 @@ type DiffInline struct {
Content template.HTML
}
-// DiffInlineWithUnicodeEscape makes a DiffInline with hidden unicode characters escaped
+// DiffInlineWithUnicodeEscape makes a DiffInline with hidden Unicode characters escaped
func DiffInlineWithUnicodeEscape(s template.HTML, locale translation.Locale) DiffInline {
status, content := charset.EscapeControlHTML(s, locale)
return DiffInline{EscapeStatus: status, Content: content}
}
-// DiffInlineWithHighlightCode makes a DiffInline with code highlight and hidden unicode characters escaped
-func DiffInlineWithHighlightCode(fileName, language, code string, locale translation.Locale) DiffInline {
- highlighted, _ := highlight.Code(fileName, language, code)
- status, content := charset.EscapeControlHTML(highlighted, locale)
- return DiffInline{EscapeStatus: status, Content: content}
-}
-
-// GetComputedInlineDiffFor computes inline diff for the given line.
-func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine, locale translation.Locale) DiffInline {
+func (diffSection *DiffSection) getLineContentForRender(lineIdx int, diffLine *DiffLine, fileLanguage string, highlightLines map[int]template.HTML) template.HTML {
+ h, ok := highlightLines[lineIdx-1]
+ if ok {
+ return h
+ }
+ if diffLine.Content == "" {
+ return ""
+ }
if setting.Git.DisableDiffHighlight {
- return getLineContent(diffLine.Content[1:], locale)
+ return template.HTML(html.EscapeString(diffLine.Content[1:]))
}
+ h, _ = highlight.Code(diffSection.FileName, fileLanguage, diffLine.Content[1:])
+ return h
+}
- var (
- compareDiffLine *DiffLine
- diff1 string
- diff2 string
- )
-
- language := ""
+func (diffSection *DiffSection) getDiffLineForRender(diffLineType DiffLineType, leftLine, rightLine *DiffLine, locale translation.Locale) DiffInline {
+ var fileLanguage string
+ var highlightedLeftLines, highlightedRightLines map[int]template.HTML
+ // when a "diff section" is manually prepared by ExcerptBlob, it doesn't have "file" information
if diffSection.file != nil {
- language = diffSection.file.Language
+ fileLanguage = diffSection.file.Language
+ highlightedLeftLines, highlightedRightLines = diffSection.file.highlightedLeftLines, diffSection.file.highlightedRightLines
}
+ var lineHTML template.HTML
+ hcd := newHighlightCodeDiff()
+ if diffLineType == DiffLinePlain {
+ // left and right are the same, no need to do line-level diff
+ if leftLine != nil {
+ lineHTML = diffSection.getLineContentForRender(leftLine.LeftIdx, leftLine, fileLanguage, highlightedLeftLines)
+ } else if rightLine != nil {
+ lineHTML = diffSection.getLineContentForRender(rightLine.RightIdx, rightLine, fileLanguage, highlightedRightLines)
+ }
+ } else {
+ var diff1, diff2 template.HTML
+ if leftLine != nil {
+ diff1 = diffSection.getLineContentForRender(leftLine.LeftIdx, leftLine, fileLanguage, highlightedLeftLines)
+ }
+ if rightLine != nil {
+ diff2 = diffSection.getLineContentForRender(rightLine.RightIdx, rightLine, fileLanguage, highlightedRightLines)
+ }
+ if diff1 != "" && diff2 != "" {
+ // if only some parts of a line are changed, highlight these changed parts as "deleted/added".
+ lineHTML = hcd.diffLineWithHighlight(diffLineType, diff1, diff2)
+ } else {
+ // if left is empty or right is empty (a line is fully deleted or added), then we do not need to diff anymore.
+ // the tmpl code already adds background colors for these cases.
+ lineHTML = util.Iif(diffLineType == DiffLineDel, diff1, diff2)
+ }
+ }
+ return DiffInlineWithUnicodeEscape(lineHTML, locale)
+}
+
+// GetComputedInlineDiffFor computes inline diff for the given line.
+func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine, locale translation.Locale) DiffInline {
// try to find equivalent diff line. ignore, otherwise
switch diffLine.Type {
case DiffLineSection:
return getLineContent(diffLine.Content[1:], locale)
case DiffLineAdd:
- compareDiffLine = diffSection.GetLine(DiffLineDel, diffLine.RightIdx)
- if compareDiffLine == nil {
- return DiffInlineWithHighlightCode(diffSection.FileName, language, diffLine.Content[1:], locale)
- }
- diff1 = compareDiffLine.Content
- diff2 = diffLine.Content
+ compareDiffLine := diffSection.GetLine(diffLine.Match)
+ return diffSection.getDiffLineForRender(DiffLineAdd, compareDiffLine, diffLine, locale)
case DiffLineDel:
- compareDiffLine = diffSection.GetLine(DiffLineAdd, diffLine.LeftIdx)
- if compareDiffLine == nil {
- return DiffInlineWithHighlightCode(diffSection.FileName, language, diffLine.Content[1:], locale)
- }
- diff1 = diffLine.Content
- diff2 = compareDiffLine.Content
- default:
- if strings.IndexByte(" +-", diffLine.Content[0]) > -1 {
- return DiffInlineWithHighlightCode(diffSection.FileName, language, diffLine.Content[1:], locale)
- }
- return DiffInlineWithHighlightCode(diffSection.FileName, language, diffLine.Content, locale)
+ compareDiffLine := diffSection.GetLine(diffLine.Match)
+ return diffSection.getDiffLineForRender(DiffLineDel, diffLine, compareDiffLine, locale)
+ default: // Plain
+ // TODO: there was an "if" check: `if diffLine.Content >strings.IndexByte(" +-", diffLine.Content[0]) > -1 { ... } else { ... }`
+ // no idea why it needs that check, it seems that the "if" should be always true, so try to simplify the code
+ return diffSection.getDiffLineForRender(DiffLinePlain, nil, diffLine, locale)
}
-
- hcd := newHighlightCodeDiff()
- diffRecord := hcd.diffWithHighlight(diffSection.FileName, language, diff1[1:], diff2[1:])
- // it seems that Gitea doesn't need the line wrapper of Chroma, so do not add them back
- // if the line wrappers are still needed in the future, it can be added back by "diffToHTML(hcd.lineWrapperTags. ...)"
- diffHTML := diffToHTML(nil, diffRecord, diffLine.Type)
- return DiffInlineWithUnicodeEscape(template.HTML(diffHTML), locale)
}
// DiffFile represents a file diff.
type DiffFile struct {
- Name string
- NameHash string
- OldName string
- Index int
- Addition, Deletion int
- Type DiffFileType
- IsCreated bool
- IsDeleted bool
- IsBin bool
- IsLFSFile bool
- IsRenamed bool
- IsAmbiguous bool
- Sections []*DiffSection
- IsIncomplete bool
- IsIncompleteLineTooLong bool
- IsProtected bool
- IsGenerated bool
- IsVendored bool
+ // only used internally to parse Ambiguous filenames
+ isAmbiguous bool
+
+ // basic fields (parsed from diff result)
+ Name string
+ NameHash string
+ OldName string
+ Addition int
+ Deletion int
+ Type DiffFileType
+ Mode string
+ OldMode string
+ IsCreated bool
+ IsDeleted bool
+ IsBin bool
+ IsLFSFile bool
+ IsRenamed bool
+ IsSubmodule bool
+ // basic fields but for render purpose only
+ Sections []*DiffSection
+ IsIncomplete bool
+ IsIncompleteLineTooLong bool
+
+ // will be filled by the extra loop in GitDiffForRender
+ Language string
+ IsGenerated bool
+ IsVendored bool
+ SubmoduleDiffInfo *SubmoduleDiffInfo // IsSubmodule==true, then there must be a SubmoduleDiffInfo
+
+ // will be filled by route handler
+ IsProtected bool
+
+ // will be filled by SyncUserSpecificDiff
IsViewed bool // User specific
HasChangedSinceLastReview bool // User specific
- Language string
- Mode string
- OldMode string
- IsSubmodule bool // if IsSubmodule==true, then there must be a SubmoduleDiffInfo
- SubmoduleDiffInfo *SubmoduleDiffInfo
+ // for render purpose only, will be filled by the extra loop in GitDiffForRender
+ highlightedLeftLines map[int]template.HTML
+ highlightedRightLines map[int]template.HTML
}
// GetType returns type of diff file.
@@ -381,18 +396,30 @@ func (diffFile *DiffFile) GetType() int {
return int(diffFile.Type)
}
-// GetTailSection creates a fake DiffLineSection if the last section is not the end of the file
-func (diffFile *DiffFile) GetTailSection(gitRepo *git.Repository, leftCommit, rightCommit *git.Commit) *DiffSection {
- if len(diffFile.Sections) == 0 || diffFile.Type != DiffFileChange || diffFile.IsBin || diffFile.IsLFSFile {
- return nil
- }
+type DiffLimitedContent struct {
+ LeftContent, RightContent *limitByteWriter
+}
+// GetTailSectionAndLimitedContent creates a fake DiffLineSection if the last section is not the end of the file
+func (diffFile *DiffFile) GetTailSectionAndLimitedContent(leftCommit, rightCommit *git.Commit) (_ *DiffSection, diffLimitedContent DiffLimitedContent) {
+ var leftLineCount, rightLineCount int
+ diffLimitedContent = DiffLimitedContent{}
+ if diffFile.IsBin || diffFile.IsLFSFile {
+ return nil, diffLimitedContent
+ }
+ if (diffFile.Type == DiffFileDel || diffFile.Type == DiffFileChange) && leftCommit != nil {
+ leftLineCount, diffLimitedContent.LeftContent = getCommitFileLineCountAndLimitedContent(leftCommit, diffFile.OldName)
+ }
+ if (diffFile.Type == DiffFileAdd || diffFile.Type == DiffFileChange) && rightCommit != nil {
+ rightLineCount, diffLimitedContent.RightContent = getCommitFileLineCountAndLimitedContent(rightCommit, diffFile.OldName)
+ }
+ if len(diffFile.Sections) == 0 || diffFile.Type != DiffFileChange {
+ return nil, diffLimitedContent
+ }
lastSection := diffFile.Sections[len(diffFile.Sections)-1]
lastLine := lastSection.Lines[len(lastSection.Lines)-1]
- leftLineCount := getCommitFileLineCount(leftCommit, diffFile.Name)
- rightLineCount := getCommitFileLineCount(rightCommit, diffFile.Name)
if leftLineCount <= lastLine.LeftIdx || rightLineCount <= lastLine.RightIdx {
- return nil
+ return nil, diffLimitedContent
}
tailDiffLine := &DiffLine{
Type: DiffLineSection,
@@ -406,7 +433,7 @@ func (diffFile *DiffFile) GetTailSection(gitRepo *git.Repository, leftCommit, ri
},
}
tailSection := &DiffSection{FileName: diffFile.Name, Lines: []*DiffLine{tailDiffLine}}
- return tailSection
+ return tailSection, diffLimitedContent
}
// GetDiffFileName returns the name of the diff file, or its old name in case it was deleted
@@ -438,26 +465,37 @@ func (diffFile *DiffFile) ModeTranslationKey(mode string) string {
}
}
-func getCommitFileLineCount(commit *git.Commit, filePath string) int {
+type limitByteWriter struct {
+ buf bytes.Buffer
+ limit int
+}
+
+func (l *limitByteWriter) Write(p []byte) (n int, err error) {
+ if l.buf.Len()+len(p) > l.limit {
+ p = p[:l.limit-l.buf.Len()]
+ }
+ return l.buf.Write(p)
+}
+
+func getCommitFileLineCountAndLimitedContent(commit *git.Commit, filePath string) (lineCount int, limitWriter *limitByteWriter) {
blob, err := commit.GetBlobByPath(filePath)
if err != nil {
- return 0
+ return 0, nil
}
- lineCount, err := blob.GetBlobLineCount()
+ w := &limitByteWriter{limit: MaxDiffHighlightEntireFileSize + 1}
+ lineCount, err = blob.GetBlobLineCount(w)
if err != nil {
- return 0
+ return 0, nil
}
- return lineCount
+ return lineCount, w
}
// Diff represents a difference between two git trees.
type Diff struct {
- Start, End string
- NumFiles int
- TotalAddition, TotalDeletion int
- Files []*DiffFile
- IsIncomplete bool
- NumViewedFiles int // user-specific
+ Start, End string
+ Files []*DiffFile
+ IsIncomplete bool
+ NumViewedFiles int // user-specific
}
// LoadComments loads comments into each line
@@ -528,13 +566,13 @@ parsingLoop:
}
if maxFiles > -1 && len(diff.Files) >= maxFiles {
- lastFile := createDiffFile(diff, line)
+ lastFile := createDiffFile(line)
diff.End = lastFile.Name
diff.IsIncomplete = true
break parsingLoop
}
- curFile = createDiffFile(diff, line)
+ curFile = createDiffFile(line)
if skipping {
if curFile.Name != skipToFile {
line, err = skipToNextDiffHead(input)
@@ -617,28 +655,28 @@ parsingLoop:
case strings.HasPrefix(line, "rename from "):
curFile.IsRenamed = true
curFile.Type = DiffFileRename
- if curFile.IsAmbiguous {
+ if curFile.isAmbiguous {
curFile.OldName = prepareValue(line, "rename from ")
}
case strings.HasPrefix(line, "rename to "):
curFile.IsRenamed = true
curFile.Type = DiffFileRename
- if curFile.IsAmbiguous {
+ if curFile.isAmbiguous {
curFile.Name = prepareValue(line, "rename to ")
- curFile.IsAmbiguous = false
+ curFile.isAmbiguous = false
}
case strings.HasPrefix(line, "copy from "):
curFile.IsRenamed = true
curFile.Type = DiffFileCopy
- if curFile.IsAmbiguous {
+ if curFile.isAmbiguous {
curFile.OldName = prepareValue(line, "copy from ")
}
case strings.HasPrefix(line, "copy to "):
curFile.IsRenamed = true
curFile.Type = DiffFileCopy
- if curFile.IsAmbiguous {
+ if curFile.isAmbiguous {
curFile.Name = prepareValue(line, "copy to ")
- curFile.IsAmbiguous = false
+ curFile.isAmbiguous = false
}
case strings.HasPrefix(line, "new file"):
curFile.Type = DiffFileAdd
@@ -665,7 +703,7 @@ parsingLoop:
curFile.IsBin = true
case strings.HasPrefix(line, "--- "):
// Handle ambiguous filenames
- if curFile.IsAmbiguous {
+ if curFile.isAmbiguous {
// The shortest string that can end up here is:
// "--- a\t\n" without the quotes.
// This line has a len() of 7 but doesn't contain a oldName.
@@ -683,7 +721,7 @@ parsingLoop:
// Otherwise do nothing with this line
case strings.HasPrefix(line, "+++ "):
// Handle ambiguous filenames
- if curFile.IsAmbiguous {
+ if curFile.isAmbiguous {
if len(line) > 6 && line[4] == 'b' {
curFile.Name = line[6 : len(line)-1]
if line[len(line)-2] == '\t' {
@@ -695,12 +733,10 @@ parsingLoop:
} else {
curFile.Name = curFile.OldName
}
- curFile.IsAmbiguous = false
+ curFile.isAmbiguous = false
}
// Otherwise do nothing with this line, but now switch to parsing hunks
lineBytes, isFragment, err := parseHunks(ctx, curFile, maxLines, maxLineCharacters, input)
- diff.TotalAddition += curFile.Addition
- diff.TotalDeletion += curFile.Deletion
if err != nil {
if err != io.EOF {
return diff, err
@@ -773,7 +809,6 @@ parsingLoop:
}
}
- diff.NumFiles = len(diff.Files)
return diff, nil
}
@@ -1011,7 +1046,7 @@ func parseHunks(ctx context.Context, curFile *DiffFile, maxLines, maxLineCharact
}
}
-func createDiffFile(diff *Diff, line string) *DiffFile {
+func createDiffFile(line string) *DiffFile {
// The a/ and b/ filenames are the same unless rename/copy is involved.
// Especially, even for a creation or a deletion, /dev/null is not used
// in place of the a/ or b/ filenames.
@@ -1022,12 +1057,11 @@ func createDiffFile(diff *Diff, line string) *DiffFile {
//
// Path names are quoted if necessary.
//
- // This means that you should always be able to determine the file name even when there
+ // This means that you should always be able to determine the file name even when
// there is potential ambiguity...
//
// but we can be simpler with our heuristics by just forcing git to prefix things nicely
curFile := &DiffFile{
- Index: len(diff.Files) + 1,
Type: DiffFileChange,
Sections: make([]*DiffSection, 0, 10),
}
@@ -1039,7 +1073,7 @@ func createDiffFile(diff *Diff, line string) *DiffFile {
curFile.OldName, oldNameAmbiguity = readFileName(rd)
curFile.Name, newNameAmbiguity = readFileName(rd)
if oldNameAmbiguity && newNameAmbiguity {
- curFile.IsAmbiguous = true
+ curFile.isAmbiguous = true
// OK we should bet that the oldName and the newName are the same if they can be made to be same
// So we need to start again ...
if (len(line)-len(cmdDiffHead)-1)%2 == 0 {
@@ -1104,55 +1138,48 @@ type DiffOptions struct {
MaxFiles int
WhitespaceBehavior git.TrustedCmdArgs
DirectComparison bool
- FileOnly bool
}
-// GetDiff builds a Diff between two commits of a repository.
+func guessBeforeCommitForDiff(gitRepo *git.Repository, beforeCommitID string, afterCommit *git.Commit) (actualBeforeCommit *git.Commit, actualBeforeCommitID git.ObjectID, err error) {
+ commitObjectFormat := afterCommit.ID.Type()
+ isBeforeCommitIDEmpty := beforeCommitID == "" || beforeCommitID == commitObjectFormat.EmptyObjectID().String()
+
+ if isBeforeCommitIDEmpty && afterCommit.ParentCount() == 0 {
+ actualBeforeCommitID = commitObjectFormat.EmptyTree()
+ } else {
+ if isBeforeCommitIDEmpty {
+ actualBeforeCommit, err = afterCommit.Parent(0)
+ } else {
+ actualBeforeCommit, err = gitRepo.GetCommit(beforeCommitID)
+ }
+ if err != nil {
+ return nil, nil, err
+ }
+ actualBeforeCommitID = actualBeforeCommit.ID
+ }
+ return actualBeforeCommit, actualBeforeCommitID, nil
+}
+
+// getDiffBasic builds a Diff between two commits of a repository.
// Passing the empty string as beforeCommitID returns a diff from the parent commit.
// The whitespaceBehavior is either an empty string or a git flag
-func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
+// Returned beforeCommit could be nil if the afterCommit doesn't have parent commit
+func getDiffBasic(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (_ *Diff, beforeCommit, afterCommit *git.Commit, err error) {
repoPath := gitRepo.Path
- var beforeCommit *git.Commit
- commit, err := gitRepo.GetCommit(opts.AfterCommitID)
+ afterCommit, err = gitRepo.GetCommit(opts.AfterCommitID)
if err != nil {
- return nil, err
+ return nil, nil, nil, err
}
- cmdCtx, cmdCancel := context.WithCancel(ctx)
- defer cmdCancel()
-
- cmdDiff := git.NewCommand(cmdCtx)
- objectFormat, err := gitRepo.GetObjectFormat()
+ beforeCommit, beforeCommitID, err := guessBeforeCommitForDiff(gitRepo, opts.BeforeCommitID, afterCommit)
if err != nil {
- return nil, err
+ return nil, nil, nil, err
}
- if (len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == objectFormat.EmptyObjectID().String()) && commit.ParentCount() == 0 {
- cmdDiff.AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M").
- AddArguments(opts.WhitespaceBehavior...).
- AddDynamicArguments(objectFormat.EmptyTree().String()).
- AddDynamicArguments(opts.AfterCommitID)
- } else {
- actualBeforeCommitID := opts.BeforeCommitID
- if len(actualBeforeCommitID) == 0 {
- parentCommit, err := commit.Parent(0)
- if err != nil {
- return nil, err
- }
- actualBeforeCommitID = parentCommit.ID.String()
- }
-
- cmdDiff.AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M").
- AddArguments(opts.WhitespaceBehavior...).
- AddDynamicArguments(actualBeforeCommitID, opts.AfterCommitID)
- opts.BeforeCommitID = actualBeforeCommitID
-
- beforeCommit, err = gitRepo.GetCommit(opts.BeforeCommitID)
- if err != nil {
- return nil, err
- }
- }
+ cmdDiff := git.NewCommand().
+ AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M").
+ AddArguments(opts.WhitespaceBehavior...)
// In git 2.31, git diff learned --skip-to which we can use to shortcut skip to file
// so if we are using at least this version of git we don't have to tell ParsePatch to do
@@ -1163,8 +1190,12 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
parsePatchSkipToFile = ""
}
+ cmdDiff.AddDynamicArguments(beforeCommitID.String(), opts.AfterCommitID)
cmdDiff.AddDashesAndList(files...)
+ cmdCtx, cmdCancel := context.WithCancel(ctx)
+ defer cmdCancel()
+
reader, writer := io.Pipe()
defer func() {
_ = reader.Close()
@@ -1173,7 +1204,7 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
go func() {
stderr := &bytes.Buffer{}
- if err := cmdDiff.Run(&git.RunOpts{
+ if err := cmdDiff.Run(cmdCtx, &git.RunOpts{
Timeout: time.Duration(setting.Git.Timeout.Default) * time.Second,
Dir: repoPath,
Stdout: writer,
@@ -1189,12 +1220,25 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
// Ensure the git process is killed if it didn't exit already
cmdCancel()
if err != nil {
- return nil, fmt.Errorf("unable to ParsePatch: %w", err)
+ return nil, nil, nil, fmt.Errorf("unable to ParsePatch: %w", err)
}
diff.Start = opts.SkipTo
+ return diff, beforeCommit, afterCommit, nil
+}
+
+func GetDiffForAPI(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
+ diff, _, _, err := getDiffBasic(ctx, gitRepo, opts, files...)
+ return diff, err
+}
- checker, deferable := gitRepo.CheckAttributeReader(opts.AfterCommitID)
- defer deferable()
+func GetDiffForRender(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
+ diff, beforeCommit, afterCommit, err := getDiffBasic(ctx, gitRepo, opts, files...)
+ if err != nil {
+ return nil, err
+ }
+
+ checker, deferrable := gitRepo.CheckAttributeReader(opts.AfterCommitID)
+ defer deferrable()
for _, diffFile := range diff.Files {
isVendored := optional.None[bool]()
@@ -1209,12 +1253,14 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
if language.Has() {
diffFile.Language = language.Value()
}
+ } else {
+ checker = nil // CheckPath fails, it's not impossible to "check" anymore
}
}
// Populate Submodule URLs
if diffFile.SubmoduleDiffInfo != nil {
- diffFile.SubmoduleDiffInfo.PopulateURL(diffFile, beforeCommit, commit)
+ diffFile.SubmoduleDiffInfo.PopulateURL(diffFile, beforeCommit, afterCommit)
}
if !isVendored.Has() {
@@ -1226,76 +1272,77 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
isGenerated = optional.Some(analyze.IsGenerated(diffFile.Name))
}
diffFile.IsGenerated = isGenerated.Value()
-
- tailSection := diffFile.GetTailSection(gitRepo, beforeCommit, commit)
+ tailSection, limitedContent := diffFile.GetTailSectionAndLimitedContent(beforeCommit, afterCommit)
if tailSection != nil {
diffFile.Sections = append(diffFile.Sections, tailSection)
}
- }
-
- if opts.FileOnly {
- return diff, nil
- }
- stats, err := GetPullDiffStats(gitRepo, opts)
- if err != nil {
- return nil, err
+ if !setting.Git.DisableDiffHighlight {
+ if limitedContent.LeftContent != nil && limitedContent.LeftContent.buf.Len() < MaxDiffHighlightEntireFileSize {
+ diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.String())
+ }
+ if limitedContent.RightContent != nil && limitedContent.RightContent.buf.Len() < MaxDiffHighlightEntireFileSize {
+ diffFile.highlightedRightLines = highlightCodeLines(diffFile, false /* right */, limitedContent.RightContent.buf.String())
+ }
+ }
}
- diff.NumFiles, diff.TotalAddition, diff.TotalDeletion = stats.NumFiles, stats.TotalAddition, stats.TotalDeletion
-
return diff, nil
}
-type PullDiffStats struct {
+func highlightCodeLines(diffFile *DiffFile, isLeft bool, content string) map[int]template.HTML {
+ highlightedNewContent, _ := highlight.Code(diffFile.Name, diffFile.Language, content)
+ splitLines := strings.Split(string(highlightedNewContent), "\n")
+ lines := make(map[int]template.HTML, len(splitLines))
+ // only save the highlighted lines we need, but not the whole file, to save memory
+ for _, sec := range diffFile.Sections {
+ for _, ln := range sec.Lines {
+ lineIdx := ln.LeftIdx
+ if !isLeft {
+ lineIdx = ln.RightIdx
+ }
+ if lineIdx >= 1 {
+ idx := lineIdx - 1
+ if idx < len(splitLines) {
+ lines[idx] = template.HTML(splitLines[idx])
+ }
+ }
+ }
+ }
+ return lines
+}
+
+type DiffShortStat struct {
NumFiles, TotalAddition, TotalDeletion int
}
-// GetPullDiffStats
-func GetPullDiffStats(gitRepo *git.Repository, opts *DiffOptions) (*PullDiffStats, error) {
+func GetDiffShortStat(gitRepo *git.Repository, beforeCommitID, afterCommitID string) (*DiffShortStat, error) {
repoPath := gitRepo.Path
- diff := &PullDiffStats{}
-
- separator := "..."
- if opts.DirectComparison {
- separator = ".."
- }
-
- objectFormat, err := gitRepo.GetObjectFormat()
+ afterCommit, err := gitRepo.GetCommit(afterCommitID)
if err != nil {
return nil, err
}
- diffPaths := []string{opts.BeforeCommitID + separator + opts.AfterCommitID}
- if len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == objectFormat.EmptyObjectID().String() {
- diffPaths = []string{objectFormat.EmptyTree().String(), opts.AfterCommitID}
+ _, actualBeforeCommitID, err := guessBeforeCommitForDiff(gitRepo, beforeCommitID, afterCommit)
+ if err != nil {
+ return nil, err
}
- diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, nil, diffPaths...)
- if err != nil && strings.Contains(err.Error(), "no merge base") {
- // git >= 2.28 now returns an error if base and head have become unrelated.
- // previously it would return the results of git diff --shortstat base head so let's try that...
- diffPaths = []string{opts.BeforeCommitID, opts.AfterCommitID}
- diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, nil, diffPaths...)
- }
+ diff := &DiffShortStat{}
+ diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStatByCmdArgs(gitRepo.Ctx, repoPath, nil, actualBeforeCommitID.String(), afterCommitID)
if err != nil {
return nil, err
}
-
return diff, nil
}
-// SyncAndGetUserSpecificDiff is like GetDiff, except that user specific data such as which files the given user has already viewed on the given PR will also be set
-// Additionally, the database asynchronously is updated if files have changed since the last review
-func SyncAndGetUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model.PullRequest, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
- diff, err := GetDiff(ctx, gitRepo, opts, files...)
- if err != nil {
- return nil, err
- }
+// SyncUserSpecificDiff inserts user-specific data such as which files the user has already viewed on the given diff
+// Additionally, the database is updated asynchronously if files have changed since the last review
+func SyncUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model.PullRequest, gitRepo *git.Repository, diff *Diff, opts *DiffOptions, files ...string) error {
review, err := pull_model.GetNewestReviewState(ctx, userID, pull.ID)
if err != nil || review == nil || review.UpdatedFiles == nil {
- return diff, err
+ return err
}
latestCommit := opts.AfterCommitID
@@ -1348,11 +1395,11 @@ outer:
err := pull_model.UpdateReviewState(ctx, review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff)
if err != nil {
log.Warn("Could not update review for user %d, pull %d, commit %s and the changed files %v: %v", review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff, err)
- return nil, err
+ return err
}
}
- return diff, nil
+ return nil
}
// CommentAsDiff returns c.Patch as *Diff
@@ -1398,10 +1445,8 @@ func GetWhitespaceFlag(whitespaceBehavior string) git.TrustedCmdArgs {
"ignore-eol": {"--ignore-space-at-eol"},
"show-all": nil,
}
-
if flag, ok := whitespaceFlags[whitespaceBehavior]; ok {
return flag
}
- log.Warn("unknown whitespace behavior: %q, default to 'show-all'", whitespaceBehavior)
return nil
}
diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go
index 1017d188dd..71394b1915 100644
--- a/services/gitdiff/gitdiff_test.go
+++ b/services/gitdiff/gitdiff_test.go
@@ -17,27 +17,10 @@ import (
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting"
- dmp "github.com/sergi/go-diff/diffmatchpatch"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
-func TestDiffToHTML(t *testing.T) {
- assert.Equal(t, "foo <span class=\"added-code\">bar</span> biz", diffToHTML(nil, []dmp.Diff{
- {Type: dmp.DiffEqual, Text: "foo "},
- {Type: dmp.DiffInsert, Text: "bar"},
- {Type: dmp.DiffDelete, Text: " baz"},
- {Type: dmp.DiffEqual, Text: " biz"},
- }, DiffLineAdd))
-
- assert.Equal(t, "foo <span class=\"removed-code\">bar</span> biz", diffToHTML(nil, []dmp.Diff{
- {Type: dmp.DiffEqual, Text: "foo "},
- {Type: dmp.DiffDelete, Text: "bar"},
- {Type: dmp.DiffInsert, Text: " baz"},
- {Type: dmp.DiffEqual, Text: " biz"},
- }, DiffLineDel))
-}
-
func TestParsePatch_skipTo(t *testing.T) {
type testcase struct {
name string
@@ -182,16 +165,10 @@ diff --git "\\a/README.md" "\\b/README.md"
}
gotMarshaled, _ := json.MarshalIndent(got, "", " ")
- if got.NumFiles != 1 {
+ if len(got.Files) != 1 {
t.Errorf("ParsePath(%q) did not receive 1 file:\n%s", testcase.name, string(gotMarshaled))
return
}
- if got.TotalAddition != testcase.addition {
- t.Errorf("ParsePath(%q) does not have correct totalAddition %d, wanted %d", testcase.name, got.TotalAddition, testcase.addition)
- }
- if got.TotalDeletion != testcase.deletion {
- t.Errorf("ParsePath(%q) did not have correct totalDeletion %d, wanted %d", testcase.name, got.TotalDeletion, testcase.deletion)
- }
file := got.Files[0]
if file.Addition != testcase.addition {
t.Errorf("ParsePath(%q) does not have correct file addition %d, wanted %d", testcase.name, file.Addition, testcase.addition)
@@ -407,16 +384,10 @@ index 6961180..9ba1a00 100644
}
gotMarshaled, _ := json.MarshalIndent(got, "", " ")
- if got.NumFiles != 1 {
+ if len(got.Files) != 1 {
t.Errorf("ParsePath(%q) did not receive 1 file:\n%s", testcase.name, string(gotMarshaled))
return
}
- if got.TotalAddition != testcase.addition {
- t.Errorf("ParsePath(%q) does not have correct totalAddition %d, wanted %d", testcase.name, got.TotalAddition, testcase.addition)
- }
- if got.TotalDeletion != testcase.deletion {
- t.Errorf("ParsePath(%q) did not have correct totalDeletion %d, wanted %d", testcase.name, got.TotalDeletion, testcase.deletion)
- }
file := got.Files[0]
if file.Addition != testcase.addition {
t.Errorf("ParsePath(%q) does not have correct file addition %d, wanted %d", testcase.name, file.Addition, testcase.addition)
@@ -628,23 +599,25 @@ func TestDiffLine_GetCommentSide(t *testing.T) {
}
func TestGetDiffRangeWithWhitespaceBehavior(t *testing.T) {
- gitRepo, err := git.OpenRepository(git.DefaultContext, "./testdata/academic-module")
+ gitRepo, err := git.OpenRepository(t.Context(), "../../modules/git/tests/repos/repo5_pulls")
require.NoError(t, err)
defer gitRepo.Close()
for _, behavior := range []git.TrustedCmdArgs{{"-w"}, {"--ignore-space-at-eol"}, {"-b"}, nil} {
- diffs, err := GetDiff(db.DefaultContext, gitRepo,
+ diffs, err := GetDiffForAPI(t.Context(), gitRepo,
&DiffOptions{
- AfterCommitID: "bd7063cc7c04689c4d082183d32a604ed27a24f9",
- BeforeCommitID: "559c156f8e0178b71cb44355428f24001b08fc68",
+ AfterCommitID: "d8e0bbb45f200e67d9a784ce55bd90821af45ebd",
+ BeforeCommitID: "72866af952e98d02a73003501836074b286a78f6",
MaxLines: setting.Git.MaxGitDiffLines,
MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
- MaxFiles: setting.Git.MaxGitDiffFiles,
+ MaxFiles: 1,
WhitespaceBehavior: behavior,
})
- assert.NoError(t, err, "Error when diff with %s", behavior)
+ require.NoError(t, err, "Error when diff with WhitespaceBehavior=%s", behavior)
+ assert.True(t, diffs.IsIncomplete)
+ assert.Len(t, diffs.Files, 1)
for _, f := range diffs.Files {
- assert.NotEmpty(t, f.Sections, "%s should have sections", f.Name)
+ assert.NotEmpty(t, f.Sections, "Diff file %q should have sections", f.Name)
}
}
}
diff --git a/services/gitdiff/highlightdiff.go b/services/gitdiff/highlightdiff.go
index 35d4844550..e8be063e69 100644
--- a/services/gitdiff/highlightdiff.go
+++ b/services/gitdiff/highlightdiff.go
@@ -4,23 +4,24 @@
package gitdiff
import (
+ "bytes"
+ "html/template"
"strings"
- "code.gitea.io/gitea/modules/highlight"
-
"github.com/sergi/go-diff/diffmatchpatch"
)
// token is a html tag or entity, eg: "<span ...>", "</span>", "&lt;"
func extractHTMLToken(s string) (before, token, after string, valid bool) {
for pos1 := 0; pos1 < len(s); pos1++ {
- if s[pos1] == '<' {
+ switch s[pos1] {
+ case '<':
pos2 := strings.IndexByte(s[pos1:], '>')
if pos2 == -1 {
return "", "", s, false
}
return s[:pos1], s[pos1 : pos1+pos2+1], s[pos1+pos2+1:], true
- } else if s[pos1] == '&' {
+ case '&':
pos2 := strings.IndexByte(s[pos1:], ';')
if pos2 == -1 {
return "", "", s, false
@@ -77,7 +78,7 @@ func (hcd *highlightCodeDiff) isInPlaceholderRange(r rune) bool {
return hcd.placeholderBegin <= r && r < hcd.placeholderBegin+rune(hcd.placeholderMaxCount)
}
-func (hcd *highlightCodeDiff) collectUsedRunes(code string) {
+func (hcd *highlightCodeDiff) collectUsedRunes(code template.HTML) {
for _, r := range code {
if hcd.isInPlaceholderRange(r) {
// put the existing rune (used by code) in map, then this rune won't be used a placeholder anymore.
@@ -86,27 +87,76 @@ func (hcd *highlightCodeDiff) collectUsedRunes(code string) {
}
}
-func (hcd *highlightCodeDiff) diffWithHighlight(filename, language, codeA, codeB string) []diffmatchpatch.Diff {
+func (hcd *highlightCodeDiff) diffLineWithHighlight(lineType DiffLineType, codeA, codeB template.HTML) template.HTML {
+ return hcd.diffLineWithHighlightWrapper(nil, lineType, codeA, codeB)
+}
+
+func (hcd *highlightCodeDiff) diffLineWithHighlightWrapper(lineWrapperTags []string, lineType DiffLineType, codeA, codeB template.HTML) template.HTML {
hcd.collectUsedRunes(codeA)
hcd.collectUsedRunes(codeB)
- highlightCodeA, _ := highlight.Code(filename, language, codeA)
- highlightCodeB, _ := highlight.Code(filename, language, codeB)
+ convertedCodeA := hcd.convertToPlaceholders(codeA)
+ convertedCodeB := hcd.convertToPlaceholders(codeB)
+
+ dmp := defaultDiffMatchPatch()
+ diffs := dmp.DiffMain(convertedCodeA, convertedCodeB, true)
+ diffs = dmp.DiffCleanupSemantic(diffs)
+
+ buf := bytes.NewBuffer(nil)
- convertedCodeA := hcd.convertToPlaceholders(string(highlightCodeA))
- convertedCodeB := hcd.convertToPlaceholders(string(highlightCodeB))
+ // restore the line wrapper tags <span class="line"> and <span class="cl">, if necessary
+ for _, tag := range lineWrapperTags {
+ buf.WriteString(tag)
+ }
- diffs := diffMatchPatch.DiffMain(convertedCodeA, convertedCodeB, true)
- diffs = diffMatchPatch.DiffCleanupEfficiency(diffs)
+ addedCodePrefix := hcd.registerTokenAsPlaceholder(`<span class="added-code">`)
+ removedCodePrefix := hcd.registerTokenAsPlaceholder(`<span class="removed-code">`)
+ codeTagSuffix := hcd.registerTokenAsPlaceholder(`</span>`)
- for i := range diffs {
- hcd.recoverOneDiff(&diffs[i])
+ if codeTagSuffix != 0 {
+ for _, diff := range diffs {
+ switch {
+ case diff.Type == diffmatchpatch.DiffEqual:
+ buf.WriteString(diff.Text)
+ case diff.Type == diffmatchpatch.DiffInsert && lineType == DiffLineAdd:
+ buf.WriteRune(addedCodePrefix)
+ buf.WriteString(diff.Text)
+ buf.WriteRune(codeTagSuffix)
+ case diff.Type == diffmatchpatch.DiffDelete && lineType == DiffLineDel:
+ buf.WriteRune(removedCodePrefix)
+ buf.WriteString(diff.Text)
+ buf.WriteRune(codeTagSuffix)
+ }
+ }
+ } else {
+ // placeholder map space is exhausted
+ for _, diff := range diffs {
+ take := diff.Type == diffmatchpatch.DiffEqual || (diff.Type == diffmatchpatch.DiffInsert && lineType == DiffLineAdd) || (diff.Type == diffmatchpatch.DiffDelete && lineType == DiffLineDel)
+ if take {
+ buf.WriteString(diff.Text)
+ }
+ }
}
- return diffs
+ for range lineWrapperTags {
+ buf.WriteString("</span>")
+ }
+ return hcd.recoverOneDiff(buf.String())
+}
+
+func (hcd *highlightCodeDiff) registerTokenAsPlaceholder(token string) rune {
+ placeholder, ok := hcd.tokenPlaceholderMap[token]
+ if !ok {
+ placeholder = hcd.nextPlaceholder()
+ if placeholder != 0 {
+ hcd.tokenPlaceholderMap[token] = placeholder
+ hcd.placeholderTokenMap[placeholder] = token
+ }
+ }
+ return placeholder
}
// convertToPlaceholders totally depends on Chroma's valid HTML output and its structure, do not use these functions for other purposes.
-func (hcd *highlightCodeDiff) convertToPlaceholders(htmlCode string) string {
+func (hcd *highlightCodeDiff) convertToPlaceholders(htmlContent template.HTML) string {
var tagStack []string
res := strings.Builder{}
@@ -115,6 +165,7 @@ func (hcd *highlightCodeDiff) convertToPlaceholders(htmlCode string) string {
var beforeToken, token string
var valid bool
+ htmlCode := string(htmlContent)
// the standard chroma highlight HTML is "<span class="line [hl]"><span class="cl"> ... </span></span>"
for {
beforeToken, token, htmlCode, valid = extractHTMLToken(htmlCode)
@@ -151,14 +202,7 @@ func (hcd *highlightCodeDiff) convertToPlaceholders(htmlCode string) string {
} // else: impossible
// remember the placeholder and token in the map
- placeholder, ok := hcd.tokenPlaceholderMap[tokenInMap]
- if !ok {
- placeholder = hcd.nextPlaceholder()
- if placeholder != 0 {
- hcd.tokenPlaceholderMap[tokenInMap] = placeholder
- hcd.placeholderTokenMap[placeholder] = tokenInMap
- }
- }
+ placeholder := hcd.registerTokenAsPlaceholder(tokenInMap)
if placeholder != 0 {
res.WriteRune(placeholder) // use the placeholder to replace the token
@@ -179,11 +223,11 @@ func (hcd *highlightCodeDiff) convertToPlaceholders(htmlCode string) string {
return res.String()
}
-func (hcd *highlightCodeDiff) recoverOneDiff(diff *diffmatchpatch.Diff) {
+func (hcd *highlightCodeDiff) recoverOneDiff(str string) template.HTML {
sb := strings.Builder{}
var tagStack []string
- for _, r := range diff.Text {
+ for _, r := range str {
token, ok := hcd.placeholderTokenMap[r]
if !ok || token == "" {
sb.WriteRune(r) // if the rune is not a placeholder, write it as it is
@@ -217,6 +261,5 @@ func (hcd *highlightCodeDiff) recoverOneDiff(diff *diffmatchpatch.Diff) {
} // else: impossible. every tag was pushed into the stack by the code above and is valid HTML opening tag
}
}
-
- diff.Text = sb.String()
+ return template.HTML(sb.String())
}
diff --git a/services/gitdiff/highlightdiff_test.go b/services/gitdiff/highlightdiff_test.go
index 545a060e20..aebe38ae7c 100644
--- a/services/gitdiff/highlightdiff_test.go
+++ b/services/gitdiff/highlightdiff_test.go
@@ -5,121 +5,82 @@ package gitdiff
import (
"fmt"
+ "html/template"
"strings"
"testing"
- "github.com/sergi/go-diff/diffmatchpatch"
"github.com/stretchr/testify/assert"
)
func TestDiffWithHighlight(t *testing.T) {
- hcd := newHighlightCodeDiff()
- diffs := hcd.diffWithHighlight(
- "main.v", "",
- " run('<>')\n",
- " run(db)\n",
- )
-
- expected := ` <span class="n">run</span><span class="o">(</span><span class="removed-code"><span class="k">&#39;</span><span class="o">&lt;</span><span class="o">&gt;</span><span class="k">&#39;</span></span><span class="o">)</span>`
- output := diffToHTML(nil, diffs, DiffLineDel)
- assert.Equal(t, expected, output)
-
- expected = ` <span class="n">run</span><span class="o">(</span><span class="added-code"><span class="n">db</span></span><span class="o">)</span>`
- output = diffToHTML(nil, diffs, DiffLineAdd)
- assert.Equal(t, expected, output)
-
- hcd = newHighlightCodeDiff()
- hcd.placeholderTokenMap['O'] = "<span>"
- hcd.placeholderTokenMap['C'] = "</span>"
- diff := diffmatchpatch.Diff{}
-
- diff.Text = "OC"
- hcd.recoverOneDiff(&diff)
- assert.Equal(t, "<span></span>", diff.Text)
-
- diff.Text = "O"
- hcd.recoverOneDiff(&diff)
- assert.Equal(t, "<span></span>", diff.Text)
-
- diff.Text = "C"
- hcd.recoverOneDiff(&diff)
- assert.Equal(t, "", diff.Text)
+ t.Run("DiffLineAddDel", func(t *testing.T) {
+ hcd := newHighlightCodeDiff()
+ codeA := template.HTML(`x <span class="k">foo</span> y`)
+ codeB := template.HTML(`x <span class="k">bar</span> y`)
+ outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
+ assert.Equal(t, `x <span class="k"><span class="removed-code">foo</span></span> y`, string(outDel))
+ outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
+ assert.Equal(t, `x <span class="k"><span class="added-code">bar</span></span> y`, string(outAdd))
+ })
+
+ t.Run("CleanUp", func(t *testing.T) {
+ hcd := newHighlightCodeDiff()
+ codeA := template.HTML(`<span class="cm>this is a comment</span>`)
+ codeB := template.HTML(`<span class="cm>this is updated comment</span>`)
+ outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
+ assert.Equal(t, `<span class="cm>this is <span class="removed-code">a</span> comment</span>`, string(outDel))
+ outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
+ assert.Equal(t, `<span class="cm>this is <span class="added-code">updated</span> comment</span>`, string(outAdd))
+ })
+
+ t.Run("OpenCloseTags", func(t *testing.T) {
+ hcd := newHighlightCodeDiff()
+ hcd.placeholderTokenMap['O'], hcd.placeholderTokenMap['C'] = "<span>", "</span>"
+ assert.Equal(t, "<span></span>", string(hcd.recoverOneDiff("OC")))
+ assert.Equal(t, "<span></span>", string(hcd.recoverOneDiff("O")))
+ assert.Empty(t, string(hcd.recoverOneDiff("C")))
+ })
}
func TestDiffWithHighlightPlaceholder(t *testing.T) {
hcd := newHighlightCodeDiff()
- diffs := hcd.diffWithHighlight(
- "main.js", "",
- "a='\U00100000'",
- "a='\U0010FFFD''",
- )
- assert.Equal(t, "", hcd.placeholderTokenMap[0x00100000])
- assert.Equal(t, "", hcd.placeholderTokenMap[0x0010FFFD])
-
- expected := fmt.Sprintf(`<span class="nx">a</span><span class="o">=</span><span class="s1">&#39;</span><span class="removed-code">%s</span>&#39;`, "\U00100000")
- output := diffToHTML(hcd.lineWrapperTags, diffs, DiffLineDel)
- assert.Equal(t, expected, output)
+ output := hcd.diffLineWithHighlight(DiffLineDel, "a='\U00100000'", "a='\U0010FFFD''")
+ assert.Empty(t, hcd.placeholderTokenMap[0x00100000])
+ assert.Empty(t, hcd.placeholderTokenMap[0x0010FFFD])
+ expected := fmt.Sprintf(`a='<span class="removed-code">%s</span>'`, "\U00100000")
+ assert.Equal(t, expected, string(output))
hcd = newHighlightCodeDiff()
- diffs = hcd.diffWithHighlight(
- "main.js", "",
- "a='\U00100000'",
- "a='\U0010FFFD'",
- )
- expected = fmt.Sprintf(`<span class="nx">a</span><span class="o">=</span><span class="s1">&#39;</span><span class="added-code">%s</span>&#39;`, "\U0010FFFD")
- output = diffToHTML(nil, diffs, DiffLineAdd)
- assert.Equal(t, expected, output)
+ output = hcd.diffLineWithHighlight(DiffLineAdd, "a='\U00100000'", "a='\U0010FFFD'")
+ expected = fmt.Sprintf(`a='<span class="added-code">%s</span>'`, "\U0010FFFD")
+ assert.Equal(t, expected, string(output))
}
func TestDiffWithHighlightPlaceholderExhausted(t *testing.T) {
hcd := newHighlightCodeDiff()
hcd.placeholderMaxCount = 0
- diffs := hcd.diffWithHighlight(
- "main.js", "",
- "'",
- ``,
- )
- output := diffToHTML(nil, diffs, DiffLineDel)
- expected := fmt.Sprintf(`<span class="removed-code">%s#39;</span>`, "\uFFFD")
- assert.Equal(t, expected, output)
-
- hcd = newHighlightCodeDiff()
- hcd.placeholderMaxCount = 0
- diffs = hcd.diffWithHighlight(
- "main.js", "",
- "a < b",
- "a > b",
- )
- output = diffToHTML(nil, diffs, DiffLineDel)
- expected = fmt.Sprintf(`a %s<span class="removed-code">l</span>t; b`, "\uFFFD")
- assert.Equal(t, expected, output)
-
- output = diffToHTML(nil, diffs, DiffLineAdd)
- expected = fmt.Sprintf(`a %s<span class="added-code">g</span>t; b`, "\uFFFD")
- assert.Equal(t, expected, output)
+ placeHolderAmp := string(rune(0xFFFD))
+ output := hcd.diffLineWithHighlight(DiffLineDel, `<span class="k">&lt;</span>`, `<span class="k">&gt;</span>`)
+ assert.Equal(t, placeHolderAmp+"lt;", string(output))
+ output = hcd.diffLineWithHighlight(DiffLineAdd, `<span class="k">&lt;</span>`, `<span class="k">&gt;</span>`)
+ assert.Equal(t, placeHolderAmp+"gt;", string(output))
}
func TestDiffWithHighlightTagMatch(t *testing.T) {
- totalOverflow := 0
- for i := 0; i < 100; i++ {
- hcd := newHighlightCodeDiff()
- hcd.placeholderMaxCount = i
- diffs := hcd.diffWithHighlight(
- "main.js", "",
- "a='1'",
- "b='2'",
- )
- totalOverflow += hcd.placeholderOverflowCount
-
- output := diffToHTML(nil, diffs, DiffLineDel)
- c1 := strings.Count(output, "<span")
- c2 := strings.Count(output, "</span")
- assert.Equal(t, c1, c2)
-
- output = diffToHTML(nil, diffs, DiffLineAdd)
- c1 = strings.Count(output, "<span")
- c2 = strings.Count(output, "</span")
- assert.Equal(t, c1, c2)
+ f := func(t *testing.T, lineType DiffLineType) {
+ totalOverflow := 0
+ for i := 0; ; i++ {
+ hcd := newHighlightCodeDiff()
+ hcd.placeholderMaxCount = i
+ output := string(hcd.diffLineWithHighlight(lineType, `<span class="k">&lt;</span>`, `<span class="k">&gt;</span>`))
+ totalOverflow += hcd.placeholderOverflowCount
+ assert.Equal(t, strings.Count(output, "<span"), strings.Count(output, "</span"))
+ if hcd.placeholderOverflowCount == 0 {
+ break
+ }
+ }
+ assert.NotZero(t, totalOverflow)
}
- assert.NotZero(t, totalOverflow)
+ t.Run("DiffLineAdd", func(t *testing.T) { f(t, DiffLineAdd) })
+ t.Run("DiffLineDel", func(t *testing.T) { f(t, DiffLineDel) })
}
diff --git a/services/gitdiff/submodule_test.go b/services/gitdiff/submodule_test.go
index f0eab5557c..3047b23103 100644
--- a/services/gitdiff/submodule_test.go
+++ b/services/gitdiff/submodule_test.go
@@ -4,7 +4,6 @@
package gitdiff
import (
- "context"
"strings"
"testing"
@@ -224,7 +223,7 @@ func TestSubmoduleInfo(t *testing.T) {
PreviousRefID: "aaaa",
NewRefID: "bbbb",
}
- ctx := context.Background()
+ ctx := t.Context()
assert.EqualValues(t, "1111", sdi.CommitRefIDLinkHTML(ctx, "1111"))
assert.EqualValues(t, "aaaa...bbbb", sdi.CompareRefIDLinkHTML(ctx))
assert.EqualValues(t, "name", sdi.SubmoduleRepoLinkHTML(ctx))
diff --git a/services/gitdiff/testdata/academic-module/HEAD b/services/gitdiff/testdata/academic-module/HEAD
deleted file mode 100644
index cb089cd89a..0000000000
--- a/services/gitdiff/testdata/academic-module/HEAD
+++ /dev/null
@@ -1 +0,0 @@
-ref: refs/heads/master
diff --git a/services/gitdiff/testdata/academic-module/config b/services/gitdiff/testdata/academic-module/config
deleted file mode 100644
index 1bc26be514..0000000000
--- a/services/gitdiff/testdata/academic-module/config
+++ /dev/null
@@ -1,10 +0,0 @@
-[core]
- repositoryformatversion = 0
- filemode = true
- bare = false
- logallrefupdates = true
- ignorecase = true
- precomposeunicode = true
-[branch "master"]
- remote = origin
- merge = refs/heads/master
diff --git a/services/gitdiff/testdata/academic-module/index b/services/gitdiff/testdata/academic-module/index
deleted file mode 100644
index e712c906e3..0000000000
--- a/services/gitdiff/testdata/academic-module/index
+++ /dev/null
Binary files differ
diff --git a/services/gitdiff/testdata/academic-module/logs/HEAD b/services/gitdiff/testdata/academic-module/logs/HEAD
deleted file mode 100644
index 16b2e1c0f6..0000000000
--- a/services/gitdiff/testdata/academic-module/logs/HEAD
+++ /dev/null
@@ -1 +0,0 @@
-0000000000000000000000000000000000000000 bd7063cc7c04689c4d082183d32a604ed27a24f9 Lunny Xiao <xiaolunwen@gmail.com> 1574829684 +0800 clone: from https://try.gitea.io/shemgp-aiias/academic-module
diff --git a/services/gitdiff/testdata/academic-module/logs/refs/heads/master b/services/gitdiff/testdata/academic-module/logs/refs/heads/master
deleted file mode 100644
index 16b2e1c0f6..0000000000
--- a/services/gitdiff/testdata/academic-module/logs/refs/heads/master
+++ /dev/null
@@ -1 +0,0 @@
-0000000000000000000000000000000000000000 bd7063cc7c04689c4d082183d32a604ed27a24f9 Lunny Xiao <xiaolunwen@gmail.com> 1574829684 +0800 clone: from https://try.gitea.io/shemgp-aiias/academic-module
diff --git a/services/gitdiff/testdata/academic-module/logs/refs/remotes/origin/HEAD b/services/gitdiff/testdata/academic-module/logs/refs/remotes/origin/HEAD
deleted file mode 100644
index 16b2e1c0f6..0000000000
--- a/services/gitdiff/testdata/academic-module/logs/refs/remotes/origin/HEAD
+++ /dev/null
@@ -1 +0,0 @@
-0000000000000000000000000000000000000000 bd7063cc7c04689c4d082183d32a604ed27a24f9 Lunny Xiao <xiaolunwen@gmail.com> 1574829684 +0800 clone: from https://try.gitea.io/shemgp-aiias/academic-module
diff --git a/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.idx b/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.idx
deleted file mode 100644
index 4d759aa504..0000000000
--- a/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.idx
+++ /dev/null
Binary files differ
diff --git a/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.pack b/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.pack
deleted file mode 100644
index 2dc49cfded..0000000000
--- a/services/gitdiff/testdata/academic-module/objects/pack/pack-597efbc3613c7ba790e33b178fd9fc1fe17b4245.pack
+++ /dev/null
Binary files differ
diff --git a/services/gitdiff/testdata/academic-module/packed-refs b/services/gitdiff/testdata/academic-module/packed-refs
deleted file mode 100644
index 13b5611650..0000000000
--- a/services/gitdiff/testdata/academic-module/packed-refs
+++ /dev/null
@@ -1,2 +0,0 @@
-# pack-refs with: peeled fully-peeled sorted
-bd7063cc7c04689c4d082183d32a604ed27a24f9 refs/remotes/origin/master
diff --git a/services/gitdiff/testdata/academic-module/refs/heads/master b/services/gitdiff/testdata/academic-module/refs/heads/master
deleted file mode 100644
index bd2b56eaf4..0000000000
--- a/services/gitdiff/testdata/academic-module/refs/heads/master
+++ /dev/null
@@ -1 +0,0 @@
-bd7063cc7c04689c4d082183d32a604ed27a24f9
diff --git a/services/gitdiff/testdata/academic-module/refs/remotes/origin/HEAD b/services/gitdiff/testdata/academic-module/refs/remotes/origin/HEAD
deleted file mode 100644
index 6efe28fff8..0000000000
--- a/services/gitdiff/testdata/academic-module/refs/remotes/origin/HEAD
+++ /dev/null
@@ -1 +0,0 @@
-ref: refs/remotes/origin/master
diff --git a/services/issue/comments.go b/services/issue/comments.go
index 33b5702a00..46f92f7cd2 100644
--- a/services/issue/comments.go
+++ b/services/issue/comments.go
@@ -12,7 +12,10 @@ import (
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/timeutil"
+ git_service "code.gitea.io/gitea/services/git"
notify_service "code.gitea.io/gitea/services/notify"
)
@@ -139,3 +142,40 @@ func DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_m
return nil
}
+
+// LoadCommentPushCommits Load push commits
+func LoadCommentPushCommits(ctx context.Context, c *issues_model.Comment) (err error) {
+ if c.Content == "" || c.Commits != nil || c.Type != issues_model.CommentTypePullRequestPush {
+ return nil
+ }
+
+ var data issues_model.PushActionContent
+ err = json.Unmarshal([]byte(c.Content), &data)
+ if err != nil {
+ return err
+ }
+
+ c.IsForcePush = data.IsForcePush
+
+ if c.IsForcePush {
+ if len(data.CommitIDs) != 2 {
+ return nil
+ }
+ c.OldCommit = data.CommitIDs[0]
+ c.NewCommit = data.CommitIDs[1]
+ } else {
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, c.Issue.Repo)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ c.Commits, err = git_service.ConvertFromGitCommit(ctx, gitRepo.GetCommitsFromIDs(data.CommitIDs), c.Issue.Repo)
+ if err != nil {
+ return err
+ }
+ c.CommitsNum = int64(len(c.Commits))
+ }
+
+ return err
+}
diff --git a/services/issue/issue.go b/services/issue/issue.go
index 091b7c02d7..455a1ec297 100644
--- a/services/issue/issue.go
+++ b/services/issue/issue.go
@@ -92,8 +92,12 @@ func ChangeTitle(ctx context.Context, issue *issues_model.Issue, doer *user_mode
var reviewNotifiers []*ReviewRequestNotifier
if issue.IsPull && issues_model.HasWorkInProgressPrefix(oldTitle) && !issues_model.HasWorkInProgressPrefix(title) {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return err
+ }
+
var err error
- reviewNotifiers, err = PullRequestCodeOwnersReview(ctx, issue, issue.PullRequest)
+ reviewNotifiers, err = PullRequestCodeOwnersReview(ctx, issue.PullRequest)
if err != nil {
log.Error("PullRequestCodeOwnersReview: %v", err)
}
@@ -197,13 +201,6 @@ func DeleteIssue(ctx context.Context, doer *user_model.User, gitRepo *git.Reposi
}
}
- // If the Issue is pinned, we should unpin it before deletion to avoid problems with other pinned Issues
- if issue.IsPinned() {
- if err := issue.Unpin(ctx, doer); err != nil {
- return err
- }
- }
-
notify_service.DeleteIssue(ctx, doer, issue)
return nil
@@ -319,6 +316,7 @@ func deleteIssue(ctx context.Context, issue *issues_model.Issue) error {
&issues_model.Comment{RefIssueID: issue.ID},
&issues_model.IssueDependency{DependencyID: issue.ID},
&issues_model.Comment{DependentIssueID: issue.ID},
+ &issues_model.IssuePin{IssueID: issue.ID},
); err != nil {
return err
}
diff --git a/services/issue/issue_test.go b/services/issue/issue_test.go
index 8806cec0e7..b3df8191e1 100644
--- a/services/issue/issue_test.go
+++ b/services/issue/issue_test.go
@@ -24,8 +24,8 @@ func TestGetRefEndNamesAndURLs(t *testing.T) {
repoLink := "/foo/bar"
endNames, urls := GetRefEndNamesAndURLs(issues, repoLink)
- assert.EqualValues(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames)
- assert.EqualValues(t, map[int64]string{
+ assert.Equal(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames)
+ assert.Equal(t, map[int64]string{
1: repoLink + "/src/branch/branch1",
2: repoLink + "/src/tag/tag1",
3: repoLink + "/src/commit/c0ffee",
diff --git a/services/issue/pull.go b/services/issue/pull.go
index 896802108d..bd19c25436 100644
--- a/services/issue/pull.go
+++ b/services/issue/pull.go
@@ -6,6 +6,7 @@ package issue
import (
"context"
"fmt"
+ "slices"
"time"
issues_model "code.gitea.io/gitea/models/issues"
@@ -40,20 +41,31 @@ type ReviewRequestNotifier struct {
ReviewTeam *org_model.Team
}
-func PullRequestCodeOwnersReview(ctx context.Context, issue *issues_model.Issue, pr *issues_model.PullRequest) ([]*ReviewRequestNotifier, error) {
- files := []string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}
+var codeOwnerFiles = []string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}
+func IsCodeOwnerFile(f string) bool {
+ return slices.Contains(codeOwnerFiles, f)
+}
+
+func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullRequest) ([]*ReviewRequestNotifier, error) {
+ return PullRequestCodeOwnersReviewSpecialCommits(ctx, pr, "", "") // no commit is provided, then it uses PR's base&head branch
+}
+
+func PullRequestCodeOwnersReviewSpecialCommits(ctx context.Context, pr *issues_model.PullRequest, startCommitID, endCommitID string) ([]*ReviewRequestNotifier, error) {
+ if err := pr.LoadIssue(ctx); err != nil {
+ return nil, err
+ }
+ issue := pr.Issue
if pr.IsWorkInProgress(ctx) {
return nil, nil
}
-
if err := pr.LoadHeadRepo(ctx); err != nil {
return nil, err
}
-
if err := pr.LoadBaseRepo(ctx); err != nil {
return nil, err
}
+ pr.Issue.Repo = pr.BaseRepo
if pr.BaseRepo.IsFork {
return nil, nil
@@ -71,7 +83,7 @@ func PullRequestCodeOwnersReview(ctx context.Context, issue *issues_model.Issue,
}
var data string
- for _, file := range files {
+ for _, file := range codeOwnerFiles {
if blob, err := commit.GetBlobByPath(file); err == nil {
data, err = blob.GetBlobContent(setting.UI.MaxDisplayFileSize)
if err == nil {
@@ -79,18 +91,28 @@ func PullRequestCodeOwnersReview(ctx context.Context, issue *issues_model.Issue,
}
}
}
+ if data == "" {
+ return nil, nil
+ }
rules, _ := issues_model.GetCodeOwnersFromContent(ctx, data)
+ if len(rules) == 0 {
+ return nil, nil
+ }
- // get the mergebase
- mergeBase, err := getMergeBase(repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName())
- if err != nil {
- return nil, err
+ if startCommitID == "" && endCommitID == "" {
+ // get the mergebase
+ mergeBase, err := getMergeBase(repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName())
+ if err != nil {
+ return nil, err
+ }
+ startCommitID = mergeBase
+ endCommitID = pr.GetGitRefName()
}
// https://github.com/go-gitea/gitea/issues/29763, we need to get the files changed
// between the merge base and the head commit but not the base branch and the head commit
- changedFiles, err := repo.GetFilesChangedBetween(mergeBase, pr.GetGitRefName())
+ changedFiles, err := repo.GetFilesChangedBetween(startCommitID, endCommitID)
if err != nil {
return nil, err
}
diff --git a/services/issue/suggestion.go b/services/issue/suggestion.go
new file mode 100644
index 0000000000..22eddb1904
--- /dev/null
+++ b/services/issue/suggestion.go
@@ -0,0 +1,73 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "context"
+ "strconv"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/structs"
+)
+
+func GetSuggestion(ctx context.Context, repo *repo_model.Repository, isPull optional.Option[bool], keyword string) ([]*structs.Issue, error) {
+ var issues issues_model.IssueList
+ var err error
+ pageSize := 5
+ if keyword == "" {
+ issues, err = issues_model.FindLatestUpdatedIssues(ctx, repo.ID, isPull, pageSize)
+ if err != nil {
+ return nil, err
+ }
+ } else {
+ indexKeyword, _ := strconv.ParseInt(keyword, 10, 64)
+ var issueByIndex *issues_model.Issue
+ var excludedID int64
+ if indexKeyword > 0 {
+ issueByIndex, err = issues_model.GetIssueByIndex(ctx, repo.ID, indexKeyword)
+ if err != nil && !issues_model.IsErrIssueNotExist(err) {
+ return nil, err
+ }
+ if issueByIndex != nil {
+ excludedID = issueByIndex.ID
+ pageSize--
+ }
+ }
+
+ issues, err = issues_model.FindIssuesSuggestionByKeyword(ctx, repo.ID, keyword, isPull, excludedID, pageSize)
+ if err != nil {
+ return nil, err
+ }
+
+ if issueByIndex != nil {
+ issues = append([]*issues_model.Issue{issueByIndex}, issues...)
+ }
+ }
+
+ if err := issues.LoadPullRequests(ctx); err != nil {
+ return nil, err
+ }
+
+ suggestions := make([]*structs.Issue, 0, len(issues))
+ for _, issue := range issues {
+ suggestion := &structs.Issue{
+ ID: issue.ID,
+ Index: issue.Index,
+ Title: issue.Title,
+ State: issue.State(),
+ }
+
+ if issue.IsPull && issue.PullRequest != nil {
+ suggestion.PullRequest = &structs.PullRequestMeta{
+ HasMerged: issue.PullRequest.HasMerged,
+ IsWorkInProgress: issue.PullRequest.IsWorkInProgress(ctx),
+ }
+ }
+ suggestions = append(suggestions, suggestion)
+ }
+
+ return suggestions, nil
+}
diff --git a/services/issue/suggestion_test.go b/services/issue/suggestion_test.go
new file mode 100644
index 0000000000..a5b39d27bb
--- /dev/null
+++ b/services/issue/suggestion_test.go
@@ -0,0 +1,57 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issue
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/optional"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_Suggestion(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ testCases := []struct {
+ keyword string
+ isPull optional.Option[bool]
+ expectedIndexes []int64
+ }{
+ {
+ keyword: "",
+ expectedIndexes: []int64{5, 1, 4, 2, 3},
+ },
+ {
+ keyword: "1",
+ expectedIndexes: []int64{1},
+ },
+ {
+ keyword: "issue",
+ expectedIndexes: []int64{4, 1, 2, 3},
+ },
+ {
+ keyword: "pull",
+ expectedIndexes: []int64{5},
+ },
+ }
+
+ for _, testCase := range testCases {
+ t.Run(testCase.keyword, func(t *testing.T) {
+ issues, err := GetSuggestion(db.DefaultContext, repo1, testCase.isPull, testCase.keyword)
+ assert.NoError(t, err)
+
+ issueIndexes := make([]int64, 0, len(issues))
+ for _, issue := range issues {
+ issueIndexes = append(issueIndexes, issue.Index)
+ }
+ assert.Equal(t, testCase.expectedIndexes, issueIndexes)
+ })
+ }
+}
diff --git a/services/lfs/server.go b/services/lfs/server.go
index a77623fdc1..1e7608b781 100644
--- a/services/lfs/server.go
+++ b/services/lfs/server.go
@@ -134,7 +134,9 @@ func DownloadHandler(ctx *context.Context) {
}
contentLength := toByte + 1 - fromByte
- ctx.Resp.Header().Set("Content-Length", strconv.FormatInt(contentLength, 10))
+ contentLengthStr := strconv.FormatInt(contentLength, 10)
+ ctx.Resp.Header().Set("Content-Length", contentLengthStr)
+ ctx.Resp.Header().Set("X-Gitea-LFS-Content-Length", contentLengthStr) // we need this header to make sure it won't be affected by reverse proxy or compression
ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
filename := ctx.PathParam("filename")
@@ -162,11 +164,12 @@ func BatchHandler(ctx *context.Context) {
}
var isUpload bool
- if br.Operation == "upload" {
+ switch br.Operation {
+ case "upload":
isUpload = true
- } else if br.Operation == "download" {
+ case "download":
isUpload = false
- } else {
+ default:
log.Trace("Attempt to BATCH with invalid operation: %s", br.Operation)
writeStatus(ctx, http.StatusBadRequest)
return
diff --git a/services/mailer/mail.go b/services/mailer/mail.go
index 52e19bde6f..f7e5b0c9f0 100644
--- a/services/mailer/mail.go
+++ b/services/mailer/mail.go
@@ -7,46 +7,29 @@ package mailer
import (
"bytes"
"context"
+ "encoding/base64"
"fmt"
"html/template"
+ "io"
"mime"
"regexp"
- "strconv"
"strings"
texttmpl "text/template"
- "time"
- activities_model "code.gitea.io/gitea/models/activities"
- issues_model "code.gitea.io/gitea/models/issues"
- "code.gitea.io/gitea/models/renderhelper"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
- "code.gitea.io/gitea/modules/emoji"
+ "code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/markup/markdown"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/templates"
- "code.gitea.io/gitea/modules/timeutil"
- "code.gitea.io/gitea/modules/translation"
- incoming_payload "code.gitea.io/gitea/services/mailer/incoming/payload"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/typesniffer"
sender_service "code.gitea.io/gitea/services/mailer/sender"
- "code.gitea.io/gitea/services/mailer/token"
-)
-
-const (
- mailAuthActivate templates.TplName = "auth/activate"
- mailAuthActivateEmail templates.TplName = "auth/activate_email"
- mailAuthResetPassword templates.TplName = "auth/reset_passwd"
- mailAuthRegisterNotify templates.TplName = "auth/register_notify"
-
- mailNotifyCollaborator templates.TplName = "notify/collaborator"
- mailRepoTransferNotify templates.TplName = "notify/repo_transfer"
-
- // There's no actual limit for subject in RFC 5322
- mailMaxSubjectRunes = 256
+ "golang.org/x/net/html"
)
+const mailMaxSubjectRunes = 256 // There's no actual limit for subject in RFC 5322
+
var (
bodyTemplates *template.Template
subjectTemplates *texttmpl.Template
@@ -62,475 +45,114 @@ func SendTestMail(email string) error {
return sender_service.Send(sender, sender_service.NewMessage(email, "Gitea Test Email!", "Gitea Test Email!"))
}
-// sendUserMail sends a mail to the user
-func sendUserMail(language string, u *user_model.User, tpl templates.TplName, code, subject, info string) {
- locale := translation.NewLocale(language)
- data := map[string]any{
- "locale": locale,
- "DisplayName": u.DisplayName(),
- "ActiveCodeLives": timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, locale),
- "ResetPwdCodeLives": timeutil.MinutesToFriendly(setting.Service.ResetPwdCodeLives, locale),
- "Code": code,
- "Language": locale.Language(),
- }
-
- var content bytes.Buffer
-
- if err := bodyTemplates.ExecuteTemplate(&content, string(tpl), data); err != nil {
- log.Error("Template: %v", err)
- return
- }
-
- msg := sender_service.NewMessage(u.EmailTo(), subject, content.String())
- msg.Info = fmt.Sprintf("UID: %d, %s", u.ID, info)
-
- SendAsync(msg)
-}
-
-// SendActivateAccountMail sends an activation mail to the user (new user registration)
-func SendActivateAccountMail(locale translation.Locale, u *user_model.User) {
- if setting.MailService == nil {
- // No mail service configured
- return
- }
- opts := &user_model.TimeLimitCodeOptions{Purpose: user_model.TimeLimitCodeActivateAccount}
- sendUserMail(locale.Language(), u, mailAuthActivate, user_model.GenerateUserTimeLimitCode(opts, u), locale.TrString("mail.activate_account"), "activate account")
-}
-
-// SendResetPasswordMail sends a password reset mail to the user
-func SendResetPasswordMail(u *user_model.User) {
- if setting.MailService == nil {
- // No mail service configured
- return
- }
- locale := translation.NewLocale(u.Language)
- opts := &user_model.TimeLimitCodeOptions{Purpose: user_model.TimeLimitCodeResetPassword}
- sendUserMail(u.Language, u, mailAuthResetPassword, user_model.GenerateUserTimeLimitCode(opts, u), locale.TrString("mail.reset_password"), "recover account")
-}
-
-// SendActivateEmailMail sends confirmation email to confirm new email address
-func SendActivateEmailMail(u *user_model.User, email string) {
- if setting.MailService == nil {
- // No mail service configured
- return
- }
- locale := translation.NewLocale(u.Language)
- opts := &user_model.TimeLimitCodeOptions{Purpose: user_model.TimeLimitCodeActivateEmail, NewEmail: email}
- data := map[string]any{
- "locale": locale,
- "DisplayName": u.DisplayName(),
- "ActiveCodeLives": timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, locale),
- "Code": user_model.GenerateUserTimeLimitCode(opts, u),
- "Email": email,
- "Language": locale.Language(),
- }
-
- var content bytes.Buffer
-
- if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthActivateEmail), data); err != nil {
- log.Error("Template: %v", err)
- return
+func sanitizeSubject(subject string) string {
+ runes := []rune(strings.TrimSpace(subjectRemoveSpaces.ReplaceAllLiteralString(subject, " ")))
+ if len(runes) > mailMaxSubjectRunes {
+ runes = runes[:mailMaxSubjectRunes]
}
-
- msg := sender_service.NewMessage(email, locale.TrString("mail.activate_email"), content.String())
- msg.Info = fmt.Sprintf("UID: %d, activate email", u.ID)
-
- SendAsync(msg)
+ // Encode non-ASCII characters
+ return mime.QEncoding.Encode("utf-8", string(runes))
}
-// SendRegisterNotifyMail triggers a notify e-mail by admin created a account.
-func SendRegisterNotifyMail(u *user_model.User) {
- if setting.MailService == nil || !u.IsActive {
- // No mail service configured OR user is inactive
- return
- }
- locale := translation.NewLocale(u.Language)
-
- data := map[string]any{
- "locale": locale,
- "DisplayName": u.DisplayName(),
- "Username": u.Name,
- "Language": locale.Language(),
- }
-
- var content bytes.Buffer
-
- if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthRegisterNotify), data); err != nil {
- log.Error("Template: %v", err)
- return
- }
-
- msg := sender_service.NewMessage(u.EmailTo(), locale.TrString("mail.register_notify", setting.AppName), content.String())
- msg.Info = fmt.Sprintf("UID: %d, registration notify", u.ID)
-
- SendAsync(msg)
+type mailAttachmentBase64Embedder struct {
+ doer *user_model.User
+ repo *repo_model.Repository
+ maxSize int64
+ estimateSize int64
}
-// SendCollaboratorMail sends mail notification to new collaborator.
-func SendCollaboratorMail(u, doer *user_model.User, repo *repo_model.Repository) {
- if setting.MailService == nil || !u.IsActive {
- // No mail service configured OR the user is inactive
- return
- }
- locale := translation.NewLocale(u.Language)
- repoName := repo.FullName()
-
- subject := locale.TrString("mail.repo.collaborator.added.subject", doer.DisplayName(), repoName)
- data := map[string]any{
- "locale": locale,
- "Subject": subject,
- "RepoName": repoName,
- "Link": repo.HTMLURL(),
- "Language": locale.Language(),
- }
-
- var content bytes.Buffer
-
- if err := bodyTemplates.ExecuteTemplate(&content, string(mailNotifyCollaborator), data); err != nil {
- log.Error("Template: %v", err)
- return
- }
-
- msg := sender_service.NewMessage(u.EmailTo(), subject, content.String())
- msg.Info = fmt.Sprintf("UID: %d, add collaborator", u.ID)
-
- SendAsync(msg)
+func newMailAttachmentBase64Embedder(doer *user_model.User, repo *repo_model.Repository, maxSize int64) *mailAttachmentBase64Embedder {
+ return &mailAttachmentBase64Embedder{doer: doer, repo: repo, maxSize: maxSize}
}
-func composeIssueCommentMessages(ctx *mailCommentContext, lang string, recipients []*user_model.User, fromMention bool, info string) ([]*sender_service.Message, error) {
- var (
- subject string
- link string
- prefix string
- // Fall back subject for bad templates, make sure subject is never empty
- fallback string
- reviewComments []*issues_model.Comment
- )
-
- commentType := issues_model.CommentTypeComment
- if ctx.Comment != nil {
- commentType = ctx.Comment.Type
- link = ctx.Issue.HTMLURL() + "#" + ctx.Comment.HashTag()
- } else {
- link = ctx.Issue.HTMLURL()
- }
-
- reviewType := issues_model.ReviewTypeComment
- if ctx.Comment != nil && ctx.Comment.Review != nil {
- reviewType = ctx.Comment.Review.Type
- }
-
- // This is the body of the new issue or comment, not the mail body
- rctx := renderhelper.NewRenderContextRepoComment(ctx.Context, ctx.Issue.Repo).WithUseAbsoluteLink(true)
- body, err := markdown.RenderString(rctx,
- ctx.Content)
+func (b64embedder *mailAttachmentBase64Embedder) Base64InlineImages(ctx context.Context, body template.HTML) (template.HTML, error) {
+ doc, err := html.Parse(strings.NewReader(string(body)))
if err != nil {
- return nil, err
- }
-
- actType, actName, tplName := actionToTemplate(ctx.Issue, ctx.ActionType, commentType, reviewType)
-
- if actName != "new" {
- prefix = "Re: "
- }
- fallback = prefix + fallbackMailSubject(ctx.Issue)
-
- if ctx.Comment != nil && ctx.Comment.Review != nil {
- reviewComments = make([]*issues_model.Comment, 0, 10)
- for _, lines := range ctx.Comment.Review.CodeComments {
- for _, comments := range lines {
- reviewComments = append(reviewComments, comments...)
+ return "", fmt.Errorf("html.Parse failed: %w", err)
+ }
+
+ b64embedder.estimateSize = int64(len(string(body)))
+
+ var processNode func(*html.Node)
+ processNode = func(n *html.Node) {
+ if n.Type == html.ElementNode {
+ if n.Data == "img" {
+ for i, attr := range n.Attr {
+ if attr.Key == "src" {
+ attachmentSrc := attr.Val
+ dataURI, err := b64embedder.AttachmentSrcToBase64DataURI(ctx, attachmentSrc)
+ if err != nil {
+ // Not an error, just skip. This is probably an image from outside the gitea instance.
+ log.Trace("Unable to embed attachment %q to mail body: %v", attachmentSrc, err)
+ } else {
+ n.Attr[i].Val = dataURI
+ }
+ break
+ }
+ }
}
}
- }
- locale := translation.NewLocale(lang)
-
- mailMeta := map[string]any{
- "locale": locale,
- "FallbackSubject": fallback,
- "Body": body,
- "Link": link,
- "Issue": ctx.Issue,
- "Comment": ctx.Comment,
- "IsPull": ctx.Issue.IsPull,
- "User": ctx.Issue.Repo.MustOwner(ctx),
- "Repo": ctx.Issue.Repo.FullName(),
- "Doer": ctx.Doer,
- "IsMention": fromMention,
- "SubjectPrefix": prefix,
- "ActionType": actType,
- "ActionName": actName,
- "ReviewComments": reviewComments,
- "Language": locale.Language(),
- "CanReply": setting.IncomingEmail.Enabled && commentType != issues_model.CommentTypePullRequestPush,
- }
-
- var mailSubject bytes.Buffer
- if err := subjectTemplates.ExecuteTemplate(&mailSubject, tplName, mailMeta); err == nil {
- subject = sanitizeSubject(mailSubject.String())
- if subject == "" {
- subject = fallback
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ processNode(c)
}
- } else {
- log.Error("ExecuteTemplate [%s]: %v", tplName+"/subject", err)
}
- subject = emoji.ReplaceAliases(subject)
-
- mailMeta["Subject"] = subject
-
- var mailBody bytes.Buffer
-
- if err := bodyTemplates.ExecuteTemplate(&mailBody, tplName, mailMeta); err != nil {
- log.Error("ExecuteTemplate [%s]: %v", tplName+"/body", err)
- }
+ processNode(doc)
- // Make sure to compose independent messages to avoid leaking user emails
- msgID := generateMessageIDForIssue(ctx.Issue, ctx.Comment, ctx.ActionType)
- reference := generateMessageIDForIssue(ctx.Issue, nil, activities_model.ActionType(0))
-
- var replyPayload []byte
- if ctx.Comment != nil {
- if ctx.Comment.Type.HasMailReplySupport() {
- replyPayload, err = incoming_payload.CreateReferencePayload(ctx.Comment)
- }
- } else {
- replyPayload, err = incoming_payload.CreateReferencePayload(ctx.Issue)
- }
+ var buf bytes.Buffer
+ err = html.Render(&buf, doc)
if err != nil {
- return nil, err
+ return "", fmt.Errorf("html.Render failed: %w", err)
}
-
- unsubscribePayload, err := incoming_payload.CreateReferencePayload(ctx.Issue)
- if err != nil {
- return nil, err
- }
-
- msgs := make([]*sender_service.Message, 0, len(recipients))
- for _, recipient := range recipients {
- msg := sender_service.NewMessageFrom(
- recipient.Email,
- fromDisplayName(ctx.Doer),
- setting.MailService.FromEmail,
- subject,
- mailBody.String(),
- )
- msg.Info = fmt.Sprintf("Subject: %s, %s", subject, info)
-
- msg.SetHeader("Message-ID", msgID)
- msg.SetHeader("In-Reply-To", reference)
-
- references := []string{reference}
- listUnsubscribe := []string{"<" + ctx.Issue.HTMLURL() + ">"}
-
- if setting.IncomingEmail.Enabled {
- if replyPayload != nil {
- token, err := token.CreateToken(token.ReplyHandlerType, recipient, replyPayload)
- if err != nil {
- log.Error("CreateToken failed: %v", err)
- } else {
- replyAddress := strings.Replace(setting.IncomingEmail.ReplyToAddress, setting.IncomingEmail.TokenPlaceholder, token, 1)
- msg.ReplyTo = replyAddress
- msg.SetHeader("List-Post", fmt.Sprintf("<mailto:%s>", replyAddress))
-
- references = append(references, fmt.Sprintf("<reply-%s@%s>", token, setting.Domain))
- }
- }
-
- token, err := token.CreateToken(token.UnsubscribeHandlerType, recipient, unsubscribePayload)
- if err != nil {
- log.Error("CreateToken failed: %v", err)
- } else {
- unsubAddress := strings.Replace(setting.IncomingEmail.ReplyToAddress, setting.IncomingEmail.TokenPlaceholder, token, 1)
- listUnsubscribe = append(listUnsubscribe, "<mailto:"+unsubAddress+">")
- }
- }
-
- msg.SetHeader("References", references...)
- msg.SetHeader("List-Unsubscribe", listUnsubscribe...)
-
- for key, value := range generateAdditionalHeaders(ctx, actType, recipient) {
- msg.SetHeader(key, value)
- }
-
- msgs = append(msgs, msg)
- }
-
- return msgs, nil
+ return template.HTML(buf.String()), nil
}
-func generateMessageIDForIssue(issue *issues_model.Issue, comment *issues_model.Comment, actionType activities_model.ActionType) string {
- var path string
- if issue.IsPull {
- path = "pulls"
- } else {
- path = "issues"
- }
-
- var extra string
- if comment != nil {
- extra = fmt.Sprintf("/comment/%d", comment.ID)
- } else {
- switch actionType {
- case activities_model.ActionCloseIssue, activities_model.ActionClosePullRequest:
- extra = fmt.Sprintf("/close/%d", time.Now().UnixNano()/1e6)
- case activities_model.ActionReopenIssue, activities_model.ActionReopenPullRequest:
- extra = fmt.Sprintf("/reopen/%d", time.Now().UnixNano()/1e6)
- case activities_model.ActionMergePullRequest, activities_model.ActionAutoMergePullRequest:
- extra = fmt.Sprintf("/merge/%d", time.Now().UnixNano()/1e6)
- case activities_model.ActionPullRequestReadyForReview:
- extra = fmt.Sprintf("/ready/%d", time.Now().UnixNano()/1e6)
+func (b64embedder *mailAttachmentBase64Embedder) AttachmentSrcToBase64DataURI(ctx context.Context, attachmentSrc string) (string, error) {
+ parsedSrc := httplib.ParseGiteaSiteURL(ctx, attachmentSrc)
+ var attachmentUUID string
+ if parsedSrc != nil {
+ var ok bool
+ attachmentUUID, ok = strings.CutPrefix(parsedSrc.RoutePath, "/attachments/")
+ if !ok {
+ attachmentUUID, ok = strings.CutPrefix(parsedSrc.RepoSubPath, "/attachments/")
+ }
+ if !ok {
+ return "", fmt.Errorf("not an attachment")
}
}
-
- return fmt.Sprintf("<%s/%s/%d%s@%s>", issue.Repo.FullName(), path, issue.Index, extra, setting.Domain)
-}
-
-func generateMessageIDForRelease(release *repo_model.Release) string {
- return fmt.Sprintf("<%s/releases/%d@%s>", release.Repo.FullName(), release.ID, setting.Domain)
-}
-
-func generateAdditionalHeaders(ctx *mailCommentContext, reason string, recipient *user_model.User) map[string]string {
- repo := ctx.Issue.Repo
-
- return map[string]string{
- // https://datatracker.ietf.org/doc/html/rfc2919
- "List-ID": fmt.Sprintf("%s <%s.%s.%s>", repo.FullName(), repo.Name, repo.OwnerName, setting.Domain),
-
- // https://datatracker.ietf.org/doc/html/rfc2369
- "List-Archive": fmt.Sprintf("<%s>", repo.HTMLURL()),
-
- "X-Mailer": "Gitea",
- "X-Gitea-Reason": reason,
- "X-Gitea-Sender": ctx.Doer.Name,
- "X-Gitea-Recipient": recipient.Name,
- "X-Gitea-Recipient-Address": recipient.Email,
- "X-Gitea-Repository": repo.Name,
- "X-Gitea-Repository-Path": repo.FullName(),
- "X-Gitea-Repository-Link": repo.HTMLURL(),
- "X-Gitea-Issue-ID": strconv.FormatInt(ctx.Issue.Index, 10),
- "X-Gitea-Issue-Link": ctx.Issue.HTMLURL(),
-
- "X-GitHub-Reason": reason,
- "X-GitHub-Sender": ctx.Doer.Name,
- "X-GitHub-Recipient": recipient.Name,
- "X-GitHub-Recipient-Address": recipient.Email,
-
- "X-GitLab-NotificationReason": reason,
- "X-GitLab-Project": repo.Name,
- "X-GitLab-Project-Path": repo.FullName(),
- "X-GitLab-Issue-IID": strconv.FormatInt(ctx.Issue.Index, 10),
- }
-}
-
-func sanitizeSubject(subject string) string {
- runes := []rune(strings.TrimSpace(subjectRemoveSpaces.ReplaceAllLiteralString(subject, " ")))
- if len(runes) > mailMaxSubjectRunes {
- runes = runes[:mailMaxSubjectRunes]
+ attachment, err := repo_model.GetAttachmentByUUID(ctx, attachmentUUID)
+ if err != nil {
+ return "", err
}
- // Encode non-ASCII characters
- return mime.QEncoding.Encode("utf-8", string(runes))
-}
-// SendIssueAssignedMail composes and sends issue assigned email
-func SendIssueAssignedMail(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, content string, comment *issues_model.Comment, recipients []*user_model.User) error {
- if setting.MailService == nil {
- // No mail service configured
- return nil
+ if attachment.RepoID != b64embedder.repo.ID {
+ return "", fmt.Errorf("attachment does not belong to the repository")
}
-
- if err := issue.LoadRepo(ctx); err != nil {
- log.Error("Unable to load repo [%d] for issue #%d [%d]. Error: %v", issue.RepoID, issue.Index, issue.ID, err)
- return err
+ if attachment.Size+b64embedder.estimateSize > b64embedder.maxSize {
+ return "", fmt.Errorf("total embedded images exceed max limit")
}
- langMap := make(map[string][]*user_model.User)
- for _, user := range recipients {
- if !user.IsActive {
- // don't send emails to inactive users
- continue
- }
- langMap[user.Language] = append(langMap[user.Language], user)
+ fr, err := storage.Attachments.Open(attachment.RelativePath())
+ if err != nil {
+ return "", err
}
+ defer fr.Close()
- for lang, tos := range langMap {
- msgs, err := composeIssueCommentMessages(&mailCommentContext{
- Context: ctx,
- Issue: issue,
- Doer: doer,
- ActionType: activities_model.ActionType(0),
- Content: content,
- Comment: comment,
- }, lang, tos, false, "issue assigned")
- if err != nil {
- return err
- }
- SendAsync(msgs...)
+ lr := &io.LimitedReader{R: fr, N: b64embedder.maxSize + 1}
+ content, err := io.ReadAll(lr)
+ if err != nil {
+ return "", fmt.Errorf("LimitedReader ReadAll: %w", err)
}
- return nil
-}
-// actionToTemplate returns the type and name of the action facing the user
-// (slightly different from activities_model.ActionType) and the name of the template to use (based on availability)
-func actionToTemplate(issue *issues_model.Issue, actionType activities_model.ActionType,
- commentType issues_model.CommentType, reviewType issues_model.ReviewType,
-) (typeName, name, template string) {
- if issue.IsPull {
- typeName = "pull"
- } else {
- typeName = "issue"
- }
- switch actionType {
- case activities_model.ActionCreateIssue, activities_model.ActionCreatePullRequest:
- name = "new"
- case activities_model.ActionCommentIssue, activities_model.ActionCommentPull:
- name = "comment"
- case activities_model.ActionCloseIssue, activities_model.ActionClosePullRequest:
- name = "close"
- case activities_model.ActionReopenIssue, activities_model.ActionReopenPullRequest:
- name = "reopen"
- case activities_model.ActionMergePullRequest, activities_model.ActionAutoMergePullRequest:
- name = "merge"
- case activities_model.ActionPullReviewDismissed:
- name = "review_dismissed"
- case activities_model.ActionPullRequestReadyForReview:
- name = "ready_for_review"
- default:
- switch commentType {
- case issues_model.CommentTypeReview:
- switch reviewType {
- case issues_model.ReviewTypeApprove:
- name = "approve"
- case issues_model.ReviewTypeReject:
- name = "reject"
- default:
- name = "review"
- }
- case issues_model.CommentTypeCode:
- name = "code"
- case issues_model.CommentTypeAssignees:
- name = "assigned"
- case issues_model.CommentTypePullRequestPush:
- name = "push"
- default:
- name = "default"
- }
+ mimeType := typesniffer.DetectContentType(content)
+ if !mimeType.IsImage() {
+ return "", fmt.Errorf("not an image")
}
- template = typeName + "/" + name
- ok := bodyTemplates.Lookup(template) != nil
- if !ok && typeName != "issue" {
- template = "issue/" + name
- ok = bodyTemplates.Lookup(template) != nil
- }
- if !ok {
- template = typeName + "/default"
- ok = bodyTemplates.Lookup(template) != nil
- }
- if !ok {
- template = "issue/default"
- }
- return typeName, name, template
+ encoded := base64.StdEncoding.EncodeToString(content)
+ dataURI := fmt.Sprintf("data:%s;base64,%s", mimeType.GetMimeType(), encoded)
+ b64embedder.estimateSize += int64(len(dataURI))
+ return dataURI, nil
}
func fromDisplayName(u *user_model.User) string {
diff --git a/services/mailer/mail_comment.go b/services/mailer/mail_comment.go
index 1812441d5a..e8d12e429d 100644
--- a/services/mailer/mail_comment.go
+++ b/services/mailer/mail_comment.go
@@ -25,9 +25,8 @@ func MailParticipantsComment(ctx context.Context, c *issues_model.Comment, opTyp
if c.Type == issues_model.CommentTypePullRequestPush {
content = ""
}
- if err := mailIssueCommentToParticipants(
- &mailCommentContext{
- Context: ctx,
+ if err := mailIssueCommentToParticipants(ctx,
+ &mailComment{
Issue: issue,
Doer: c.Poster,
ActionType: opType,
@@ -48,9 +47,8 @@ func MailMentionsComment(ctx context.Context, pr *issues_model.PullRequest, c *i
visited := make(container.Set[int64], len(mentions)+1)
visited.Add(c.Poster.ID)
- if err = mailIssueCommentBatch(
- &mailCommentContext{
- Context: ctx,
+ if err = mailIssueCommentBatch(ctx,
+ &mailComment{
Issue: pr.Issue,
Doer: c.Poster,
ActionType: activities_model.ActionCommentPull,
diff --git a/services/mailer/mail_issue.go b/services/mailer/mail_issue.go
index e269b1ca1e..b854d61a1a 100644
--- a/services/mailer/mail_issue.go
+++ b/services/mailer/mail_issue.go
@@ -18,38 +18,21 @@ import (
"code.gitea.io/gitea/modules/setting"
)
-func fallbackMailSubject(issue *issues_model.Issue) string {
- return fmt.Sprintf("[%s] %s (#%d)", issue.Repo.FullName(), issue.Title, issue.Index)
-}
-
-type mailCommentContext struct {
- context.Context
- Issue *issues_model.Issue
- Doer *user_model.User
- ActionType activities_model.ActionType
- Content string
- Comment *issues_model.Comment
- ForceDoerNotification bool
-}
-
-const (
- // MailBatchSize set the batch size used in mailIssueCommentBatch
- MailBatchSize = 100
-)
+const MailBatchSize = 100 // batch size used in mailIssueCommentBatch
// mailIssueCommentToParticipants can be used for both new issue creation and comment.
// This function sends two list of emails:
// 1. Repository watchers (except for WIP pull requests) and users who are participated in comments.
// 2. Users who are not in 1. but get mentioned in current issue/comment.
-func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*user_model.User) error {
+func mailIssueCommentToParticipants(ctx context.Context, comment *mailComment, mentions []*user_model.User) error {
// Required by the mail composer; make sure to load these before calling the async function
- if err := ctx.Issue.LoadRepo(ctx); err != nil {
+ if err := comment.Issue.LoadRepo(ctx); err != nil {
return fmt.Errorf("LoadRepo: %w", err)
}
- if err := ctx.Issue.LoadPoster(ctx); err != nil {
+ if err := comment.Issue.LoadPoster(ctx); err != nil {
return fmt.Errorf("LoadPoster: %w", err)
}
- if err := ctx.Issue.LoadPullRequest(ctx); err != nil {
+ if err := comment.Issue.LoadPullRequest(ctx); err != nil {
return fmt.Errorf("LoadPullRequest: %w", err)
}
@@ -57,35 +40,35 @@ func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*user_mo
unfiltered := make([]int64, 1, 64)
// =========== Original poster ===========
- unfiltered[0] = ctx.Issue.PosterID
+ unfiltered[0] = comment.Issue.PosterID
// =========== Assignees ===========
- ids, err := issues_model.GetAssigneeIDsByIssue(ctx, ctx.Issue.ID)
+ ids, err := issues_model.GetAssigneeIDsByIssue(ctx, comment.Issue.ID)
if err != nil {
- return fmt.Errorf("GetAssigneeIDsByIssue(%d): %w", ctx.Issue.ID, err)
+ return fmt.Errorf("GetAssigneeIDsByIssue(%d): %w", comment.Issue.ID, err)
}
unfiltered = append(unfiltered, ids...)
// =========== Participants (i.e. commenters, reviewers) ===========
- ids, err = issues_model.GetParticipantsIDsByIssueID(ctx, ctx.Issue.ID)
+ ids, err = issues_model.GetParticipantsIDsByIssueID(ctx, comment.Issue.ID)
if err != nil {
- return fmt.Errorf("GetParticipantsIDsByIssueID(%d): %w", ctx.Issue.ID, err)
+ return fmt.Errorf("GetParticipantsIDsByIssueID(%d): %w", comment.Issue.ID, err)
}
unfiltered = append(unfiltered, ids...)
// =========== Issue watchers ===========
- ids, err = issues_model.GetIssueWatchersIDs(ctx, ctx.Issue.ID, true)
+ ids, err = issues_model.GetIssueWatchersIDs(ctx, comment.Issue.ID, true)
if err != nil {
- return fmt.Errorf("GetIssueWatchersIDs(%d): %w", ctx.Issue.ID, err)
+ return fmt.Errorf("GetIssueWatchersIDs(%d): %w", comment.Issue.ID, err)
}
unfiltered = append(unfiltered, ids...)
// =========== Repo watchers ===========
// Make repo watchers last, since it's likely the list with the most users
- if !(ctx.Issue.IsPull && ctx.Issue.PullRequest.IsWorkInProgress(ctx) && ctx.ActionType != activities_model.ActionCreatePullRequest) {
- ids, err = repo_model.GetRepoWatchersIDs(ctx, ctx.Issue.RepoID)
+ if !(comment.Issue.IsPull && comment.Issue.PullRequest.IsWorkInProgress(ctx) && comment.ActionType != activities_model.ActionCreatePullRequest) {
+ ids, err = repo_model.GetRepoWatchersIDs(ctx, comment.Issue.RepoID)
if err != nil {
- return fmt.Errorf("GetRepoWatchersIDs(%d): %w", ctx.Issue.RepoID, err)
+ return fmt.Errorf("GetRepoWatchersIDs(%d): %w", comment.Issue.RepoID, err)
}
unfiltered = append(ids, unfiltered...)
}
@@ -93,19 +76,19 @@ func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*user_mo
visited := make(container.Set[int64], len(unfiltered)+len(mentions)+1)
// Avoid mailing the doer
- if ctx.Doer.EmailNotificationsPreference != user_model.EmailNotificationsAndYourOwn && !ctx.ForceDoerNotification {
- visited.Add(ctx.Doer.ID)
+ if comment.Doer.EmailNotificationsPreference != user_model.EmailNotificationsAndYourOwn && !comment.ForceDoerNotification {
+ visited.Add(comment.Doer.ID)
}
// =========== Mentions ===========
- if err = mailIssueCommentBatch(ctx, mentions, visited, true); err != nil {
+ if err = mailIssueCommentBatch(ctx, comment, mentions, visited, true); err != nil {
return fmt.Errorf("mailIssueCommentBatch() mentions: %w", err)
}
// Avoid mailing explicit unwatched
- ids, err = issues_model.GetIssueWatchersIDs(ctx, ctx.Issue.ID, false)
+ ids, err = issues_model.GetIssueWatchersIDs(ctx, comment.Issue.ID, false)
if err != nil {
- return fmt.Errorf("GetIssueWatchersIDs(%d): %w", ctx.Issue.ID, err)
+ return fmt.Errorf("GetIssueWatchersIDs(%d): %w", comment.Issue.ID, err)
}
visited.AddMultiple(ids...)
@@ -113,16 +96,16 @@ func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*user_mo
if err != nil {
return err
}
- if err = mailIssueCommentBatch(ctx, unfilteredUsers, visited, false); err != nil {
+ if err = mailIssueCommentBatch(ctx, comment, unfilteredUsers, visited, false); err != nil {
return fmt.Errorf("mailIssueCommentBatch(): %w", err)
}
return nil
}
-func mailIssueCommentBatch(ctx *mailCommentContext, users []*user_model.User, visited container.Set[int64], fromMention bool) error {
+func mailIssueCommentBatch(ctx context.Context, comment *mailComment, users []*user_model.User, visited container.Set[int64], fromMention bool) error {
checkUnit := unit.TypeIssues
- if ctx.Issue.IsPull {
+ if comment.Issue.IsPull {
checkUnit = unit.TypePullRequests
}
@@ -146,7 +129,7 @@ func mailIssueCommentBatch(ctx *mailCommentContext, users []*user_model.User, vi
}
// test if this user is allowed to see the issue/pull
- if !access_model.CheckRepoUnitUser(ctx, ctx.Issue.Repo, user, checkUnit) {
+ if !access_model.CheckRepoUnitUser(ctx, comment.Issue.Repo, user, checkUnit) {
continue
}
@@ -158,7 +141,7 @@ func mailIssueCommentBatch(ctx *mailCommentContext, users []*user_model.User, vi
// working backwards from the last (possibly) incomplete batch. If len(receivers) can be 0 this
// starting condition will need to be changed slightly
for i := ((len(receivers) - 1) / MailBatchSize) * MailBatchSize; i >= 0; i -= MailBatchSize {
- msgs, err := composeIssueCommentMessages(ctx, lang, receivers[i:], fromMention, "issue comments")
+ msgs, err := composeIssueCommentMessages(ctx, comment, lang, receivers[i:], fromMention, "issue comments")
if err != nil {
return err
}
@@ -185,9 +168,8 @@ func MailParticipants(ctx context.Context, issue *issues_model.Issue, doer *user
content = ""
}
forceDoerNotification := opType == activities_model.ActionAutoMergePullRequest
- if err := mailIssueCommentToParticipants(
- &mailCommentContext{
- Context: ctx,
+ if err := mailIssueCommentToParticipants(ctx,
+ &mailComment{
Issue: issue,
Doer: doer,
ActionType: opType,
@@ -199,3 +181,40 @@ func MailParticipants(ctx context.Context, issue *issues_model.Issue, doer *user
}
return nil
}
+
+// SendIssueAssignedMail composes and sends issue assigned email
+func SendIssueAssignedMail(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, content string, comment *issues_model.Comment, recipients []*user_model.User) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error("Unable to load repo [%d] for issue #%d [%d]. Error: %v", issue.RepoID, issue.Index, issue.ID, err)
+ return err
+ }
+
+ langMap := make(map[string][]*user_model.User)
+ for _, user := range recipients {
+ if !user.IsActive {
+ // don't send emails to inactive users
+ continue
+ }
+ langMap[user.Language] = append(langMap[user.Language], user)
+ }
+
+ for lang, tos := range langMap {
+ msgs, err := composeIssueCommentMessages(ctx, &mailComment{
+ Issue: issue,
+ Doer: doer,
+ ActionType: activities_model.ActionType(0),
+ Content: content,
+ Comment: comment,
+ }, lang, tos, false, "issue assigned")
+ if err != nil {
+ return err
+ }
+ SendAsync(msgs...)
+ }
+ return nil
+}
diff --git a/services/mailer/mail_issue_common.go b/services/mailer/mail_issue_common.go
new file mode 100644
index 0000000000..ebfd52162c
--- /dev/null
+++ b/services/mailer/mail_issue_common.go
@@ -0,0 +1,336 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "strconv"
+ "strings"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/renderhelper"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/emoji"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/translation"
+ incoming_payload "code.gitea.io/gitea/services/mailer/incoming/payload"
+ sender_service "code.gitea.io/gitea/services/mailer/sender"
+ "code.gitea.io/gitea/services/mailer/token"
+)
+
+// maxEmailBodySize is the approximate maximum size of an email body in bytes
+// Many e-mail service providers have limitations on the size of the email body, it's usually from 10MB to 25MB
+const maxEmailBodySize = 9_000_000
+
+func fallbackMailSubject(issue *issues_model.Issue) string {
+ return fmt.Sprintf("[%s] %s (#%d)", issue.Repo.FullName(), issue.Title, issue.Index)
+}
+
+type mailComment struct {
+ Issue *issues_model.Issue
+ Doer *user_model.User
+ ActionType activities_model.ActionType
+ Content string
+ Comment *issues_model.Comment
+ ForceDoerNotification bool
+}
+
+func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang string, recipients []*user_model.User, fromMention bool, info string) ([]*sender_service.Message, error) {
+ var (
+ subject string
+ link string
+ prefix string
+ // Fall back subject for bad templates, make sure subject is never empty
+ fallback string
+ reviewComments []*issues_model.Comment
+ )
+
+ commentType := issues_model.CommentTypeComment
+ if comment.Comment != nil {
+ commentType = comment.Comment.Type
+ link = comment.Issue.HTMLURL() + "#" + comment.Comment.HashTag()
+ } else {
+ link = comment.Issue.HTMLURL()
+ }
+
+ reviewType := issues_model.ReviewTypeComment
+ if comment.Comment != nil && comment.Comment.Review != nil {
+ reviewType = comment.Comment.Review.Type
+ }
+
+ // This is the body of the new issue or comment, not the mail body
+ rctx := renderhelper.NewRenderContextRepoComment(ctx, comment.Issue.Repo).WithUseAbsoluteLink(true)
+ body, err := markdown.RenderString(rctx, comment.Content)
+ if err != nil {
+ return nil, err
+ }
+
+ if setting.MailService.EmbedAttachmentImages {
+ attEmbedder := newMailAttachmentBase64Embedder(comment.Doer, comment.Issue.Repo, maxEmailBodySize)
+ bodyAfterEmbedding, err := attEmbedder.Base64InlineImages(ctx, body)
+ if err != nil {
+ log.Error("Failed to embed images in mail body: %v", err)
+ } else {
+ body = bodyAfterEmbedding
+ }
+ }
+ actType, actName, tplName := actionToTemplate(comment.Issue, comment.ActionType, commentType, reviewType)
+
+ if actName != "new" {
+ prefix = "Re: "
+ }
+ fallback = prefix + fallbackMailSubject(comment.Issue)
+
+ if comment.Comment != nil && comment.Comment.Review != nil {
+ reviewComments = make([]*issues_model.Comment, 0, 10)
+ for _, lines := range comment.Comment.Review.CodeComments {
+ for _, comments := range lines {
+ reviewComments = append(reviewComments, comments...)
+ }
+ }
+ }
+ locale := translation.NewLocale(lang)
+
+ mailMeta := map[string]any{
+ "locale": locale,
+ "FallbackSubject": fallback,
+ "Body": body,
+ "Link": link,
+ "Issue": comment.Issue,
+ "Comment": comment.Comment,
+ "IsPull": comment.Issue.IsPull,
+ "User": comment.Issue.Repo.MustOwner(ctx),
+ "Repo": comment.Issue.Repo.FullName(),
+ "Doer": comment.Doer,
+ "IsMention": fromMention,
+ "SubjectPrefix": prefix,
+ "ActionType": actType,
+ "ActionName": actName,
+ "ReviewComments": reviewComments,
+ "Language": locale.Language(),
+ "CanReply": setting.IncomingEmail.Enabled && commentType != issues_model.CommentTypePullRequestPush,
+ }
+
+ var mailSubject bytes.Buffer
+ if err := subjectTemplates.ExecuteTemplate(&mailSubject, tplName, mailMeta); err == nil {
+ subject = sanitizeSubject(mailSubject.String())
+ if subject == "" {
+ subject = fallback
+ }
+ } else {
+ log.Error("ExecuteTemplate [%s]: %v", tplName+"/subject", err)
+ }
+
+ subject = emoji.ReplaceAliases(subject)
+
+ mailMeta["Subject"] = subject
+
+ var mailBody bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&mailBody, tplName, mailMeta); err != nil {
+ log.Error("ExecuteTemplate [%s]: %v", tplName+"/body", err)
+ }
+
+ // Make sure to compose independent messages to avoid leaking user emails
+ msgID := generateMessageIDForIssue(comment.Issue, comment.Comment, comment.ActionType)
+ reference := generateMessageIDForIssue(comment.Issue, nil, activities_model.ActionType(0))
+
+ var replyPayload []byte
+ if comment.Comment != nil {
+ if comment.Comment.Type.HasMailReplySupport() {
+ replyPayload, err = incoming_payload.CreateReferencePayload(comment.Comment)
+ }
+ } else {
+ replyPayload, err = incoming_payload.CreateReferencePayload(comment.Issue)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ unsubscribePayload, err := incoming_payload.CreateReferencePayload(comment.Issue)
+ if err != nil {
+ return nil, err
+ }
+
+ msgs := make([]*sender_service.Message, 0, len(recipients))
+ for _, recipient := range recipients {
+ msg := sender_service.NewMessageFrom(
+ recipient.Email,
+ fromDisplayName(comment.Doer),
+ setting.MailService.FromEmail,
+ subject,
+ mailBody.String(),
+ )
+ msg.Info = fmt.Sprintf("Subject: %s, %s", subject, info)
+
+ msg.SetHeader("Message-ID", msgID)
+ msg.SetHeader("In-Reply-To", reference)
+
+ references := []string{reference}
+ listUnsubscribe := []string{"<" + comment.Issue.HTMLURL() + ">"}
+
+ if setting.IncomingEmail.Enabled {
+ if replyPayload != nil {
+ token, err := token.CreateToken(token.ReplyHandlerType, recipient, replyPayload)
+ if err != nil {
+ log.Error("CreateToken failed: %v", err)
+ } else {
+ replyAddress := strings.Replace(setting.IncomingEmail.ReplyToAddress, setting.IncomingEmail.TokenPlaceholder, token, 1)
+ msg.ReplyTo = replyAddress
+ msg.SetHeader("List-Post", fmt.Sprintf("<mailto:%s>", replyAddress))
+
+ references = append(references, fmt.Sprintf("<reply-%s@%s>", token, setting.Domain))
+ }
+ }
+
+ token, err := token.CreateToken(token.UnsubscribeHandlerType, recipient, unsubscribePayload)
+ if err != nil {
+ log.Error("CreateToken failed: %v", err)
+ } else {
+ unsubAddress := strings.Replace(setting.IncomingEmail.ReplyToAddress, setting.IncomingEmail.TokenPlaceholder, token, 1)
+ listUnsubscribe = append(listUnsubscribe, "<mailto:"+unsubAddress+">")
+ }
+ }
+
+ msg.SetHeader("References", references...)
+ msg.SetHeader("List-Unsubscribe", listUnsubscribe...)
+
+ for key, value := range generateAdditionalHeaders(comment, actType, recipient) {
+ msg.SetHeader(key, value)
+ }
+
+ msgs = append(msgs, msg)
+ }
+
+ return msgs, nil
+}
+
+// actionToTemplate returns the type and name of the action facing the user
+// (slightly different from activities_model.ActionType) and the name of the template to use (based on availability)
+func actionToTemplate(issue *issues_model.Issue, actionType activities_model.ActionType,
+ commentType issues_model.CommentType, reviewType issues_model.ReviewType,
+) (typeName, name, template string) {
+ if issue.IsPull {
+ typeName = "pull"
+ } else {
+ typeName = "issue"
+ }
+ switch actionType {
+ case activities_model.ActionCreateIssue, activities_model.ActionCreatePullRequest:
+ name = "new"
+ case activities_model.ActionCommentIssue, activities_model.ActionCommentPull:
+ name = "comment"
+ case activities_model.ActionCloseIssue, activities_model.ActionClosePullRequest:
+ name = "close"
+ case activities_model.ActionReopenIssue, activities_model.ActionReopenPullRequest:
+ name = "reopen"
+ case activities_model.ActionMergePullRequest, activities_model.ActionAutoMergePullRequest:
+ name = "merge"
+ case activities_model.ActionPullReviewDismissed:
+ name = "review_dismissed"
+ case activities_model.ActionPullRequestReadyForReview:
+ name = "ready_for_review"
+ default:
+ switch commentType {
+ case issues_model.CommentTypeReview:
+ switch reviewType {
+ case issues_model.ReviewTypeApprove:
+ name = "approve"
+ case issues_model.ReviewTypeReject:
+ name = "reject"
+ default:
+ name = "review"
+ }
+ case issues_model.CommentTypeCode:
+ name = "code"
+ case issues_model.CommentTypeAssignees:
+ name = "assigned"
+ case issues_model.CommentTypePullRequestPush:
+ name = "push"
+ default:
+ name = "default"
+ }
+ }
+
+ template = typeName + "/" + name
+ ok := bodyTemplates.Lookup(template) != nil
+ if !ok && typeName != "issue" {
+ template = "issue/" + name
+ ok = bodyTemplates.Lookup(template) != nil
+ }
+ if !ok {
+ template = typeName + "/default"
+ ok = bodyTemplates.Lookup(template) != nil
+ }
+ if !ok {
+ template = "issue/default"
+ }
+ return typeName, name, template
+}
+
+func generateMessageIDForIssue(issue *issues_model.Issue, comment *issues_model.Comment, actionType activities_model.ActionType) string {
+ var path string
+ if issue.IsPull {
+ path = "pulls"
+ } else {
+ path = "issues"
+ }
+
+ var extra string
+ if comment != nil {
+ extra = fmt.Sprintf("/comment/%d", comment.ID)
+ } else {
+ switch actionType {
+ case activities_model.ActionCloseIssue, activities_model.ActionClosePullRequest:
+ extra = fmt.Sprintf("/close/%d", time.Now().UnixNano()/1e6)
+ case activities_model.ActionReopenIssue, activities_model.ActionReopenPullRequest:
+ extra = fmt.Sprintf("/reopen/%d", time.Now().UnixNano()/1e6)
+ case activities_model.ActionMergePullRequest, activities_model.ActionAutoMergePullRequest:
+ extra = fmt.Sprintf("/merge/%d", time.Now().UnixNano()/1e6)
+ case activities_model.ActionPullRequestReadyForReview:
+ extra = fmt.Sprintf("/ready/%d", time.Now().UnixNano()/1e6)
+ }
+ }
+
+ return fmt.Sprintf("<%s/%s/%d%s@%s>", issue.Repo.FullName(), path, issue.Index, extra, setting.Domain)
+}
+
+func generateAdditionalHeaders(ctx *mailComment, reason string, recipient *user_model.User) map[string]string {
+ repo := ctx.Issue.Repo
+
+ return map[string]string{
+ // https://datatracker.ietf.org/doc/html/rfc2919
+ "List-ID": fmt.Sprintf("%s <%s.%s.%s>", repo.FullName(), repo.Name, repo.OwnerName, setting.Domain),
+
+ // https://datatracker.ietf.org/doc/html/rfc2369
+ "List-Archive": fmt.Sprintf("<%s>", repo.HTMLURL()),
+
+ "X-Mailer": "Gitea",
+ "X-Gitea-Reason": reason,
+ "X-Gitea-Sender": ctx.Doer.Name,
+ "X-Gitea-Recipient": recipient.Name,
+ "X-Gitea-Recipient-Address": recipient.Email,
+ "X-Gitea-Repository": repo.Name,
+ "X-Gitea-Repository-Path": repo.FullName(),
+ "X-Gitea-Repository-Link": repo.HTMLURL(),
+ "X-Gitea-Issue-ID": strconv.FormatInt(ctx.Issue.Index, 10),
+ "X-Gitea-Issue-Link": ctx.Issue.HTMLURL(),
+
+ "X-GitHub-Reason": reason,
+ "X-GitHub-Sender": ctx.Doer.Name,
+ "X-GitHub-Recipient": recipient.Name,
+ "X-GitHub-Recipient-Address": recipient.Email,
+
+ "X-GitLab-NotificationReason": reason,
+ "X-GitLab-Project": repo.Name,
+ "X-GitLab-Project-Path": repo.FullName(),
+ "X-GitLab-Issue-IID": strconv.FormatInt(ctx.Issue.Index, 10),
+ }
+}
diff --git a/services/mailer/mail_release.go b/services/mailer/mail_release.go
index 31316b0053..bfff73c39c 100644
--- a/services/mailer/mail_release.go
+++ b/services/mailer/mail_release.go
@@ -6,6 +6,7 @@ package mailer
import (
"bytes"
"context"
+ "fmt"
"code.gitea.io/gitea/models/renderhelper"
repo_model "code.gitea.io/gitea/models/repo"
@@ -18,9 +19,11 @@ import (
sender_service "code.gitea.io/gitea/services/mailer/sender"
)
-const (
- tplNewReleaseMail templates.TplName = "release"
-)
+const tplNewReleaseMail templates.TplName = "release"
+
+func generateMessageIDForRelease(release *repo_model.Release) string {
+ return fmt.Sprintf("<%s/releases/%d@%s>", release.Repo.FullName(), release.ID, setting.Domain)
+}
// MailNewRelease send new release notify to all repo watchers.
func MailNewRelease(ctx context.Context, rel *repo_model.Release) {
diff --git a/services/mailer/mail_repo.go b/services/mailer/mail_repo.go
index 5f80654bcd..b6b2d5ca07 100644
--- a/services/mailer/mail_repo.go
+++ b/services/mailer/mail_repo.go
@@ -12,10 +12,13 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/translation"
sender_service "code.gitea.io/gitea/services/mailer/sender"
)
+const mailRepoTransferNotify templates.TplName = "notify/repo_transfer"
+
// SendRepoTransferNotifyMail triggers a notification e-mail when a pending repository transfer was created
func SendRepoTransferNotifyMail(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository) error {
if setting.MailService == nil {
diff --git a/services/mailer/mail_team_invite.go b/services/mailer/mail_team_invite.go
index 5ca44442f3..1fbade7e23 100644
--- a/services/mailer/mail_team_invite.go
+++ b/services/mailer/mail_team_invite.go
@@ -18,9 +18,7 @@ import (
sender_service "code.gitea.io/gitea/services/mailer/sender"
)
-const (
- tplTeamInviteMail templates.TplName = "team_invite"
-)
+const tplTeamInviteMail templates.TplName = "team_invite"
// MailTeamInvite sends team invites
func MailTeamInvite(ctx context.Context, inviter *user_model.User, team *org_model.Team, invite *org_model.TeamInvite) error {
diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go
index 36cef486c9..7a47cf3876 100644
--- a/services/mailer/mail_test.go
+++ b/services/mailer/mail_test.go
@@ -6,6 +6,7 @@ package mailer
import (
"bytes"
"context"
+ "encoding/base64"
"fmt"
"html/template"
"io"
@@ -23,9 +24,13 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/services/attachment"
sender_service "code.gitea.io/gitea/services/mailer/sender"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
const subjectTpl = `
@@ -53,22 +58,44 @@ const bodyTpl = `
func prepareMailerTest(t *testing.T) (doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, comment *issues_model.Comment) {
assert.NoError(t, unittest.PrepareTestDatabase())
- mailService := setting.Mailer{
- From: "test@gitea.com",
- }
-
- setting.MailService = &mailService
+ setting.MailService = &setting.Mailer{From: "test@gitea.com"}
setting.Domain = "localhost"
+ setting.AppURL = "https://try.gitea.io/"
doer = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1, Owner: doer})
issue = unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1, Repo: repo, Poster: doer})
- assert.NoError(t, issue.LoadRepo(db.DefaultContext))
comment = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2, Issue: issue})
+ require.NoError(t, issue.LoadRepo(db.DefaultContext))
return doer, repo, issue, comment
}
-func TestComposeIssueCommentMessage(t *testing.T) {
+func prepareMailerBase64Test(t *testing.T) (doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, att1, att2 *repo_model.Attachment) {
+ user, repo, issue, comment := prepareMailerTest(t)
+ setting.MailService.EmbedAttachmentImages = true
+
+ att1, err := attachment.NewAttachment(t.Context(), &repo_model.Attachment{
+ RepoID: repo.ID,
+ IssueID: issue.ID,
+ UploaderID: user.ID,
+ CommentID: comment.ID,
+ Name: "test.png",
+ }, bytes.NewReader([]byte("\x89\x50\x4e\x47\x0d\x0a\x1a\x0a")), 8)
+ require.NoError(t, err)
+
+ att2, err = attachment.NewAttachment(t.Context(), &repo_model.Attachment{
+ RepoID: repo.ID,
+ IssueID: issue.ID,
+ UploaderID: user.ID,
+ CommentID: comment.ID,
+ Name: "test.png",
+ }, bytes.NewReader([]byte("\x89\x50\x4e\x47\x0d\x0a\x1a\x0a"+strings.Repeat("\x00", 1024))), 8+1024)
+ require.NoError(t, err)
+
+ return user, repo, issue, att1, att2
+}
+
+func TestComposeIssueComment(t *testing.T) {
doer, _, issue, comment := prepareMailerTest(t)
markup.Init(&markup.RenderHelperFuncs{
@@ -84,9 +111,8 @@ func TestComposeIssueCommentMessage(t *testing.T) {
bodyTemplates = template.Must(template.New("issue/comment").Parse(bodyTpl))
recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}}
- msgs, err := composeIssueCommentMessages(&mailCommentContext{
- Context: context.TODO(), // TODO: use a correct context
- Issue: issue, Doer: doer, ActionType: activities_model.ActionCommentIssue,
+ msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCommentIssue,
Content: fmt.Sprintf("test @%s %s#%d body", doer.Name, issue.Repo.FullName(), issue.Index),
Comment: comment,
}, "en-US", recipients, false, "issue comment")
@@ -109,7 +135,8 @@ func TestComposeIssueCommentMessage(t *testing.T) {
assert.Len(t, gomailMsg.GetGenHeader("List-Unsubscribe"), 2) // url + mailto
var buf bytes.Buffer
- gomailMsg.WriteTo(&buf)
+ _, err = gomailMsg.WriteTo(&buf)
+ require.NoError(t, err)
b, err := io.ReadAll(quotedprintable.NewReader(&buf))
assert.NoError(t, err)
@@ -123,6 +150,22 @@ func TestComposeIssueCommentMessage(t *testing.T) {
assert.Contains(t, string(b), fmt.Sprintf(`href="%s"`, issue.HTMLURL()))
}
+func TestMailMentionsComment(t *testing.T) {
+ doer, _, issue, comment := prepareMailerTest(t)
+ comment.Poster = doer
+ subjectTemplates = texttmpl.Must(texttmpl.New("issue/comment").Parse(subjectTpl))
+ bodyTemplates = template.Must(template.New("issue/comment").Parse(bodyTpl))
+ mails := 0
+
+ defer test.MockVariableValue(&SendAsync, func(msgs ...*sender_service.Message) {
+ mails = len(msgs)
+ })()
+
+ err := MailParticipantsComment(t.Context(), comment, activities_model.ActionCommentIssue, issue, []*user_model.User{})
+ require.NoError(t, err)
+ assert.Equal(t, 3, mails)
+}
+
func TestComposeIssueMessage(t *testing.T) {
doer, _, issue, _ := prepareMailerTest(t)
@@ -130,9 +173,8 @@ func TestComposeIssueMessage(t *testing.T) {
bodyTemplates = template.Must(template.New("issue/new").Parse(bodyTpl))
recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}}
- msgs, err := composeIssueCommentMessages(&mailCommentContext{
- Context: context.TODO(), // TODO: use a correct context
- Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue,
+ msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue,
Content: "test body",
}, "en-US", recipients, false, "issue create")
assert.NoError(t, err)
@@ -177,32 +219,28 @@ func TestTemplateSelection(t *testing.T) {
assert.Contains(t, wholemsg, expBody)
}
- msg := testComposeIssueCommentMessage(t, &mailCommentContext{
- Context: context.TODO(), // TODO: use a correct context
- Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue,
+ msg := testComposeIssueCommentMessage(t, &mailComment{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue,
Content: "test body",
}, recipients, false, "TestTemplateSelection")
expect(t, msg, "issue/new/subject", "issue/new/body")
- msg = testComposeIssueCommentMessage(t, &mailCommentContext{
- Context: context.TODO(), // TODO: use a correct context
- Issue: issue, Doer: doer, ActionType: activities_model.ActionCommentIssue,
+ msg = testComposeIssueCommentMessage(t, &mailComment{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCommentIssue,
Content: "test body", Comment: comment,
}, recipients, false, "TestTemplateSelection")
expect(t, msg, "issue/default/subject", "issue/default/body")
pull := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2, Repo: repo, Poster: doer})
comment = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 4, Issue: pull})
- msg = testComposeIssueCommentMessage(t, &mailCommentContext{
- Context: context.TODO(), // TODO: use a correct context
- Issue: pull, Doer: doer, ActionType: activities_model.ActionCommentPull,
+ msg = testComposeIssueCommentMessage(t, &mailComment{
+ Issue: pull, Doer: doer, ActionType: activities_model.ActionCommentPull,
Content: "test body", Comment: comment,
}, recipients, false, "TestTemplateSelection")
expect(t, msg, "pull/comment/subject", "pull/comment/body")
- msg = testComposeIssueCommentMessage(t, &mailCommentContext{
- Context: context.TODO(), // TODO: use a correct context
- Issue: issue, Doer: doer, ActionType: activities_model.ActionCloseIssue,
+ msg = testComposeIssueCommentMessage(t, &mailComment{
+ Issue: issue, Doer: doer, ActionType: activities_model.ActionCloseIssue,
Content: "test body", Comment: comment,
}, recipients, false, "TestTemplateSelection")
expect(t, msg, "Re: [user2/repo1] issue1 (#1)", "issue/close/body")
@@ -219,9 +257,8 @@ func TestTemplateServices(t *testing.T) {
bodyTemplates = template.Must(template.New("issue/default").Parse(tplBody))
recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}}
- msg := testComposeIssueCommentMessage(t, &mailCommentContext{
- Context: context.TODO(), // TODO: use a correct context
- Issue: issue, Doer: doer, ActionType: actionType,
+ msg := testComposeIssueCommentMessage(t, &mailComment{
+ Issue: issue, Doer: doer, ActionType: actionType,
Content: "test body", Comment: comment,
}, recipients, fromMention, "TestTemplateServices")
@@ -253,8 +290,8 @@ func TestTemplateServices(t *testing.T) {
"//Re: //")
}
-func testComposeIssueCommentMessage(t *testing.T, ctx *mailCommentContext, recipients []*user_model.User, fromMention bool, info string) *sender_service.Message {
- msgs, err := composeIssueCommentMessages(ctx, "en-US", recipients, fromMention, info)
+func testComposeIssueCommentMessage(t *testing.T, ctx *mailComment, recipients []*user_model.User, fromMention bool, info string) *sender_service.Message {
+ msgs, err := composeIssueCommentMessages(t.Context(), ctx, "en-US", recipients, fromMention, info)
assert.NoError(t, err)
assert.Len(t, msgs, 1)
return msgs[0]
@@ -263,10 +300,10 @@ func testComposeIssueCommentMessage(t *testing.T, ctx *mailCommentContext, recip
func TestGenerateAdditionalHeaders(t *testing.T) {
doer, _, issue, _ := prepareMailerTest(t)
- ctx := &mailCommentContext{Context: context.TODO() /* TODO: use a correct context */, Issue: issue, Doer: doer}
+ comment := &mailComment{Issue: issue, Doer: doer}
recipient := &user_model.User{Name: "test", Email: "test@gitea.com"}
- headers := generateAdditionalHeaders(ctx, "dummy-reason", recipient)
+ headers := generateAdditionalHeaders(comment, "dummy-reason", recipient)
expected := map[string]string{
"List-ID": "user2/repo1 <repo1.user2.localhost>",
@@ -404,9 +441,9 @@ func TestGenerateMessageIDForRelease(t *testing.T) {
}
func TestFromDisplayName(t *testing.T) {
- template, err := texttmpl.New("mailFrom").Parse("{{ .DisplayName }}")
+ tmpl, err := texttmpl.New("mailFrom").Parse("{{ .DisplayName }}")
assert.NoError(t, err)
- setting.MailService = &setting.Mailer{FromDisplayNameFormatTemplate: template}
+ setting.MailService = &setting.Mailer{FromDisplayNameFormatTemplate: tmpl}
defer func() { setting.MailService = nil }()
tests := []struct {
@@ -430,14 +467,14 @@ func TestFromDisplayName(t *testing.T) {
t.Run(tc.userDisplayName, func(t *testing.T) {
user := &user_model.User{FullName: tc.userDisplayName, Name: "tmp"}
got := fromDisplayName(user)
- assert.EqualValues(t, tc.fromDisplayName, got)
+ assert.Equal(t, tc.fromDisplayName, got)
})
}
t.Run("template with all available vars", func(t *testing.T) {
- template, err = texttmpl.New("mailFrom").Parse("{{ .DisplayName }} (by {{ .AppName }} on [{{ .Domain }}])")
+ tmpl, err = texttmpl.New("mailFrom").Parse("{{ .DisplayName }} (by {{ .AppName }} on [{{ .Domain }}])")
assert.NoError(t, err)
- setting.MailService = &setting.Mailer{FromDisplayNameFormatTemplate: template}
+ setting.MailService = &setting.Mailer{FromDisplayNameFormatTemplate: tmpl}
oldAppName := setting.AppName
setting.AppName = "Code IT"
oldDomain := setting.Domain
@@ -447,6 +484,74 @@ func TestFromDisplayName(t *testing.T) {
setting.Domain = oldDomain
}()
- assert.EqualValues(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"}))
+ assert.Equal(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"}))
+ })
+}
+
+func TestEmbedBase64Images(t *testing.T) {
+ user, repo, issue, att1, att2 := prepareMailerBase64Test(t)
+ // comment := &mailComment{Issue: issue, Doer: user}
+
+ imgExternalURL := "https://via.placeholder.com/10"
+ imgExternalImg := fmt.Sprintf(`<img src="%s"/>`, imgExternalURL)
+
+ att1URL := setting.AppURL + repo.Owner.Name + "/" + repo.Name + "/attachments/" + att1.UUID
+ att1Img := fmt.Sprintf(`<img src="%s"/>`, att1URL)
+ att1Base64 := "data:image/png;base64,iVBORw0KGgo="
+ att1ImgBase64 := fmt.Sprintf(`<img src="%s"/>`, att1Base64)
+
+ att2URL := setting.AppURL + repo.Owner.Name + "/" + repo.Name + "/attachments/" + att2.UUID
+ att2Img := fmt.Sprintf(`<img src="%s"/>`, att2URL)
+ att2File, err := storage.Attachments.Open(att2.RelativePath())
+ require.NoError(t, err)
+ defer att2File.Close()
+ att2Bytes, err := io.ReadAll(att2File)
+ require.NoError(t, err)
+ require.Greater(t, len(att2Bytes), 1024)
+ att2Base64 := "data:image/png;base64," + base64.StdEncoding.EncodeToString(att2Bytes)
+ att2ImgBase64 := fmt.Sprintf(`<img src="%s"/>`, att2Base64)
+
+ t.Run("ComposeMessage", func(t *testing.T) {
+ subjectTemplates = texttmpl.Must(texttmpl.New("issue/new").Parse(subjectTpl))
+ bodyTemplates = template.Must(template.New("issue/new").Parse(bodyTpl))
+
+ issue.Content = fmt.Sprintf(`MSG-BEFORE <image src="attachments/%s"> MSG-AFTER`, att1.UUID)
+ require.NoError(t, issues_model.UpdateIssueCols(t.Context(), issue, "content"))
+
+ recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}}
+ msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{
+ Issue: issue,
+ Doer: user,
+ ActionType: activities_model.ActionCreateIssue,
+ Content: issue.Content,
+ }, "en-US", recipients, false, "issue create")
+ require.NoError(t, err)
+
+ mailBody := msgs[0].Body
+ assert.Regexp(t, `MSG-BEFORE <a[^>]+><img src="data:image/png;base64,iVBORw0KGgo="/></a> MSG-AFTER`, mailBody)
+ })
+
+ t.Run("EmbedInstanceImageSkipExternalImage", func(t *testing.T) {
+ mailBody := "<html><head></head><body><p>Test1</p>" + imgExternalImg + "<p>Test2</p>" + att1Img + "<p>Test3</p></body></html>"
+ expectedMailBody := "<html><head></head><body><p>Test1</p>" + imgExternalImg + "<p>Test2</p>" + att1ImgBase64 + "<p>Test3</p></body></html>"
+ b64embedder := newMailAttachmentBase64Embedder(user, repo, 1024)
+ resultMailBody, err := b64embedder.Base64InlineImages(t.Context(), template.HTML(mailBody))
+ require.NoError(t, err)
+ assert.Equal(t, expectedMailBody, string(resultMailBody))
+ })
+
+ t.Run("LimitedEmailBodySize", func(t *testing.T) {
+ mailBody := fmt.Sprintf("<html><head></head><body>%s%s</body></html>", att1Img, att2Img)
+ b64embedder := newMailAttachmentBase64Embedder(user, repo, 1024)
+ resultMailBody, err := b64embedder.Base64InlineImages(t.Context(), template.HTML(mailBody))
+ require.NoError(t, err)
+ expected := fmt.Sprintf("<html><head></head><body>%s%s</body></html>", att1ImgBase64, att2Img)
+ assert.Equal(t, expected, string(resultMailBody))
+
+ b64embedder = newMailAttachmentBase64Embedder(user, repo, 4096)
+ resultMailBody, err = b64embedder.Base64InlineImages(t.Context(), template.HTML(mailBody))
+ require.NoError(t, err)
+ expected = fmt.Sprintf("<html><head></head><body>%s%s</body></html>", att1ImgBase64, att2ImgBase64)
+ assert.Equal(t, expected, string(resultMailBody))
})
}
diff --git a/services/mailer/mail_user.go b/services/mailer/mail_user.go
new file mode 100644
index 0000000000..5a200a5fa7
--- /dev/null
+++ b/services/mailer/mail_user.go
@@ -0,0 +1,161 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "bytes"
+ "fmt"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/translation"
+ sender_service "code.gitea.io/gitea/services/mailer/sender"
+)
+
+const (
+ mailAuthActivate templates.TplName = "auth/activate"
+ mailAuthActivateEmail templates.TplName = "auth/activate_email"
+ mailAuthResetPassword templates.TplName = "auth/reset_passwd"
+ mailAuthRegisterNotify templates.TplName = "auth/register_notify"
+ mailNotifyCollaborator templates.TplName = "notify/collaborator"
+)
+
+// sendUserMail sends a mail to the user
+func sendUserMail(language string, u *user_model.User, tpl templates.TplName, code, subject, info string) {
+ locale := translation.NewLocale(language)
+ data := map[string]any{
+ "locale": locale,
+ "DisplayName": u.DisplayName(),
+ "ActiveCodeLives": timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, locale),
+ "ResetPwdCodeLives": timeutil.MinutesToFriendly(setting.Service.ResetPwdCodeLives, locale),
+ "Code": code,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(tpl), data); err != nil {
+ log.Error("Template: %v", err)
+ return
+ }
+
+ msg := sender_service.NewMessage(u.EmailTo(), subject, content.String())
+ msg.Info = fmt.Sprintf("UID: %d, %s", u.ID, info)
+
+ SendAsync(msg)
+}
+
+// SendActivateAccountMail sends an activation mail to the user (new user registration)
+func SendActivateAccountMail(locale translation.Locale, u *user_model.User) {
+ if setting.MailService == nil {
+ // No mail service configured
+ return
+ }
+ opts := &user_model.TimeLimitCodeOptions{Purpose: user_model.TimeLimitCodeActivateAccount}
+ sendUserMail(locale.Language(), u, mailAuthActivate, user_model.GenerateUserTimeLimitCode(opts, u), locale.TrString("mail.activate_account"), "activate account")
+}
+
+// SendResetPasswordMail sends a password reset mail to the user
+func SendResetPasswordMail(u *user_model.User) {
+ if setting.MailService == nil {
+ // No mail service configured
+ return
+ }
+ locale := translation.NewLocale(u.Language)
+ opts := &user_model.TimeLimitCodeOptions{Purpose: user_model.TimeLimitCodeResetPassword}
+ sendUserMail(u.Language, u, mailAuthResetPassword, user_model.GenerateUserTimeLimitCode(opts, u), locale.TrString("mail.reset_password"), "recover account")
+}
+
+// SendActivateEmailMail sends confirmation email to confirm new email address
+func SendActivateEmailMail(u *user_model.User, email string) {
+ if setting.MailService == nil {
+ // No mail service configured
+ return
+ }
+ locale := translation.NewLocale(u.Language)
+ opts := &user_model.TimeLimitCodeOptions{Purpose: user_model.TimeLimitCodeActivateEmail, NewEmail: email}
+ data := map[string]any{
+ "locale": locale,
+ "DisplayName": u.DisplayName(),
+ "ActiveCodeLives": timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, locale),
+ "Code": user_model.GenerateUserTimeLimitCode(opts, u),
+ "Email": email,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthActivateEmail), data); err != nil {
+ log.Error("Template: %v", err)
+ return
+ }
+
+ msg := sender_service.NewMessage(email, locale.TrString("mail.activate_email"), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, activate email", u.ID)
+
+ SendAsync(msg)
+}
+
+// SendRegisterNotifyMail triggers a notify e-mail by admin created a account.
+func SendRegisterNotifyMail(u *user_model.User) {
+ if setting.MailService == nil || !u.IsActive {
+ // No mail service configured OR user is inactive
+ return
+ }
+ locale := translation.NewLocale(u.Language)
+
+ data := map[string]any{
+ "locale": locale,
+ "DisplayName": u.DisplayName(),
+ "Username": u.Name,
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailAuthRegisterNotify), data); err != nil {
+ log.Error("Template: %v", err)
+ return
+ }
+
+ msg := sender_service.NewMessage(u.EmailTo(), locale.TrString("mail.register_notify", setting.AppName), content.String())
+ msg.Info = fmt.Sprintf("UID: %d, registration notify", u.ID)
+
+ SendAsync(msg)
+}
+
+// SendCollaboratorMail sends mail notification to new collaborator.
+func SendCollaboratorMail(u, doer *user_model.User, repo *repo_model.Repository) {
+ if setting.MailService == nil || !u.IsActive {
+ // No mail service configured OR the user is inactive
+ return
+ }
+ locale := translation.NewLocale(u.Language)
+ repoName := repo.FullName()
+
+ subject := locale.TrString("mail.repo.collaborator.added.subject", doer.DisplayName(), repoName)
+ data := map[string]any{
+ "locale": locale,
+ "Subject": subject,
+ "RepoName": repoName,
+ "Link": repo.HTMLURL(),
+ "Language": locale.Language(),
+ }
+
+ var content bytes.Buffer
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(mailNotifyCollaborator), data); err != nil {
+ log.Error("Template: %v", err)
+ return
+ }
+
+ msg := sender_service.NewMessage(u.EmailTo(), subject, content.String())
+ msg.Info = fmt.Sprintf("UID: %d, add collaborator", u.ID)
+
+ SendAsync(msg)
+}
diff --git a/services/mailer/notify.go b/services/mailer/notify.go
index e48b5d399d..77c366fe31 100644
--- a/services/mailer/notify.go
+++ b/services/mailer/notify.go
@@ -12,6 +12,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
+ issue_service "code.gitea.io/gitea/services/issue"
notify_service "code.gitea.io/gitea/services/notify"
)
@@ -30,15 +31,16 @@ func (m *mailNotifier) CreateIssueComment(ctx context.Context, doer *user_model.
issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
) {
var act activities_model.ActionType
- if comment.Type == issues_model.CommentTypeClose {
+ switch comment.Type {
+ case issues_model.CommentTypeClose:
act = activities_model.ActionCloseIssue
- } else if comment.Type == issues_model.CommentTypeReopen {
+ case issues_model.CommentTypeReopen:
act = activities_model.ActionReopenIssue
- } else if comment.Type == issues_model.CommentTypeComment {
+ case issues_model.CommentTypeComment:
act = activities_model.ActionCommentIssue
- } else if comment.Type == issues_model.CommentTypeCode {
+ case issues_model.CommentTypeCode:
act = activities_model.ActionCommentIssue
- } else if comment.Type == issues_model.CommentTypePullRequestPush {
+ case issues_model.CommentTypePullRequestPush:
act = 0
}
@@ -94,11 +96,12 @@ func (m *mailNotifier) NewPullRequest(ctx context.Context, pr *issues_model.Pull
func (m *mailNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, r *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
var act activities_model.ActionType
- if comment.Type == issues_model.CommentTypeClose {
+ switch comment.Type {
+ case issues_model.CommentTypeClose:
act = activities_model.ActionCloseIssue
- } else if comment.Type == issues_model.CommentTypeReopen {
+ case issues_model.CommentTypeReopen:
act = activities_model.ActionReopenIssue
- } else if comment.Type == issues_model.CommentTypeComment {
+ case issues_model.CommentTypeComment:
act = activities_model.ActionCommentPull
}
if err := MailParticipantsComment(ctx, comment, act, pr.Issue, mentions); err != nil {
@@ -169,7 +172,7 @@ func (m *mailNotifier) PullRequestPushCommits(ctx context.Context, doer *user_mo
log.Error("comment.Issue.PullRequest.LoadBaseRepo: %v", err)
return
}
- if err := comment.LoadPushCommits(ctx); err != nil {
+ if err := issue_service.LoadCommentPushCommits(ctx, comment); err != nil {
log.Error("comment.LoadPushCommits: %v", err)
}
m.CreateIssueComment(ctx, doer, comment.Issue.Repo, comment.Issue, comment, nil)
diff --git a/services/markup/renderhelper.go b/services/markup/renderhelper.go
index 4b9852b48b..ea494146a7 100644
--- a/services/markup/renderhelper.go
+++ b/services/markup/renderhelper.go
@@ -21,8 +21,8 @@ func FormalRenderHelperFuncs() *markup.RenderHelperFuncs {
return false
}
- giteaCtx, ok := ctx.(*gitea_context.Context)
- if !ok {
+ giteaCtx := gitea_context.GetWebContext(ctx)
+ if giteaCtx == nil {
// when using general context, use user's visibility to check
return mentionedUser.Visibility.IsPublic()
}
diff --git a/services/markup/renderhelper_codepreview.go b/services/markup/renderhelper_codepreview.go
index 170c70c409..d638af7ff0 100644
--- a/services/markup/renderhelper_codepreview.go
+++ b/services/markup/renderhelper_codepreview.go
@@ -36,8 +36,8 @@ func renderRepoFileCodePreview(ctx context.Context, opts markup.RenderCodePrevie
return "", err
}
- webCtx, ok := ctx.Value(gitea_context.WebContextKey).(*gitea_context.Context)
- if !ok {
+ webCtx := gitea_context.GetWebContext(ctx)
+ if webCtx == nil {
return "", fmt.Errorf("context is not a web context")
}
doer := webCtx.Doer
diff --git a/services/markup/renderhelper_issueicontitle.go b/services/markup/renderhelper_issueicontitle.go
index 53a508e908..fd8f9d43fa 100644
--- a/services/markup/renderhelper_issueicontitle.go
+++ b/services/markup/renderhelper_issueicontitle.go
@@ -18,8 +18,8 @@ import (
)
func renderRepoIssueIconTitle(ctx context.Context, opts markup.RenderIssueIconTitleOptions) (_ template.HTML, err error) {
- webCtx, ok := ctx.Value(gitea_context.WebContextKey).(*gitea_context.Context)
- if !ok {
+ webCtx := gitea_context.GetWebContext(ctx)
+ if webCtx == nil {
return "", fmt.Errorf("context is not a web context")
}
diff --git a/services/markup/renderhelper_mention_test.go b/services/markup/renderhelper_mention_test.go
index c244fa3d21..d05fbb6fba 100644
--- a/services/markup/renderhelper_mention_test.go
+++ b/services/markup/renderhelper_mention_test.go
@@ -4,7 +4,6 @@
package markup
import (
- "context"
"net/http"
"net/http/httptest"
"testing"
@@ -32,10 +31,10 @@ func TestRenderHelperMention(t *testing.T) {
unittest.AssertCount(t, &user.User{Name: userNoSuch}, 0)
// when using general context, use user's visibility to check
- assert.True(t, FormalRenderHelperFuncs().IsUsernameMentionable(context.Background(), userPublic))
- assert.False(t, FormalRenderHelperFuncs().IsUsernameMentionable(context.Background(), userLimited))
- assert.False(t, FormalRenderHelperFuncs().IsUsernameMentionable(context.Background(), userPrivate))
- assert.False(t, FormalRenderHelperFuncs().IsUsernameMentionable(context.Background(), userNoSuch))
+ assert.True(t, FormalRenderHelperFuncs().IsUsernameMentionable(t.Context(), userPublic))
+ assert.False(t, FormalRenderHelperFuncs().IsUsernameMentionable(t.Context(), userLimited))
+ assert.False(t, FormalRenderHelperFuncs().IsUsernameMentionable(t.Context(), userPrivate))
+ assert.False(t, FormalRenderHelperFuncs().IsUsernameMentionable(t.Context(), userNoSuch))
// when using web context, use user.IsUserVisibleToViewer to check
req, err := http.NewRequest("GET", "/", nil)
diff --git a/services/migrations/codebase.go b/services/migrations/codebase.go
index 492fc908e9..880dd21497 100644
--- a/services/migrations/codebase.go
+++ b/services/migrations/codebase.go
@@ -66,7 +66,6 @@ type codebaseUser struct {
// from Codebase
type CodebaseDownloader struct {
base.NullDownloader
- ctx context.Context
client *http.Client
baseURL *url.URL
projectURL *url.URL
@@ -77,17 +76,11 @@ type CodebaseDownloader struct {
commitMap map[string]string
}
-// SetContext set context
-func (d *CodebaseDownloader) SetContext(ctx context.Context) {
- d.ctx = ctx
-}
-
// NewCodebaseDownloader creates a new downloader
-func NewCodebaseDownloader(ctx context.Context, projectURL *url.URL, project, repoName, username, password string) *CodebaseDownloader {
+func NewCodebaseDownloader(_ context.Context, projectURL *url.URL, project, repoName, username, password string) *CodebaseDownloader {
baseURL, _ := url.Parse("https://api3.codebasehq.com")
downloader := &CodebaseDownloader{
- ctx: ctx,
baseURL: baseURL,
projectURL: projectURL,
project: project,
@@ -127,7 +120,7 @@ func (d *CodebaseDownloader) FormatCloneURL(opts base.MigrateOptions, remoteAddr
return opts.CloneAddr, nil
}
-func (d *CodebaseDownloader) callAPI(endpoint string, parameter map[string]string, result any) error {
+func (d *CodebaseDownloader) callAPI(ctx context.Context, endpoint string, parameter map[string]string, result any) error {
u, err := d.baseURL.Parse(endpoint)
if err != nil {
return err
@@ -141,7 +134,7 @@ func (d *CodebaseDownloader) callAPI(endpoint string, parameter map[string]strin
u.RawQuery = query.Encode()
}
- req, err := http.NewRequestWithContext(d.ctx, "GET", u.String(), nil)
+ req, err := http.NewRequestWithContext(ctx, "GET", u.String(), nil)
if err != nil {
return err
}
@@ -158,7 +151,7 @@ func (d *CodebaseDownloader) callAPI(endpoint string, parameter map[string]strin
// GetRepoInfo returns repository information
// https://support.codebasehq.com/kb/projects
-func (d *CodebaseDownloader) GetRepoInfo() (*base.Repository, error) {
+func (d *CodebaseDownloader) GetRepoInfo(ctx context.Context) (*base.Repository, error) {
var rawRepository struct {
XMLName xml.Name `xml:"repository"`
Name string `xml:"name"`
@@ -169,6 +162,7 @@ func (d *CodebaseDownloader) GetRepoInfo() (*base.Repository, error) {
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/%s/%s", d.project, d.repoName),
nil,
&rawRepository,
@@ -187,7 +181,7 @@ func (d *CodebaseDownloader) GetRepoInfo() (*base.Repository, error) {
// GetMilestones returns milestones
// https://support.codebasehq.com/kb/tickets-and-milestones/milestones
-func (d *CodebaseDownloader) GetMilestones() ([]*base.Milestone, error) {
+func (d *CodebaseDownloader) GetMilestones(ctx context.Context) ([]*base.Milestone, error) {
var rawMilestones struct {
XMLName xml.Name `xml:"ticketing-milestone"`
Type string `xml:"type,attr"`
@@ -209,6 +203,7 @@ func (d *CodebaseDownloader) GetMilestones() ([]*base.Milestone, error) {
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/%s/milestones", d.project),
nil,
&rawMilestones,
@@ -245,7 +240,7 @@ func (d *CodebaseDownloader) GetMilestones() ([]*base.Milestone, error) {
// GetLabels returns labels
// https://support.codebasehq.com/kb/tickets-and-milestones/statuses-priorities-and-categories
-func (d *CodebaseDownloader) GetLabels() ([]*base.Label, error) {
+func (d *CodebaseDownloader) GetLabels(ctx context.Context) ([]*base.Label, error) {
var rawTypes struct {
XMLName xml.Name `xml:"ticketing-types"`
Type string `xml:"type,attr"`
@@ -259,6 +254,7 @@ func (d *CodebaseDownloader) GetLabels() ([]*base.Label, error) {
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/%s/tickets/types", d.project),
nil,
&rawTypes,
@@ -284,7 +280,7 @@ type codebaseIssueContext struct {
// GetIssues returns issues, limits are not supported
// https://support.codebasehq.com/kb/tickets-and-milestones
// https://support.codebasehq.com/kb/tickets-and-milestones/updating-tickets
-func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+func (d *CodebaseDownloader) GetIssues(ctx context.Context, _, _ int) ([]*base.Issue, bool, error) {
var rawIssues struct {
XMLName xml.Name `xml:"tickets"`
Type string `xml:"type,attr"`
@@ -324,6 +320,7 @@ func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool,
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/%s/tickets", d.project),
nil,
&rawIssues,
@@ -358,6 +355,7 @@ func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool,
} `xml:"ticket-note"`
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/%s/tickets/%d/notes", d.project, issue.TicketID.Value),
nil,
&notes,
@@ -370,7 +368,7 @@ func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool,
if len(note.Content) == 0 {
continue
}
- poster := d.tryGetUser(note.UserID.Value)
+ poster := d.tryGetUser(ctx, note.UserID.Value)
comments = append(comments, &base.Comment{
IssueIndex: issue.TicketID.Value,
Index: note.ID.Value,
@@ -390,7 +388,7 @@ func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool,
if issue.Status.TreatAsClosed.Value {
state = "closed"
}
- poster := d.tryGetUser(issue.ReporterID.Value)
+ poster := d.tryGetUser(ctx, issue.ReporterID.Value)
issues = append(issues, &base.Issue{
Title: issue.Summary,
Number: issue.TicketID.Value,
@@ -419,7 +417,7 @@ func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool,
}
// GetComments returns comments
-func (d *CodebaseDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+func (d *CodebaseDownloader) GetComments(_ context.Context, commentable base.Commentable) ([]*base.Comment, bool, error) {
context, ok := commentable.GetContext().(codebaseIssueContext)
if !ok {
return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext())
@@ -430,7 +428,7 @@ func (d *CodebaseDownloader) GetComments(commentable base.Commentable) ([]*base.
// GetPullRequests returns pull requests
// https://support.codebasehq.com/kb/repositories/merge-requests
-func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+func (d *CodebaseDownloader) GetPullRequests(ctx context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
var rawMergeRequests struct {
XMLName xml.Name `xml:"merge-requests"`
Type string `xml:"type,attr"`
@@ -443,6 +441,7 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/%s/%s/merge_requests", d.project, d.repoName),
map[string]string{
"query": `"Target Project" is "` + d.repoName + `"`,
@@ -503,6 +502,7 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq
} `xml:"comments"`
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/%s/%s/merge_requests/%d", d.project, d.repoName, mr.ID.Value),
nil,
&rawMergeRequest,
@@ -531,7 +531,7 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq
}
continue
}
- poster := d.tryGetUser(comment.UserID.Value)
+ poster := d.tryGetUser(ctx, comment.UserID.Value)
comments = append(comments, &base.Comment{
IssueIndex: number,
Index: comment.ID.Value,
@@ -547,7 +547,7 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq
comments = append(comments, &base.Comment{})
}
- poster := d.tryGetUser(rawMergeRequest.UserID.Value)
+ poster := d.tryGetUser(ctx, rawMergeRequest.UserID.Value)
pullRequests = append(pullRequests, &base.PullRequest{
Title: rawMergeRequest.Subject,
@@ -563,12 +563,12 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq
MergedTime: mergedTime,
Head: base.PullRequestBranch{
Ref: rawMergeRequest.SourceRef,
- SHA: d.getHeadCommit(rawMergeRequest.SourceRef),
+ SHA: d.getHeadCommit(ctx, rawMergeRequest.SourceRef),
RepoName: d.repoName,
},
Base: base.PullRequestBranch{
Ref: rawMergeRequest.TargetRef,
- SHA: d.getHeadCommit(rawMergeRequest.TargetRef),
+ SHA: d.getHeadCommit(ctx, rawMergeRequest.TargetRef),
RepoName: d.repoName,
},
ForeignIndex: rawMergeRequest.ID.Value,
@@ -584,7 +584,7 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq
return pullRequests, true, nil
}
-func (d *CodebaseDownloader) tryGetUser(userID int64) *codebaseUser {
+func (d *CodebaseDownloader) tryGetUser(ctx context.Context, userID int64) *codebaseUser {
if len(d.userMap) == 0 {
var rawUsers struct {
XMLName xml.Name `xml:"users"`
@@ -602,6 +602,7 @@ func (d *CodebaseDownloader) tryGetUser(userID int64) *codebaseUser {
}
err := d.callAPI(
+ ctx,
"/users",
nil,
&rawUsers,
@@ -627,7 +628,7 @@ func (d *CodebaseDownloader) tryGetUser(userID int64) *codebaseUser {
return user
}
-func (d *CodebaseDownloader) getHeadCommit(ref string) string {
+func (d *CodebaseDownloader) getHeadCommit(ctx context.Context, ref string) string {
commitRef, ok := d.commitMap[ref]
if !ok {
var rawCommits struct {
@@ -638,6 +639,7 @@ func (d *CodebaseDownloader) getHeadCommit(ref string) string {
} `xml:"commit"`
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/%s/%s/commits/%s", d.project, d.repoName, ref),
nil,
&rawCommits,
diff --git a/services/migrations/codebase_test.go b/services/migrations/codebase_test.go
index 68721e0641..6cd52e5e59 100644
--- a/services/migrations/codebase_test.go
+++ b/services/migrations/codebase_test.go
@@ -4,7 +4,6 @@
package migrations
import (
- "context"
"net/url"
"os"
"testing"
@@ -30,9 +29,9 @@ func TestCodebaseDownloadRepo(t *testing.T) {
if cloneUser != "" {
u.User = url.UserPassword(cloneUser, clonePassword)
}
-
+ ctx := t.Context()
factory := &CodebaseDownloaderFactory{}
- downloader, err := factory.New(context.Background(), base.MigrateOptions{
+ downloader, err := factory.New(ctx, base.MigrateOptions{
CloneAddr: u.String(),
AuthUsername: apiUser,
AuthPassword: apiPassword,
@@ -40,7 +39,7 @@ func TestCodebaseDownloadRepo(t *testing.T) {
if err != nil {
t.Fatalf("Error creating Codebase downloader: %v", err)
}
- repo, err := downloader.GetRepoInfo()
+ repo, err := downloader.GetRepoInfo(ctx)
assert.NoError(t, err)
assertRepositoryEqual(t, &base.Repository{
Name: "test",
@@ -50,7 +49,7 @@ func TestCodebaseDownloadRepo(t *testing.T) {
OriginalURL: cloneAddr,
}, repo)
- milestones, err := downloader.GetMilestones()
+ milestones, err := downloader.GetMilestones(ctx)
assert.NoError(t, err)
assertMilestonesEqual(t, []*base.Milestone{
{
@@ -65,11 +64,11 @@ func TestCodebaseDownloadRepo(t *testing.T) {
},
}, milestones)
- labels, err := downloader.GetLabels()
+ labels, err := downloader.GetLabels(ctx)
assert.NoError(t, err)
assert.Len(t, labels, 4)
- issues, isEnd, err := downloader.GetIssues(1, 2)
+ issues, isEnd, err := downloader.GetIssues(ctx, 1, 2)
assert.NoError(t, err)
assert.True(t, isEnd)
assertIssuesEqual(t, []*base.Issue{
@@ -106,7 +105,7 @@ func TestCodebaseDownloadRepo(t *testing.T) {
},
}, issues)
- comments, _, err := downloader.GetComments(issues[0])
+ comments, _, err := downloader.GetComments(ctx, issues[0])
assert.NoError(t, err)
assertCommentsEqual(t, []*base.Comment{
{
@@ -119,7 +118,7 @@ func TestCodebaseDownloadRepo(t *testing.T) {
},
}, comments)
- prs, _, err := downloader.GetPullRequests(1, 1)
+ prs, _, err := downloader.GetPullRequests(ctx, 1, 1)
assert.NoError(t, err)
assertPullRequestsEqual(t, []*base.PullRequest{
{
@@ -144,7 +143,7 @@ func TestCodebaseDownloadRepo(t *testing.T) {
},
}, prs)
- rvs, err := downloader.GetReviews(prs[0])
+ rvs, err := downloader.GetReviews(ctx, prs[0])
assert.NoError(t, err)
assert.Empty(t, rvs)
}
diff --git a/services/migrations/codecommit.go b/services/migrations/codecommit.go
index fead527f5b..c45f9e5943 100644
--- a/services/migrations/codecommit.go
+++ b/services/migrations/codecommit.go
@@ -62,9 +62,8 @@ func (c *CodeCommitDownloaderFactory) GitServiceType() structs.GitServiceType {
return structs.CodeCommitService
}
-func NewCodeCommitDownloader(ctx context.Context, repoName, baseURL, accessKeyID, secretAccessKey, region string) *CodeCommitDownloader {
+func NewCodeCommitDownloader(_ context.Context, repoName, baseURL, accessKeyID, secretAccessKey, region string) *CodeCommitDownloader {
downloader := CodeCommitDownloader{
- ctx: ctx,
repoName: repoName,
baseURL: baseURL,
codeCommitClient: codecommit.New(codecommit.Options{
@@ -79,21 +78,15 @@ func NewCodeCommitDownloader(ctx context.Context, repoName, baseURL, accessKeyID
// CodeCommitDownloader implements a downloader for AWS CodeCommit
type CodeCommitDownloader struct {
base.NullDownloader
- ctx context.Context
codeCommitClient *codecommit.Client
repoName string
baseURL string
allPullRequestIDs []string
}
-// SetContext set context
-func (c *CodeCommitDownloader) SetContext(ctx context.Context) {
- c.ctx = ctx
-}
-
// GetRepoInfo returns a repository information
-func (c *CodeCommitDownloader) GetRepoInfo() (*base.Repository, error) {
- output, err := c.codeCommitClient.GetRepository(c.ctx, &codecommit.GetRepositoryInput{
+func (c *CodeCommitDownloader) GetRepoInfo(ctx context.Context) (*base.Repository, error) {
+ output, err := c.codeCommitClient.GetRepository(ctx, &codecommit.GetRepositoryInput{
RepositoryName: util.ToPointer(c.repoName),
})
if err != nil {
@@ -117,14 +110,14 @@ func (c *CodeCommitDownloader) GetRepoInfo() (*base.Repository, error) {
}
// GetComments returns comments of an issue or PR
-func (c *CodeCommitDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+func (c *CodeCommitDownloader) GetComments(ctx context.Context, commentable base.Commentable) ([]*base.Comment, bool, error) {
var (
nextToken *string
comments []*base.Comment
)
for {
- resp, err := c.codeCommitClient.GetCommentsForPullRequest(c.ctx, &codecommit.GetCommentsForPullRequestInput{
+ resp, err := c.codeCommitClient.GetCommentsForPullRequest(ctx, &codecommit.GetCommentsForPullRequestInput{
NextToken: nextToken,
PullRequestId: util.ToPointer(strconv.FormatInt(commentable.GetForeignIndex(), 10)),
})
@@ -155,8 +148,8 @@ func (c *CodeCommitDownloader) GetComments(commentable base.Commentable) ([]*bas
}
// GetPullRequests returns pull requests according page and perPage
-func (c *CodeCommitDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
- allPullRequestIDs, err := c.getAllPullRequestIDs()
+func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
+ allPullRequestIDs, err := c.getAllPullRequestIDs(ctx)
if err != nil {
return nil, false, err
}
@@ -170,7 +163,7 @@ func (c *CodeCommitDownloader) GetPullRequests(page, perPage int) ([]*base.PullR
prs := make([]*base.PullRequest, 0, len(batch))
for _, id := range batch {
- output, err := c.codeCommitClient.GetPullRequest(c.ctx, &codecommit.GetPullRequestInput{
+ output, err := c.codeCommitClient.GetPullRequest(ctx, &codecommit.GetPullRequestInput{
PullRequestId: util.ToPointer(id),
})
if err != nil {
@@ -231,7 +224,7 @@ func (c *CodeCommitDownloader) FormatCloneURL(opts MigrateOptions, remoteAddr st
return u.String(), nil
}
-func (c *CodeCommitDownloader) getAllPullRequestIDs() ([]string, error) {
+func (c *CodeCommitDownloader) getAllPullRequestIDs(ctx context.Context) ([]string, error) {
if len(c.allPullRequestIDs) > 0 {
return c.allPullRequestIDs, nil
}
@@ -242,7 +235,7 @@ func (c *CodeCommitDownloader) getAllPullRequestIDs() ([]string, error) {
)
for {
- output, err := c.codeCommitClient.ListPullRequests(c.ctx, &codecommit.ListPullRequestsInput{
+ output, err := c.codeCommitClient.ListPullRequests(ctx, &codecommit.ListPullRequestsInput{
RepositoryName: util.ToPointer(c.repoName),
NextToken: nextToken,
})
diff --git a/services/migrations/dump.go b/services/migrations/dump.go
index 07812002af..b4ca1e41e0 100644
--- a/services/migrations/dump.go
+++ b/services/migrations/dump.go
@@ -32,7 +32,6 @@ var _ base.Uploader = &RepositoryDumper{}
// RepositoryDumper implements an Uploader to the local directory
type RepositoryDumper struct {
- ctx context.Context
baseDir string
repoOwner string
repoName string
@@ -56,7 +55,6 @@ func NewRepositoryDumper(ctx context.Context, baseDir, repoOwner, repoName strin
return nil, err
}
return &RepositoryDumper{
- ctx: ctx,
opts: opts,
baseDir: baseDir,
repoOwner: repoOwner,
@@ -105,7 +103,7 @@ func (g *RepositoryDumper) setURLToken(remoteAddr string) (string, error) {
}
// CreateRepo creates a repository
-func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOptions) error {
+func (g *RepositoryDumper) CreateRepo(ctx context.Context, repo *base.Repository, opts base.MigrateOptions) error {
f, err := os.Create(filepath.Join(g.baseDir, "repo.yml"))
if err != nil {
return err
@@ -149,7 +147,7 @@ func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOp
return err
}
- err = git.Clone(g.ctx, remoteAddr, repoPath, git.CloneRepoOptions{
+ err = git.Clone(ctx, remoteAddr, repoPath, git.CloneRepoOptions{
Mirror: true,
Quiet: true,
Timeout: migrateTimeout,
@@ -158,19 +156,19 @@ func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOp
if err != nil {
return fmt.Errorf("Clone: %w", err)
}
- if err := git.WriteCommitGraph(g.ctx, repoPath); err != nil {
+ if err := git.WriteCommitGraph(ctx, repoPath); err != nil {
return err
}
if opts.Wiki {
wikiPath := g.wikiPath()
- wikiRemotePath := repository.WikiRemoteURL(g.ctx, remoteAddr)
+ wikiRemotePath := repository.WikiRemoteURL(ctx, remoteAddr)
if len(wikiRemotePath) > 0 {
if err := os.MkdirAll(wikiPath, os.ModePerm); err != nil {
return fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
}
- if err := git.Clone(g.ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{
+ if err := git.Clone(ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{
Mirror: true,
Quiet: true,
Timeout: migrateTimeout,
@@ -181,13 +179,13 @@ func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOp
if err := os.RemoveAll(wikiPath); err != nil {
return fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
}
- } else if err := git.WriteCommitGraph(g.ctx, wikiPath); err != nil {
+ } else if err := git.WriteCommitGraph(ctx, wikiPath); err != nil {
return err
}
}
}
- g.gitRepo, err = git.OpenRepository(g.ctx, g.gitPath())
+ g.gitRepo, err = git.OpenRepository(ctx, g.gitPath())
return err
}
@@ -220,7 +218,7 @@ func (g *RepositoryDumper) Close() {
}
// CreateTopics creates topics
-func (g *RepositoryDumper) CreateTopics(topics ...string) error {
+func (g *RepositoryDumper) CreateTopics(_ context.Context, topics ...string) error {
f, err := os.Create(filepath.Join(g.baseDir, "topic.yml"))
if err != nil {
return err
@@ -242,7 +240,7 @@ func (g *RepositoryDumper) CreateTopics(topics ...string) error {
}
// CreateMilestones creates milestones
-func (g *RepositoryDumper) CreateMilestones(milestones ...*base.Milestone) error {
+func (g *RepositoryDumper) CreateMilestones(_ context.Context, milestones ...*base.Milestone) error {
var err error
if g.milestoneFile == nil {
g.milestoneFile, err = os.Create(filepath.Join(g.baseDir, "milestone.yml"))
@@ -264,7 +262,7 @@ func (g *RepositoryDumper) CreateMilestones(milestones ...*base.Milestone) error
}
// CreateLabels creates labels
-func (g *RepositoryDumper) CreateLabels(labels ...*base.Label) error {
+func (g *RepositoryDumper) CreateLabels(_ context.Context, labels ...*base.Label) error {
var err error
if g.labelFile == nil {
g.labelFile, err = os.Create(filepath.Join(g.baseDir, "label.yml"))
@@ -286,7 +284,7 @@ func (g *RepositoryDumper) CreateLabels(labels ...*base.Label) error {
}
// CreateReleases creates releases
-func (g *RepositoryDumper) CreateReleases(releases ...*base.Release) error {
+func (g *RepositoryDumper) CreateReleases(_ context.Context, releases ...*base.Release) error {
if g.opts.ReleaseAssets {
for _, release := range releases {
attachDir := filepath.Join("release_assets", release.TagName)
@@ -354,12 +352,12 @@ func (g *RepositoryDumper) CreateReleases(releases ...*base.Release) error {
}
// SyncTags syncs releases with tags in the database
-func (g *RepositoryDumper) SyncTags() error {
+func (g *RepositoryDumper) SyncTags(ctx context.Context) error {
return nil
}
// CreateIssues creates issues
-func (g *RepositoryDumper) CreateIssues(issues ...*base.Issue) error {
+func (g *RepositoryDumper) CreateIssues(_ context.Context, issues ...*base.Issue) error {
var err error
if g.issueFile == nil {
g.issueFile, err = os.Create(filepath.Join(g.baseDir, "issue.yml"))
@@ -412,7 +410,7 @@ func (g *RepositoryDumper) encodeItems(number int64, items []any, dir string, it
}
// CreateComments creates comments of issues
-func (g *RepositoryDumper) CreateComments(comments ...*base.Comment) error {
+func (g *RepositoryDumper) CreateComments(_ context.Context, comments ...*base.Comment) error {
commentsMap := make(map[int64][]any, len(comments))
for _, comment := range comments {
commentsMap[comment.IssueIndex] = append(commentsMap[comment.IssueIndex], comment)
@@ -421,7 +419,7 @@ func (g *RepositoryDumper) CreateComments(comments ...*base.Comment) error {
return g.createItems(g.commentDir(), g.commentFiles, commentsMap)
}
-func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error {
+func (g *RepositoryDumper) handlePullRequest(ctx context.Context, pr *base.PullRequest) error {
// SECURITY: this pr must have been ensured safe
if !pr.EnsuredSafe {
log.Error("PR #%d in %s/%s has not been checked for safety ... We will ignore this.", pr.Number, g.repoOwner, g.repoName)
@@ -490,7 +488,7 @@ func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error {
if pr.Head.CloneURL == "" || pr.Head.Ref == "" {
// Set head information if pr.Head.SHA is available
if pr.Head.SHA != "" {
- _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
+ _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()})
if err != nil {
log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err)
}
@@ -520,7 +518,7 @@ func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error {
if !ok {
// Set head information if pr.Head.SHA is available
if pr.Head.SHA != "" {
- _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
+ _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()})
if err != nil {
log.Error("PR #%d in %s/%s unable to update-ref for pr HEAD: %v", pr.Number, g.repoOwner, g.repoName, err)
}
@@ -555,7 +553,7 @@ func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error {
fetchArg = git.BranchPrefix + fetchArg
}
- _, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags").AddDashesAndList(remote, fetchArg).RunStdString(&git.RunOpts{Dir: g.gitPath()})
+ _, _, err = git.NewCommand("fetch", "--no-tags").AddDashesAndList(remote, fetchArg).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()})
if err != nil {
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
// We need to continue here so that the Head.Ref is reset and we attempt to set the gitref for the PR
@@ -579,7 +577,7 @@ func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error {
pr.Head.SHA = headSha
}
if pr.Head.SHA != "" {
- _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.gitPath()})
+ _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.gitPath()})
if err != nil {
log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
}
@@ -589,7 +587,7 @@ func (g *RepositoryDumper) handlePullRequest(pr *base.PullRequest) error {
}
// CreatePullRequests creates pull requests
-func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
+func (g *RepositoryDumper) CreatePullRequests(ctx context.Context, prs ...*base.PullRequest) error {
var err error
if g.pullrequestFile == nil {
if err := os.MkdirAll(g.baseDir, os.ModePerm); err != nil {
@@ -607,7 +605,7 @@ func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
count := 0
for i := 0; i < len(prs); i++ {
pr := prs[i]
- if err := g.handlePullRequest(pr); err != nil {
+ if err := g.handlePullRequest(ctx, pr); err != nil {
log.Error("PR #%d in %s/%s failed - skipping", pr.Number, g.repoOwner, g.repoName, err)
continue
}
@@ -620,7 +618,7 @@ func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error {
}
// CreateReviews create pull request reviews
-func (g *RepositoryDumper) CreateReviews(reviews ...*base.Review) error {
+func (g *RepositoryDumper) CreateReviews(_ context.Context, reviews ...*base.Review) error {
reviewsMap := make(map[int64][]any, len(reviews))
for _, review := range reviews {
reviewsMap[review.IssueIndex] = append(reviewsMap[review.IssueIndex], review)
@@ -636,7 +634,7 @@ func (g *RepositoryDumper) Rollback() error {
}
// Finish when migrating succeed, this will update something.
-func (g *RepositoryDumper) Finish() error {
+func (g *RepositoryDumper) Finish(_ context.Context) error {
return nil
}
diff --git a/services/migrations/git.go b/services/migrations/git.go
index 22ffd5e765..1ed99499a1 100644
--- a/services/migrations/git.go
+++ b/services/migrations/git.go
@@ -28,12 +28,8 @@ func NewPlainGitDownloader(ownerName, repoName, remoteURL string) *PlainGitDownl
}
}
-// SetContext set context
-func (g *PlainGitDownloader) SetContext(ctx context.Context) {
-}
-
// GetRepoInfo returns a repository information
-func (g *PlainGitDownloader) GetRepoInfo() (*base.Repository, error) {
+func (g *PlainGitDownloader) GetRepoInfo(_ context.Context) (*base.Repository, error) {
// convert github repo to stand Repo
return &base.Repository{
Owner: g.ownerName,
@@ -43,6 +39,6 @@ func (g *PlainGitDownloader) GetRepoInfo() (*base.Repository, error) {
}
// GetTopics return empty string slice
-func (g PlainGitDownloader) GetTopics() ([]string, error) {
+func (g PlainGitDownloader) GetTopics(_ context.Context) ([]string, error) {
return []string{}, nil
}
diff --git a/services/migrations/gitea_downloader.go b/services/migrations/gitea_downloader.go
index 272bf02e11..f92f318293 100644
--- a/services/migrations/gitea_downloader.go
+++ b/services/migrations/gitea_downloader.go
@@ -67,7 +67,6 @@ func (f *GiteaDownloaderFactory) GitServiceType() structs.GitServiceType {
// GiteaDownloader implements a Downloader interface to get repository information's
type GiteaDownloader struct {
base.NullDownloader
- ctx context.Context
client *gitea_sdk.Client
baseURL string
repoOwner string
@@ -114,7 +113,6 @@ func NewGiteaDownloader(ctx context.Context, baseURL, repoPath, username, passwo
}
return &GiteaDownloader{
- ctx: ctx,
client: giteaClient,
baseURL: baseURL,
repoOwner: path[0],
@@ -124,11 +122,6 @@ func NewGiteaDownloader(ctx context.Context, baseURL, repoPath, username, passwo
}, nil
}
-// SetContext set context
-func (g *GiteaDownloader) SetContext(ctx context.Context) {
- g.ctx = ctx
-}
-
// String implements Stringer
func (g *GiteaDownloader) String() string {
return fmt.Sprintf("migration from gitea server %s %s/%s", g.baseURL, g.repoOwner, g.repoName)
@@ -142,7 +135,7 @@ func (g *GiteaDownloader) LogString() string {
}
// GetRepoInfo returns a repository information
-func (g *GiteaDownloader) GetRepoInfo() (*base.Repository, error) {
+func (g *GiteaDownloader) GetRepoInfo(_ context.Context) (*base.Repository, error) {
if g == nil {
return nil, errors.New("error: GiteaDownloader is nil")
}
@@ -164,19 +157,19 @@ func (g *GiteaDownloader) GetRepoInfo() (*base.Repository, error) {
}
// GetTopics return gitea topics
-func (g *GiteaDownloader) GetTopics() ([]string, error) {
+func (g *GiteaDownloader) GetTopics(_ context.Context) ([]string, error) {
topics, _, err := g.client.ListRepoTopics(g.repoOwner, g.repoName, gitea_sdk.ListRepoTopicsOptions{})
return topics, err
}
// GetMilestones returns milestones
-func (g *GiteaDownloader) GetMilestones() ([]*base.Milestone, error) {
+func (g *GiteaDownloader) GetMilestones(ctx context.Context) ([]*base.Milestone, error) {
milestones := make([]*base.Milestone, 0, g.maxPerPage)
for i := 1; ; i++ {
// make sure gitea can shutdown gracefully
select {
- case <-g.ctx.Done():
+ case <-ctx.Done():
return nil, nil
default:
}
@@ -235,13 +228,13 @@ func (g *GiteaDownloader) convertGiteaLabel(label *gitea_sdk.Label) *base.Label
}
// GetLabels returns labels
-func (g *GiteaDownloader) GetLabels() ([]*base.Label, error) {
+func (g *GiteaDownloader) GetLabels(ctx context.Context) ([]*base.Label, error) {
labels := make([]*base.Label, 0, g.maxPerPage)
for i := 1; ; i++ {
// make sure gitea can shutdown gracefully
select {
- case <-g.ctx.Done():
+ case <-ctx.Done():
return nil, nil
default:
}
@@ -323,13 +316,13 @@ func (g *GiteaDownloader) convertGiteaRelease(rel *gitea_sdk.Release) *base.Rele
}
// GetReleases returns releases
-func (g *GiteaDownloader) GetReleases() ([]*base.Release, error) {
+func (g *GiteaDownloader) GetReleases(ctx context.Context) ([]*base.Release, error) {
releases := make([]*base.Release, 0, g.maxPerPage)
for i := 1; ; i++ {
// make sure gitea can shutdown gracefully
select {
- case <-g.ctx.Done():
+ case <-ctx.Done():
return nil, nil
default:
}
@@ -395,7 +388,7 @@ func (g *GiteaDownloader) getCommentReactions(commentID int64) ([]*base.Reaction
}
// GetIssues returns issues according start and limit
-func (g *GiteaDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+func (g *GiteaDownloader) GetIssues(_ context.Context, page, perPage int) ([]*base.Issue, bool, error) {
if perPage > g.maxPerPage {
perPage = g.maxPerPage
}
@@ -458,13 +451,13 @@ func (g *GiteaDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, err
}
// GetComments returns comments according issueNumber
-func (g *GiteaDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+func (g *GiteaDownloader) GetComments(ctx context.Context, commentable base.Commentable) ([]*base.Comment, bool, error) {
allComments := make([]*base.Comment, 0, g.maxPerPage)
for i := 1; ; i++ {
// make sure gitea can shutdown gracefully
select {
- case <-g.ctx.Done():
+ case <-ctx.Done():
return nil, false, nil
default:
}
@@ -504,7 +497,7 @@ func (g *GiteaDownloader) GetComments(commentable base.Commentable) ([]*base.Com
}
// GetPullRequests returns pull requests according page and perPage
-func (g *GiteaDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+func (g *GiteaDownloader) GetPullRequests(_ context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
if perPage > g.maxPerPage {
perPage = g.maxPerPage
}
@@ -624,7 +617,7 @@ func (g *GiteaDownloader) GetPullRequests(page, perPage int) ([]*base.PullReques
}
// GetReviews returns pull requests review
-func (g *GiteaDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+func (g *GiteaDownloader) GetReviews(ctx context.Context, reviewable base.Reviewable) ([]*base.Review, error) {
if err := g.client.CheckServerVersionConstraint(">=1.12"); err != nil {
log.Info("GiteaDownloader: instance to old, skip GetReviews")
return nil, nil
@@ -635,7 +628,7 @@ func (g *GiteaDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review
for i := 1; ; i++ {
// make sure gitea can shutdown gracefully
select {
- case <-g.ctx.Done():
+ case <-ctx.Done():
return nil, nil
default:
}
diff --git a/services/migrations/gitea_downloader_test.go b/services/migrations/gitea_downloader_test.go
index 6f6ef99d96..bb1760e889 100644
--- a/services/migrations/gitea_downloader_test.go
+++ b/services/migrations/gitea_downloader_test.go
@@ -4,7 +4,6 @@
package migrations
import (
- "context"
"net/http"
"os"
"sort"
@@ -28,12 +27,12 @@ func TestGiteaDownloadRepo(t *testing.T) {
if err != nil || resp.StatusCode != http.StatusOK {
t.Skipf("Can't reach https://gitea.com, skipping %s", t.Name())
}
-
- downloader, err := NewGiteaDownloader(context.Background(), "https://gitea.com", "gitea/test_repo", "", "", giteaToken)
+ ctx := t.Context()
+ downloader, err := NewGiteaDownloader(ctx, "https://gitea.com", "gitea/test_repo", "", "", giteaToken)
require.NoError(t, err, "NewGiteaDownloader error occur")
require.NotNil(t, downloader, "NewGiteaDownloader is nil")
- repo, err := downloader.GetRepoInfo()
+ repo, err := downloader.GetRepoInfo(ctx)
assert.NoError(t, err)
assertRepositoryEqual(t, &base.Repository{
Name: "test_repo",
@@ -45,12 +44,12 @@ func TestGiteaDownloadRepo(t *testing.T) {
DefaultBranch: "master",
}, repo)
- topics, err := downloader.GetTopics()
+ topics, err := downloader.GetTopics(ctx)
assert.NoError(t, err)
sort.Strings(topics)
- assert.EqualValues(t, []string{"ci", "gitea", "migration", "test"}, topics)
+ assert.Equal(t, []string{"ci", "gitea", "migration", "test"}, topics)
- labels, err := downloader.GetLabels()
+ labels, err := downloader.GetLabels(ctx)
assert.NoError(t, err)
assertLabelsEqual(t, []*base.Label{
{
@@ -80,7 +79,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
},
}, labels)
- milestones, err := downloader.GetMilestones()
+ milestones, err := downloader.GetMilestones(ctx)
assert.NoError(t, err)
assertMilestonesEqual(t, []*base.Milestone{
{
@@ -100,7 +99,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
},
}, milestones)
- releases, err := downloader.GetReleases()
+ releases, err := downloader.GetReleases(ctx)
assert.NoError(t, err)
assertReleasesEqual(t, []*base.Release{
{
@@ -131,13 +130,13 @@ func TestGiteaDownloadRepo(t *testing.T) {
},
}, releases)
- issues, isEnd, err := downloader.GetIssues(1, 50)
+ issues, isEnd, err := downloader.GetIssues(ctx, 1, 50)
assert.NoError(t, err)
assert.True(t, isEnd)
assert.Len(t, issues, 7)
- assert.EqualValues(t, "open", issues[0].State)
+ assert.Equal(t, "open", issues[0].State)
- issues, isEnd, err = downloader.GetIssues(3, 2)
+ issues, isEnd, err = downloader.GetIssues(ctx, 3, 2)
assert.NoError(t, err)
assert.False(t, isEnd)
@@ -194,7 +193,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
},
}, issues)
- comments, _, err := downloader.GetComments(&base.Issue{Number: 4, ForeignIndex: 4})
+ comments, _, err := downloader.GetComments(ctx, &base.Issue{Number: 4, ForeignIndex: 4})
assert.NoError(t, err)
assertCommentsEqual(t, []*base.Comment{
{
@@ -217,11 +216,11 @@ func TestGiteaDownloadRepo(t *testing.T) {
},
}, comments)
- prs, isEnd, err := downloader.GetPullRequests(1, 50)
+ prs, isEnd, err := downloader.GetPullRequests(ctx, 1, 50)
assert.NoError(t, err)
assert.True(t, isEnd)
assert.Len(t, prs, 6)
- prs, isEnd, err = downloader.GetPullRequests(1, 3)
+ prs, isEnd, err = downloader.GetPullRequests(ctx, 1, 3)
assert.NoError(t, err)
assert.False(t, isEnd)
assert.Len(t, prs, 3)
@@ -259,7 +258,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
PatchURL: "https://gitea.com/gitea/test_repo/pulls/12.patch",
}, prs[1])
- reviews, err := downloader.GetReviews(&base.Issue{Number: 7, ForeignIndex: 7})
+ reviews, err := downloader.GetReviews(ctx, &base.Issue{Number: 7, ForeignIndex: 7})
assert.NoError(t, err)
assertReviewsEqual(t, []*base.Review{
{
diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go
index 9e06b77b66..b17cc3ce41 100644
--- a/services/migrations/gitea_uploader.go
+++ b/services/migrations/gitea_uploader.go
@@ -41,7 +41,6 @@ var _ base.Uploader = &GiteaLocalUploader{}
// GiteaLocalUploader implements an Uploader to gitea sites
type GiteaLocalUploader struct {
- ctx context.Context
doer *user_model.User
repoOwner string
repoName string
@@ -58,9 +57,8 @@ type GiteaLocalUploader struct {
}
// NewGiteaLocalUploader creates an gitea Uploader via gitea API v1
-func NewGiteaLocalUploader(ctx context.Context, doer *user_model.User, repoOwner, repoName string) *GiteaLocalUploader {
+func NewGiteaLocalUploader(_ context.Context, doer *user_model.User, repoOwner, repoName string) *GiteaLocalUploader {
return &GiteaLocalUploader{
- ctx: ctx,
doer: doer,
repoOwner: repoOwner,
repoName: repoName,
@@ -93,15 +91,15 @@ func (g *GiteaLocalUploader) MaxBatchInsertSize(tp string) int {
}
// CreateRepo creates a repository
-func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.MigrateOptions) error {
- owner, err := user_model.GetUserByName(g.ctx, g.repoOwner)
+func (g *GiteaLocalUploader) CreateRepo(ctx context.Context, repo *base.Repository, opts base.MigrateOptions) error {
+ owner, err := user_model.GetUserByName(ctx, g.repoOwner)
if err != nil {
return err
}
var r *repo_model.Repository
if opts.MigrateToRepoID <= 0 {
- r, err = repo_service.CreateRepositoryDirectly(g.ctx, g.doer, owner, repo_service.CreateRepoOptions{
+ r, err = repo_service.CreateRepositoryDirectly(ctx, g.doer, owner, repo_service.CreateRepoOptions{
Name: g.repoName,
Description: repo.Description,
OriginalURL: repo.OriginalURL,
@@ -111,7 +109,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
Status: repo_model.RepositoryBeingMigrated,
})
} else {
- r, err = repo_model.GetRepositoryByID(g.ctx, opts.MigrateToRepoID)
+ r, err = repo_model.GetRepositoryByID(ctx, opts.MigrateToRepoID)
}
if err != nil {
return err
@@ -119,7 +117,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
r.DefaultBranch = repo.DefaultBranch
r.Description = repo.Description
- r, err = repo_service.MigrateRepositoryGitData(g.ctx, owner, r, base.MigrateOptions{
+ r, err = repo_service.MigrateRepositoryGitData(ctx, owner, r, base.MigrateOptions{
RepoName: g.repoName,
Description: repo.Description,
OriginalURL: repo.OriginalURL,
@@ -139,7 +137,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
if err != nil {
return err
}
- g.gitRepo, err = gitrepo.OpenRepository(g.ctx, g.repo)
+ g.gitRepo, err = gitrepo.OpenRepository(ctx, g.repo)
if err != nil {
return err
}
@@ -150,7 +148,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
return err
}
g.repo.ObjectFormatName = objectFormat.Name()
- return repo_model.UpdateRepositoryCols(g.ctx, g.repo, "object_format_name")
+ return repo_model.UpdateRepositoryCols(ctx, g.repo, "object_format_name")
}
// Close closes this uploader
@@ -161,7 +159,7 @@ func (g *GiteaLocalUploader) Close() {
}
// CreateTopics creates topics
-func (g *GiteaLocalUploader) CreateTopics(topics ...string) error {
+func (g *GiteaLocalUploader) CreateTopics(ctx context.Context, topics ...string) error {
// Ignore topics too long for the db
c := 0
for _, topic := range topics {
@@ -173,11 +171,11 @@ func (g *GiteaLocalUploader) CreateTopics(topics ...string) error {
c++
}
topics = topics[:c]
- return repo_model.SaveTopics(g.ctx, g.repo.ID, topics...)
+ return repo_model.SaveTopics(ctx, g.repo.ID, topics...)
}
// CreateMilestones creates milestones
-func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) error {
+func (g *GiteaLocalUploader) CreateMilestones(ctx context.Context, milestones ...*base.Milestone) error {
mss := make([]*issues_model.Milestone, 0, len(milestones))
for _, milestone := range milestones {
var deadline timeutil.TimeStamp
@@ -216,7 +214,7 @@ func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) err
mss = append(mss, &ms)
}
- err := issues_model.InsertMilestones(g.ctx, mss...)
+ err := issues_model.InsertMilestones(ctx, mss...)
if err != nil {
return err
}
@@ -228,7 +226,7 @@ func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) err
}
// CreateLabels creates labels
-func (g *GiteaLocalUploader) CreateLabels(labels ...*base.Label) error {
+func (g *GiteaLocalUploader) CreateLabels(ctx context.Context, labels ...*base.Label) error {
lbs := make([]*issues_model.Label, 0, len(labels))
for _, l := range labels {
if color, err := label.NormalizeColor(l.Color); err != nil {
@@ -247,7 +245,7 @@ func (g *GiteaLocalUploader) CreateLabels(labels ...*base.Label) error {
})
}
- err := issues_model.NewLabels(g.ctx, lbs...)
+ err := issues_model.NewLabels(ctx, lbs...)
if err != nil {
return err
}
@@ -258,7 +256,7 @@ func (g *GiteaLocalUploader) CreateLabels(labels ...*base.Label) error {
}
// CreateReleases creates releases
-func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error {
+func (g *GiteaLocalUploader) CreateReleases(ctx context.Context, releases ...*base.Release) error {
rels := make([]*repo_model.Release, 0, len(releases))
for _, release := range releases {
if release.Created.IsZero() {
@@ -292,7 +290,7 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error {
CreatedUnix: timeutil.TimeStamp(release.Created.Unix()),
}
- if err := g.remapUser(release, &rel); err != nil {
+ if err := g.remapUser(ctx, release, &rel); err != nil {
return err
}
@@ -361,16 +359,16 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error {
rels = append(rels, &rel)
}
- return repo_model.InsertReleases(g.ctx, rels...)
+ return repo_model.InsertReleases(ctx, rels...)
}
// SyncTags syncs releases with tags in the database
-func (g *GiteaLocalUploader) SyncTags() error {
- return repo_module.SyncReleasesWithTags(g.ctx, g.repo, g.gitRepo)
+func (g *GiteaLocalUploader) SyncTags(ctx context.Context) error {
+ return repo_module.SyncReleasesWithTags(ctx, g.repo, g.gitRepo)
}
// CreateIssues creates issues
-func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
+func (g *GiteaLocalUploader) CreateIssues(ctx context.Context, issues ...*base.Issue) error {
iss := make([]*issues_model.Issue, 0, len(issues))
for _, issue := range issues {
var labels []*issues_model.Label
@@ -419,7 +417,7 @@ func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
UpdatedUnix: timeutil.TimeStamp(issue.Updated.Unix()),
}
- if err := g.remapUser(issue, &is); err != nil {
+ if err := g.remapUser(ctx, issue, &is); err != nil {
return err
}
@@ -432,7 +430,7 @@ func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
Type: reaction.Content,
CreatedUnix: timeutil.TimeStampNow(),
}
- if err := g.remapUser(reaction, &res); err != nil {
+ if err := g.remapUser(ctx, reaction, &res); err != nil {
return err
}
is.Reactions = append(is.Reactions, &res)
@@ -441,7 +439,7 @@ func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
}
if len(iss) > 0 {
- if err := issues_model.InsertIssues(g.ctx, iss...); err != nil {
+ if err := issues_model.InsertIssues(ctx, iss...); err != nil {
return err
}
@@ -454,7 +452,7 @@ func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error {
}
// CreateComments creates comments of issues
-func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error {
+func (g *GiteaLocalUploader) CreateComments(ctx context.Context, comments ...*base.Comment) error {
cms := make([]*issues_model.Comment, 0, len(comments))
for _, comment := range comments {
var issue *issues_model.Issue
@@ -513,7 +511,7 @@ func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error {
default:
}
- if err := g.remapUser(comment, &cm); err != nil {
+ if err := g.remapUser(ctx, comment, &cm); err != nil {
return err
}
@@ -523,7 +521,7 @@ func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error {
Type: reaction.Content,
CreatedUnix: timeutil.TimeStampNow(),
}
- if err := g.remapUser(reaction, &res); err != nil {
+ if err := g.remapUser(ctx, reaction, &res); err != nil {
return err
}
cm.Reactions = append(cm.Reactions, &res)
@@ -535,35 +533,35 @@ func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error {
if len(cms) == 0 {
return nil
}
- return issues_model.InsertIssueComments(g.ctx, cms)
+ return issues_model.InsertIssueComments(ctx, cms)
}
// CreatePullRequests creates pull requests
-func (g *GiteaLocalUploader) CreatePullRequests(prs ...*base.PullRequest) error {
+func (g *GiteaLocalUploader) CreatePullRequests(ctx context.Context, prs ...*base.PullRequest) error {
gprs := make([]*issues_model.PullRequest, 0, len(prs))
for _, pr := range prs {
- gpr, err := g.newPullRequest(pr)
+ gpr, err := g.newPullRequest(ctx, pr)
if err != nil {
return err
}
- if err := g.remapUser(pr, gpr.Issue); err != nil {
+ if err := g.remapUser(ctx, pr, gpr.Issue); err != nil {
return err
}
gprs = append(gprs, gpr)
}
- if err := issues_model.InsertPullRequests(g.ctx, gprs...); err != nil {
+ if err := issues_model.InsertPullRequests(ctx, gprs...); err != nil {
return err
}
for _, pr := range gprs {
g.issues[pr.Issue.Index] = pr.Issue
- pull.AddToTaskQueue(g.ctx, pr)
+ pull.AddToTaskQueue(ctx, pr)
}
return nil
}
-func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head string, err error) {
+func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *base.PullRequest) (head string, err error) {
// SECURITY: this pr must have been must have been ensured safe
if !pr.EnsuredSafe {
log.Error("PR #%d in %s/%s has not been checked for safety.", pr.Number, g.repoOwner, g.repoName)
@@ -664,7 +662,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head
fetchArg = git.BranchPrefix + fetchArg
}
- _, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags").AddDashesAndList(remote, fetchArg).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
+ _, _, err = git.NewCommand("fetch", "--no-tags").AddDashesAndList(remote, fetchArg).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()})
if err != nil {
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
return head, nil
@@ -683,7 +681,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head
pr.Head.SHA = headSha
}
- _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
+ _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()})
if err != nil {
return "", err
}
@@ -700,13 +698,13 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head
// The SHA is empty
log.Warn("Empty reference, no pull head for PR #%d in %s/%s", pr.Number, g.repoOwner, g.repoName)
} else {
- _, _, err = git.NewCommand(g.ctx, "rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
+ _, _, err = git.NewCommand("rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()})
if err != nil {
// Git update-ref remove bad references with a relative path
log.Warn("Deprecated local head %s for PR #%d in %s/%s, removing %s", pr.Head.SHA, pr.Number, g.repoOwner, g.repoName, pr.GetGitRefName())
} else {
// set head information
- _, _, err = git.NewCommand(g.ctx, "update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()})
+ _, _, err = git.NewCommand("update-ref", "--no-deref").AddDynamicArguments(pr.GetGitRefName(), pr.Head.SHA).RunStdString(ctx, &git.RunOpts{Dir: g.repo.RepoPath()})
if err != nil {
log.Error("unable to set %s as the local head for PR #%d from %s in %s/%s. Error: %v", pr.Head.SHA, pr.Number, pr.Head.Ref, g.repoOwner, g.repoName, err)
}
@@ -716,7 +714,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head
return head, nil
}
-func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model.PullRequest, error) {
+func (g *GiteaLocalUploader) newPullRequest(ctx context.Context, pr *base.PullRequest) (*issues_model.PullRequest, error) {
var labels []*issues_model.Label
for _, label := range pr.Labels {
lb, ok := g.labels[label.Name]
@@ -727,7 +725,7 @@ func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model
milestoneID := g.milestones[pr.Milestone]
- head, err := g.updateGitForPullRequest(pr)
+ head, err := g.updateGitForPullRequest(ctx, pr)
if err != nil {
return nil, fmt.Errorf("updateGitForPullRequest: %w", err)
}
@@ -779,7 +777,7 @@ func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model
UpdatedUnix: timeutil.TimeStamp(pr.Updated.Unix()),
}
- if err := g.remapUser(pr, &issue); err != nil {
+ if err := g.remapUser(ctx, pr, &issue); err != nil {
return nil, err
}
@@ -789,7 +787,7 @@ func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model
Type: reaction.Content,
CreatedUnix: timeutil.TimeStampNow(),
}
- if err := g.remapUser(reaction, &res); err != nil {
+ if err := g.remapUser(ctx, reaction, &res); err != nil {
return nil, err
}
issue.Reactions = append(issue.Reactions, &res)
@@ -839,7 +837,7 @@ func convertReviewState(state string) issues_model.ReviewType {
}
// CreateReviews create pull request reviews of currently migrated issues
-func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
+func (g *GiteaLocalUploader) CreateReviews(ctx context.Context, reviews ...*base.Review) error {
cms := make([]*issues_model.Review, 0, len(reviews))
for _, review := range reviews {
var issue *issues_model.Issue
@@ -860,7 +858,7 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
UpdatedUnix: timeutil.TimeStamp(review.CreatedAt.Unix()),
}
- if err := g.remapUser(review, &cm); err != nil {
+ if err := g.remapUser(ctx, review, &cm); err != nil {
return err
}
@@ -870,7 +868,7 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
pr, ok := g.prCache[issue.ID]
if !ok {
var err error
- pr, err = issues_model.GetPullRequestByIssueIDWithNoAttributes(g.ctx, issue.ID)
+ pr, err = issues_model.GetPullRequestByIssueIDWithNoAttributes(ctx, issue.ID)
if err != nil {
return err
}
@@ -940,7 +938,7 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
UpdatedUnix: timeutil.TimeStamp(comment.UpdatedAt.Unix()),
}
- if err := g.remapUser(review, &c); err != nil {
+ if err := g.remapUser(ctx, review, &c); err != nil {
return err
}
@@ -948,7 +946,7 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
}
}
- return issues_model.InsertReviews(g.ctx, cms)
+ return issues_model.InsertReviews(ctx, cms)
}
// Rollback when migrating failed, this will rollback all the changes.
@@ -962,31 +960,31 @@ func (g *GiteaLocalUploader) Rollback() error {
}
// Finish when migrating success, this will do some status update things.
-func (g *GiteaLocalUploader) Finish() error {
+func (g *GiteaLocalUploader) Finish(ctx context.Context) error {
if g.repo == nil || g.repo.ID <= 0 {
return ErrRepoNotCreated
}
// update issue_index
- if err := issues_model.RecalculateIssueIndexForRepo(g.ctx, g.repo.ID); err != nil {
+ if err := issues_model.RecalculateIssueIndexForRepo(ctx, g.repo.ID); err != nil {
return err
}
- if err := models.UpdateRepoStats(g.ctx, g.repo.ID); err != nil {
+ if err := models.UpdateRepoStats(ctx, g.repo.ID); err != nil {
return err
}
g.repo.Status = repo_model.RepositoryReady
- return repo_model.UpdateRepositoryCols(g.ctx, g.repo, "status")
+ return repo_model.UpdateRepositoryCols(ctx, g.repo, "status")
}
-func (g *GiteaLocalUploader) remapUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error {
+func (g *GiteaLocalUploader) remapUser(ctx context.Context, source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error {
var userID int64
var err error
if g.sameApp {
- userID, err = g.remapLocalUser(source)
+ userID, err = g.remapLocalUser(ctx, source)
} else {
- userID, err = g.remapExternalUser(source)
+ userID, err = g.remapExternalUser(ctx, source)
}
if err != nil {
return err
@@ -998,10 +996,10 @@ func (g *GiteaLocalUploader) remapUser(source user_model.ExternalUserMigrated, t
return target.RemapExternalUser(source.GetExternalName(), source.GetExternalID(), g.doer.ID)
}
-func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated) (int64, error) {
+func (g *GiteaLocalUploader) remapLocalUser(ctx context.Context, source user_model.ExternalUserMigrated) (int64, error) {
userid, ok := g.userMap[source.GetExternalID()]
if !ok {
- name, err := user_model.GetUserNameByID(g.ctx, source.GetExternalID())
+ name, err := user_model.GetUserNameByID(ctx, source.GetExternalID())
if err != nil {
return 0, err
}
@@ -1016,10 +1014,10 @@ func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrat
return userid, nil
}
-func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated) (userid int64, err error) {
+func (g *GiteaLocalUploader) remapExternalUser(ctx context.Context, source user_model.ExternalUserMigrated) (userid int64, err error) {
userid, ok := g.userMap[source.GetExternalID()]
if !ok {
- userid, err = user_model.GetUserIDByExternalUserID(g.ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID()))
+ userid, err = user_model.GetUserIDByExternalUserID(ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID()))
if err != nil {
log.Error("GetUserIDByExternalUserID: %v", err)
return 0, err
diff --git a/services/migrations/gitea_uploader_test.go b/services/migrations/gitea_uploader_test.go
index f2379dadf8..5a5875e15d 100644
--- a/services/migrations/gitea_uploader_test.go
+++ b/services/migrations/gitea_uploader_test.go
@@ -5,7 +5,6 @@
package migrations
import (
- "context"
"fmt"
"os"
"path/filepath"
@@ -40,7 +39,7 @@ func TestGiteaUploadRepo(t *testing.T) {
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
var (
- ctx = context.Background()
+ ctx = t.Context()
downloader = NewGithubDownloaderV3(ctx, "https://github.com", "", "", "", "go-xorm", "builder")
repoName = "builder-" + time.Now().Format("2006-01-02-15-04-05")
uploader = NewGiteaLocalUploader(graceful.GetManager().HammerContext(), user, user.Name, repoName)
@@ -65,7 +64,7 @@ func TestGiteaUploadRepo(t *testing.T) {
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, Name: repoName})
assert.True(t, repo.HasWiki())
- assert.EqualValues(t, repo_model.RepositoryReady, repo.Status)
+ assert.Equal(t, repo_model.RepositoryReady, repo.Status)
milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
RepoID: repo.ID,
@@ -132,8 +131,9 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) {
doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ ctx := t.Context()
repoName := "migrated"
- uploader := NewGiteaLocalUploader(context.Background(), doer, doer.Name, repoName)
+ uploader := NewGiteaLocalUploader(ctx, doer, doer.Name, repoName)
// call remapLocalUser
uploader.sameApp = true
@@ -150,9 +150,9 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) {
//
target := repo_model.Release{}
uploader.userMap = make(map[int64]int64)
- err := uploader.remapUser(&source, &target)
+ err := uploader.remapUser(ctx, &source, &target)
assert.NoError(t, err)
- assert.EqualValues(t, doer.ID, target.GetUserID())
+ assert.Equal(t, doer.ID, target.GetUserID())
//
// The externalID matches a known user but the name does not match,
@@ -161,9 +161,9 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) {
source.PublisherID = user.ID
target = repo_model.Release{}
uploader.userMap = make(map[int64]int64)
- err = uploader.remapUser(&source, &target)
+ err = uploader.remapUser(ctx, &source, &target)
assert.NoError(t, err)
- assert.EqualValues(t, doer.ID, target.GetUserID())
+ assert.Equal(t, doer.ID, target.GetUserID())
//
// The externalID and externalName match an existing user, everything
@@ -172,17 +172,17 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) {
source.PublisherName = user.Name
target = repo_model.Release{}
uploader.userMap = make(map[int64]int64)
- err = uploader.remapUser(&source, &target)
+ err = uploader.remapUser(ctx, &source, &target)
assert.NoError(t, err)
- assert.EqualValues(t, user.ID, target.GetUserID())
+ assert.Equal(t, user.ID, target.GetUserID())
}
func TestGiteaUploadRemapExternalUser(t *testing.T) {
unittest.PrepareTestEnv(t)
doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
-
+ ctx := t.Context()
repoName := "migrated"
- uploader := NewGiteaLocalUploader(context.Background(), doer, doer.Name, repoName)
+ uploader := NewGiteaLocalUploader(ctx, doer, doer.Name, repoName)
uploader.gitServiceType = structs.GiteaService
// call remapExternalUser
uploader.sameApp = false
@@ -200,9 +200,9 @@ func TestGiteaUploadRemapExternalUser(t *testing.T) {
//
uploader.userMap = make(map[int64]int64)
target := repo_model.Release{}
- err := uploader.remapUser(&source, &target)
+ err := uploader.remapUser(ctx, &source, &target)
assert.NoError(t, err)
- assert.EqualValues(t, doer.ID, target.GetUserID())
+ assert.Equal(t, doer.ID, target.GetUserID())
//
// Link the external ID to an existing user
@@ -223,9 +223,9 @@ func TestGiteaUploadRemapExternalUser(t *testing.T) {
//
uploader.userMap = make(map[int64]int64)
target = repo_model.Release{}
- err = uploader.remapUser(&source, &target)
+ err = uploader.remapUser(ctx, &source, &target)
assert.NoError(t, err)
- assert.EqualValues(t, linkedUser.ID, target.GetUserID())
+ assert.Equal(t, linkedUser.ID, target.GetUserID())
}
func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
@@ -237,7 +237,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
fromRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
baseRef := "master"
assert.NoError(t, git.InitRepository(git.DefaultContext, fromRepo.RepoPath(), false, fromRepo.ObjectFormatName))
- err := git.NewCommand(git.DefaultContext, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseRef).Run(&git.RunOpts{Dir: fromRepo.RepoPath()})
+ err := git.NewCommand("symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseRef).Run(git.DefaultContext, &git.RunOpts{Dir: fromRepo.RepoPath()})
assert.NoError(t, err)
assert.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# Testing Repository\n\nOriginally created in: %s", fromRepo.RepoPath())), 0o644))
assert.NoError(t, git.AddChanges(fromRepo.RepoPath(), true))
@@ -261,7 +261,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
// fromRepo branch1
//
headRef := "branch1"
- _, _, err = git.NewCommand(git.DefaultContext, "checkout", "-b").AddDynamicArguments(headRef).RunStdString(&git.RunOpts{Dir: fromRepo.RepoPath()})
+ _, _, err = git.NewCommand("checkout", "-b").AddDynamicArguments(headRef).RunStdString(git.DefaultContext, &git.RunOpts{Dir: fromRepo.RepoPath()})
assert.NoError(t, err)
assert.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte("SOMETHING"), 0o644))
assert.NoError(t, git.AddChanges(fromRepo.RepoPath(), true))
@@ -285,7 +285,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
assert.NoError(t, git.CloneWithArgs(git.DefaultContext, nil, fromRepo.RepoPath(), forkRepo.RepoPath(), git.CloneRepoOptions{
Branch: headRef,
}))
- _, _, err = git.NewCommand(git.DefaultContext, "checkout", "-b").AddDynamicArguments(forkHeadRef).RunStdString(&git.RunOpts{Dir: forkRepo.RepoPath()})
+ _, _, err = git.NewCommand("checkout", "-b").AddDynamicArguments(forkHeadRef).RunStdString(git.DefaultContext, &git.RunOpts{Dir: forkRepo.RepoPath()})
assert.NoError(t, err)
assert.NoError(t, os.WriteFile(filepath.Join(forkRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# branch2 %s", forkRepo.RepoPath())), 0o644))
assert.NoError(t, git.AddChanges(forkRepo.RepoPath(), true))
@@ -301,11 +301,12 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
assert.NoError(t, err)
toRepoName := "migrated"
- uploader := NewGiteaLocalUploader(context.Background(), fromRepoOwner, fromRepoOwner.Name, toRepoName)
+ ctx := t.Context()
+ uploader := NewGiteaLocalUploader(ctx, fromRepoOwner, fromRepoOwner.Name, toRepoName)
uploader.gitServiceType = structs.GiteaService
- assert.NoError(t, repo_service.Init(context.Background()))
- assert.NoError(t, uploader.CreateRepo(&base.Repository{
+ assert.NoError(t, repo_service.Init(t.Context()))
+ assert.NoError(t, uploader.CreateRepo(ctx, &base.Repository{
Description: "description",
OriginalURL: fromRepo.RepoPath(),
CloneURL: fromRepo.RepoPath(),
@@ -505,16 +506,16 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
testCase.pr.EnsuredSafe = true
- head, err := uploader.updateGitForPullRequest(&testCase.pr)
+ head, err := uploader.updateGitForPullRequest(ctx, &testCase.pr)
assert.NoError(t, err)
- assert.EqualValues(t, testCase.head, head)
+ assert.Equal(t, testCase.head, head)
log.Info(stopMark)
logFiltered, logStopped := logChecker.Check(5 * time.Second)
assert.True(t, logStopped)
if len(testCase.logFilter) > 0 {
- assert.EqualValues(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter)
+ assert.Equal(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter)
}
})
}
diff --git a/services/migrations/github.go b/services/migrations/github.go
index 604ab84b39..b00d6ed27f 100644
--- a/services/migrations/github.go
+++ b/services/migrations/github.go
@@ -64,7 +64,6 @@ func (f *GithubDownloaderV3Factory) GitServiceType() structs.GitServiceType {
// from github via APIv3
type GithubDownloaderV3 struct {
base.NullDownloader
- ctx context.Context
clients []*github.Client
baseURL string
repoOwner string
@@ -79,12 +78,11 @@ type GithubDownloaderV3 struct {
}
// NewGithubDownloaderV3 creates a github Downloader via github v3 API
-func NewGithubDownloaderV3(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 {
+func NewGithubDownloaderV3(_ context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 {
downloader := GithubDownloaderV3{
userName: userName,
baseURL: baseURL,
password: password,
- ctx: ctx,
repoOwner: repoOwner,
repoName: repoName,
maxPerPage: 100,
@@ -141,12 +139,7 @@ func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) {
g.rates = append(g.rates, nil)
}
-// SetContext set context
-func (g *GithubDownloaderV3) SetContext(ctx context.Context) {
- g.ctx = ctx
-}
-
-func (g *GithubDownloaderV3) waitAndPickClient() {
+func (g *GithubDownloaderV3) waitAndPickClient(ctx context.Context) {
var recentIdx int
var maxRemaining int
for i := 0; i < len(g.clients); i++ {
@@ -160,13 +153,13 @@ func (g *GithubDownloaderV3) waitAndPickClient() {
for g.rates[g.curClientIdx] != nil && g.rates[g.curClientIdx].Remaining <= GithubLimitRateRemaining {
timer := time.NewTimer(time.Until(g.rates[g.curClientIdx].Reset.Time))
select {
- case <-g.ctx.Done():
+ case <-ctx.Done():
timer.Stop()
return
case <-timer.C:
}
- err := g.RefreshRate()
+ err := g.RefreshRate(ctx)
if err != nil {
log.Error("g.getClient().RateLimit.Get: %s", err)
}
@@ -174,8 +167,8 @@ func (g *GithubDownloaderV3) waitAndPickClient() {
}
// RefreshRate update the current rate (doesn't count in rate limit)
-func (g *GithubDownloaderV3) RefreshRate() error {
- rates, _, err := g.getClient().RateLimit.Get(g.ctx)
+func (g *GithubDownloaderV3) RefreshRate(ctx context.Context) error {
+ rates, _, err := g.getClient().RateLimit.Get(ctx)
if err != nil {
// if rate limit is not enabled, ignore it
if strings.Contains(err.Error(), "404") {
@@ -198,9 +191,9 @@ func (g *GithubDownloaderV3) setRate(rate *github.Rate) {
}
// GetRepoInfo returns a repository information
-func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
- g.waitAndPickClient()
- gr, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName)
+func (g *GithubDownloaderV3) GetRepoInfo(ctx context.Context) (*base.Repository, error) {
+ g.waitAndPickClient(ctx)
+ gr, resp, err := g.getClient().Repositories.Get(ctx, g.repoOwner, g.repoName)
if err != nil {
return nil, err
}
@@ -219,9 +212,9 @@ func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
}
// GetTopics return github topics
-func (g *GithubDownloaderV3) GetTopics() ([]string, error) {
- g.waitAndPickClient()
- r, resp, err := g.getClient().Repositories.Get(g.ctx, g.repoOwner, g.repoName)
+func (g *GithubDownloaderV3) GetTopics(ctx context.Context) ([]string, error) {
+ g.waitAndPickClient(ctx)
+ r, resp, err := g.getClient().Repositories.Get(ctx, g.repoOwner, g.repoName)
if err != nil {
return nil, err
}
@@ -230,12 +223,12 @@ func (g *GithubDownloaderV3) GetTopics() ([]string, error) {
}
// GetMilestones returns milestones
-func (g *GithubDownloaderV3) GetMilestones() ([]*base.Milestone, error) {
+func (g *GithubDownloaderV3) GetMilestones(ctx context.Context) ([]*base.Milestone, error) {
perPage := g.maxPerPage
milestones := make([]*base.Milestone, 0, perPage)
for i := 1; ; i++ {
- g.waitAndPickClient()
- ms, resp, err := g.getClient().Issues.ListMilestones(g.ctx, g.repoOwner, g.repoName,
+ g.waitAndPickClient(ctx)
+ ms, resp, err := g.getClient().Issues.ListMilestones(ctx, g.repoOwner, g.repoName,
&github.MilestoneListOptions{
State: "all",
ListOptions: github.ListOptions{
@@ -279,12 +272,12 @@ func convertGithubLabel(label *github.Label) *base.Label {
}
// GetLabels returns labels
-func (g *GithubDownloaderV3) GetLabels() ([]*base.Label, error) {
+func (g *GithubDownloaderV3) GetLabels(ctx context.Context) ([]*base.Label, error) {
perPage := g.maxPerPage
labels := make([]*base.Label, 0, perPage)
for i := 1; ; i++ {
- g.waitAndPickClient()
- ls, resp, err := g.getClient().Issues.ListLabels(g.ctx, g.repoOwner, g.repoName,
+ g.waitAndPickClient(ctx)
+ ls, resp, err := g.getClient().Issues.ListLabels(ctx, g.repoOwner, g.repoName,
&github.ListOptions{
Page: i,
PerPage: perPage,
@@ -304,7 +297,7 @@ func (g *GithubDownloaderV3) GetLabels() ([]*base.Label, error) {
return labels, nil
}
-func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease) *base.Release {
+func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *github.RepositoryRelease) *base.Release {
// GitHub allows commitish to be a reference.
// In this case, we need to remove the prefix, i.e. convert "refs/heads/main" to "main".
targetCommitish := strings.TrimPrefix(rel.GetTargetCommitish(), git.BranchPrefix)
@@ -339,12 +332,12 @@ func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease)
Created: asset.CreatedAt.Time,
Updated: asset.UpdatedAt.Time,
DownloadFunc: func() (io.ReadCloser, error) {
- g.waitAndPickClient()
- readCloser, redirectURL, err := g.getClient().Repositories.DownloadReleaseAsset(g.ctx, g.repoOwner, g.repoName, assetID, nil)
+ g.waitAndPickClient(ctx)
+ readCloser, redirectURL, err := g.getClient().Repositories.DownloadReleaseAsset(ctx, g.repoOwner, g.repoName, assetID, nil)
if err != nil {
return nil, err
}
- if err := g.RefreshRate(); err != nil {
+ if err := g.RefreshRate(ctx); err != nil {
log.Error("g.getClient().RateLimits: %s", err)
}
@@ -364,13 +357,13 @@ func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease)
return io.NopCloser(strings.NewReader(redirectURL)), nil
}
- g.waitAndPickClient()
- req, err := http.NewRequestWithContext(g.ctx, "GET", redirectURL, nil)
+ g.waitAndPickClient(ctx)
+ req, err := http.NewRequestWithContext(ctx, "GET", redirectURL, nil)
if err != nil {
return nil, err
}
resp, err := httpClient.Do(req)
- err1 := g.RefreshRate()
+ err1 := g.RefreshRate(ctx)
if err1 != nil {
log.Error("g.RefreshRate(): %s", err1)
}
@@ -385,12 +378,12 @@ func (g *GithubDownloaderV3) convertGithubRelease(rel *github.RepositoryRelease)
}
// GetReleases returns releases
-func (g *GithubDownloaderV3) GetReleases() ([]*base.Release, error) {
+func (g *GithubDownloaderV3) GetReleases(ctx context.Context) ([]*base.Release, error) {
perPage := g.maxPerPage
releases := make([]*base.Release, 0, perPage)
for i := 1; ; i++ {
- g.waitAndPickClient()
- ls, resp, err := g.getClient().Repositories.ListReleases(g.ctx, g.repoOwner, g.repoName,
+ g.waitAndPickClient(ctx)
+ ls, resp, err := g.getClient().Repositories.ListReleases(ctx, g.repoOwner, g.repoName,
&github.ListOptions{
Page: i,
PerPage: perPage,
@@ -401,7 +394,7 @@ func (g *GithubDownloaderV3) GetReleases() ([]*base.Release, error) {
g.setRate(&resp.Rate)
for _, release := range ls {
- releases = append(releases, g.convertGithubRelease(release))
+ releases = append(releases, g.convertGithubRelease(ctx, release))
}
if len(ls) < perPage {
break
@@ -411,7 +404,7 @@ func (g *GithubDownloaderV3) GetReleases() ([]*base.Release, error) {
}
// GetIssues returns issues according start and limit
-func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+func (g *GithubDownloaderV3) GetIssues(ctx context.Context, page, perPage int) ([]*base.Issue, bool, error) {
if perPage > g.maxPerPage {
perPage = g.maxPerPage
}
@@ -426,8 +419,8 @@ func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool,
}
allIssues := make([]*base.Issue, 0, perPage)
- g.waitAndPickClient()
- issues, resp, err := g.getClient().Issues.ListByRepo(g.ctx, g.repoOwner, g.repoName, opt)
+ g.waitAndPickClient(ctx)
+ issues, resp, err := g.getClient().Issues.ListByRepo(ctx, g.repoOwner, g.repoName, opt)
if err != nil {
return nil, false, fmt.Errorf("error while listing repos: %w", err)
}
@@ -447,8 +440,8 @@ func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool,
var reactions []*base.Reaction
if !g.SkipReactions {
for i := 1; ; i++ {
- g.waitAndPickClient()
- res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListOptions{
+ g.waitAndPickClient(ctx)
+ res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, issue.GetNumber(), &github.ListOptions{
Page: i,
PerPage: perPage,
})
@@ -503,12 +496,12 @@ func (g *GithubDownloaderV3) SupportGetRepoComments() bool {
}
// GetComments returns comments according issueNumber
-func (g *GithubDownloaderV3) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
- comments, err := g.getComments(commentable)
+func (g *GithubDownloaderV3) GetComments(ctx context.Context, commentable base.Commentable) ([]*base.Comment, bool, error) {
+ comments, err := g.getComments(ctx, commentable)
return comments, false, err
}
-func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.Comment, error) {
+func (g *GithubDownloaderV3) getComments(ctx context.Context, commentable base.Commentable) ([]*base.Comment, error) {
var (
allComments = make([]*base.Comment, 0, g.maxPerPage)
created = "created"
@@ -522,8 +515,8 @@ func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.
},
}
for {
- g.waitAndPickClient()
- comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, int(commentable.GetForeignIndex()), opt)
+ g.waitAndPickClient(ctx)
+ comments, resp, err := g.getClient().Issues.ListComments(ctx, g.repoOwner, g.repoName, int(commentable.GetForeignIndex()), opt)
if err != nil {
return nil, fmt.Errorf("error while listing repos: %w", err)
}
@@ -533,8 +526,8 @@ func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.
var reactions []*base.Reaction
if !g.SkipReactions {
for i := 1; ; i++ {
- g.waitAndPickClient()
- res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
+ g.waitAndPickClient(ctx)
+ res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
Page: i,
PerPage: g.maxPerPage,
})
@@ -576,7 +569,7 @@ func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.
}
// GetAllComments returns repository comments according page and perPageSize
-func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment, bool, error) {
+func (g *GithubDownloaderV3) GetAllComments(ctx context.Context, page, perPage int) ([]*base.Comment, bool, error) {
var (
allComments = make([]*base.Comment, 0, perPage)
created = "created"
@@ -594,8 +587,8 @@ func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment,
},
}
- g.waitAndPickClient()
- comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, 0, opt)
+ g.waitAndPickClient(ctx)
+ comments, resp, err := g.getClient().Issues.ListComments(ctx, g.repoOwner, g.repoName, 0, opt)
if err != nil {
return nil, false, fmt.Errorf("error while listing repos: %w", err)
}
@@ -608,8 +601,8 @@ func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment,
var reactions []*base.Reaction
if !g.SkipReactions {
for i := 1; ; i++ {
- g.waitAndPickClient()
- res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(g.ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
+ g.waitAndPickClient(ctx)
+ res, resp, err := g.getClient().Reactions.ListIssueCommentReactions(ctx, g.repoOwner, g.repoName, comment.GetID(), &github.ListOptions{
Page: i,
PerPage: g.maxPerPage,
})
@@ -648,7 +641,7 @@ func (g *GithubDownloaderV3) GetAllComments(page, perPage int) ([]*base.Comment,
}
// GetPullRequests returns pull requests according page and perPage
-func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+func (g *GithubDownloaderV3) GetPullRequests(ctx context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
if perPage > g.maxPerPage {
perPage = g.maxPerPage
}
@@ -662,8 +655,8 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq
},
}
allPRs := make([]*base.PullRequest, 0, perPage)
- g.waitAndPickClient()
- prs, resp, err := g.getClient().PullRequests.List(g.ctx, g.repoOwner, g.repoName, opt)
+ g.waitAndPickClient(ctx)
+ prs, resp, err := g.getClient().PullRequests.List(ctx, g.repoOwner, g.repoName, opt)
if err != nil {
return nil, false, fmt.Errorf("error while listing repos: %w", err)
}
@@ -679,8 +672,8 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq
var reactions []*base.Reaction
if !g.SkipReactions {
for i := 1; ; i++ {
- g.waitAndPickClient()
- res, resp, err := g.getClient().Reactions.ListIssueReactions(g.ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListOptions{
+ g.waitAndPickClient(ctx)
+ res, resp, err := g.getClient().Reactions.ListIssueReactions(ctx, g.repoOwner, g.repoName, pr.GetNumber(), &github.ListOptions{
Page: i,
PerPage: perPage,
})
@@ -702,7 +695,7 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq
}
// download patch and saved as tmp file
- g.waitAndPickClient()
+ g.waitAndPickClient(ctx)
allPRs = append(allPRs, &base.PullRequest{
Title: pr.GetTitle(),
@@ -759,15 +752,15 @@ func convertGithubReview(r *github.PullRequestReview) *base.Review {
}
}
-func (g *GithubDownloaderV3) convertGithubReviewComments(cs []*github.PullRequestComment) ([]*base.ReviewComment, error) {
+func (g *GithubDownloaderV3) convertGithubReviewComments(ctx context.Context, cs []*github.PullRequestComment) ([]*base.ReviewComment, error) {
rcs := make([]*base.ReviewComment, 0, len(cs))
for _, c := range cs {
// get reactions
var reactions []*base.Reaction
if !g.SkipReactions {
for i := 1; ; i++ {
- g.waitAndPickClient()
- res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(g.ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListOptions{
+ g.waitAndPickClient(ctx)
+ res, resp, err := g.getClient().Reactions.ListPullRequestCommentReactions(ctx, g.repoOwner, g.repoName, c.GetID(), &github.ListOptions{
Page: i,
PerPage: g.maxPerPage,
})
@@ -806,7 +799,7 @@ func (g *GithubDownloaderV3) convertGithubReviewComments(cs []*github.PullReques
}
// GetReviews returns pull requests review
-func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+func (g *GithubDownloaderV3) GetReviews(ctx context.Context, reviewable base.Reviewable) ([]*base.Review, error) {
allReviews := make([]*base.Review, 0, g.maxPerPage)
if g.SkipReviews {
return allReviews, nil
@@ -816,8 +809,8 @@ func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Rev
}
// Get approve/request change reviews
for {
- g.waitAndPickClient()
- reviews, resp, err := g.getClient().PullRequests.ListReviews(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
+ g.waitAndPickClient(ctx)
+ reviews, resp, err := g.getClient().PullRequests.ListReviews(ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
if err != nil {
return nil, fmt.Errorf("error while listing repos: %w", err)
}
@@ -830,14 +823,14 @@ func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Rev
PerPage: g.maxPerPage,
}
for {
- g.waitAndPickClient()
- reviewComments, resp, err := g.getClient().PullRequests.ListReviewComments(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), review.GetID(), opt2)
+ g.waitAndPickClient(ctx)
+ reviewComments, resp, err := g.getClient().PullRequests.ListReviewComments(ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), review.GetID(), opt2)
if err != nil {
return nil, fmt.Errorf("error while listing repos: %w", err)
}
g.setRate(&resp.Rate)
- cs, err := g.convertGithubReviewComments(reviewComments)
+ cs, err := g.convertGithubReviewComments(ctx, reviewComments)
if err != nil {
return nil, err
}
@@ -856,8 +849,8 @@ func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Rev
}
// Get requested reviews
for {
- g.waitAndPickClient()
- reviewers, resp, err := g.getClient().PullRequests.ListReviewers(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
+ g.waitAndPickClient(ctx)
+ reviewers, resp, err := g.getClient().PullRequests.ListReviewers(ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt)
if err != nil {
return nil, fmt.Errorf("error while listing repos: %w", err)
}
diff --git a/services/migrations/github_test.go b/services/migrations/github_test.go
index 2b89e6dc0f..2625fb62ec 100644
--- a/services/migrations/github_test.go
+++ b/services/migrations/github_test.go
@@ -5,7 +5,6 @@
package migrations
import (
- "context"
"os"
"testing"
"time"
@@ -21,11 +20,12 @@ func TestGitHubDownloadRepo(t *testing.T) {
if token == "" {
t.Skip("Skipping GitHub migration test because GITHUB_READ_TOKEN is empty")
}
- downloader := NewGithubDownloaderV3(context.Background(), "https://github.com", "", "", token, "go-gitea", "test_repo")
- err := downloader.RefreshRate()
+ ctx := t.Context()
+ downloader := NewGithubDownloaderV3(ctx, "https://github.com", "", "", token, "go-gitea", "test_repo")
+ err := downloader.RefreshRate(ctx)
assert.NoError(t, err)
- repo, err := downloader.GetRepoInfo()
+ repo, err := downloader.GetRepoInfo(ctx)
assert.NoError(t, err)
assertRepositoryEqual(t, &base.Repository{
Name: "test_repo",
@@ -36,11 +36,11 @@ func TestGitHubDownloadRepo(t *testing.T) {
DefaultBranch: "master",
}, repo)
- topics, err := downloader.GetTopics()
+ topics, err := downloader.GetTopics(ctx)
assert.NoError(t, err)
assert.Contains(t, topics, "gitea")
- milestones, err := downloader.GetMilestones()
+ milestones, err := downloader.GetMilestones(ctx)
assert.NoError(t, err)
assertMilestonesEqual(t, []*base.Milestone{
{
@@ -63,7 +63,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
},
}, milestones)
- labels, err := downloader.GetLabels()
+ labels, err := downloader.GetLabels(ctx)
assert.NoError(t, err)
assertLabelsEqual(t, []*base.Label{
{
@@ -113,7 +113,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
},
}, labels)
- releases, err := downloader.GetReleases()
+ releases, err := downloader.GetReleases(ctx)
assert.NoError(t, err)
assertReleasesEqual(t, []*base.Release{
{
@@ -129,7 +129,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
}, releases)
// downloader.GetIssues()
- issues, isEnd, err := downloader.GetIssues(1, 2)
+ issues, isEnd, err := downloader.GetIssues(ctx, 1, 2)
assert.NoError(t, err)
assert.False(t, isEnd)
assertIssuesEqual(t, []*base.Issue{
@@ -218,7 +218,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
}, issues)
// downloader.GetComments()
- comments, _, err := downloader.GetComments(&base.Issue{Number: 2, ForeignIndex: 2})
+ comments, _, err := downloader.GetComments(ctx, &base.Issue{Number: 2, ForeignIndex: 2})
assert.NoError(t, err)
assertCommentsEqual(t, []*base.Comment{
{
@@ -248,7 +248,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
}, comments)
// downloader.GetPullRequests()
- prs, _, err := downloader.GetPullRequests(1, 2)
+ prs, _, err := downloader.GetPullRequests(ctx, 1, 2)
assert.NoError(t, err)
assertPullRequestsEqual(t, []*base.PullRequest{
{
@@ -338,7 +338,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
},
}, prs)
- reviews, err := downloader.GetReviews(&base.PullRequest{Number: 3, ForeignIndex: 3})
+ reviews, err := downloader.GetReviews(ctx, &base.PullRequest{Number: 3, ForeignIndex: 3})
assert.NoError(t, err)
assertReviewsEqual(t, []*base.Review{
{
@@ -370,7 +370,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
},
}, reviews)
- reviews, err = downloader.GetReviews(&base.PullRequest{Number: 4, ForeignIndex: 4})
+ reviews, err = downloader.GetReviews(ctx, &base.PullRequest{Number: 4, ForeignIndex: 4})
assert.NoError(t, err)
assertReviewsEqual(t, []*base.Review{
{
diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go
index 07d5040b5b..4bed8e2f6c 100644
--- a/services/migrations/gitlab.go
+++ b/services/migrations/gitlab.go
@@ -16,6 +16,7 @@ import (
"time"
issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
base "code.gitea.io/gitea/modules/migration"
@@ -80,7 +81,6 @@ func (r *gitlabIIDResolver) generatePullRequestNumber(mrIID int) int64 {
// because Gitlab has individual Issue and Pull Request numbers.
type GitlabDownloader struct {
base.NullDownloader
- ctx context.Context
client *gitlab.Client
baseURL string
repoID int
@@ -143,7 +143,6 @@ func NewGitlabDownloader(ctx context.Context, baseURL, repoPath, username, passw
}
return &GitlabDownloader{
- ctx: ctx,
client: gitlabClient,
baseURL: baseURL,
repoID: gr.ID,
@@ -164,14 +163,9 @@ func (g *GitlabDownloader) LogString() string {
return fmt.Sprintf("<GitlabDownloader %s [%d]/%s>", g.baseURL, g.repoID, g.repoName)
}
-// SetContext set context
-func (g *GitlabDownloader) SetContext(ctx context.Context) {
- g.ctx = ctx
-}
-
// GetRepoInfo returns a repository information
-func (g *GitlabDownloader) GetRepoInfo() (*base.Repository, error) {
- gr, _, err := g.client.Projects.GetProject(g.repoID, nil, nil, gitlab.WithContext(g.ctx))
+func (g *GitlabDownloader) GetRepoInfo(ctx context.Context) (*base.Repository, error) {
+ gr, _, err := g.client.Projects.GetProject(g.repoID, nil, nil, gitlab.WithContext(ctx))
if err != nil {
return nil, err
}
@@ -207,8 +201,8 @@ func (g *GitlabDownloader) GetRepoInfo() (*base.Repository, error) {
}
// GetTopics return gitlab topics
-func (g *GitlabDownloader) GetTopics() ([]string, error) {
- gr, _, err := g.client.Projects.GetProject(g.repoID, nil, nil, gitlab.WithContext(g.ctx))
+func (g *GitlabDownloader) GetTopics(ctx context.Context) ([]string, error) {
+ gr, _, err := g.client.Projects.GetProject(g.repoID, nil, nil, gitlab.WithContext(ctx))
if err != nil {
return nil, err
}
@@ -216,7 +210,7 @@ func (g *GitlabDownloader) GetTopics() ([]string, error) {
}
// GetMilestones returns milestones
-func (g *GitlabDownloader) GetMilestones() ([]*base.Milestone, error) {
+func (g *GitlabDownloader) GetMilestones(ctx context.Context) ([]*base.Milestone, error) {
perPage := g.maxPerPage
state := "all"
milestones := make([]*base.Milestone, 0, perPage)
@@ -227,7 +221,7 @@ func (g *GitlabDownloader) GetMilestones() ([]*base.Milestone, error) {
Page: i,
PerPage: perPage,
},
- }, nil, gitlab.WithContext(g.ctx))
+ }, nil, gitlab.WithContext(ctx))
if err != nil {
return nil, err
}
@@ -288,14 +282,14 @@ func (g *GitlabDownloader) normalizeColor(val string) string {
}
// GetLabels returns labels
-func (g *GitlabDownloader) GetLabels() ([]*base.Label, error) {
+func (g *GitlabDownloader) GetLabels(ctx context.Context) ([]*base.Label, error) {
perPage := g.maxPerPage
labels := make([]*base.Label, 0, perPage)
for i := 1; ; i++ {
ls, _, err := g.client.Labels.ListLabels(g.repoID, &gitlab.ListLabelsOptions{ListOptions: gitlab.ListOptions{
Page: i,
PerPage: perPage,
- }}, nil, gitlab.WithContext(g.ctx))
+ }}, nil, gitlab.WithContext(ctx))
if err != nil {
return nil, err
}
@@ -314,7 +308,7 @@ func (g *GitlabDownloader) GetLabels() ([]*base.Label, error) {
return labels, nil
}
-func (g *GitlabDownloader) convertGitlabRelease(rel *gitlab.Release) *base.Release {
+func (g *GitlabDownloader) convertGitlabRelease(ctx context.Context, rel *gitlab.Release) *base.Release {
var zero int
r := &base.Release{
TagName: rel.TagName,
@@ -337,7 +331,7 @@ func (g *GitlabDownloader) convertGitlabRelease(rel *gitlab.Release) *base.Relea
Size: &zero,
DownloadCount: &zero,
DownloadFunc: func() (io.ReadCloser, error) {
- link, _, err := g.client.ReleaseLinks.GetReleaseLink(g.repoID, rel.TagName, assetID, gitlab.WithContext(g.ctx))
+ link, _, err := g.client.ReleaseLinks.GetReleaseLink(g.repoID, rel.TagName, assetID, gitlab.WithContext(ctx))
if err != nil {
return nil, err
}
@@ -351,7 +345,7 @@ func (g *GitlabDownloader) convertGitlabRelease(rel *gitlab.Release) *base.Relea
if err != nil {
return nil, err
}
- req = req.WithContext(g.ctx)
+ req = req.WithContext(ctx)
resp, err := httpClient.Do(req)
if err != nil {
return nil, err
@@ -366,7 +360,7 @@ func (g *GitlabDownloader) convertGitlabRelease(rel *gitlab.Release) *base.Relea
}
// GetReleases returns releases
-func (g *GitlabDownloader) GetReleases() ([]*base.Release, error) {
+func (g *GitlabDownloader) GetReleases(ctx context.Context) ([]*base.Release, error) {
perPage := g.maxPerPage
releases := make([]*base.Release, 0, perPage)
for i := 1; ; i++ {
@@ -375,13 +369,13 @@ func (g *GitlabDownloader) GetReleases() ([]*base.Release, error) {
Page: i,
PerPage: perPage,
},
- }, nil, gitlab.WithContext(g.ctx))
+ }, nil, gitlab.WithContext(ctx))
if err != nil {
return nil, err
}
for _, release := range ls {
- releases = append(releases, g.convertGitlabRelease(release))
+ releases = append(releases, g.convertGitlabRelease(ctx, release))
}
if len(ls) < perPage {
break
@@ -397,7 +391,7 @@ type gitlabIssueContext struct {
// GetIssues returns issues according start and limit
//
// Note: issue label description and colors are not supported by the go-gitlab library at this time
-func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+func (g *GitlabDownloader) GetIssues(ctx context.Context, page, perPage int) ([]*base.Issue, bool, error) {
state := "all"
sort := "asc"
@@ -416,7 +410,7 @@ func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
allIssues := make([]*base.Issue, 0, perPage)
- issues, _, err := g.client.Issues.ListProjectIssues(g.repoID, opt, nil, gitlab.WithContext(g.ctx))
+ issues, _, err := g.client.Issues.ListProjectIssues(g.repoID, opt, nil, gitlab.WithContext(ctx))
if err != nil {
return nil, false, fmt.Errorf("error while listing issues: %w", err)
}
@@ -436,7 +430,7 @@ func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
var reactions []*gitlab.AwardEmoji
awardPage := 1
for {
- awards, _, err := g.client.AwardEmoji.ListIssueAwardEmoji(g.repoID, issue.IID, &gitlab.ListAwardEmojiOptions{Page: awardPage, PerPage: perPage}, gitlab.WithContext(g.ctx))
+ awards, _, err := g.client.AwardEmoji.ListIssueAwardEmoji(g.repoID, issue.IID, &gitlab.ListAwardEmojiOptions{Page: awardPage, PerPage: perPage}, gitlab.WithContext(ctx))
if err != nil {
return nil, false, fmt.Errorf("error while listing issue awards: %w", err)
}
@@ -477,7 +471,7 @@ func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
// GetComments returns comments according issueNumber
// TODO: figure out how to transfer comment reactions
-func (g *GitlabDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+func (g *GitlabDownloader) GetComments(ctx context.Context, commentable base.Commentable) ([]*base.Comment, bool, error) {
context, ok := commentable.GetContext().(gitlabIssueContext)
if !ok {
return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext())
@@ -495,12 +489,12 @@ func (g *GitlabDownloader) GetComments(commentable base.Commentable) ([]*base.Co
comments, resp, err = g.client.Discussions.ListIssueDiscussions(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListIssueDiscussionsOptions{
Page: page,
PerPage: g.maxPerPage,
- }, nil, gitlab.WithContext(g.ctx))
+ }, nil, gitlab.WithContext(ctx))
} else {
comments, resp, err = g.client.Discussions.ListMergeRequestDiscussions(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListMergeRequestDiscussionsOptions{
Page: page,
PerPage: g.maxPerPage,
- }, nil, gitlab.WithContext(g.ctx))
+ }, nil, gitlab.WithContext(ctx))
}
if err != nil {
@@ -528,25 +522,29 @@ func (g *GitlabDownloader) GetComments(commentable base.Commentable) ([]*base.Co
Page: page,
PerPage: g.maxPerPage,
},
- }, nil, gitlab.WithContext(g.ctx))
+ }, nil, gitlab.WithContext(ctx))
} else {
stateEvents, resp, err = g.client.ResourceStateEvents.ListIssueStateEvents(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListStateEventsOptions{
ListOptions: gitlab.ListOptions{
Page: page,
PerPage: g.maxPerPage,
},
- }, nil, gitlab.WithContext(g.ctx))
+ }, nil, gitlab.WithContext(ctx))
}
if err != nil {
return nil, false, fmt.Errorf("error while listing state events: %v %w", g.repoID, err)
}
for _, stateEvent := range stateEvents {
+ posterUserID, posterUsername := user.GhostUserID, user.GhostUserName
+ if stateEvent.User != nil {
+ posterUserID, posterUsername = int64(stateEvent.User.ID), stateEvent.User.Username
+ }
comment := &base.Comment{
IssueIndex: commentable.GetLocalIndex(),
Index: int64(stateEvent.ID),
- PosterID: int64(stateEvent.User.ID),
- PosterName: stateEvent.User.Username,
+ PosterID: posterUserID,
+ PosterName: posterUsername,
Content: "",
Created: *stateEvent.CreatedAt,
}
@@ -604,7 +602,7 @@ func (g *GitlabDownloader) convertNoteToComment(localIndex int64, note *gitlab.N
}
// GetPullRequests returns pull requests according page and perPage
-func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+func (g *GitlabDownloader) GetPullRequests(ctx context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
if perPage > g.maxPerPage {
perPage = g.maxPerPage
}
@@ -620,7 +618,7 @@ func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
allPRs := make([]*base.PullRequest, 0, perPage)
- prs, _, err := g.client.MergeRequests.ListProjectMergeRequests(g.repoID, opt, nil, gitlab.WithContext(g.ctx))
+ prs, _, err := g.client.MergeRequests.ListProjectMergeRequests(g.repoID, opt, nil, gitlab.WithContext(ctx))
if err != nil {
return nil, false, fmt.Errorf("error while listing merge requests: %w", err)
}
@@ -673,7 +671,7 @@ func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
var reactions []*gitlab.AwardEmoji
awardPage := 1
for {
- awards, _, err := g.client.AwardEmoji.ListMergeRequestAwardEmoji(g.repoID, pr.IID, &gitlab.ListAwardEmojiOptions{Page: awardPage, PerPage: perPage}, gitlab.WithContext(g.ctx))
+ awards, _, err := g.client.AwardEmoji.ListMergeRequestAwardEmoji(g.repoID, pr.IID, &gitlab.ListAwardEmojiOptions{Page: awardPage, PerPage: perPage}, gitlab.WithContext(ctx))
if err != nil {
return nil, false, fmt.Errorf("error while listing merge requests awards: %w", err)
}
@@ -733,8 +731,8 @@ func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
}
// GetReviews returns pull requests review
-func (g *GitlabDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
- approvals, resp, err := g.client.MergeRequestApprovals.GetConfiguration(g.repoID, int(reviewable.GetForeignIndex()), gitlab.WithContext(g.ctx))
+func (g *GitlabDownloader) GetReviews(ctx context.Context, reviewable base.Reviewable) ([]*base.Review, error) {
+ approvals, resp, err := g.client.MergeRequestApprovals.GetConfiguration(g.repoID, int(reviewable.GetForeignIndex()), gitlab.WithContext(ctx))
if err != nil {
if resp != nil && resp.StatusCode == http.StatusNotFound {
log.Error(fmt.Sprintf("GitlabDownloader: while migrating a error occurred: '%s'", err.Error()))
diff --git a/services/migrations/gitlab_test.go b/services/migrations/gitlab_test.go
index 556fe771c5..73a1b6a276 100644
--- a/services/migrations/gitlab_test.go
+++ b/services/migrations/gitlab_test.go
@@ -4,7 +4,6 @@
package migrations
import (
- "context"
"fmt"
"net/http"
"net/http/httptest"
@@ -31,12 +30,12 @@ func TestGitlabDownloadRepo(t *testing.T) {
if err != nil || resp.StatusCode != http.StatusOK {
t.Skipf("Can't access test repo, skipping %s", t.Name())
}
-
- downloader, err := NewGitlabDownloader(context.Background(), "https://gitlab.com", "gitea/test_repo", "", "", gitlabPersonalAccessToken)
+ ctx := t.Context()
+ downloader, err := NewGitlabDownloader(ctx, "https://gitlab.com", "gitea/test_repo", "", "", gitlabPersonalAccessToken)
if err != nil {
t.Fatalf("NewGitlabDownloader is nil: %v", err)
}
- repo, err := downloader.GetRepoInfo()
+ repo, err := downloader.GetRepoInfo(ctx)
assert.NoError(t, err)
// Repo Owner is blank in Gitlab Group repos
assertRepositoryEqual(t, &base.Repository{
@@ -48,12 +47,12 @@ func TestGitlabDownloadRepo(t *testing.T) {
DefaultBranch: "master",
}, repo)
- topics, err := downloader.GetTopics()
+ topics, err := downloader.GetTopics(ctx)
assert.NoError(t, err)
assert.Len(t, topics, 2)
- assert.EqualValues(t, []string{"migration", "test"}, topics)
+ assert.Equal(t, []string{"migration", "test"}, topics)
- milestones, err := downloader.GetMilestones()
+ milestones, err := downloader.GetMilestones(ctx)
assert.NoError(t, err)
assertMilestonesEqual(t, []*base.Milestone{
{
@@ -71,7 +70,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
},
}, milestones)
- labels, err := downloader.GetLabels()
+ labels, err := downloader.GetLabels(ctx)
assert.NoError(t, err)
assertLabelsEqual(t, []*base.Label{
{
@@ -112,7 +111,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
},
}, labels)
- releases, err := downloader.GetReleases()
+ releases, err := downloader.GetReleases(ctx)
assert.NoError(t, err)
assertReleasesEqual(t, []*base.Release{
{
@@ -126,7 +125,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
},
}, releases)
- issues, isEnd, err := downloader.GetIssues(1, 2)
+ issues, isEnd, err := downloader.GetIssues(ctx, 1, 2)
assert.NoError(t, err)
assert.False(t, isEnd)
@@ -214,7 +213,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
},
}, issues)
- comments, _, err := downloader.GetComments(&base.Issue{
+ comments, _, err := downloader.GetComments(ctx, &base.Issue{
Number: 2,
ForeignIndex: 2,
Context: gitlabIssueContext{IsMergeRequest: false},
@@ -255,7 +254,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
},
}, comments)
- prs, _, err := downloader.GetPullRequests(1, 1)
+ prs, _, err := downloader.GetPullRequests(ctx, 1, 1)
assert.NoError(t, err)
assertPullRequestsEqual(t, []*base.PullRequest{
{
@@ -304,7 +303,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
},
}, prs)
- rvs, err := downloader.GetReviews(&base.PullRequest{Number: 1, ForeignIndex: 1})
+ rvs, err := downloader.GetReviews(ctx, &base.PullRequest{Number: 1, ForeignIndex: 1})
assert.NoError(t, err)
assertReviewsEqual(t, []*base.Review{
{
@@ -323,7 +322,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
},
}, rvs)
- rvs, err = downloader.GetReviews(&base.PullRequest{Number: 2, ForeignIndex: 2})
+ rvs, err = downloader.GetReviews(ctx, &base.PullRequest{Number: 2, ForeignIndex: 2})
assert.NoError(t, err)
assertReviewsEqual(t, []*base.Review{
{
@@ -423,9 +422,8 @@ func TestGitlabGetReviews(t *testing.T) {
defer gitlabClientMockTeardown(server)
repoID := 1324
-
+ ctx := t.Context()
downloader := &GitlabDownloader{
- ctx: context.Background(),
client: client,
repoID: repoID,
}
@@ -465,7 +463,7 @@ func TestGitlabGetReviews(t *testing.T) {
mux.HandleFunc(fmt.Sprintf("/api/v4/projects/%d/merge_requests/%d/approvals", testCase.repoID, testCase.prID), mock)
id := int64(testCase.prID)
- rvs, err := downloader.GetReviews(&base.Issue{Number: id, ForeignIndex: id})
+ rvs, err := downloader.GetReviews(ctx, &base.Issue{Number: id, ForeignIndex: id})
assert.NoError(t, err)
assertReviewsEqual(t, []*base.Review{&review}, rvs)
}
@@ -503,7 +501,7 @@ func TestAwardsToReactions(t *testing.T) {
assert.NoError(t, json.Unmarshal([]byte(testResponse), &awards))
reactions := downloader.awardsToReactions(awards)
- assert.EqualValues(t, []*base.Reaction{
+ assert.Equal(t, []*base.Reaction{
{
UserName: "lafriks",
UserID: 1241334,
@@ -595,7 +593,7 @@ func TestNoteToComment(t *testing.T) {
for i, note := range notes {
actualComment := *downloader.convertNoteToComment(17, &note)
- assert.EqualValues(t, actualComment, comments[i])
+ assert.Equal(t, actualComment, comments[i])
}
}
diff --git a/services/migrations/gogs.go b/services/migrations/gogs.go
index 72c52d180b..a4f84dbf72 100644
--- a/services/migrations/gogs.go
+++ b/services/migrations/gogs.go
@@ -13,7 +13,6 @@ import (
"code.gitea.io/gitea/modules/log"
base "code.gitea.io/gitea/modules/migration"
- "code.gitea.io/gitea/modules/proxy"
"code.gitea.io/gitea/modules/structs"
"github.com/gogs/go-gogs-client"
@@ -60,16 +59,14 @@ func (f *GogsDownloaderFactory) GitServiceType() structs.GitServiceType {
// from gogs via API
type GogsDownloader struct {
base.NullDownloader
- ctx context.Context
- client *gogs.Client
baseURL string
repoOwner string
repoName string
userName string
password string
+ token string
openIssuesFinished bool
openIssuesPages int
- transport http.RoundTripper
}
// String implements Stringer
@@ -84,53 +81,45 @@ func (g *GogsDownloader) LogString() string {
return fmt.Sprintf("<GogsDownloader %s %s/%s>", g.baseURL, g.repoOwner, g.repoName)
}
-// SetContext set context
-func (g *GogsDownloader) SetContext(ctx context.Context) {
- g.ctx = ctx
-}
-
// NewGogsDownloader creates a gogs Downloader via gogs API
-func NewGogsDownloader(ctx context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GogsDownloader {
+func NewGogsDownloader(_ context.Context, baseURL, userName, password, token, repoOwner, repoName string) *GogsDownloader {
downloader := GogsDownloader{
- ctx: ctx,
baseURL: baseURL,
userName: userName,
password: password,
+ token: token,
repoOwner: repoOwner,
repoName: repoName,
}
+ return &downloader
+}
- var client *gogs.Client
- if len(token) != 0 {
- client = gogs.NewClient(baseURL, token)
- downloader.userName = token
- } else {
- transport := NewMigrationHTTPTransport()
- transport.Proxy = func(req *http.Request) (*url.URL, error) {
- req.SetBasicAuth(userName, password)
- return proxy.Proxy()(req)
- }
- downloader.transport = transport
-
- client = gogs.NewClient(baseURL, "")
- client.SetHTTPClient(&http.Client{
- Transport: &downloader,
- })
- }
+type roundTripperFunc func(req *http.Request) (*http.Response, error)
- downloader.client = client
- return &downloader
+func (rt roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) {
+ return rt(r)
}
-// RoundTrip wraps the provided request within this downloader's context and passes it to our internal http.Transport.
-// This implements http.RoundTripper and makes the gogs client requests cancellable even though it is not cancellable itself
-func (g *GogsDownloader) RoundTrip(req *http.Request) (*http.Response, error) {
- return g.transport.RoundTrip(req.WithContext(g.ctx))
+func (g *GogsDownloader) client(ctx context.Context) *gogs.Client {
+ // Gogs client lacks the context support, so we use a custom transport
+ // Then each request uses a dedicated client with its own context
+ httpTransport := NewMigrationHTTPTransport()
+ gogsClient := gogs.NewClient(g.baseURL, g.token)
+ gogsClient.SetHTTPClient(&http.Client{
+ Transport: roundTripperFunc(func(req *http.Request) (*http.Response, error) {
+ if g.password != "" {
+ // Gogs client lacks the support for basic auth, this is the only way to set it
+ req.SetBasicAuth(g.userName, g.password)
+ }
+ return httpTransport.RoundTrip(req.WithContext(ctx))
+ }),
+ })
+ return gogsClient
}
// GetRepoInfo returns a repository information
-func (g *GogsDownloader) GetRepoInfo() (*base.Repository, error) {
- gr, err := g.client.GetRepo(g.repoOwner, g.repoName)
+func (g *GogsDownloader) GetRepoInfo(ctx context.Context) (*base.Repository, error) {
+ gr, err := g.client(ctx).GetRepo(g.repoOwner, g.repoName)
if err != nil {
return nil, err
}
@@ -148,11 +137,11 @@ func (g *GogsDownloader) GetRepoInfo() (*base.Repository, error) {
}
// GetMilestones returns milestones
-func (g *GogsDownloader) GetMilestones() ([]*base.Milestone, error) {
+func (g *GogsDownloader) GetMilestones(ctx context.Context) ([]*base.Milestone, error) {
perPage := 100
milestones := make([]*base.Milestone, 0, perPage)
- ms, err := g.client.ListRepoMilestones(g.repoOwner, g.repoName)
+ ms, err := g.client(ctx).ListRepoMilestones(g.repoOwner, g.repoName)
if err != nil {
return nil, err
}
@@ -171,10 +160,10 @@ func (g *GogsDownloader) GetMilestones() ([]*base.Milestone, error) {
}
// GetLabels returns labels
-func (g *GogsDownloader) GetLabels() ([]*base.Label, error) {
+func (g *GogsDownloader) GetLabels(ctx context.Context) ([]*base.Label, error) {
perPage := 100
labels := make([]*base.Label, 0, perPage)
- ls, err := g.client.ListRepoLabels(g.repoOwner, g.repoName)
+ ls, err := g.client(ctx).ListRepoLabels(g.repoOwner, g.repoName)
if err != nil {
return nil, err
}
@@ -187,7 +176,7 @@ func (g *GogsDownloader) GetLabels() ([]*base.Label, error) {
}
// GetIssues returns issues according start and limit, perPage is not supported
-func (g *GogsDownloader) GetIssues(page, _ int) ([]*base.Issue, bool, error) {
+func (g *GogsDownloader) GetIssues(ctx context.Context, page, _ int) ([]*base.Issue, bool, error) {
var state string
if g.openIssuesFinished {
state = string(gogs.STATE_CLOSED)
@@ -197,7 +186,7 @@ func (g *GogsDownloader) GetIssues(page, _ int) ([]*base.Issue, bool, error) {
g.openIssuesPages = page
}
- issues, isEnd, err := g.getIssues(page, state)
+ issues, isEnd, err := g.getIssues(ctx, page, state)
if err != nil {
return nil, false, err
}
@@ -212,10 +201,10 @@ func (g *GogsDownloader) GetIssues(page, _ int) ([]*base.Issue, bool, error) {
return issues, false, nil
}
-func (g *GogsDownloader) getIssues(page int, state string) ([]*base.Issue, bool, error) {
+func (g *GogsDownloader) getIssues(ctx context.Context, page int, state string) ([]*base.Issue, bool, error) {
allIssues := make([]*base.Issue, 0, 10)
- issues, err := g.client.ListRepoIssues(g.repoOwner, g.repoName, gogs.ListIssueOption{
+ issues, err := g.client(ctx).ListRepoIssues(g.repoOwner, g.repoName, gogs.ListIssueOption{
Page: page,
State: state,
})
@@ -234,10 +223,10 @@ func (g *GogsDownloader) getIssues(page int, state string) ([]*base.Issue, bool,
}
// GetComments returns comments according issueNumber
-func (g *GogsDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+func (g *GogsDownloader) GetComments(ctx context.Context, commentable base.Commentable) ([]*base.Comment, bool, error) {
allComments := make([]*base.Comment, 0, 100)
- comments, err := g.client.ListIssueComments(g.repoOwner, g.repoName, commentable.GetForeignIndex())
+ comments, err := g.client(ctx).ListIssueComments(g.repoOwner, g.repoName, commentable.GetForeignIndex())
if err != nil {
return nil, false, fmt.Errorf("error while listing repos: %w", err)
}
@@ -261,7 +250,7 @@ func (g *GogsDownloader) GetComments(commentable base.Commentable) ([]*base.Comm
}
// GetTopics return repository topics
-func (g *GogsDownloader) GetTopics() ([]string, error) {
+func (g *GogsDownloader) GetTopics(_ context.Context) ([]string, error) {
return []string{}, nil
}
diff --git a/services/migrations/gogs_test.go b/services/migrations/gogs_test.go
index 610af183de..503b669f8e 100644
--- a/services/migrations/gogs_test.go
+++ b/services/migrations/gogs_test.go
@@ -4,7 +4,6 @@
package migrations
import (
- "context"
"net/http"
"os"
"testing"
@@ -28,9 +27,9 @@ func TestGogsDownloadRepo(t *testing.T) {
t.Skipf("visit test repo failed, ignored")
return
}
-
- downloader := NewGogsDownloader(context.Background(), "https://try.gogs.io", "", "", gogsPersonalAccessToken, "lunnytest", "TESTREPO")
- repo, err := downloader.GetRepoInfo()
+ ctx := t.Context()
+ downloader := NewGogsDownloader(ctx, "https://try.gogs.io", "", "", gogsPersonalAccessToken, "lunnytest", "TESTREPO")
+ repo, err := downloader.GetRepoInfo(ctx)
assert.NoError(t, err)
assertRepositoryEqual(t, &base.Repository{
@@ -42,7 +41,7 @@ func TestGogsDownloadRepo(t *testing.T) {
DefaultBranch: "master",
}, repo)
- milestones, err := downloader.GetMilestones()
+ milestones, err := downloader.GetMilestones(ctx)
assert.NoError(t, err)
assertMilestonesEqual(t, []*base.Milestone{
{
@@ -51,7 +50,7 @@ func TestGogsDownloadRepo(t *testing.T) {
},
}, milestones)
- labels, err := downloader.GetLabels()
+ labels, err := downloader.GetLabels(ctx)
assert.NoError(t, err)
assertLabelsEqual(t, []*base.Label{
{
@@ -85,7 +84,7 @@ func TestGogsDownloadRepo(t *testing.T) {
}, labels)
// downloader.GetIssues()
- issues, isEnd, err := downloader.GetIssues(1, 8)
+ issues, isEnd, err := downloader.GetIssues(ctx, 1, 8)
assert.NoError(t, err)
assert.False(t, isEnd)
assertIssuesEqual(t, []*base.Issue{
@@ -110,7 +109,7 @@ func TestGogsDownloadRepo(t *testing.T) {
}, issues)
// downloader.GetComments()
- comments, _, err := downloader.GetComments(&base.Issue{Number: 1, ForeignIndex: 1})
+ comments, _, err := downloader.GetComments(ctx, &base.Issue{Number: 1, ForeignIndex: 1})
assert.NoError(t, err)
assertCommentsEqual(t, []*base.Comment{
{
@@ -134,6 +133,6 @@ func TestGogsDownloadRepo(t *testing.T) {
}, comments)
// downloader.GetPullRequests()
- _, _, err = downloader.GetPullRequests(1, 3)
+ _, _, err = downloader.GetPullRequests(ctx, 1, 3)
assert.Error(t, err)
}
diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go
index 51b22d6111..5dda12286f 100644
--- a/services/migrations/migrate.go
+++ b/services/migrations/migrate.go
@@ -168,7 +168,7 @@ func newDownloader(ctx context.Context, ownerName string, opts base.MigrateOptio
}
if setting.Migrations.MaxAttempts > 1 {
- downloader = base.NewRetryDownloader(ctx, downloader, setting.Migrations.MaxAttempts, setting.Migrations.RetryBackoff)
+ downloader = base.NewRetryDownloader(downloader, setting.Migrations.MaxAttempts, setting.Migrations.RetryBackoff)
}
return downloader, nil
}
@@ -176,12 +176,12 @@ func newDownloader(ctx context.Context, ownerName string, opts base.MigrateOptio
// migrateRepository will download information and then upload it to Uploader, this is a simple
// process for small repository. For a big repository, save all the data to disk
// before upload is better
-func migrateRepository(_ context.Context, doer *user_model.User, downloader base.Downloader, uploader base.Uploader, opts base.MigrateOptions, messenger base.Messenger) error {
+func migrateRepository(ctx context.Context, doer *user_model.User, downloader base.Downloader, uploader base.Uploader, opts base.MigrateOptions, messenger base.Messenger) error {
if messenger == nil {
messenger = base.NilMessenger
}
- repo, err := downloader.GetRepoInfo()
+ repo, err := downloader.GetRepoInfo(ctx)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -220,14 +220,14 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
log.Trace("migrating git data from %s", repo.CloneURL)
messenger("repo.migrate.migrating_git")
- if err = uploader.CreateRepo(repo, opts); err != nil {
+ if err = uploader.CreateRepo(ctx, repo, opts); err != nil {
return err
}
defer uploader.Close()
log.Trace("migrating topics")
messenger("repo.migrate.migrating_topics")
- topics, err := downloader.GetTopics()
+ topics, err := downloader.GetTopics(ctx)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -235,7 +235,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
log.Warn("migrating topics is not supported, ignored")
}
if len(topics) != 0 {
- if err = uploader.CreateTopics(topics...); err != nil {
+ if err = uploader.CreateTopics(ctx, topics...); err != nil {
return err
}
}
@@ -243,7 +243,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
if opts.Milestones {
log.Trace("migrating milestones")
messenger("repo.migrate.migrating_milestones")
- milestones, err := downloader.GetMilestones()
+ milestones, err := downloader.GetMilestones(ctx)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -256,7 +256,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
msBatchSize = len(milestones)
}
- if err := uploader.CreateMilestones(milestones[:msBatchSize]...); err != nil {
+ if err := uploader.CreateMilestones(ctx, milestones[:msBatchSize]...); err != nil {
return err
}
milestones = milestones[msBatchSize:]
@@ -266,7 +266,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
if opts.Labels {
log.Trace("migrating labels")
messenger("repo.migrate.migrating_labels")
- labels, err := downloader.GetLabels()
+ labels, err := downloader.GetLabels(ctx)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -280,7 +280,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
lbBatchSize = len(labels)
}
- if err := uploader.CreateLabels(labels[:lbBatchSize]...); err != nil {
+ if err := uploader.CreateLabels(ctx, labels[:lbBatchSize]...); err != nil {
return err
}
labels = labels[lbBatchSize:]
@@ -290,7 +290,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
if opts.Releases {
log.Trace("migrating releases")
messenger("repo.migrate.migrating_releases")
- releases, err := downloader.GetReleases()
+ releases, err := downloader.GetReleases(ctx)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -304,14 +304,14 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
relBatchSize = len(releases)
}
- if err = uploader.CreateReleases(releases[:relBatchSize]...); err != nil {
+ if err = uploader.CreateReleases(ctx, releases[:relBatchSize]...); err != nil {
return err
}
releases = releases[relBatchSize:]
}
// Once all releases (if any) are inserted, sync any remaining non-release tags
- if err = uploader.SyncTags(); err != nil {
+ if err = uploader.SyncTags(ctx); err != nil {
return err
}
}
@@ -329,7 +329,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
issueBatchSize := uploader.MaxBatchInsertSize("issue")
for i := 1; ; i++ {
- issues, isEnd, err := downloader.GetIssues(i, issueBatchSize)
+ issues, isEnd, err := downloader.GetIssues(ctx, i, issueBatchSize)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -338,7 +338,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
break
}
- if err := uploader.CreateIssues(issues...); err != nil {
+ if err := uploader.CreateIssues(ctx, issues...); err != nil {
return err
}
@@ -346,7 +346,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
allComments := make([]*base.Comment, 0, commentBatchSize)
for _, issue := range issues {
log.Trace("migrating issue %d's comments", issue.Number)
- comments, _, err := downloader.GetComments(issue)
+ comments, _, err := downloader.GetComments(ctx, issue)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -357,7 +357,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
allComments = append(allComments, comments...)
if len(allComments) >= commentBatchSize {
- if err = uploader.CreateComments(allComments[:commentBatchSize]...); err != nil {
+ if err = uploader.CreateComments(ctx, allComments[:commentBatchSize]...); err != nil {
return err
}
@@ -366,7 +366,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
}
if len(allComments) > 0 {
- if err = uploader.CreateComments(allComments...); err != nil {
+ if err = uploader.CreateComments(ctx, allComments...); err != nil {
return err
}
}
@@ -383,7 +383,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
messenger("repo.migrate.migrating_pulls")
prBatchSize := uploader.MaxBatchInsertSize("pullrequest")
for i := 1; ; i++ {
- prs, isEnd, err := downloader.GetPullRequests(i, prBatchSize)
+ prs, isEnd, err := downloader.GetPullRequests(ctx, i, prBatchSize)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -392,7 +392,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
break
}
- if err := uploader.CreatePullRequests(prs...); err != nil {
+ if err := uploader.CreatePullRequests(ctx, prs...); err != nil {
return err
}
@@ -402,7 +402,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
allComments := make([]*base.Comment, 0, commentBatchSize)
for _, pr := range prs {
log.Trace("migrating pull request %d's comments", pr.Number)
- comments, _, err := downloader.GetComments(pr)
+ comments, _, err := downloader.GetComments(ctx, pr)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -413,14 +413,14 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
allComments = append(allComments, comments...)
if len(allComments) >= commentBatchSize {
- if err = uploader.CreateComments(allComments[:commentBatchSize]...); err != nil {
+ if err = uploader.CreateComments(ctx, allComments[:commentBatchSize]...); err != nil {
return err
}
allComments = allComments[commentBatchSize:]
}
}
if len(allComments) > 0 {
- if err = uploader.CreateComments(allComments...); err != nil {
+ if err = uploader.CreateComments(ctx, allComments...); err != nil {
return err
}
}
@@ -429,7 +429,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
// migrate reviews
allReviews := make([]*base.Review, 0, reviewBatchSize)
for _, pr := range prs {
- reviews, err := downloader.GetReviews(pr)
+ reviews, err := downloader.GetReviews(ctx, pr)
if err != nil {
if !base.IsErrNotSupported(err) {
return err
@@ -441,14 +441,14 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
allReviews = append(allReviews, reviews...)
if len(allReviews) >= reviewBatchSize {
- if err = uploader.CreateReviews(allReviews[:reviewBatchSize]...); err != nil {
+ if err = uploader.CreateReviews(ctx, allReviews[:reviewBatchSize]...); err != nil {
return err
}
allReviews = allReviews[reviewBatchSize:]
}
}
if len(allReviews) > 0 {
- if err = uploader.CreateReviews(allReviews...); err != nil {
+ if err = uploader.CreateReviews(ctx, allReviews...); err != nil {
return err
}
}
@@ -463,12 +463,12 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
if opts.Comments && supportAllComments {
log.Trace("migrating comments")
for i := 1; ; i++ {
- comments, isEnd, err := downloader.GetAllComments(i, commentBatchSize)
+ comments, isEnd, err := downloader.GetAllComments(ctx, i, commentBatchSize)
if err != nil {
return err
}
- if err := uploader.CreateComments(comments...); err != nil {
+ if err := uploader.CreateComments(ctx, comments...); err != nil {
return err
}
@@ -478,7 +478,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
}
}
- return uploader.Finish()
+ return uploader.Finish(ctx)
}
// Init migrations service
diff --git a/services/migrations/onedev.go b/services/migrations/onedev.go
index e2f7b771f3..4ce35dd12e 100644
--- a/services/migrations/onedev.go
+++ b/services/migrations/onedev.go
@@ -71,7 +71,6 @@ type onedevUser struct {
// from OneDev
type OneDevDownloader struct {
base.NullDownloader
- ctx context.Context
client *http.Client
baseURL *url.URL
repoName string
@@ -81,15 +80,9 @@ type OneDevDownloader struct {
milestoneMap map[int64]string
}
-// SetContext set context
-func (d *OneDevDownloader) SetContext(ctx context.Context) {
- d.ctx = ctx
-}
-
// NewOneDevDownloader creates a new downloader
-func NewOneDevDownloader(ctx context.Context, baseURL *url.URL, username, password, repoName string) *OneDevDownloader {
+func NewOneDevDownloader(_ context.Context, baseURL *url.URL, username, password, repoName string) *OneDevDownloader {
downloader := &OneDevDownloader{
- ctx: ctx,
baseURL: baseURL,
repoName: repoName,
client: &http.Client{
@@ -121,7 +114,7 @@ func (d *OneDevDownloader) LogString() string {
return fmt.Sprintf("<OneDevDownloader %s [%d]/%s>", d.baseURL, d.repoID, d.repoName)
}
-func (d *OneDevDownloader) callAPI(endpoint string, parameter map[string]string, result any) error {
+func (d *OneDevDownloader) callAPI(ctx context.Context, endpoint string, parameter map[string]string, result any) error {
u, err := d.baseURL.Parse(endpoint)
if err != nil {
return err
@@ -135,7 +128,7 @@ func (d *OneDevDownloader) callAPI(endpoint string, parameter map[string]string,
u.RawQuery = query.Encode()
}
- req, err := http.NewRequestWithContext(d.ctx, "GET", u.String(), nil)
+ req, err := http.NewRequestWithContext(ctx, "GET", u.String(), nil)
if err != nil {
return err
}
@@ -151,7 +144,7 @@ func (d *OneDevDownloader) callAPI(endpoint string, parameter map[string]string,
}
// GetRepoInfo returns repository information
-func (d *OneDevDownloader) GetRepoInfo() (*base.Repository, error) {
+func (d *OneDevDownloader) GetRepoInfo(ctx context.Context) (*base.Repository, error) {
info := make([]struct {
ID int64 `json:"id"`
Name string `json:"name"`
@@ -159,6 +152,7 @@ func (d *OneDevDownloader) GetRepoInfo() (*base.Repository, error) {
}, 0, 1)
err := d.callAPI(
+ ctx,
"/api/projects",
map[string]string{
"query": `"Name" is "` + d.repoName + `"`,
@@ -194,7 +188,7 @@ func (d *OneDevDownloader) GetRepoInfo() (*base.Repository, error) {
}
// GetMilestones returns milestones
-func (d *OneDevDownloader) GetMilestones() ([]*base.Milestone, error) {
+func (d *OneDevDownloader) GetMilestones(ctx context.Context) ([]*base.Milestone, error) {
rawMilestones := make([]struct {
ID int64 `json:"id"`
Name string `json:"name"`
@@ -209,6 +203,7 @@ func (d *OneDevDownloader) GetMilestones() ([]*base.Milestone, error) {
offset := 0
for {
err := d.callAPI(
+ ctx,
endpoint,
map[string]string{
"offset": strconv.Itoa(offset),
@@ -243,7 +238,7 @@ func (d *OneDevDownloader) GetMilestones() ([]*base.Milestone, error) {
}
// GetLabels returns labels
-func (d *OneDevDownloader) GetLabels() ([]*base.Label, error) {
+func (d *OneDevDownloader) GetLabels(_ context.Context) ([]*base.Label, error) {
return []*base.Label{
{
Name: "Bug",
@@ -277,7 +272,7 @@ type onedevIssueContext struct {
}
// GetIssues returns issues
-func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+func (d *OneDevDownloader) GetIssues(ctx context.Context, page, perPage int) ([]*base.Issue, bool, error) {
rawIssues := make([]struct {
ID int64 `json:"id"`
Number int64 `json:"number"`
@@ -289,6 +284,7 @@ func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
}, 0, perPage)
err := d.callAPI(
+ ctx,
"/api/issues",
map[string]string{
"query": `"Project" is "` + d.repoName + `"`,
@@ -308,6 +304,7 @@ func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
Value string `json:"value"`
}, 0, 10)
err := d.callAPI(
+ ctx,
fmt.Sprintf("/api/issues/%d/fields", issue.ID),
nil,
&fields,
@@ -329,6 +326,7 @@ func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
Name string `json:"name"`
}, 0, 10)
err = d.callAPI(
+ ctx,
fmt.Sprintf("/api/issues/%d/milestones", issue.ID),
nil,
&milestones,
@@ -345,7 +343,7 @@ func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
if state == "released" {
state = "closed"
}
- poster := d.tryGetUser(issue.SubmitterID)
+ poster := d.tryGetUser(ctx, issue.SubmitterID)
issues = append(issues, &base.Issue{
Title: issue.Title,
Number: issue.Number,
@@ -370,7 +368,7 @@ func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er
}
// GetComments returns comments
-func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+func (d *OneDevDownloader) GetComments(ctx context.Context, commentable base.Commentable) ([]*base.Comment, bool, error) {
context, ok := commentable.GetContext().(onedevIssueContext)
if !ok {
return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext())
@@ -391,6 +389,7 @@ func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Co
}
err := d.callAPI(
+ ctx,
endpoint,
nil,
&rawComments,
@@ -412,6 +411,7 @@ func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Co
}
err = d.callAPI(
+ ctx,
endpoint,
nil,
&rawChanges,
@@ -425,7 +425,7 @@ func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Co
if len(comment.Content) == 0 {
continue
}
- poster := d.tryGetUser(comment.UserID)
+ poster := d.tryGetUser(ctx, comment.UserID)
comments = append(comments, &base.Comment{
IssueIndex: commentable.GetLocalIndex(),
Index: comment.ID,
@@ -450,7 +450,7 @@ func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Co
continue
}
- poster := d.tryGetUser(change.UserID)
+ poster := d.tryGetUser(ctx, change.UserID)
comments = append(comments, &base.Comment{
IssueIndex: commentable.GetLocalIndex(),
PosterID: poster.ID,
@@ -466,7 +466,7 @@ func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Co
}
// GetPullRequests returns pull requests
-func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+func (d *OneDevDownloader) GetPullRequests(ctx context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
rawPullRequests := make([]struct {
ID int64 `json:"id"`
Number int64 `json:"number"`
@@ -484,6 +484,7 @@ func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
}, 0, perPage)
err := d.callAPI(
+ ctx,
"/api/pull-requests",
map[string]string{
"query": `"Target Project" is "` + d.repoName + `"`,
@@ -505,6 +506,7 @@ func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
MergeCommitHash string `json:"mergeCommitHash"`
}
err := d.callAPI(
+ ctx,
fmt.Sprintf("/api/pull-requests/%d/merge-preview", pr.ID),
nil,
&mergePreview,
@@ -525,7 +527,7 @@ func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
mergedTime = pr.CloseInfo.Date
}
}
- poster := d.tryGetUser(pr.SubmitterID)
+ poster := d.tryGetUser(ctx, pr.SubmitterID)
number := pr.Number + d.maxIssueIndex
pullRequests = append(pullRequests, &base.PullRequest{
@@ -562,7 +564,7 @@ func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
}
// GetReviews returns pull requests reviews
-func (d *OneDevDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+func (d *OneDevDownloader) GetReviews(ctx context.Context, reviewable base.Reviewable) ([]*base.Review, error) {
rawReviews := make([]struct {
ID int64 `json:"id"`
UserID int64 `json:"userId"`
@@ -574,6 +576,7 @@ func (d *OneDevDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Revie
}, 0, 100)
err := d.callAPI(
+ ctx,
fmt.Sprintf("/api/pull-requests/%d/reviews", reviewable.GetForeignIndex()),
nil,
&rawReviews,
@@ -596,7 +599,7 @@ func (d *OneDevDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Revie
}
}
- poster := d.tryGetUser(review.UserID)
+ poster := d.tryGetUser(ctx, review.UserID)
reviews = append(reviews, &base.Review{
IssueIndex: reviewable.GetLocalIndex(),
ReviewerID: poster.ID,
@@ -610,14 +613,15 @@ func (d *OneDevDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Revie
}
// GetTopics return repository topics
-func (d *OneDevDownloader) GetTopics() ([]string, error) {
+func (d *OneDevDownloader) GetTopics(_ context.Context) ([]string, error) {
return []string{}, nil
}
-func (d *OneDevDownloader) tryGetUser(userID int64) *onedevUser {
+func (d *OneDevDownloader) tryGetUser(ctx context.Context, userID int64) *onedevUser {
user, ok := d.userMap[userID]
if !ok {
err := d.callAPI(
+ ctx,
fmt.Sprintf("/api/users/%d", userID),
nil,
&user,
diff --git a/services/migrations/onedev_test.go b/services/migrations/onedev_test.go
index 48412fec64..a05d6cac6e 100644
--- a/services/migrations/onedev_test.go
+++ b/services/migrations/onedev_test.go
@@ -4,7 +4,6 @@
package migrations
import (
- "context"
"net/http"
"net/url"
"testing"
@@ -22,11 +21,12 @@ func TestOneDevDownloadRepo(t *testing.T) {
}
u, _ := url.Parse("https://code.onedev.io")
- downloader := NewOneDevDownloader(context.Background(), u, "", "", "go-gitea-test_repo")
+ ctx := t.Context()
+ downloader := NewOneDevDownloader(ctx, u, "", "", "go-gitea-test_repo")
if err != nil {
t.Fatalf("NewOneDevDownloader is nil: %v", err)
}
- repo, err := downloader.GetRepoInfo()
+ repo, err := downloader.GetRepoInfo(ctx)
assert.NoError(t, err)
assertRepositoryEqual(t, &base.Repository{
Name: "go-gitea-test_repo",
@@ -36,7 +36,7 @@ func TestOneDevDownloadRepo(t *testing.T) {
OriginalURL: "https://code.onedev.io/projects/go-gitea-test_repo",
}, repo)
- milestones, err := downloader.GetMilestones()
+ milestones, err := downloader.GetMilestones(ctx)
assert.NoError(t, err)
deadline := time.Unix(1620086400, 0)
assertMilestonesEqual(t, []*base.Milestone{
@@ -51,11 +51,11 @@ func TestOneDevDownloadRepo(t *testing.T) {
},
}, milestones)
- labels, err := downloader.GetLabels()
+ labels, err := downloader.GetLabels(ctx)
assert.NoError(t, err)
assert.Len(t, labels, 6)
- issues, isEnd, err := downloader.GetIssues(1, 2)
+ issues, isEnd, err := downloader.GetIssues(ctx, 1, 2)
assert.NoError(t, err)
assert.False(t, isEnd)
assertIssuesEqual(t, []*base.Issue{
@@ -94,7 +94,7 @@ func TestOneDevDownloadRepo(t *testing.T) {
},
}, issues)
- comments, _, err := downloader.GetComments(&base.Issue{
+ comments, _, err := downloader.GetComments(ctx, &base.Issue{
Number: 4,
ForeignIndex: 398,
Context: onedevIssueContext{IsPullRequest: false},
@@ -110,7 +110,7 @@ func TestOneDevDownloadRepo(t *testing.T) {
},
}, comments)
- prs, _, err := downloader.GetPullRequests(1, 1)
+ prs, _, err := downloader.GetPullRequests(ctx, 1, 1)
assert.NoError(t, err)
assertPullRequestsEqual(t, []*base.PullRequest{
{
@@ -136,7 +136,7 @@ func TestOneDevDownloadRepo(t *testing.T) {
},
}, prs)
- rvs, err := downloader.GetReviews(&base.PullRequest{Number: 5, ForeignIndex: 186})
+ rvs, err := downloader.GetReviews(ctx, &base.PullRequest{Number: 5, ForeignIndex: 186})
assert.NoError(t, err)
assertReviewsEqual(t, []*base.Review{
{
diff --git a/services/migrations/restore.go b/services/migrations/restore.go
index fd337b22c7..5686285935 100644
--- a/services/migrations/restore.go
+++ b/services/migrations/restore.go
@@ -18,7 +18,6 @@ import (
// RepositoryRestorer implements an Downloader from the local directory
type RepositoryRestorer struct {
base.NullDownloader
- ctx context.Context
baseDir string
repoOwner string
repoName string
@@ -26,13 +25,12 @@ type RepositoryRestorer struct {
}
// NewRepositoryRestorer creates a repository restorer which could restore repository from a dumped folder
-func NewRepositoryRestorer(ctx context.Context, baseDir, owner, repoName string, validation bool) (*RepositoryRestorer, error) {
+func NewRepositoryRestorer(_ context.Context, baseDir, owner, repoName string, validation bool) (*RepositoryRestorer, error) {
baseDir, err := filepath.Abs(baseDir)
if err != nil {
return nil, err
}
return &RepositoryRestorer{
- ctx: ctx,
baseDir: baseDir,
repoOwner: owner,
repoName: repoName,
@@ -48,11 +46,6 @@ func (r *RepositoryRestorer) reviewDir() string {
return filepath.Join(r.baseDir, "reviews")
}
-// SetContext set context
-func (r *RepositoryRestorer) SetContext(ctx context.Context) {
- r.ctx = ctx
-}
-
func (r *RepositoryRestorer) getRepoOptions() (map[string]string, error) {
p := filepath.Join(r.baseDir, "repo.yml")
bs, err := os.ReadFile(p)
@@ -69,7 +62,7 @@ func (r *RepositoryRestorer) getRepoOptions() (map[string]string, error) {
}
// GetRepoInfo returns a repository information
-func (r *RepositoryRestorer) GetRepoInfo() (*base.Repository, error) {
+func (r *RepositoryRestorer) GetRepoInfo(_ context.Context) (*base.Repository, error) {
opts, err := r.getRepoOptions()
if err != nil {
return nil, err
@@ -89,7 +82,7 @@ func (r *RepositoryRestorer) GetRepoInfo() (*base.Repository, error) {
}
// GetTopics return github topics
-func (r *RepositoryRestorer) GetTopics() ([]string, error) {
+func (r *RepositoryRestorer) GetTopics(_ context.Context) ([]string, error) {
p := filepath.Join(r.baseDir, "topic.yml")
topics := struct {
@@ -112,7 +105,7 @@ func (r *RepositoryRestorer) GetTopics() ([]string, error) {
}
// GetMilestones returns milestones
-func (r *RepositoryRestorer) GetMilestones() ([]*base.Milestone, error) {
+func (r *RepositoryRestorer) GetMilestones(_ context.Context) ([]*base.Milestone, error) {
milestones := make([]*base.Milestone, 0, 10)
p := filepath.Join(r.baseDir, "milestone.yml")
err := base.Load(p, &milestones, r.validation)
@@ -127,7 +120,7 @@ func (r *RepositoryRestorer) GetMilestones() ([]*base.Milestone, error) {
}
// GetReleases returns releases
-func (r *RepositoryRestorer) GetReleases() ([]*base.Release, error) {
+func (r *RepositoryRestorer) GetReleases(_ context.Context) ([]*base.Release, error) {
releases := make([]*base.Release, 0, 10)
p := filepath.Join(r.baseDir, "release.yml")
_, err := os.Stat(p)
@@ -158,7 +151,7 @@ func (r *RepositoryRestorer) GetReleases() ([]*base.Release, error) {
}
// GetLabels returns labels
-func (r *RepositoryRestorer) GetLabels() ([]*base.Label, error) {
+func (r *RepositoryRestorer) GetLabels(_ context.Context) ([]*base.Label, error) {
labels := make([]*base.Label, 0, 10)
p := filepath.Join(r.baseDir, "label.yml")
_, err := os.Stat(p)
@@ -182,7 +175,7 @@ func (r *RepositoryRestorer) GetLabels() ([]*base.Label, error) {
}
// GetIssues returns issues according start and limit
-func (r *RepositoryRestorer) GetIssues(page, perPage int) ([]*base.Issue, bool, error) {
+func (r *RepositoryRestorer) GetIssues(_ context.Context, _, _ int) ([]*base.Issue, bool, error) {
issues := make([]*base.Issue, 0, 10)
p := filepath.Join(r.baseDir, "issue.yml")
err := base.Load(p, &issues, r.validation)
@@ -196,7 +189,7 @@ func (r *RepositoryRestorer) GetIssues(page, perPage int) ([]*base.Issue, bool,
}
// GetComments returns comments according issueNumber
-func (r *RepositoryRestorer) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) {
+func (r *RepositoryRestorer) GetComments(_ context.Context, commentable base.Commentable) ([]*base.Comment, bool, error) {
comments := make([]*base.Comment, 0, 10)
p := filepath.Join(r.commentDir(), fmt.Sprintf("%d.yml", commentable.GetForeignIndex()))
_, err := os.Stat(p)
@@ -220,7 +213,7 @@ func (r *RepositoryRestorer) GetComments(commentable base.Commentable) ([]*base.
}
// GetPullRequests returns pull requests according page and perPage
-func (r *RepositoryRestorer) GetPullRequests(page, perPage int) ([]*base.PullRequest, bool, error) {
+func (r *RepositoryRestorer) GetPullRequests(_ context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
pulls := make([]*base.PullRequest, 0, 10)
p := filepath.Join(r.baseDir, "pull_request.yml")
_, err := os.Stat(p)
@@ -248,7 +241,7 @@ func (r *RepositoryRestorer) GetPullRequests(page, perPage int) ([]*base.PullReq
}
// GetReviews returns pull requests review
-func (r *RepositoryRestorer) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) {
+func (r *RepositoryRestorer) GetReviews(ctx context.Context, reviewable base.Reviewable) ([]*base.Review, error) {
reviews := make([]*base.Review, 0, 10)
p := filepath.Join(r.reviewDir(), fmt.Sprintf("%d.yml", reviewable.GetForeignIndex()))
_, err := os.Stat(p)
diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go
index 24605cfae0..fa5b9934ec 100644
--- a/services/mirror/mirror_pull.go
+++ b/services/mirror/mirror_pull.go
@@ -15,6 +15,7 @@ import (
"code.gitea.io/gitea/modules/git"
giturl "code.gitea.io/gitea/modules/git/url"
"code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/globallock"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
@@ -40,13 +41,13 @@ func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error
remoteName := m.GetRemoteName()
repoPath := m.GetRepository(ctx).RepoPath()
// Remove old remote
- _, _, err = git.NewCommand(ctx, "remote", "rm").AddDynamicArguments(remoteName).RunStdString(&git.RunOpts{Dir: repoPath})
+ _, _, err = git.NewCommand("remote", "rm").AddDynamicArguments(remoteName).RunStdString(ctx, &git.RunOpts{Dir: repoPath})
if err != nil && !git.IsRemoteNotExistError(err) {
return err
}
- cmd := git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteName).AddArguments("--mirror=fetch").AddDynamicArguments(addr)
- _, _, err = cmd.RunStdString(&git.RunOpts{Dir: repoPath})
+ cmd := git.NewCommand("remote", "add").AddDynamicArguments(remoteName).AddArguments("--mirror=fetch").AddDynamicArguments(addr)
+ _, _, err = cmd.RunStdString(ctx, &git.RunOpts{Dir: repoPath})
if err != nil && !git.IsRemoteNotExistError(err) {
return err
}
@@ -55,13 +56,13 @@ func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error
wikiPath := m.Repo.WikiPath()
wikiRemotePath := repo_module.WikiRemoteURL(ctx, addr)
// Remove old remote of wiki
- _, _, err = git.NewCommand(ctx, "remote", "rm").AddDynamicArguments(remoteName).RunStdString(&git.RunOpts{Dir: wikiPath})
+ _, _, err = git.NewCommand("remote", "rm").AddDynamicArguments(remoteName).RunStdString(ctx, &git.RunOpts{Dir: wikiPath})
if err != nil && !git.IsRemoteNotExistError(err) {
return err
}
- cmd = git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteName).AddArguments("--mirror=fetch").AddDynamicArguments(wikiRemotePath)
- _, _, err = cmd.RunStdString(&git.RunOpts{Dir: wikiPath})
+ cmd = git.NewCommand("remote", "add").AddDynamicArguments(remoteName).AddArguments("--mirror=fetch").AddDynamicArguments(wikiRemotePath)
+ _, _, err = cmd.RunStdString(ctx, &git.RunOpts{Dir: wikiPath})
if err != nil && !git.IsRemoteNotExistError(err) {
return err
}
@@ -126,7 +127,9 @@ func parseRemoteUpdateOutput(output, remoteName string) []*mirrorSyncResult {
case strings.HasPrefix(lines[i], " - "): // Delete reference
isTag := !strings.HasPrefix(refName, remoteName+"/")
var refFullName git.RefName
- if isTag {
+ if strings.HasPrefix(refName, "refs/") {
+ refFullName = git.RefName(refName)
+ } else if isTag {
refFullName = git.RefNameFromTag(refName)
} else {
refFullName = git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/"))
@@ -149,8 +152,15 @@ func parseRemoteUpdateOutput(output, remoteName string) []*mirrorSyncResult {
log.Error("Expect two SHAs but not what found: %q", lines[i])
continue
}
+ var refFullName git.RefName
+ if strings.HasPrefix(refName, "refs/") {
+ refFullName = git.RefName(refName)
+ } else {
+ refFullName = git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/"))
+ }
+
results = append(results, &mirrorSyncResult{
- refName: git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/")),
+ refName: refFullName,
oldCommitID: shas[0],
newCommitID: shas[1],
})
@@ -199,8 +209,8 @@ func pruneBrokenReferences(ctx context.Context,
stderrBuilder.Reset()
stdoutBuilder.Reset()
- pruneErr := git.NewCommand(ctx, "remote", "prune").AddDynamicArguments(m.GetRemoteName()).
- Run(&git.RunOpts{
+ pruneErr := git.NewCommand("remote", "prune").AddDynamicArguments(m.GetRemoteName()).
+ Run(ctx, &git.RunOpts{
Timeout: timeout,
Dir: repoPath,
Stdout: stdoutBuilder,
@@ -225,6 +235,24 @@ func pruneBrokenReferences(ctx context.Context,
return pruneErr
}
+// checkRecoverableSyncError takes an error message from a git fetch command and returns false if it should be a fatal/blocking error
+func checkRecoverableSyncError(stderrMessage string) bool {
+ switch {
+ case strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken"):
+ return true
+ case strings.Contains(stderrMessage, "remote error") && strings.Contains(stderrMessage, "not our ref"):
+ return true
+ case strings.Contains(stderrMessage, "cannot lock ref") && strings.Contains(stderrMessage, "but expected"):
+ return true
+ case strings.Contains(stderrMessage, "cannot lock ref") && strings.Contains(stderrMessage, "unable to resolve reference"):
+ return true
+ case strings.Contains(stderrMessage, "Unable to create") && strings.Contains(stderrMessage, ".lock"):
+ return true
+ default:
+ return false
+ }
+}
+
// runSync returns true if sync finished without error.
func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bool) {
repoPath := m.Repo.RepoPath()
@@ -234,7 +262,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
log.Trace("SyncMirrors [repo: %-v]: running git remote update...", m.Repo)
// use fetch but not remote update because git fetch support --tags but remote update doesn't
- cmd := git.NewCommand(ctx, "fetch")
+ cmd := git.NewCommand("fetch")
if m.EnablePrune {
cmd.AddArguments("--prune")
}
@@ -250,7 +278,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
stdoutBuilder := strings.Builder{}
stderrBuilder := strings.Builder{}
- if err := cmd.Run(&git.RunOpts{
+ if err := cmd.Run(ctx, &git.RunOpts{
Timeout: timeout,
Dir: repoPath,
Env: envs,
@@ -265,7 +293,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
stdoutMessage := util.SanitizeCredentialURLs(stdout)
// Now check if the error is a resolve reference due to broken reference
- if strings.Contains(stderr, "unable to resolve reference") && strings.Contains(stderr, "reference broken") {
+ if checkRecoverableSyncError(stderr) {
log.Warn("SyncMirrors [repo: %-v]: failed to update mirror repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err)
err = nil
@@ -275,7 +303,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
// Successful prune - reattempt mirror
stderrBuilder.Reset()
stdoutBuilder.Reset()
- if err = cmd.Run(&git.RunOpts{
+ if err = cmd.Run(ctx, &git.RunOpts{
Timeout: timeout,
Dir: repoPath,
Stdout: &stdoutBuilder,
@@ -314,6 +342,15 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
return nil, false
}
+ if m.LFS && setting.LFS.StartServer {
+ log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo)
+ endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint)
+ lfsClient := lfs.NewClient(endpoint, nil)
+ if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err)
+ }
+ }
+
log.Trace("SyncMirrors [repo: %-v]: syncing branches...", m.Repo)
if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, m.Repo, gitRepo, 0); err != nil {
log.Error("SyncMirrors [repo: %-v]: failed to synchronize branches: %v", m.Repo, err)
@@ -323,15 +360,6 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
if err = repo_module.SyncReleasesWithTags(ctx, m.Repo, gitRepo); err != nil {
log.Error("SyncMirrors [repo: %-v]: failed to synchronize tags to releases: %v", m.Repo, err)
}
-
- if m.LFS && setting.LFS.StartServer {
- log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo)
- endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint)
- lfsClient := lfs.NewClient(endpoint, nil)
- if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil {
- log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err)
- }
- }
gitRepo.Close()
log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo)
@@ -343,8 +371,8 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
log.Trace("SyncMirrors [repo: %-v Wiki]: running git remote update...", m.Repo)
stderrBuilder.Reset()
stdoutBuilder.Reset()
- if err := git.NewCommand(ctx, "remote", "update", "--prune").AddDynamicArguments(m.GetRemoteName()).
- Run(&git.RunOpts{
+ if err := git.NewCommand("remote", "update", "--prune").AddDynamicArguments(m.GetRemoteName()).
+ Run(ctx, &git.RunOpts{
Timeout: timeout,
Dir: wikiPath,
Stdout: &stdoutBuilder,
@@ -358,7 +386,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
stdoutMessage := util.SanitizeCredentialURLs(stdout)
// Now check if the error is a resolve reference due to broken reference
- if strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken") {
+ if checkRecoverableSyncError(stderrMessage) {
log.Warn("SyncMirrors [repo: %-v Wiki]: failed to update mirror wiki repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err)
err = nil
@@ -369,8 +397,8 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
stderrBuilder.Reset()
stdoutBuilder.Reset()
- if err = git.NewCommand(ctx, "remote", "update", "--prune").AddDynamicArguments(m.GetRemoteName()).
- Run(&git.RunOpts{
+ if err = git.NewCommand("remote", "update", "--prune").AddDynamicArguments(m.GetRemoteName()).
+ Run(ctx, &git.RunOpts{
Timeout: timeout,
Dir: wikiPath,
Stdout: &stdoutBuilder,
@@ -416,6 +444,10 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
return parseRemoteUpdateOutput(output, m.GetRemoteName()), true
}
+func getRepoPullMirrorLockKey(repoID int64) string {
+ return fmt.Sprintf("repo_pull_mirror_%d", repoID)
+}
+
// SyncPullMirror starts the sync of the pull mirror and schedules the next run.
func SyncPullMirror(ctx context.Context, repoID int64) bool {
log.Trace("SyncMirrors [repo_id: %v]", repoID)
@@ -428,6 +460,13 @@ func SyncPullMirror(ctx context.Context, repoID int64) bool {
log.Error("PANIC whilst SyncMirrors[repo_id: %d] Panic: %v\nStacktrace: %s", repoID, err, log.Stack(2))
}()
+ releaser, err := globallock.Lock(ctx, getRepoPullMirrorLockKey(repoID))
+ if err != nil {
+ log.Error("globallock.Lock(): %v", err)
+ return false
+ }
+ defer releaser()
+
m, err := repo_model.GetMirrorByRepoID(ctx, repoID)
if err != nil {
log.Error("SyncMirrors [repo_id: %v]: unable to GetMirrorByRepoID: %v", repoID, err)
diff --git a/services/mirror/mirror_pull_test.go b/services/mirror/mirror_pull_test.go
new file mode 100644
index 0000000000..97859be5b0
--- /dev/null
+++ b/services/mirror/mirror_pull_test.go
@@ -0,0 +1,94 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mirror
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_parseRemoteUpdateOutput(t *testing.T) {
+ output := `
+ * [new tag] v0.1.8 -> v0.1.8
+ * [new branch] master -> origin/master
+ - [deleted] (none) -> origin/test1
+ - [deleted] (none) -> tag1
+ + f895a1e...957a993 test2 -> origin/test2 (forced update)
+ 957a993..a87ba5f test3 -> origin/test3
+ * [new ref] refs/pull/26595/head -> refs/pull/26595/head
+ * [new ref] refs/pull/26595/merge -> refs/pull/26595/merge
+ e0639e38fb..6db2410489 refs/pull/25873/head -> refs/pull/25873/head
+ + 1c97ebc746...976d27d52f refs/pull/25873/merge -> refs/pull/25873/merge (forced update)
+`
+ results := parseRemoteUpdateOutput(output, "origin")
+ assert.Len(t, results, 10)
+ assert.Equal(t, "refs/tags/v0.1.8", results[0].refName.String())
+ assert.Equal(t, gitShortEmptySha, results[0].oldCommitID)
+ assert.Empty(t, results[0].newCommitID)
+
+ assert.Equal(t, "refs/heads/master", results[1].refName.String())
+ assert.Equal(t, gitShortEmptySha, results[1].oldCommitID)
+ assert.Empty(t, results[1].newCommitID)
+
+ assert.Equal(t, "refs/heads/test1", results[2].refName.String())
+ assert.Empty(t, results[2].oldCommitID)
+ assert.Equal(t, gitShortEmptySha, results[2].newCommitID)
+
+ assert.Equal(t, "refs/tags/tag1", results[3].refName.String())
+ assert.Empty(t, results[3].oldCommitID)
+ assert.Equal(t, gitShortEmptySha, results[3].newCommitID)
+
+ assert.Equal(t, "refs/heads/test2", results[4].refName.String())
+ assert.Equal(t, "f895a1e", results[4].oldCommitID)
+ assert.Equal(t, "957a993", results[4].newCommitID)
+
+ assert.Equal(t, "refs/heads/test3", results[5].refName.String())
+ assert.Equal(t, "957a993", results[5].oldCommitID)
+ assert.Equal(t, "a87ba5f", results[5].newCommitID)
+
+ assert.Equal(t, "refs/pull/26595/head", results[6].refName.String())
+ assert.Equal(t, gitShortEmptySha, results[6].oldCommitID)
+ assert.Empty(t, results[6].newCommitID)
+
+ assert.Equal(t, "refs/pull/26595/merge", results[7].refName.String())
+ assert.Equal(t, gitShortEmptySha, results[7].oldCommitID)
+ assert.Empty(t, results[7].newCommitID)
+
+ assert.Equal(t, "refs/pull/25873/head", results[8].refName.String())
+ assert.Equal(t, "e0639e38fb", results[8].oldCommitID)
+ assert.Equal(t, "6db2410489", results[8].newCommitID)
+
+ assert.Equal(t, "refs/pull/25873/merge", results[9].refName.String())
+ assert.Equal(t, "1c97ebc746", results[9].oldCommitID)
+ assert.Equal(t, "976d27d52f", results[9].newCommitID)
+}
+
+func Test_checkRecoverableSyncError(t *testing.T) {
+ cases := []struct {
+ recoverable bool
+ message string
+ }{
+ // A race condition in http git-fetch where certain refs were listed on the remote and are no longer there, would exit status 128
+ {true, "fatal: remote error: upload-pack: not our ref 988881adc9fc3655077dc2d4d757d480b5ea0e11"},
+ // A race condition where a local gc/prune removes a named ref during a git-fetch would exit status 1
+ {true, "cannot lock ref 'refs/pull/123456/merge': unable to resolve reference 'refs/pull/134153/merge'"},
+ // A race condition in http git-fetch where named refs were listed on the remote and are no longer there
+ {true, "error: cannot lock ref 'refs/remotes/origin/foo': unable to resolve reference 'refs/remotes/origin/foo': reference broken"},
+ // A race condition in http git-fetch where named refs were force-pushed during the update, would exit status 128
+ {true, "error: cannot lock ref 'refs/pull/123456/merge': is at 988881adc9fc3655077dc2d4d757d480b5ea0e11 but expected 7f894307ffc9553edbd0b671cab829786866f7b2"},
+ // A race condition with other local git operations, such as git-maintenance, would exit status 128 (well, "Unable" the "U" is uppercase)
+ {true, "fatal: Unable to create '/data/gitea-repositories/foo-org/bar-repo.git/./objects/info/commit-graphs/commit-graph-chain.lock': File exists."},
+ // Missing or unauthorized credentials, would exit status 128
+ {false, "fatal: Authentication failed for 'https://example.com/foo-does-not-exist/bar.git/'"},
+ // A non-existent remote repository, would exit status 128
+ {false, "fatal: Could not read from remote repository."},
+ // A non-functioning proxy, would exit status 128
+ {false, "fatal: unable to access 'https://example.com/foo-does-not-exist/bar.git/': Failed to connect to configured-https-proxy port 1080 after 0 ms: Couldn't connect to server"},
+ }
+
+ for _, c := range cases {
+ assert.Equal(t, c.recoverable, checkRecoverableSyncError(c.message), "test case: %s", c.message)
+ }
+}
diff --git a/services/mirror/mirror_push.go b/services/mirror/mirror_push.go
index 02ff97b1f0..9b57427d98 100644
--- a/services/mirror/mirror_push.go
+++ b/services/mirror/mirror_push.go
@@ -18,6 +18,7 @@ import (
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/proxy"
"code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
@@ -29,14 +30,14 @@ var stripExitStatus = regexp.MustCompile(`exit status \d+ - `)
// AddPushMirrorRemote registers the push mirror remote.
func AddPushMirrorRemote(ctx context.Context, m *repo_model.PushMirror, addr string) error {
addRemoteAndConfig := func(addr, path string) error {
- cmd := git.NewCommand(ctx, "remote", "add", "--mirror=push").AddDynamicArguments(m.RemoteName, addr)
- if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: path}); err != nil {
+ cmd := git.NewCommand("remote", "add", "--mirror=push").AddDynamicArguments(m.RemoteName, addr)
+ if _, _, err := cmd.RunStdString(ctx, &git.RunOpts{Dir: path}); err != nil {
return err
}
- if _, _, err := git.NewCommand(ctx, "config", "--add").AddDynamicArguments("remote."+m.RemoteName+".push", "+refs/heads/*:refs/heads/*").RunStdString(&git.RunOpts{Dir: path}); err != nil {
+ if _, _, err := git.NewCommand("config", "--add").AddDynamicArguments("remote."+m.RemoteName+".push", "+refs/heads/*:refs/heads/*").RunStdString(ctx, &git.RunOpts{Dir: path}); err != nil {
return err
}
- if _, _, err := git.NewCommand(ctx, "config", "--add").AddDynamicArguments("remote."+m.RemoteName+".push", "+refs/tags/*:refs/tags/*").RunStdString(&git.RunOpts{Dir: path}); err != nil {
+ if _, _, err := git.NewCommand("config", "--add").AddDynamicArguments("remote."+m.RemoteName+".push", "+refs/tags/*:refs/tags/*").RunStdString(ctx, &git.RunOpts{Dir: path}); err != nil {
return err
}
return nil
@@ -60,15 +61,15 @@ func AddPushMirrorRemote(ctx context.Context, m *repo_model.PushMirror, addr str
// RemovePushMirrorRemote removes the push mirror remote.
func RemovePushMirrorRemote(ctx context.Context, m *repo_model.PushMirror) error {
- cmd := git.NewCommand(ctx, "remote", "rm").AddDynamicArguments(m.RemoteName)
+ cmd := git.NewCommand("remote", "rm").AddDynamicArguments(m.RemoteName)
_ = m.GetRepository(ctx)
- if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: m.Repo.RepoPath()}); err != nil {
+ if _, _, err := cmd.RunStdString(ctx, &git.RunOpts{Dir: m.Repo.RepoPath()}); err != nil {
return err
}
if m.Repo.HasWiki() {
- if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: m.Repo.WikiPath()}); err != nil {
+ if _, _, err := cmd.RunStdString(ctx, &git.RunOpts{Dir: m.Repo.WikiPath()}); err != nil {
// The wiki remote may not exist
log.Warn("Wiki Remote[%d] could not be removed: %v", m.ID, err)
}
@@ -142,7 +143,7 @@ func runPushSync(ctx context.Context, m *repo_model.PushMirror) error {
var gitRepo *git.Repository
if isWiki {
- gitRepo, err = gitrepo.OpenWikiRepository(ctx, repo)
+ gitRepo, err = gitrepo.OpenRepository(ctx, repo.WikiStorageRepo())
} else {
gitRepo, err = gitrepo.OpenRepository(ctx, repo)
}
@@ -161,11 +162,13 @@ func runPushSync(ctx context.Context, m *repo_model.PushMirror) error {
log.Trace("Pushing %s mirror[%d] remote %s", path, m.ID, m.RemoteName)
+ envs := proxy.EnvWithProxy(remoteURL.URL)
if err := git.Push(ctx, path, git.PushOptions{
Remote: m.RemoteName,
Force: true,
Mirror: true,
Timeout: timeout,
+ Env: envs,
}); err != nil {
log.Error("Error pushing %s mirror[%d] remote %s: %v", path, m.ID, m.RemoteName, err)
diff --git a/services/mirror/mirror_test.go b/services/mirror/mirror_test.go
deleted file mode 100644
index 8ad524b608..0000000000
--- a/services/mirror/mirror_test.go
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright 2023 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package mirror
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func Test_parseRemoteUpdateOutput(t *testing.T) {
- output := `
- * [new tag] v0.1.8 -> v0.1.8
- * [new branch] master -> origin/master
- - [deleted] (none) -> origin/test1
- - [deleted] (none) -> tag1
- + f895a1e...957a993 test2 -> origin/test2 (forced update)
- 957a993..a87ba5f test3 -> origin/test3
-`
- results := parseRemoteUpdateOutput(output, "origin")
- assert.Len(t, results, 6)
- assert.EqualValues(t, "refs/tags/v0.1.8", results[0].refName.String())
- assert.EqualValues(t, gitShortEmptySha, results[0].oldCommitID)
- assert.EqualValues(t, "", results[0].newCommitID)
-
- assert.EqualValues(t, "refs/heads/master", results[1].refName.String())
- assert.EqualValues(t, gitShortEmptySha, results[1].oldCommitID)
- assert.EqualValues(t, "", results[1].newCommitID)
-
- assert.EqualValues(t, "refs/heads/test1", results[2].refName.String())
- assert.EqualValues(t, "", results[2].oldCommitID)
- assert.EqualValues(t, gitShortEmptySha, results[2].newCommitID)
-
- assert.EqualValues(t, "refs/tags/tag1", results[3].refName.String())
- assert.EqualValues(t, "", results[3].oldCommitID)
- assert.EqualValues(t, gitShortEmptySha, results[3].newCommitID)
-
- assert.EqualValues(t, "refs/heads/test2", results[4].refName.String())
- assert.EqualValues(t, "f895a1e", results[4].oldCommitID)
- assert.EqualValues(t, "957a993", results[4].newCommitID)
-
- assert.EqualValues(t, "refs/heads/test3", results[5].refName.String())
- assert.EqualValues(t, "957a993", results[5].oldCommitID)
- assert.EqualValues(t, "a87ba5f", results[5].newCommitID)
-}
diff --git a/services/notify/notifier.go b/services/notify/notifier.go
index 29bbb5702b..40428454be 100644
--- a/services/notify/notifier.go
+++ b/services/notify/notifier.go
@@ -6,6 +6,7 @@ package notify
import (
"context"
+ actions_model "code.gitea.io/gitea/models/actions"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
packages_model "code.gitea.io/gitea/models/packages"
@@ -77,4 +78,6 @@ type Notifier interface {
ChangeDefaultBranch(ctx context.Context, repo *repo_model.Repository)
CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus)
+
+ WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask)
}
diff --git a/services/notify/notify.go b/services/notify/notify.go
index c97d0fcbaf..9f8be4b577 100644
--- a/services/notify/notify.go
+++ b/services/notify/notify.go
@@ -6,6 +6,7 @@ package notify
import (
"context"
+ actions_model "code.gitea.io/gitea/models/actions"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
packages_model "code.gitea.io/gitea/models/packages"
@@ -374,3 +375,9 @@ func CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit
notifier.CreateCommitStatus(ctx, repo, commit, sender, status)
}
}
+
+func WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) {
+ for _, notifier := range notifiers {
+ notifier.WorkflowJobStatusUpdate(ctx, repo, sender, job, task)
+ }
+}
diff --git a/services/notify/null.go b/services/notify/null.go
index 7354efd701..9c794a2342 100644
--- a/services/notify/null.go
+++ b/services/notify/null.go
@@ -6,6 +6,7 @@ package notify
import (
"context"
+ actions_model "code.gitea.io/gitea/models/actions"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
packages_model "code.gitea.io/gitea/models/packages"
@@ -212,3 +213,6 @@ func (*NullNotifier) ChangeDefaultBranch(ctx context.Context, repo *repo_model.R
func (*NullNotifier) CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus) {
}
+
+func (*NullNotifier) WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) {
+}
diff --git a/services/org/team_test.go b/services/org/team_test.go
index 3791776e46..a7070fadb0 100644
--- a/services/org/team_test.go
+++ b/services/org/team_test.go
@@ -88,7 +88,7 @@ func TestUpdateTeam(t *testing.T) {
assert.True(t, strings.HasPrefix(team.Description, "A long description!"))
access := unittest.AssertExistsAndLoadBean(t, &access_model.Access{UserID: 4, RepoID: 3})
- assert.EqualValues(t, perm.AccessModeAdmin, access.Mode)
+ assert.Equal(t, perm.AccessModeAdmin, access.Mode)
unittest.CheckConsistencyFor(t, &organization.Team{ID: team.ID})
}
diff --git a/services/org/user.go b/services/org/user.go
index 0e74d006bb..3565ecc2fc 100644
--- a/services/org/user.go
+++ b/services/org/user.go
@@ -64,7 +64,7 @@ func RemoveOrgUser(ctx context.Context, org *organization.Organization, user *us
if err != nil {
return fmt.Errorf("AccessibleReposEnv: %w", err)
}
- repoIDs, err := env.RepoIDs(1, org.NumRepos)
+ repoIDs, err := env.RepoIDs(ctx, 1, org.NumRepos)
if err != nil {
return fmt.Errorf("GetUserRepositories [%d]: %w", user.ID, err)
}
diff --git a/services/org/user_test.go b/services/org/user_test.go
index 96d1a1c8ca..c61d600d90 100644
--- a/services/org/user_test.go
+++ b/services/org/user_test.go
@@ -53,7 +53,7 @@ func TestRemoveOrgUser(t *testing.T) {
assert.NoError(t, RemoveOrgUser(db.DefaultContext, org, user))
unittest.AssertNotExistsBean(t, &organization.OrgUser{OrgID: org.ID, UID: user.ID})
org = unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: org.ID})
- assert.EqualValues(t, expectedNumMembers, org.NumMembers)
+ assert.Equal(t, expectedNumMembers, org.NumMembers)
}
org3 := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
diff --git a/services/packages/arch/repository.go b/services/packages/arch/repository.go
index 7fb4222cf6..a12af82ba5 100644
--- a/services/packages/arch/repository.go
+++ b/services/packages/arch/repository.go
@@ -235,6 +235,28 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package
return packages_service.DeletePackageFile(ctx, pf)
}
+ vpfs := make(map[int64]*entryOptions)
+ for _, pf := range pfs {
+ current := &entryOptions{
+ File: pf,
+ }
+ current.Version, err = packages_model.GetVersionByID(ctx, pf.VersionID)
+ if err != nil {
+ return err
+ }
+
+ // here we compare the versions but not using SearchLatestVersions because we shouldn't allow "downgrading" to a older version by "latest" one.
+ // https://wiki.archlinux.org/title/Downgrading_packages : randomly downgrading can mess up dependencies:
+ // If a downgrade involves a soname change, all dependencies may need downgrading or rebuilding too.
+ if old, ok := vpfs[current.Version.PackageID]; ok {
+ if compareVersions(old.Version.Version, current.Version.Version) == -1 {
+ vpfs[current.Version.PackageID] = current
+ }
+ } else {
+ vpfs[current.Version.PackageID] = current
+ }
+ }
+
indexContent, _ := packages_module.NewHashedBuffer()
defer indexContent.Close()
@@ -243,15 +265,7 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package
cache := make(map[int64]*packages_model.Package)
- for _, pf := range pfs {
- opts := &entryOptions{
- File: pf,
- }
-
- opts.Version, err = packages_model.GetVersionByID(ctx, pf.VersionID)
- if err != nil {
- return err
- }
+ for _, opts := range vpfs {
if err := json.Unmarshal([]byte(opts.Version.MetadataJSON), &opts.VersionMetadata); err != nil {
return err
}
@@ -263,12 +277,12 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package
}
cache[opts.Package.ID] = opts.Package
}
- opts.Blob, err = packages_model.GetBlobByID(ctx, pf.BlobID)
+ opts.Blob, err = packages_model.GetBlobByID(ctx, opts.File.BlobID)
if err != nil {
return err
}
- sig, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeFile, pf.ID, arch_module.PropertySignature)
+ sig, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeFile, opts.File.ID, arch_module.PropertySignature)
if err != nil {
return err
}
@@ -277,7 +291,7 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package
}
opts.Signature = sig[0].Value
- meta, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeFile, pf.ID, arch_module.PropertyMetadata)
+ meta, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeFile, opts.File.ID, arch_module.PropertyMetadata)
if err != nil {
return err
}
diff --git a/services/packages/arch/vercmp.go b/services/packages/arch/vercmp.go
new file mode 100644
index 0000000000..0d33dda0f1
--- /dev/null
+++ b/services/packages/arch/vercmp.go
@@ -0,0 +1,113 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package arch
+
+import (
+ "strings"
+ "unicode"
+)
+
+// https://gitlab.archlinux.org/pacman/pacman/-/blob/d55b47e5512808b67bc944feb20c2bcc6c1a4c45/lib/libalpm/version.c
+
+import (
+ "strconv"
+)
+
+func parseEVR(evr string) (epoch, version, release string) {
+ if before, after, f := strings.Cut(evr, ":"); f {
+ epoch = before
+ evr = after
+ } else {
+ epoch = "0"
+ }
+
+ if before, after, f := strings.Cut(evr, "-"); f {
+ version = before
+ release = after
+ } else {
+ version = evr
+ release = "1"
+ }
+ return epoch, version, release
+}
+
+func compareSegments(a, b []string) int {
+ lenA, lenB := len(a), len(b)
+ var l int
+ if lenA > lenB {
+ l = lenB
+ } else {
+ l = lenA
+ }
+ for i := 0; i < l; i++ {
+ if r := compare(a[i], b[i]); r != 0 {
+ return r
+ }
+ }
+ if lenA == lenB {
+ return 0
+ } else if l == lenA {
+ return -1
+ }
+ return 1
+}
+
+func compare(a, b string) int {
+ if a == b {
+ return 0
+ }
+
+ aNumeric := isNumeric(a)
+ bNumeric := isNumeric(b)
+
+ if aNumeric && bNumeric {
+ aInt, _ := strconv.Atoi(a)
+ bInt, _ := strconv.Atoi(b)
+ switch {
+ case aInt < bInt:
+ return -1
+ case aInt > bInt:
+ return 1
+ default:
+ return 0
+ }
+ }
+
+ if aNumeric {
+ return 1
+ }
+ if bNumeric {
+ return -1
+ }
+
+ return strings.Compare(a, b)
+}
+
+func isNumeric(s string) bool {
+ for _, c := range s {
+ if !unicode.IsDigit(c) {
+ return false
+ }
+ }
+ return true
+}
+
+func compareVersions(a, b string) int {
+ if a == b {
+ return 0
+ }
+
+ epochA, versionA, releaseA := parseEVR(a)
+ epochB, versionB, releaseB := parseEVR(b)
+
+ if res := compareSegments([]string{epochA}, []string{epochB}); res != 0 {
+ return res
+ }
+
+ if res := compareSegments(strings.Split(versionA, "."), strings.Split(versionB, ".")); res != 0 {
+ return res
+ }
+
+ return compareSegments([]string{releaseA}, []string{releaseB})
+}
diff --git a/services/packages/arch/vercmp_test.go b/services/packages/arch/vercmp_test.go
new file mode 100644
index 0000000000..2014a6d429
--- /dev/null
+++ b/services/packages/arch/vercmp_test.go
@@ -0,0 +1,27 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package arch
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestCompareVersions(t *testing.T) {
+ // https://man.archlinux.org/man/vercmp.8.en
+ checks := [][]string{
+ {"1.0a", "1.0b", "1.0beta", "1.0p", "1.0pre", "1.0rc", "1.0", "1.0.a", "1.0.1"},
+ {"1", "1.0", "1.1", "1.1.1", "1.2", "2.0", "3.0.0"},
+ }
+ for _, check := range checks {
+ for i := 0; i < len(check)-1; i++ {
+ require.Equal(t, -1, compareVersions(check[i], check[i+1]))
+ require.Equal(t, 1, compareVersions(check[i+1], check[i]))
+ }
+ }
+ require.Equal(t, 1, compareVersions("1.0-2", "1.0"))
+ require.Equal(t, 0, compareVersions("0:1.0-1", "1.0"))
+ require.Equal(t, 1, compareVersions("1:1.0-1", "2.0"))
+}
diff --git a/services/packages/cargo/index.go b/services/packages/cargo/index.go
index 88a463e4c6..decb224b85 100644
--- a/services/packages/cargo/index.go
+++ b/services/packages/cargo/index.go
@@ -78,7 +78,7 @@ func RebuildIndex(ctx context.Context, doer, owner *user_model.User) error {
"Rebuild Cargo Index",
func(t *files_service.TemporaryUploadRepository) error {
// Remove all existing content but the Cargo config
- files, err := t.LsFiles()
+ files, err := t.LsFiles(ctx)
if err != nil {
return err
}
@@ -89,7 +89,7 @@ func RebuildIndex(ctx context.Context, doer, owner *user_model.User) error {
break
}
}
- if err := t.RemoveFilesFromIndex(files...); err != nil {
+ if err := t.RemoveFilesFromIndex(ctx, files...); err != nil {
return err
}
@@ -204,7 +204,7 @@ func addOrUpdatePackageIndex(ctx context.Context, t *files_service.TemporaryUplo
return nil
}
- return writeObjectToIndex(t, BuildPackagePath(p.LowerName), b)
+ return writeObjectToIndex(ctx, t, BuildPackagePath(p.LowerName), b)
}
func getOrCreateIndexRepository(ctx context.Context, doer, owner *user_model.User) (*repo_model.Repository, error) {
@@ -247,34 +247,34 @@ func createOrUpdateConfigFile(ctx context.Context, repo *repo_model.Repository,
"Initialize Cargo Config",
func(t *files_service.TemporaryUploadRepository) error {
var b bytes.Buffer
- err := json.NewEncoder(&b).Encode(BuildConfig(owner, setting.Service.RequireSignInView || owner.Visibility != structs.VisibleTypePublic || repo.IsPrivate))
+ err := json.NewEncoder(&b).Encode(BuildConfig(owner, setting.Service.RequireSignInViewStrict || owner.Visibility != structs.VisibleTypePublic || repo.IsPrivate))
if err != nil {
return err
}
- return writeObjectToIndex(t, ConfigFileName, &b)
+ return writeObjectToIndex(ctx, t, ConfigFileName, &b)
},
)
}
// This is a shorter version of CreateOrUpdateRepoFile which allows to perform multiple actions on a git repository
func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, commitMessage string, fn func(*files_service.TemporaryUploadRepository) error) error {
- t, err := files_service.NewTemporaryUploadRepository(ctx, repo)
+ t, err := files_service.NewTemporaryUploadRepository(repo)
if err != nil {
return err
}
defer t.Close()
var lastCommitID string
- if err := t.Clone(repo.DefaultBranch, true); err != nil {
+ if err := t.Clone(ctx, repo.DefaultBranch, true); err != nil {
if !git.IsErrBranchNotExist(err) || !repo.IsEmpty {
return err
}
- if err := t.Init(repo.ObjectFormatName); err != nil {
+ if err := t.Init(ctx, repo.ObjectFormatName); err != nil {
return err
}
} else {
- if err := t.SetDefaultIndex(); err != nil {
+ if err := t.SetDefaultIndex(ctx); err != nil {
return err
}
@@ -290,7 +290,7 @@ func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *re
return err
}
- treeHash, err := t.WriteTree()
+ treeHash, err := t.WriteTree(ctx)
if err != nil {
return err
}
@@ -301,19 +301,19 @@ func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *re
CommitMessage: commitMessage,
DoerUser: doer,
}
- commitHash, err := t.CommitTree(commitOpts)
+ commitHash, err := t.CommitTree(ctx, commitOpts)
if err != nil {
return err
}
- return t.Push(doer, commitHash, repo.DefaultBranch)
+ return t.Push(ctx, doer, commitHash, repo.DefaultBranch)
}
-func writeObjectToIndex(t *files_service.TemporaryUploadRepository, path string, r io.Reader) error {
- hash, err := t.HashObject(r)
+func writeObjectToIndex(ctx context.Context, t *files_service.TemporaryUploadRepository, path string, r io.Reader) error {
+ hash, err := t.HashObject(ctx, r)
if err != nil {
return err
}
- return t.AddObjectToIndex("100644", hash, path)
+ return t.AddObjectToIndex(ctx, "100644", hash, path)
}
diff --git a/services/packages/package_update.go b/services/packages/package_update.go
new file mode 100644
index 0000000000..4a22ee7a62
--- /dev/null
+++ b/services/packages/package_update.go
@@ -0,0 +1,79 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package packages
+
+import (
+ "context"
+ "fmt"
+
+ org_model "code.gitea.io/gitea/models/organization"
+ packages_model "code.gitea.io/gitea/models/packages"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/util"
+)
+
+func LinkToRepository(ctx context.Context, pkg *packages_model.Package, repo *repo_model.Repository, doer *user_model.User) error {
+ if pkg.OwnerID != repo.OwnerID {
+ return util.ErrPermissionDenied
+ }
+ if pkg.RepoID > 0 {
+ return util.ErrInvalidArgument
+ }
+
+ perms, err := access_model.GetUserRepoPermission(ctx, repo, doer)
+ if err != nil {
+ return fmt.Errorf("error getting permissions for user %d on repository %d: %w", doer.ID, repo.ID, err)
+ }
+ if !perms.CanWrite(unit.TypePackages) {
+ return util.ErrPermissionDenied
+ }
+
+ if err := packages_model.SetRepositoryLink(ctx, pkg.ID, repo.ID); err != nil {
+ return fmt.Errorf("error while linking package '%v' to repo '%v' : %w", pkg.Name, repo.FullName(), err)
+ }
+ return nil
+}
+
+func UnlinkFromRepository(ctx context.Context, pkg *packages_model.Package, doer *user_model.User) error {
+ if pkg.RepoID == 0 {
+ return util.ErrInvalidArgument
+ }
+
+ repo, err := repo_model.GetRepositoryByID(ctx, pkg.RepoID)
+ if err != nil && !repo_model.IsErrRepoNotExist(err) {
+ return fmt.Errorf("error getting repository %d: %w", pkg.RepoID, err)
+ }
+ if err == nil {
+ perms, err := access_model.GetUserRepoPermission(ctx, repo, doer)
+ if err != nil {
+ return fmt.Errorf("error getting permissions for user %d on repository %d: %w", doer.ID, repo.ID, err)
+ }
+ if !perms.CanWrite(unit.TypePackages) {
+ return util.ErrPermissionDenied
+ }
+ }
+
+ user, err := user_model.GetUserByID(ctx, pkg.OwnerID)
+ if err != nil {
+ return err
+ }
+ if !doer.IsAdmin {
+ if !user.IsOrganization() {
+ if doer.ID != pkg.OwnerID {
+ return fmt.Errorf("no permission to unlink package '%v' from its repository, or packages are disabled", pkg.Name)
+ }
+ } else {
+ isOrgAdmin, err := org_model.OrgFromUser(user).IsOrgAdmin(ctx, doer.ID)
+ if err != nil {
+ return err
+ } else if !isOrgAdmin {
+ return fmt.Errorf("no permission to unlink package '%v' from its repository, or packages are disabled", pkg.Name)
+ }
+ }
+ }
+ return packages_model.UnlinkRepository(ctx, pkg.ID)
+}
diff --git a/services/projects/issue.go b/services/projects/issue.go
index db1621a39f..090d19d2f4 100644
--- a/services/projects/issue.go
+++ b/services/projects/issue.go
@@ -11,6 +11,7 @@ import (
issues_model "code.gitea.io/gitea/models/issues"
project_model "code.gitea.io/gitea/models/project"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/optional"
)
// MoveIssuesOnProjectColumn moves or keeps issues in a column and sorts them inside that column
@@ -55,25 +56,152 @@ func MoveIssuesOnProjectColumn(ctx context.Context, doer *user_model.User, colum
continue
}
- _, err = db.Exec(ctx, "UPDATE `project_issue` SET project_board_id=?, sorting=? WHERE issue_id=?", column.ID, sorting, issueID)
+ projectColumnID, err := curIssue.ProjectColumnID(ctx)
if err != nil {
return err
}
- // add timeline to issue
- if _, err := issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
- Type: issues_model.CommentTypeProjectColumn,
- Doer: doer,
- Repo: curIssue.Repo,
- Issue: curIssue,
- ProjectID: column.ProjectID,
- ProjectTitle: project.Title,
- ProjectColumnID: column.ID,
- ProjectColumnTitle: column.Title,
- }); err != nil {
+ if projectColumnID != column.ID {
+ // add timeline to issue
+ if _, err := issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeProjectColumn,
+ Doer: doer,
+ Repo: curIssue.Repo,
+ Issue: curIssue,
+ ProjectID: column.ProjectID,
+ ProjectTitle: project.Title,
+ ProjectColumnID: column.ID,
+ ProjectColumnTitle: column.Title,
+ }); err != nil {
+ return err
+ }
+ }
+
+ _, err = db.Exec(ctx, "UPDATE `project_issue` SET project_board_id=?, sorting=? WHERE issue_id=?", column.ID, sorting, issueID)
+ if err != nil {
return err
}
}
return nil
})
}
+
+// LoadIssuesFromProject load issues assigned to each project column inside the given project
+func LoadIssuesFromProject(ctx context.Context, project *project_model.Project, opts *issues_model.IssuesOptions) (map[int64]issues_model.IssueList, error) {
+ issueList, err := issues_model.Issues(ctx, opts.Copy(func(o *issues_model.IssuesOptions) {
+ o.ProjectID = project.ID
+ o.SortType = "project-column-sorting"
+ }))
+ if err != nil {
+ return nil, err
+ }
+
+ if err := issueList.LoadComments(ctx); err != nil {
+ return nil, err
+ }
+
+ defaultColumn, err := project.MustDefaultColumn(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ issueColumnMap, err := issues_model.LoadProjectIssueColumnMap(ctx, project.ID, defaultColumn.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ results := make(map[int64]issues_model.IssueList)
+ for _, issue := range issueList {
+ projectColumnID, ok := issueColumnMap[issue.ID]
+ if !ok {
+ continue
+ }
+ if _, ok := results[projectColumnID]; !ok {
+ results[projectColumnID] = make(issues_model.IssueList, 0)
+ }
+ results[projectColumnID] = append(results[projectColumnID], issue)
+ }
+ return results, nil
+}
+
+// NumClosedIssues return counter of closed issues assigned to a project
+func loadNumClosedIssues(ctx context.Context, p *project_model.Project) error {
+ cnt, err := db.GetEngine(ctx).Table("project_issue").
+ Join("INNER", "issue", "project_issue.issue_id=issue.id").
+ Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, true).
+ Cols("issue_id").
+ Count()
+ if err != nil {
+ return err
+ }
+ p.NumClosedIssues = cnt
+ return nil
+}
+
+// NumOpenIssues return counter of open issues assigned to a project
+func loadNumOpenIssues(ctx context.Context, p *project_model.Project) error {
+ cnt, err := db.GetEngine(ctx).Table("project_issue").
+ Join("INNER", "issue", "project_issue.issue_id=issue.id").
+ Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, false).
+ Cols("issue_id").
+ Count()
+ if err != nil {
+ return err
+ }
+ p.NumOpenIssues = cnt
+ return nil
+}
+
+func LoadIssueNumbersForProjects(ctx context.Context, projects []*project_model.Project, doer *user_model.User) error {
+ for _, project := range projects {
+ if err := LoadIssueNumbersForProject(ctx, project, doer); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func LoadIssueNumbersForProject(ctx context.Context, project *project_model.Project, doer *user_model.User) error {
+ // for repository project, just get the numbers
+ if project.OwnerID == 0 {
+ if err := loadNumClosedIssues(ctx, project); err != nil {
+ return err
+ }
+ if err := loadNumOpenIssues(ctx, project); err != nil {
+ return err
+ }
+ project.NumIssues = project.NumClosedIssues + project.NumOpenIssues
+ return nil
+ }
+
+ if err := project.LoadOwner(ctx); err != nil {
+ return err
+ }
+
+ // for user or org projects, we need to check access permissions
+ opts := issues_model.IssuesOptions{
+ ProjectID: project.ID,
+ Doer: doer,
+ AllPublic: doer == nil,
+ Owner: project.Owner,
+ }
+
+ var err error
+ project.NumOpenIssues, err = issues_model.CountIssues(ctx, opts.Copy(func(o *issues_model.IssuesOptions) {
+ o.IsClosed = optional.Some(false)
+ }))
+ if err != nil {
+ return err
+ }
+
+ project.NumClosedIssues, err = issues_model.CountIssues(ctx, opts.Copy(func(o *issues_model.IssuesOptions) {
+ o.IsClosed = optional.Some(true)
+ }))
+ if err != nil {
+ return err
+ }
+
+ project.NumIssues = project.NumClosedIssues + project.NumOpenIssues
+
+ return nil
+}
diff --git a/services/projects/issue_test.go b/services/projects/issue_test.go
new file mode 100644
index 0000000000..e76d31e757
--- /dev/null
+++ b/services/projects/issue_test.go
@@ -0,0 +1,210 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package project
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ org_model "code.gitea.io/gitea/models/organization"
+ project_model "code.gitea.io/gitea/models/project"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_Projects(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ userAdmin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ org3 := unittest.AssertExistsAndLoadBean(t, &org_model.Organization{ID: 3})
+ user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+
+ t.Run("User projects", func(t *testing.T) {
+ pi1 := project_model.ProjectIssue{
+ ProjectID: 4,
+ IssueID: 1,
+ ProjectColumnID: 4,
+ }
+ err := db.Insert(db.DefaultContext, &pi1)
+ assert.NoError(t, err)
+ defer func() {
+ _, err = db.DeleteByID[project_model.ProjectIssue](db.DefaultContext, pi1.ID)
+ assert.NoError(t, err)
+ }()
+
+ pi2 := project_model.ProjectIssue{
+ ProjectID: 4,
+ IssueID: 4,
+ ProjectColumnID: 4,
+ }
+ err = db.Insert(db.DefaultContext, &pi2)
+ assert.NoError(t, err)
+ defer func() {
+ _, err = db.DeleteByID[project_model.ProjectIssue](db.DefaultContext, pi2.ID)
+ assert.NoError(t, err)
+ }()
+
+ projects, err := db.Find[project_model.Project](db.DefaultContext, project_model.SearchOptions{
+ OwnerID: user2.ID,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, projects, 3)
+ assert.EqualValues(t, 4, projects[0].ID)
+
+ t.Run("Authenticated user", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ Owner: user2,
+ Doer: user2,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 1) // 4 has 2 issues, 6 will not contains here because 0 issues
+ assert.Len(t, columnIssues[4], 2) // user2 can visit both issues, one from public repository one from private repository
+ })
+
+ t.Run("Anonymous user", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ AllPublic: true,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 1)
+ assert.Len(t, columnIssues[4], 1) // anonymous user can only visit public repo issues
+ })
+
+ t.Run("Authenticated user with no permission to the private repo", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ Owner: user2,
+ Doer: user4,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 1)
+ assert.Len(t, columnIssues[4], 1) // user4 can only visit public repo issues
+ })
+ })
+
+ t.Run("Org projects", func(t *testing.T) {
+ project1 := project_model.Project{
+ Title: "project in an org",
+ OwnerID: org3.ID,
+ Type: project_model.TypeOrganization,
+ TemplateType: project_model.TemplateTypeBasicKanban,
+ }
+ err := project_model.NewProject(db.DefaultContext, &project1)
+ assert.NoError(t, err)
+ defer func() {
+ err := project_model.DeleteProjectByID(db.DefaultContext, project1.ID)
+ assert.NoError(t, err)
+ }()
+
+ column1 := project_model.Column{
+ Title: "column 1",
+ ProjectID: project1.ID,
+ }
+ err = project_model.NewColumn(db.DefaultContext, &column1)
+ assert.NoError(t, err)
+
+ column2 := project_model.Column{
+ Title: "column 2",
+ ProjectID: project1.ID,
+ }
+ err = project_model.NewColumn(db.DefaultContext, &column2)
+ assert.NoError(t, err)
+
+ // issue 6 belongs to private repo 3 under org 3
+ issue6 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 6})
+ err = issues_model.IssueAssignOrRemoveProject(db.DefaultContext, issue6, user2, project1.ID, column1.ID)
+ assert.NoError(t, err)
+
+ // issue 16 belongs to public repo 16 under org 3
+ issue16 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 16})
+ err = issues_model.IssueAssignOrRemoveProject(db.DefaultContext, issue16, user2, project1.ID, column1.ID)
+ assert.NoError(t, err)
+
+ projects, err := db.Find[project_model.Project](db.DefaultContext, project_model.SearchOptions{
+ OwnerID: org3.ID,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, projects, 1)
+ assert.Equal(t, project1.ID, projects[0].ID)
+
+ t.Run("Authenticated user", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ Owner: org3.AsUser(),
+ Doer: userAdmin,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 1) // column1 has 2 issues, 6 will not contains here because 0 issues
+ assert.Len(t, columnIssues[column1.ID], 2) // user2 can visit both issues, one from public repository one from private repository
+ })
+
+ t.Run("Anonymous user", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ AllPublic: true,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 1)
+ assert.Len(t, columnIssues[column1.ID], 1) // anonymous user can only visit public repo issues
+ })
+
+ t.Run("Authenticated user with no permission to the private repo", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ Owner: org3.AsUser(),
+ Doer: user2,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 1)
+ assert.Len(t, columnIssues[column1.ID], 1) // user4 can only visit public repo issues
+ })
+ })
+
+ t.Run("Repository projects", func(t *testing.T) {
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ projects, err := db.Find[project_model.Project](db.DefaultContext, project_model.SearchOptions{
+ RepoID: repo1.ID,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, projects, 1)
+ assert.EqualValues(t, 1, projects[0].ID)
+
+ t.Run("Authenticated user", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ RepoIDs: []int64{repo1.ID},
+ Doer: userAdmin,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 3)
+ assert.Len(t, columnIssues[1], 2)
+ assert.Len(t, columnIssues[2], 1)
+ assert.Len(t, columnIssues[3], 1)
+ })
+
+ t.Run("Anonymous user", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ AllPublic: true,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 3)
+ assert.Len(t, columnIssues[1], 2)
+ assert.Len(t, columnIssues[2], 1)
+ assert.Len(t, columnIssues[3], 1)
+ })
+
+ t.Run("Authenticated user with no permission to the private repo", func(t *testing.T) {
+ columnIssues, err := LoadIssuesFromProject(db.DefaultContext, projects[0], &issues_model.IssuesOptions{
+ RepoIDs: []int64{repo1.ID},
+ Doer: user2,
+ })
+ assert.NoError(t, err)
+ assert.Len(t, columnIssues, 3)
+ assert.Len(t, columnIssues[1], 2)
+ assert.Len(t, columnIssues[2], 1)
+ assert.Len(t, columnIssues[3], 1)
+ })
+ })
+}
diff --git a/services/projects/main_test.go b/services/projects/main_test.go
new file mode 100644
index 0000000000..d39c82a140
--- /dev/null
+++ b/services/projects/main_test.go
@@ -0,0 +1,17 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package project
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models/actions"
+ _ "code.gitea.io/gitea/models/activities"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/pull/check.go b/services/pull/check.go
index e1adc3ca3b..9b159891d7 100644
--- a/services/pull/check.go
+++ b/services/pull/check.go
@@ -206,9 +206,9 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com
prHeadRef := pr.GetGitRefName()
// Check if the pull request is merged into BaseBranch
- if _, _, err := git.NewCommand(ctx, "merge-base", "--is-ancestor").
+ if _, _, err := git.NewCommand("merge-base", "--is-ancestor").
AddDynamicArguments(prHeadRef, pr.BaseBranch).
- RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()}); err != nil {
+ RunStdString(ctx, &git.RunOpts{Dir: pr.BaseRepo.RepoPath()}); err != nil {
if strings.Contains(err.Error(), "exit status 1") {
// prHeadRef is not an ancestor of the base branch
return nil, nil
@@ -234,9 +234,9 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com
objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
// Get the commit from BaseBranch where the pull request got merged
- mergeCommit, _, err := git.NewCommand(ctx, "rev-list", "--ancestry-path", "--merges", "--reverse").
- AddDynamicArguments(prHeadCommitID + ".." + pr.BaseBranch).
- RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()})
+ mergeCommit, _, err := git.NewCommand("rev-list", "--ancestry-path", "--merges", "--reverse").
+ AddDynamicArguments(prHeadCommitID+".."+pr.BaseBranch).
+ RunStdString(ctx, &git.RunOpts{Dir: pr.BaseRepo.RepoPath()})
if err != nil {
return nil, fmt.Errorf("git rev-list --ancestry-path --merges --reverse: %w", err)
} else if len(mergeCommit) < objectFormat.FullLength() {
diff --git a/services/pull/check_test.go b/services/pull/check_test.go
index dcf5f7b93a..6d85ac158e 100644
--- a/services/pull/check_test.go
+++ b/services/pull/check_test.go
@@ -5,7 +5,6 @@
package pull
import (
- "context"
"strconv"
"testing"
"time"
@@ -33,7 +32,7 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) {
cfg, err := setting.GetQueueSettings(setting.CfgProvider, "pr_patch_checker")
assert.NoError(t, err)
- prPatchCheckerQueue, err = queue.NewWorkerPoolQueueWithContext(context.Background(), "pr_patch_checker", cfg, testHandler, true)
+ prPatchCheckerQueue, err = queue.NewWorkerPoolQueueWithContext(t.Context(), "pr_patch_checker", cfg, testHandler, true)
assert.NoError(t, err)
pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
@@ -52,7 +51,7 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) {
select {
case id := <-idChan:
- assert.EqualValues(t, pr.ID, id)
+ assert.Equal(t, pr.ID, id)
case <-time.After(time.Second):
assert.FailNow(t, "Timeout: nothing was added to pullRequestQueue")
}
diff --git a/services/pull/commit_status.go b/services/pull/commit_status.go
index aa1ad7cd66..0bfff21746 100644
--- a/services/pull/commit_status.go
+++ b/services/pull/commit_status.go
@@ -10,7 +10,6 @@ import (
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
- "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/structs"
@@ -131,10 +130,10 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR
}
defer closer.Close()
- if pr.Flow == issues_model.PullRequestFlowGithub && !headGitRepo.IsBranchExist(pr.HeadBranch) {
+ if pr.Flow == issues_model.PullRequestFlowGithub && !gitrepo.IsBranchExist(ctx, pr.HeadRepo, pr.HeadBranch) {
return "", errors.New("Head branch does not exist, can not merge")
}
- if pr.Flow == issues_model.PullRequestFlowAGit && !git.IsReferenceExist(ctx, headGitRepo.Path, pr.GetGitRefName()) {
+ if pr.Flow == issues_model.PullRequestFlowAGit && !gitrepo.IsReferenceExist(ctx, pr.HeadRepo, pr.GetGitRefName()) {
return "", errors.New("Head branch does not exist, can not merge")
}
diff --git a/services/pull/merge.go b/services/pull/merge.go
index 9c909ef795..1e1ca55bc1 100644
--- a/services/pull/merge.go
+++ b/services/pull/merge.go
@@ -211,7 +211,15 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U
}
defer releaser()
defer func() {
- go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "")
+ go AddTestPullRequestTask(TestPullRequestOptions{
+ RepoID: pr.BaseRepo.ID,
+ Doer: doer,
+ Branch: pr.BaseBranch,
+ IsSync: false,
+ IsForcePush: false,
+ OldCommitID: "",
+ NewCommitID: "",
+ })
}()
_, err = doMergeAndPush(ctx, pr, doer, mergeStyle, expectedHeadCommitID, message, repo_module.PushTriggerPRMergeToBase)
@@ -356,12 +364,12 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use
)
mergeCtx.env = append(mergeCtx.env, repo_module.EnvPushTrigger+"="+string(pushTrigger))
- pushCmd := git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
+ pushCmd := git.NewCommand("push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
// Push back to upstream.
// This cause an api call to "/api/internal/hook/post-receive/...",
// If it's merge, all db transaction and operations should be there but not here to prevent deadlock.
- if err := pushCmd.Run(mergeCtx.RunOpts()); err != nil {
+ if err := pushCmd.Run(ctx, mergeCtx.RunOpts()); err != nil {
if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
return "", &git.ErrPushOutOfDate{
StdOut: mergeCtx.outbuf.String(),
@@ -386,13 +394,13 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use
}
func commitAndSignNoAuthor(ctx *mergeContext, message string) error {
- cmdCommit := git.NewCommand(ctx, "commit").AddOptionFormat("--message=%s", message)
+ cmdCommit := git.NewCommand("commit").AddOptionFormat("--message=%s", message)
if ctx.signKeyID == "" {
cmdCommit.AddArguments("--no-gpg-sign")
} else {
cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID)
}
- if err := cmdCommit.Run(ctx.RunOpts()); err != nil {
+ if err := cmdCommit.Run(ctx, ctx.RunOpts()); err != nil {
log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git commit %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
@@ -453,7 +461,7 @@ func (err ErrMergeDivergingFastForwardOnly) Error() string {
}
func runMergeCommand(ctx *mergeContext, mergeStyle repo_model.MergeStyle, cmd *git.Command) error {
- if err := cmd.Run(ctx.RunOpts()); err != nil {
+ if err := cmd.Run(ctx, ctx.RunOpts()); err != nil {
// Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict
if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil {
// We have a merge conflict error
diff --git a/services/pull/merge_ff_only.go b/services/pull/merge_ff_only.go
index f57c732104..6c3a68b95b 100644
--- a/services/pull/merge_ff_only.go
+++ b/services/pull/merge_ff_only.go
@@ -11,7 +11,7 @@ import (
// doMergeStyleFastForwardOnly merges the tracking into the current HEAD - which is assumed to be staging branch (equal to the pr.BaseBranch)
func doMergeStyleFastForwardOnly(ctx *mergeContext) error {
- cmd := git.NewCommand(ctx, "merge", "--ff-only").AddDynamicArguments(trackingBranch)
+ cmd := git.NewCommand("merge", "--ff-only").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleFastForwardOnly, cmd); err != nil {
log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
return err
diff --git a/services/pull/merge_merge.go b/services/pull/merge_merge.go
index bf56c071db..118d21c7cd 100644
--- a/services/pull/merge_merge.go
+++ b/services/pull/merge_merge.go
@@ -11,7 +11,7 @@ import (
// doMergeStyleMerge merges the tracking branch into the current HEAD - which is assumed to be the staging branch (equal to the pr.BaseBranch)
func doMergeStyleMerge(ctx *mergeContext, message string) error {
- cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
+ cmd := git.NewCommand("merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleMerge, cmd); err != nil {
log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
return err
diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go
index 2e1cc8cf85..593cba550a 100644
--- a/services/pull/merge_prepare.go
+++ b/services/pull/merge_prepare.go
@@ -73,7 +73,7 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque
}
if expectedHeadCommitID != "" {
- trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
+ trackingCommitID, _, err := git.NewCommand("show-ref", "--hash").AddDynamicArguments(git.BranchPrefix+trackingBranch).RunStdString(ctx, &git.RunOpts{Dir: mergeCtx.tmpBasePath})
if err != nil {
defer cancel()
log.Error("failed to get sha of head branch in %-v: show-ref[%s] --hash refs/heads/tracking: %v", mergeCtx.pr, mergeCtx.tmpBasePath, err)
@@ -151,8 +151,8 @@ func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
}
setConfig := func(key, value string) error {
- if err := git.NewCommand(ctx, "config", "--local").AddDynamicArguments(key, value).
- Run(ctx.RunOpts()); err != nil {
+ if err := git.NewCommand("config", "--local").AddDynamicArguments(key, value).
+ Run(ctx, ctx.RunOpts()); err != nil {
log.Error("git config [%s -> %q]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git config [%s -> %q]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
}
@@ -184,8 +184,8 @@ func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
}
// Read base branch index
- if err := git.NewCommand(ctx, "read-tree", "HEAD").
- Run(ctx.RunOpts()); err != nil {
+ if err := git.NewCommand("read-tree", "HEAD").
+ Run(ctx, ctx.RunOpts()); err != nil {
log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
}
@@ -221,8 +221,8 @@ func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, o
return 0, nil, nil
}
- err = git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch).
- Run(&git.RunOpts{
+ err = git.NewCommand("diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch).
+ Run(ctx, &git.RunOpts{
Dir: repoPath,
Stdout: diffOutWriter,
PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
@@ -272,16 +272,16 @@ func (err ErrRebaseConflicts) Error() string {
// if there is a conflict it will return an ErrRebaseConflicts
func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error {
// Checkout head branch
- if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
- Run(ctx.RunOpts()); err != nil {
+ if err := git.NewCommand("checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
+ Run(ctx, ctx.RunOpts()); err != nil {
return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
// Rebase before merging
- if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch).
- Run(ctx.RunOpts()); err != nil {
+ if err := git.NewCommand("rebase").AddDynamicArguments(baseBranch).
+ Run(ctx, ctx.RunOpts()); err != nil {
// Rebase will leave a REBASE_HEAD file in .git if there is a conflict
if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil {
var commitSha string
diff --git a/services/pull/merge_rebase.go b/services/pull/merge_rebase.go
index ecf376220e..dd7c8761f0 100644
--- a/services/pull/merge_rebase.go
+++ b/services/pull/merge_rebase.go
@@ -16,7 +16,7 @@ import (
// getRebaseAmendMessage composes the message to amend commits in rebase merge of a pull request.
func getRebaseAmendMessage(ctx *mergeContext, baseGitRepo *git.Repository) (message string, err error) {
// Get existing commit message.
- commitMessage, _, err := git.NewCommand(ctx, "show", "--format=%B", "-s").RunStdString(&git.RunOpts{Dir: ctx.tmpBasePath})
+ commitMessage, _, err := git.NewCommand("show", "--format=%B", "-s").RunStdString(ctx, &git.RunOpts{Dir: ctx.tmpBasePath})
if err != nil {
return "", err
}
@@ -42,7 +42,7 @@ func doMergeRebaseFastForward(ctx *mergeContext) error {
return fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
}
- cmd := git.NewCommand(ctx, "merge", "--ff-only").AddDynamicArguments(stagingBranch)
+ cmd := git.NewCommand("merge", "--ff-only").AddDynamicArguments(stagingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleRebase, cmd); err != nil {
log.Error("Unable to merge staging into base: %v", err)
return err
@@ -73,7 +73,7 @@ func doMergeRebaseFastForward(ctx *mergeContext) error {
}
if newMessage != "" {
- if err := git.NewCommand(ctx, "commit", "--amend").AddOptionFormat("--message=%s", newMessage).Run(&git.RunOpts{Dir: ctx.tmpBasePath}); err != nil {
+ if err := git.NewCommand("commit", "--amend").AddOptionFormat("--message=%s", newMessage).Run(ctx, &git.RunOpts{Dir: ctx.tmpBasePath}); err != nil {
log.Error("Unable to amend commit message: %v", err)
return err
}
@@ -84,7 +84,7 @@ func doMergeRebaseFastForward(ctx *mergeContext) error {
// Perform rebase merge with merge commit.
func doMergeRebaseMergeCommit(ctx *mergeContext, message string) error {
- cmd := git.NewCommand(ctx, "merge").AddArguments("--no-ff", "--no-commit").AddDynamicArguments(stagingBranch)
+ cmd := git.NewCommand("merge").AddArguments("--no-ff", "--no-commit").AddDynamicArguments(stagingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleRebaseMerge, cmd); err != nil {
log.Error("Unable to merge staging into base: %v", err)
@@ -105,8 +105,8 @@ func doMergeStyleRebase(ctx *mergeContext, mergeStyle repo_model.MergeStyle, mes
}
// Checkout base branch again
- if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch).
- Run(ctx.RunOpts()); err != nil {
+ if err := git.NewCommand("checkout").AddDynamicArguments(baseBranch).
+ Run(ctx, ctx.RunOpts()); err != nil {
log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go
index 7258671888..076189fd7a 100644
--- a/services/pull/merge_squash.go
+++ b/services/pull/merge_squash.go
@@ -58,7 +58,7 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error {
return fmt.Errorf("getAuthorSignatureSquash: %w", err)
}
- cmdMerge := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch)
+ cmdMerge := git.NewCommand("merge", "--squash").AddDynamicArguments(trackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleSquash, cmdMerge); err != nil {
log.Error("%-v Unable to merge --squash tracking into base: %v", ctx.pr, err)
return err
@@ -71,7 +71,7 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error {
}
message += fmt.Sprintf("\nCo-committed-by: %s\n", sig.String())
}
- cmdCommit := git.NewCommand(ctx, "commit").
+ cmdCommit := git.NewCommand("commit").
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
AddOptionFormat("--message=%s", message)
if ctx.signKeyID == "" {
@@ -79,7 +79,7 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error {
} else {
cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID)
}
- if err := cmdCommit.Run(ctx.RunOpts()); err != nil {
+ if err := cmdCommit.Run(ctx, ctx.RunOpts()); err != nil {
log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String())
}
diff --git a/services/pull/patch.go b/services/pull/patch.go
index 13623d73c6..29f2f992ab 100644
--- a/services/pull/patch.go
+++ b/services/pull/patch.go
@@ -92,7 +92,7 @@ func testPatch(ctx context.Context, prCtx *prContext, pr *issues_model.PullReque
defer gitRepo.Close()
// 1. update merge base
- pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: prCtx.tmpBasePath})
+ pr.MergeBase, _, err = git.NewCommand("merge-base", "--", "base", "tracking").RunStdString(ctx, &git.RunOpts{Dir: prCtx.tmpBasePath})
if err != nil {
var err2 error
pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base")
@@ -192,7 +192,7 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, f
}
// Need to get the objects from the object db to attempt to merge
- root, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage1.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ root, _, err := git.NewCommand("unpack-file").AddDynamicArguments(file.stage1.sha).RunStdString(ctx, &git.RunOpts{Dir: tmpBasePath})
if err != nil {
return fmt.Errorf("unable to get root object: %s at path: %s for merging. Error: %w", file.stage1.sha, file.stage1.path, err)
}
@@ -201,7 +201,7 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, f
_ = util.Remove(filepath.Join(tmpBasePath, root))
}()
- base, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage2.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ base, _, err := git.NewCommand("unpack-file").AddDynamicArguments(file.stage2.sha).RunStdString(ctx, &git.RunOpts{Dir: tmpBasePath})
if err != nil {
return fmt.Errorf("unable to get base object: %s at path: %s for merging. Error: %w", file.stage2.sha, file.stage2.path, err)
}
@@ -209,7 +209,7 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, f
defer func() {
_ = util.Remove(base)
}()
- head, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage3.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ head, _, err := git.NewCommand("unpack-file").AddDynamicArguments(file.stage3.sha).RunStdString(ctx, &git.RunOpts{Dir: tmpBasePath})
if err != nil {
return fmt.Errorf("unable to get head object:%s at path: %s for merging. Error: %w", file.stage3.sha, file.stage3.path, err)
}
@@ -219,13 +219,13 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, f
}()
// now git merge-file annoyingly takes a different order to the merge-tree ...
- _, _, conflictErr := git.NewCommand(ctx, "merge-file").AddDynamicArguments(base, root, head).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ _, _, conflictErr := git.NewCommand("merge-file").AddDynamicArguments(base, root, head).RunStdString(ctx, &git.RunOpts{Dir: tmpBasePath})
if conflictErr != nil {
return &errMergeConflict{file.stage2.path}
}
// base now contains the merged data
- hash, _, err := git.NewCommand(ctx, "hash-object", "-w", "--path").AddDynamicArguments(file.stage2.path, base).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ hash, _, err := git.NewCommand("hash-object", "-w", "--path").AddDynamicArguments(file.stage2.path, base).RunStdString(ctx, &git.RunOpts{Dir: tmpBasePath})
if err != nil {
return err
}
@@ -250,7 +250,7 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo
defer cancel()
// First we use read-tree to do a simple three-way merge
- if _, _, err := git.NewCommand(ctx, "read-tree", "-m").AddDynamicArguments(base, ours, theirs).RunStdString(&git.RunOpts{Dir: gitPath}); err != nil {
+ if _, _, err := git.NewCommand("read-tree", "-m").AddDynamicArguments(base, ours, theirs).RunStdString(ctx, &git.RunOpts{Dir: gitPath}); err != nil {
log.Error("Unable to run read-tree -m! Error: %v", err)
return false, nil, fmt.Errorf("unable to run read-tree -m! Error: %w", err)
}
@@ -324,9 +324,9 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
// No conflicts detected so we need to check if the patch is empty...
// a. Write the newly merged tree and check the new tree-hash
var treeHash string
- treeHash, _, err = git.NewCommand(ctx, "write-tree").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ treeHash, _, err = git.NewCommand("write-tree").RunStdString(ctx, &git.RunOpts{Dir: tmpBasePath})
if err != nil {
- lsfiles, _, _ := git.NewCommand(ctx, "ls-files", "-u").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ lsfiles, _, _ := git.NewCommand("ls-files", "-u").RunStdString(ctx, &git.RunOpts{Dir: tmpBasePath})
return false, fmt.Errorf("unable to write unconflicted tree: %w\n`git ls-files -u`:\n%s", err, lsfiles)
}
treeHash = strings.TrimSpace(treeHash)
@@ -387,7 +387,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
log.Trace("PullRequest[%d].testPatch (patchPath): %s", pr.ID, patchPath)
// 4. Read the base branch in to the index of the temporary repository
- _, _, err = git.NewCommand(gitRepo.Ctx, "read-tree", "base").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ _, _, err = git.NewCommand("read-tree", "base").RunStdString(gitRepo.Ctx, &git.RunOpts{Dir: tmpBasePath})
if err != nil {
return false, fmt.Errorf("git read-tree %s: %w", pr.BaseBranch, err)
}
@@ -400,7 +400,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
prConfig := prUnit.PullRequestsConfig()
// 6. Prepare the arguments to apply the patch against the index
- cmdApply := git.NewCommand(gitRepo.Ctx, "apply", "--check", "--cached")
+ cmdApply := git.NewCommand("apply", "--check", "--cached")
if prConfig.IgnoreWhitespaceConflicts {
cmdApply.AddArguments("--ignore-whitespace")
}
@@ -431,7 +431,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
// 8. Run the check command
conflict = false
- err = cmdApply.Run(&git.RunOpts{
+ err = cmdApply.Run(gitRepo.Ctx, &git.RunOpts{
Dir: tmpBasePath,
Stderr: stderrWriter,
PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
diff --git a/services/pull/patch_unmerged.go b/services/pull/patch_unmerged.go
index c60c48d923..200d2233e9 100644
--- a/services/pull/patch_unmerged.go
+++ b/services/pull/patch_unmerged.go
@@ -72,8 +72,8 @@ func readUnmergedLsFileLines(ctx context.Context, tmpBasePath string, outputChan
}()
stderr := &strings.Builder{}
- err = git.NewCommand(ctx, "ls-files", "-u", "-z").
- Run(&git.RunOpts{
+ err = git.NewCommand("ls-files", "-u", "-z").
+ Run(ctx, &git.RunOpts{
Dir: tmpBasePath,
Stdout: lsFilesWriter,
Stderr: stderr,
diff --git a/services/pull/protected_branch.go b/services/pull/protected_branch.go
new file mode 100644
index 0000000000..181bd32f44
--- /dev/null
+++ b/services/pull/protected_branch.go
@@ -0,0 +1,49 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/gitrepo"
+)
+
+func CreateOrUpdateProtectedBranch(ctx context.Context, repo *repo_model.Repository,
+ protectBranch *git_model.ProtectedBranch, whitelistOptions git_model.WhitelistOptions,
+) error {
+ err := git_model.UpdateProtectBranch(ctx, repo, protectBranch, whitelistOptions)
+ if err != nil {
+ return err
+ }
+
+ isPlainRule := !git_model.IsRuleNameSpecial(protectBranch.RuleName)
+ var isBranchExist bool
+ if isPlainRule {
+ // TODO: read the database directly to check if the branch exists
+ isBranchExist = gitrepo.IsBranchExist(ctx, repo, protectBranch.RuleName)
+ }
+
+ if isBranchExist {
+ if err := CheckPRsForBaseBranch(ctx, repo, protectBranch.RuleName); err != nil {
+ return err
+ }
+ } else {
+ if !isPlainRule {
+ // FIXME: since we only need to recheck files protected rules, we could improve this
+ matchedBranches, err := git_model.FindAllMatchedBranches(ctx, repo.ID, protectBranch.RuleName)
+ if err != nil {
+ return err
+ }
+ for _, branchName := range matchedBranches {
+ if err = CheckPRsForBaseBranch(ctx, repo, branchName); err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/services/pull/pull.go b/services/pull/pull.go
index 5d3758eca6..4641d4ac40 100644
--- a/services/pull/pull.go
+++ b/services/pull/pull.go
@@ -176,7 +176,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
}
if !pr.IsWorkInProgress(ctx) {
- reviewNotifiers, err = issue_service.PullRequestCodeOwnersReview(ctx, issue, pr)
+ reviewNotifiers, err = issue_service.PullRequestCodeOwnersReview(ctx, pr)
if err != nil {
return err
}
@@ -372,19 +372,29 @@ func checkForInvalidation(ctx context.Context, requests issues_model.PullRequest
return nil
}
+type TestPullRequestOptions struct {
+ RepoID int64
+ Doer *user_model.User
+ Branch string
+ IsSync bool // True means it's a pull request synchronization, false means it's triggered for pull request merging or updating
+ IsForcePush bool
+ OldCommitID string
+ NewCommitID string
+}
+
// AddTestPullRequestTask adds new test tasks by given head/base repository and head/base branch,
// and generate new patch for testing as needed.
-func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string, isSync bool, oldCommitID, newCommitID string) {
- log.Trace("AddTestPullRequestTask [head_repo_id: %d, head_branch: %s]: finding pull requests", repoID, branch)
+func AddTestPullRequestTask(opts TestPullRequestOptions) {
+ log.Trace("AddTestPullRequestTask [head_repo_id: %d, head_branch: %s]: finding pull requests", opts.RepoID, opts.Branch)
graceful.GetManager().RunWithShutdownContext(func(ctx context.Context) {
// There is no sensible way to shut this down ":-("
// If you don't let it run all the way then you will lose data
// TODO: graceful: AddTestPullRequestTask needs to become a queue!
// GetUnmergedPullRequestsByHeadInfo() only return open and unmerged PR.
- prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repoID, branch)
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, opts.RepoID, opts.Branch)
if err != nil {
- log.Error("Find pull requests [head_repo_id: %d, head_branch: %s]: %v", repoID, branch, err)
+ log.Error("Find pull requests [head_repo_id: %d, head_branch: %s]: %v", opts.RepoID, opts.Branch, err)
return
}
@@ -400,25 +410,24 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
}
AddToTaskQueue(ctx, pr)
- comment, err := CreatePushPullComment(ctx, doer, pr, oldCommitID, newCommitID)
+ comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID)
if err == nil && comment != nil {
- notify_service.PullRequestPushCommits(ctx, doer, pr, comment)
+ notify_service.PullRequestPushCommits(ctx, opts.Doer, pr, comment)
}
}
- if isSync {
- requests := issues_model.PullRequestList(prs)
- if err = requests.LoadAttributes(ctx); err != nil {
+ if opts.IsSync {
+ if err = prs.LoadAttributes(ctx); err != nil {
log.Error("PullRequestList.LoadAttributes: %v", err)
}
- if invalidationErr := checkForInvalidation(ctx, requests, repoID, doer, branch); invalidationErr != nil {
+ if invalidationErr := checkForInvalidation(ctx, prs, opts.RepoID, opts.Doer, opts.Branch); invalidationErr != nil {
log.Error("checkForInvalidation: %v", invalidationErr)
}
if err == nil {
for _, pr := range prs {
objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
- if newCommitID != "" && newCommitID != objectFormat.EmptyObjectID().String() {
- changed, err := checkIfPRContentChanged(ctx, pr, oldCommitID, newCommitID)
+ if opts.NewCommitID != "" && opts.NewCommitID != objectFormat.EmptyObjectID().String() {
+ changed, err := checkIfPRContentChanged(ctx, pr, opts.OldCommitID, opts.NewCommitID)
if err != nil {
log.Error("checkIfPRContentChanged: %v", err)
}
@@ -434,12 +443,12 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
log.Error("GetFirstMatchProtectedBranchRule: %v", err)
}
if pb != nil && pb.DismissStaleApprovals {
- if err := DismissApprovalReviews(ctx, doer, pr); err != nil {
+ if err := DismissApprovalReviews(ctx, opts.Doer, pr); err != nil {
log.Error("DismissApprovalReviews: %v", err)
}
}
}
- if err := issues_model.MarkReviewsAsNotStale(ctx, pr.IssueID, newCommitID); err != nil {
+ if err := issues_model.MarkReviewsAsNotStale(ctx, pr.IssueID, opts.NewCommitID); err != nil {
log.Error("MarkReviewsAsNotStale: %v", err)
}
divergence, err := GetDiverging(ctx, pr)
@@ -453,21 +462,36 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
}
}
- notify_service.PullRequestSynchronized(ctx, doer, pr)
+ if !pr.IsWorkInProgress(ctx) {
+ var reviewNotifiers []*issue_service.ReviewRequestNotifier
+ if opts.IsForcePush {
+ reviewNotifiers, err = issue_service.PullRequestCodeOwnersReview(ctx, pr)
+ } else {
+ reviewNotifiers, err = issue_service.PullRequestCodeOwnersReviewSpecialCommits(ctx, pr, opts.OldCommitID, opts.NewCommitID)
+ }
+ if err != nil {
+ log.Error("PullRequestCodeOwnersReview: %v", err)
+ }
+ if len(reviewNotifiers) > 0 {
+ issue_service.ReviewRequestNotify(ctx, pr.Issue, opts.Doer, reviewNotifiers)
+ }
+ }
+
+ notify_service.PullRequestSynchronized(ctx, opts.Doer, pr)
}
}
}
- log.Trace("AddTestPullRequestTask [base_repo_id: %d, base_branch: %s]: finding pull requests", repoID, branch)
- prs, err = issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, repoID, branch)
+ log.Trace("AddTestPullRequestTask [base_repo_id: %d, base_branch: %s]: finding pull requests", opts.RepoID, opts.Branch)
+ prs, err = issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, opts.RepoID, opts.Branch)
if err != nil {
- log.Error("Find pull requests [base_repo_id: %d, base_branch: %s]: %v", repoID, branch, err)
+ log.Error("Find pull requests [base_repo_id: %d, base_branch: %s]: %v", opts.RepoID, opts.Branch, err)
return
}
for _, pr := range prs {
divergence, err := GetDiverging(ctx, pr)
if err != nil {
- if git_model.IsErrBranchNotExist(err) && !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) {
+ if git_model.IsErrBranchNotExist(err) && !gitrepo.IsBranchExist(ctx, pr.HeadRepo, pr.HeadBranch) {
log.Warn("Cannot test PR %s/%d: head_branch %s no longer exists", pr.BaseRepo.Name, pr.IssueID, pr.HeadBranch)
} else {
log.Error("GetDiverging: %v", err)
@@ -505,14 +529,14 @@ func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest,
return false, fmt.Errorf("GetMergeBase: %w", err)
}
- cmd := git.NewCommand(ctx, "diff", "--name-only", "-z").AddDynamicArguments(newCommitID, oldCommitID, base)
+ cmd := git.NewCommand("diff", "--name-only", "-z").AddDynamicArguments(newCommitID, oldCommitID, base)
stdoutReader, stdoutWriter, err := os.Pipe()
if err != nil {
return false, fmt.Errorf("unable to open pipe for to run diff: %w", err)
}
stderr := new(bytes.Buffer)
- if err := cmd.Run(&git.RunOpts{
+ if err := cmd.Run(ctx, &git.RunOpts{
Dir: prCtx.tmpBasePath,
Stdout: stdoutWriter,
Stderr: stderr,
@@ -629,7 +653,7 @@ func UpdateRef(ctx context.Context, pr *issues_model.PullRequest) (err error) {
return err
}
- _, _, err = git.NewCommand(ctx, "update-ref").AddDynamicArguments(pr.GetGitRefName(), pr.HeadCommitID).RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()})
+ _, _, err = git.NewCommand("update-ref").AddDynamicArguments(pr.GetGitRefName(), pr.HeadCommitID).RunStdString(ctx, &git.RunOpts{Dir: pr.BaseRepo.RepoPath()})
if err != nil {
log.Error("Unable to update ref in base repository for PR[%d] Error: %v", pr.ID, err)
}
@@ -645,7 +669,7 @@ func retargetBranchPulls(ctx context.Context, doer *user_model.User, repoID int6
return err
}
- if err := issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ if err := prs.LoadAttributes(ctx); err != nil {
return err
}
@@ -672,11 +696,11 @@ func AdjustPullsCausedByBranchDeleted(ctx context.Context, doer *user_model.User
return err
}
- if err := issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ if err := prs.LoadAttributes(ctx); err != nil {
return err
}
- issues_model.PullRequestList(prs).SetHeadRepo(repo)
- if err := issues_model.PullRequestList(prs).LoadRepositories(ctx); err != nil {
+ prs.SetHeadRepo(repo)
+ if err := prs.LoadRepositories(ctx); err != nil {
return err
}
@@ -707,11 +731,11 @@ func AdjustPullsCausedByBranchDeleted(ctx context.Context, doer *user_model.User
return err
}
- if err := issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ if err := prs.LoadAttributes(ctx); err != nil {
return err
}
- issues_model.PullRequestList(prs).SetBaseRepo(repo)
- if err := issues_model.PullRequestList(prs).LoadRepositories(ctx); err != nil {
+ prs.SetBaseRepo(repo)
+ if err := prs.LoadRepositories(ctx); err != nil {
return err
}
@@ -744,7 +768,7 @@ func CloseRepoBranchesPulls(ctx context.Context, doer *user_model.User, repo *re
return err
}
- if err = issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ if err = prs.LoadAttributes(ctx); err != nil {
return err
}
diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go
index e5753178b8..3f33370798 100644
--- a/services/pull/temp_repo.go
+++ b/services/pull/temp_repo.go
@@ -15,6 +15,7 @@ import (
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
repo_module "code.gitea.io/gitea/modules/repository"
)
@@ -133,22 +134,22 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
return nil, nil, fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err)
}
- if err := git.NewCommand(ctx, "remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath).
- Run(prCtx.RunOpts()); err != nil {
+ if err := git.NewCommand("remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath).
+ Run(ctx, prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
}
- if err := git.NewCommand(ctx, "fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
- Run(prCtx.RunOpts()); err != nil {
+ if err := git.NewCommand("fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
+ Run(ctx, prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
}
- if err := git.NewCommand(ctx, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch).
- Run(prCtx.RunOpts()); err != nil {
+ if err := git.NewCommand("symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch).
+ Run(ctx, prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), prCtx.errbuf.String())
@@ -160,8 +161,8 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
return nil, nil, fmt.Errorf("Unable to add head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err)
}
- if err := git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath).
- Run(prCtx.RunOpts()); err != nil {
+ if err := git.NewCommand("remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath).
+ Run(ctx, prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()
return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
@@ -178,10 +179,10 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
} else {
headBranch = pr.GetGitRefName()
}
- if err := git.NewCommand(ctx, "fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch).
- Run(prCtx.RunOpts()); err != nil {
+ if err := git.NewCommand("fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch).
+ Run(ctx, prCtx.RunOpts()); err != nil {
cancel()
- if !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) {
+ if !gitrepo.IsBranchExist(ctx, pr.HeadRepo, pr.HeadBranch) {
return nil, nil, git_model.ErrBranchNotExist{
BranchName: pr.HeadBranch,
}
diff --git a/services/pull/update.go b/services/pull/update.go
index abf7ad4509..3e00dd4e65 100644
--- a/services/pull/update.go
+++ b/services/pull/update.go
@@ -42,7 +42,15 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.
if rebase {
defer func() {
- go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "")
+ go AddTestPullRequestTask(TestPullRequestOptions{
+ RepoID: pr.BaseRepo.ID,
+ Doer: doer,
+ Branch: pr.BaseBranch,
+ IsSync: false,
+ IsForcePush: false,
+ OldCommitID: "",
+ NewCommitID: "",
+ })
}()
return updateHeadByRebaseOnToBase(ctx, pr, doer)
@@ -83,7 +91,15 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.
_, err = doMergeAndPush(ctx, reversePR, doer, repo_model.MergeStyleMerge, "", message, repository.PushTriggerPRUpdateWithBase)
defer func() {
- go AddTestPullRequestTask(doer, reversePR.HeadRepo.ID, reversePR.HeadBranch, false, "", "")
+ go AddTestPullRequestTask(TestPullRequestOptions{
+ RepoID: reversePR.HeadRepo.ID,
+ Doer: doer,
+ Branch: reversePR.HeadBranch,
+ IsSync: false,
+ IsForcePush: false,
+ OldCommitID: "",
+ NewCommitID: "",
+ })
}()
return err
diff --git a/services/pull/update_rebase.go b/services/pull/update_rebase.go
index 3e2a7be132..9ff062f99c 100644
--- a/services/pull/update_rebase.go
+++ b/services/pull/update_rebase.go
@@ -27,7 +27,7 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques
defer cancel()
// Determine the old merge-base before the rebase - we use this for LFS push later on
- oldMergeBase, _, _ := git.NewCommand(ctx, "merge-base").AddDashesAndList(baseBranch, trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
+ oldMergeBase, _, _ := git.NewCommand("merge-base").AddDashesAndList(baseBranch, trackingBranch).RunStdString(ctx, &git.RunOpts{Dir: mergeCtx.tmpBasePath})
oldMergeBase = strings.TrimSpace(oldMergeBase)
// Rebase the tracking branch on to the base as the staging branch
@@ -62,7 +62,7 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques
headUser = pr.HeadRepo.Owner
}
- pushCmd := git.NewCommand(ctx, "push", "-f", "head_repo").
+ pushCmd := git.NewCommand("push", "-f", "head_repo").
AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
// Push back to the head repository.
@@ -71,7 +71,7 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques
mergeCtx.outbuf.Reset()
mergeCtx.errbuf.Reset()
- if err := pushCmd.Run(&git.RunOpts{
+ if err := pushCmd.Run(ctx, &git.RunOpts{
Env: repo_module.FullPushingEnvironment(
headUser,
doer,
diff --git a/services/release/release.go b/services/release/release.go
index 835a5943b1..0b8a74252a 100644
--- a/services/release/release.go
+++ b/services/release/release.go
@@ -77,7 +77,7 @@ func createTag(ctx context.Context, gitRepo *git.Repository, rel *repo_model.Rel
var created bool
// Only actual create when publish.
if !rel.IsDraft {
- if !gitRepo.IsTagExist(rel.TagName) {
+ if !gitrepo.IsTagExist(ctx, rel.Repo, rel.TagName) {
if err := rel.LoadAttributes(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
return false, err
@@ -296,10 +296,7 @@ func UpdateRelease(ctx context.Context, doer *user_model.User, gitRepo *git.Repo
}
for _, attach := range attachments {
if attach.ReleaseID != rel.ID {
- return util.SilentWrap{
- Message: "delete attachment of release permission denied",
- Err: util.ErrPermissionDenied,
- }
+ return util.NewPermissionDeniedErrorf("delete attachment of release permission denied")
}
deletedUUIDs.Add(attach.UUID)
}
@@ -321,10 +318,7 @@ func UpdateRelease(ctx context.Context, doer *user_model.User, gitRepo *git.Repo
}
for _, attach := range attachments {
if attach.ReleaseID != rel.ID {
- return util.SilentWrap{
- Message: "update attachment of release permission denied",
- Err: util.ErrPermissionDenied,
- }
+ return util.NewPermissionDeniedErrorf("update attachment of release permission denied")
}
}
@@ -381,8 +375,8 @@ func DeleteReleaseByID(ctx context.Context, repo *repo_model.Repository, rel *re
}
}
- if stdout, _, err := git.NewCommand(ctx, "tag", "-d").AddDashesAndList(rel.TagName).
- RunStdString(&git.RunOpts{Dir: repo.RepoPath()}); err != nil && !strings.Contains(err.Error(), "not found") {
+ if stdout, _, err := git.NewCommand("tag", "-d").AddDashesAndList(rel.TagName).
+ RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil && !strings.Contains(err.Error(), "not found") {
log.Error("DeleteReleaseByID (git tag -d): %d in %v Failed:\nStdout: %s\nError: %v", rel.ID, repo, stdout, err)
return fmt.Errorf("git tag -d: %w", err)
}
diff --git a/services/release/release_test.go b/services/release/release_test.go
index 95a54832b9..36a9f667d6 100644
--- a/services/release/release_test.go
+++ b/services/release/release_test.go
@@ -250,9 +250,9 @@ func TestRelease_Update(t *testing.T) {
assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, []string{attach.UUID}, nil, nil))
assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
assert.Len(t, release.Attachments, 1)
- assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID)
- assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
- assert.EqualValues(t, attach.Name, release.Attachments[0].Name)
+ assert.Equal(t, attach.UUID, release.Attachments[0].UUID)
+ assert.Equal(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.Equal(t, attach.Name, release.Attachments[0].Name)
// update the attachment name
assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, nil, nil, map[string]string{
@@ -261,9 +261,9 @@ func TestRelease_Update(t *testing.T) {
release.Attachments = nil
assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
assert.Len(t, release.Attachments, 1)
- assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID)
- assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
- assert.EqualValues(t, "test2.txt", release.Attachments[0].Name)
+ assert.Equal(t, attach.UUID, release.Attachments[0].UUID)
+ assert.Equal(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.Equal(t, "test2.txt", release.Attachments[0].Name)
// delete the attachment
assert.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, nil, []string{attach.UUID}, nil))
diff --git a/services/repository/adopt.go b/services/repository/adopt.go
index e37909e7ab..b7321156d9 100644
--- a/services/repository/adopt.go
+++ b/services/repository/adopt.go
@@ -53,10 +53,9 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR
}
if err := db.WithTx(ctx, func(ctx context.Context) error {
- repoPath := repo_model.RepoPath(u.Name, repo.Name)
- isExist, err := util.IsExist(repoPath)
+ isExist, err := gitrepo.IsRepositoryExist(ctx, repo)
if err != nil {
- log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err)
return err
}
if !isExist {
@@ -75,8 +74,13 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR
if repo, err = repo_model.GetRepositoryByID(ctx, repo.ID); err != nil {
return fmt.Errorf("getRepositoryByID: %w", err)
}
+ return nil
+ }); err != nil {
+ return nil, err
+ }
- if err := adoptRepository(ctx, repoPath, repo, opts.DefaultBranch); err != nil {
+ if err := func() error {
+ if err := adoptRepository(ctx, repo, opts.DefaultBranch); err != nil {
return fmt.Errorf("adoptRepository: %w", err)
}
@@ -84,39 +88,34 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR
return fmt.Errorf("checkDaemonExportOK: %w", err)
}
- // Initialize Issue Labels if selected
- if len(opts.IssueLabels) > 0 {
- if err := repo_module.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil {
- return fmt.Errorf("InitializeLabels: %w", err)
- }
- }
-
- if stdout, _, err := git.NewCommand(ctx, "update-server-info").
- RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ if stdout, _, err := git.NewCommand("update-server-info").
+ RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil {
log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
return fmt.Errorf("CreateRepository(git update-server-info): %w", err)
}
return nil
- }); err != nil {
+ }(); err != nil {
+ if errDel := DeleteRepository(ctx, doer, repo, false /* no notify */); errDel != nil {
+ log.Error("Failed to delete repository %s that could not be adopted: %v", repo.FullName(), errDel)
+ }
return nil, err
}
-
notify_service.AdoptRepository(ctx, doer, u, repo)
return repo, nil
}
-func adoptRepository(ctx context.Context, repoPath string, repo *repo_model.Repository, defaultBranch string) (err error) {
- isExist, err := util.IsExist(repoPath)
+func adoptRepository(ctx context.Context, repo *repo_model.Repository, defaultBranch string) (err error) {
+ isExist, err := gitrepo.IsRepositoryExist(ctx, repo)
if err != nil {
- log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err)
return err
}
if !isExist {
- return fmt.Errorf("adoptRepository: path does not already exist: %s", repoPath)
+ return fmt.Errorf("adoptRepository: path does not already exist: %s", repo.FullName())
}
- if err := repo_module.CreateDelegateHooks(repoPath); err != nil {
+ if err := gitrepo.CreateDelegateHooks(ctx, repo); err != nil {
return fmt.Errorf("createDelegateHooks: %w", err)
}
diff --git a/services/repository/adopt_test.go b/services/repository/adopt_test.go
index 123cedc1f2..294185ea1f 100644
--- a/services/repository/adopt_test.go
+++ b/services/repository/adopt_test.go
@@ -71,7 +71,7 @@ func TestListUnadoptedRepositories_ListOptions(t *testing.T) {
username := "user2"
unadoptedList := []string{path.Join(username, "unadopted1"), path.Join(username, "unadopted2")}
for _, unadopted := range unadoptedList {
- _ = os.Mkdir(path.Join(setting.RepoRootPath, unadopted+".git"), 0o755)
+ _ = os.Mkdir(filepath.Join(setting.RepoRootPath, unadopted+".git"), 0o755)
}
opts := db.ListOptions{Page: 1, PageSize: 1}
diff --git a/services/repository/archiver/archiver_test.go b/services/repository/archiver/archiver_test.go
index 522f90558a..87324ad38c 100644
--- a/services/repository/archiver/archiver_test.go
+++ b/services/repository/archiver/archiver_test.go
@@ -33,7 +33,7 @@ func TestArchive_Basic(t *testing.T) {
bogusReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
assert.NoError(t, err)
assert.NotNil(t, bogusReq)
- assert.EqualValues(t, firstCommit+".zip", bogusReq.GetArchiveName())
+ assert.Equal(t, firstCommit+".zip", bogusReq.GetArchiveName())
// Check a series of bogus requests.
// Step 1, valid commit with a bad extension.
@@ -54,12 +54,12 @@ func TestArchive_Basic(t *testing.T) {
bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "master.zip")
assert.NoError(t, err)
assert.NotNil(t, bogusReq)
- assert.EqualValues(t, "master.zip", bogusReq.GetArchiveName())
+ assert.Equal(t, "master.zip", bogusReq.GetArchiveName())
bogusReq, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "test/archive.zip")
assert.NoError(t, err)
assert.NotNil(t, bogusReq)
- assert.EqualValues(t, "test-archive.zip", bogusReq.GetArchiveName())
+ assert.Equal(t, "test-archive.zip", bogusReq.GetArchiveName())
// Now two valid requests, firstCommit with valid extensions.
zipReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
diff --git a/services/repository/avatar_test.go b/services/repository/avatar_test.go
index bea820e85f..2dc5173eec 100644
--- a/services/repository/avatar_test.go
+++ b/services/repository/avatar_test.go
@@ -59,7 +59,7 @@ func TestDeleteAvatar(t *testing.T) {
err = DeleteAvatar(db.DefaultContext, repo)
assert.NoError(t, err)
- assert.Equal(t, "", repo.Avatar)
+ assert.Empty(t, repo.Avatar)
}
func TestGenerateAvatar(t *testing.T) {
diff --git a/services/repository/branch.go b/services/repository/branch.go
index c80d367bbd..8804778bd5 100644
--- a/services/repository/branch.go
+++ b/services/repository/branch.go
@@ -30,6 +30,7 @@ import (
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
webhook_module "code.gitea.io/gitea/modules/webhook"
+ actions_service "code.gitea.io/gitea/services/actions"
notify_service "code.gitea.io/gitea/services/notify"
release_service "code.gitea.io/gitea/services/release"
files_service "code.gitea.io/gitea/services/repository/files"
@@ -409,11 +410,11 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, doer *user_m
return "target_exist", nil
}
- if gitRepo.IsBranchExist(to) {
+ if gitrepo.IsBranchExist(ctx, repo, to) {
return "target_exist", nil
}
- if !gitRepo.IsBranchExist(from) {
+ if !gitrepo.IsBranchExist(ctx, repo, from) {
return "from_not_exist", nil
}
@@ -452,7 +453,7 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, doer *user_m
log.Error("DeleteCronTaskByRepo: %v", err)
}
// cancel running cron jobs of this repository and delete old schedules
- if err := actions_model.CancelPreviousJobs(
+ if err := actions_service.CancelPreviousJobs(
ctx,
repo.ID,
from,
@@ -617,12 +618,12 @@ func AddAllRepoBranchesToSyncQueue(ctx context.Context) error {
return nil
}
-func SetRepoDefaultBranch(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, newBranchName string) error {
+func SetRepoDefaultBranch(ctx context.Context, repo *repo_model.Repository, newBranchName string) error {
if repo.DefaultBranch == newBranchName {
return nil
}
- if !gitRepo.IsBranchExist(newBranchName) {
+ if !gitrepo.IsBranchExist(ctx, repo, newBranchName) {
return git_model.ErrBranchNotExist{
BranchName: newBranchName,
}
@@ -639,7 +640,7 @@ func SetRepoDefaultBranch(ctx context.Context, repo *repo_model.Repository, gitR
log.Error("DeleteCronTaskByRepo: %v", err)
}
// cancel running cron jobs of this repository and delete old schedules
- if err := actions_model.CancelPreviousJobs(
+ if err := actions_service.CancelPreviousJobs(
ctx,
repo.ID,
oldDefaultBranchName,
diff --git a/services/repository/check.go b/services/repository/check.go
index acca15daf2..b475fbc487 100644
--- a/services/repository/check.go
+++ b/services/repository/check.go
@@ -86,10 +86,10 @@ func GitGcRepos(ctx context.Context, timeout time.Duration, args git.TrustedCmdA
// GitGcRepo calls 'git gc' to remove unnecessary files and optimize the local repository
func GitGcRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Duration, args git.TrustedCmdArgs) error {
log.Trace("Running git gc on %-v", repo)
- command := git.NewCommand(ctx, "gc").AddArguments(args...)
+ command := git.NewCommand("gc").AddArguments(args...)
var stdout string
var err error
- stdout, _, err = command.RunStdString(&git.RunOpts{Timeout: timeout, Dir: repo.RepoPath()})
+ stdout, _, err = command.RunStdString(ctx, &git.RunOpts{Timeout: timeout, Dir: repo.RepoPath()})
if err != nil {
log.Error("Repository garbage collection failed for %-v. Stdout: %s\nError: %v", repo, stdout, err)
desc := fmt.Sprintf("Repository garbage collection failed for %s. Stdout: %s\nError: %v", repo.RepoPath(), stdout, err)
diff --git a/services/repository/contributors_graph.go b/services/repository/contributors_graph.go
index b0748f8ee3..a4ae505313 100644
--- a/services/repository/contributors_graph.go
+++ b/services/repository/contributors_graph.go
@@ -125,13 +125,13 @@ func getExtendedCommitStats(repo *git.Repository, revision string /*, limit int
_ = stdoutWriter.Close()
}()
- gitCmd := git.NewCommand(repo.Ctx, "log", "--shortstat", "--no-merges", "--pretty=format:---%n%aN%n%aE%n%as", "--reverse")
+ gitCmd := git.NewCommand("log", "--shortstat", "--no-merges", "--pretty=format:---%n%aN%n%aE%n%as", "--reverse")
// AddOptionFormat("--max-count=%d", limit)
gitCmd.AddDynamicArguments(baseCommit.ID.String())
var extendedCommitStats []*ExtendedCommitStats
stderr := new(strings.Builder)
- err = gitCmd.Run(&git.RunOpts{
+ err = gitCmd.Run(repo.Ctx, &git.RunOpts{
Dir: repo.Path,
Stdout: stdoutWriter,
Stderr: stderr,
diff --git a/services/repository/contributors_graph_test.go b/services/repository/contributors_graph_test.go
index 6db93f6a64..7d32b1c931 100644
--- a/services/repository/contributors_graph_test.go
+++ b/services/repository/contributors_graph_test.go
@@ -38,14 +38,14 @@ func TestRepository_ContributorsGraph(t *testing.T) {
keys = append(keys, k)
}
slices.Sort(keys)
- assert.EqualValues(t, []string{
+ assert.Equal(t, []string{
"ethantkoenig@gmail.com",
"jimmy.praet@telenet.be",
"jon@allspice.io",
"total", // generated summary
}, keys)
- assert.EqualValues(t, &ContributorData{
+ assert.Equal(t, &ContributorData{
Name: "Ethan Koenig",
AvatarLink: "/assets/img/avatar_default.png",
TotalCommits: 1,
@@ -58,7 +58,7 @@ func TestRepository_ContributorsGraph(t *testing.T) {
},
},
}, data["ethantkoenig@gmail.com"])
- assert.EqualValues(t, &ContributorData{
+ assert.Equal(t, &ContributorData{
Name: "Total",
AvatarLink: "",
TotalCommits: 3,
diff --git a/services/repository/create.go b/services/repository/create.go
index 23aacd6f95..af4e897151 100644
--- a/services/repository/create.go
+++ b/services/repository/create.go
@@ -52,7 +52,7 @@ type CreateRepoOptions struct {
ObjectFormatName string
}
-func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir, repoPath string, opts CreateRepoOptions) error {
+func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir string, opts CreateRepoOptions) error {
commitTimeStr := time.Now().Format(time.RFC3339)
authorSig := repo.Owner.NewGitSig()
@@ -67,8 +67,8 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir,
)
// Clone to temporary path and do the init commit.
- if stdout, _, err := git.NewCommand(ctx, "clone").AddDynamicArguments(repoPath, tmpDir).
- RunStdString(&git.RunOpts{Dir: "", Env: env}); err != nil {
+ if stdout, _, err := git.NewCommand("clone").AddDynamicArguments(repo.RepoPath(), tmpDir).
+ RunStdString(ctx, &git.RunOpts{Dir: "", Env: env}); err != nil {
log.Error("Failed to clone from %v into %s: stdout: %s\nError: %v", repo, tmpDir, stdout, err)
return fmt.Errorf("git clone: %w", err)
}
@@ -139,8 +139,8 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir,
}
// InitRepository initializes README and .gitignore if needed.
-func initRepository(ctx context.Context, repoPath string, u *user_model.User, repo *repo_model.Repository, opts CreateRepoOptions) (err error) {
- if err = repo_module.CheckInitRepository(ctx, repo.OwnerName, repo.Name, opts.ObjectFormatName); err != nil {
+func initRepository(ctx context.Context, u *user_model.User, repo *repo_model.Repository, opts CreateRepoOptions) (err error) {
+ if err = repo_module.CheckInitRepository(ctx, repo); err != nil {
return err
}
@@ -148,7 +148,7 @@ func initRepository(ctx context.Context, repoPath string, u *user_model.User, re
if opts.AutoInit {
tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-"+repo.Name)
if err != nil {
- return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.RepoPath(), err)
+ return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.FullName(), err)
}
defer func() {
if err := util.RemoveAll(tmpDir); err != nil {
@@ -156,7 +156,7 @@ func initRepository(ctx context.Context, repoPath string, u *user_model.User, re
}
}()
- if err = prepareRepoCommit(ctx, repo, tmpDir, repoPath, opts); err != nil {
+ if err = prepareRepoCommit(ctx, repo, tmpDir, opts); err != nil {
return fmt.Errorf("prepareRepoCommit: %w", err)
}
@@ -256,10 +256,9 @@ func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opt
return nil
}
- repoPath := repo_model.RepoPath(u.Name, repo.Name)
- isExist, err := util.IsExist(repoPath)
+ isExist, err := gitrepo.IsRepositoryExist(ctx, repo)
if err != nil {
- log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err)
return err
}
if isExist {
@@ -270,15 +269,15 @@ func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opt
//
// Previously Gitea would just delete and start afresh - this was naughty.
// So we will now fail and delegate to other functionality to adopt or delete
- log.Error("Files already exist in %s and we are not going to adopt or delete.", repoPath)
+ log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName())
return repo_model.ErrRepoFilesAlreadyExist{
Uname: u.Name,
Name: repo.Name,
}
}
- if err = initRepository(ctx, repoPath, doer, repo, opts); err != nil {
- if err2 := util.RemoveAll(repoPath); err2 != nil {
+ if err = initRepository(ctx, doer, repo, opts); err != nil {
+ if err2 := gitrepo.DeleteRepository(ctx, repo); err2 != nil {
log.Error("initRepository: %v", err)
return fmt.Errorf(
"delete repo directory %s/%s failed(2): %v", u.Name, repo.Name, err2)
@@ -299,8 +298,8 @@ func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opt
return fmt.Errorf("checkDaemonExportOK: %w", err)
}
- if stdout, _, err := git.NewCommand(ctx, "update-server-info").
- RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ if stdout, _, err := git.NewCommand("update-server-info").
+ RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil {
log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
rollbackRepo = repo
rollbackRepo.OwnerID = u.ID
@@ -310,9 +309,9 @@ func CreateRepositoryDirectly(ctx context.Context, doer, u *user_model.User, opt
// update licenses
var licenses []string
if len(opts.License) > 0 {
- licenses = append(licenses, ConvertLicenseName(opts.License))
+ licenses = append(licenses, opts.License)
- stdout, _, err := git.NewCommand(ctx, "rev-parse", "HEAD").RunStdString(&git.RunOpts{Dir: repoPath})
+ stdout, _, err := git.NewCommand("rev-parse", "HEAD").RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()})
if err != nil {
log.Error("CreateRepository(git rev-parse HEAD) in %v: Stdout: %s\nError: %v", repo, stdout, err)
rollbackRepo = repo
@@ -353,14 +352,13 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re
}
}
- repoPath := repo_model.RepoPath(u.Name, repo.Name)
- isExist, err := util.IsExist(repoPath)
+ isExist, err := gitrepo.IsRepositoryExist(ctx, repo)
if err != nil {
- log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ log.Error("Unable to check if %s exists. Error: %v", repo.FullName(), err)
return err
}
if !overwriteOrAdopt && isExist {
- log.Error("Files already exist in %s and we are not going to adopt or delete.", repoPath)
+ log.Error("Files already exist in %s and we are not going to adopt or delete.", repo.FullName())
return repo_model.ErrRepoFilesAlreadyExist{
Uname: u.Name,
Name: repo.Name,
@@ -386,7 +384,8 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re
}
units := make([]repo_model.RepoUnit, 0, len(defaultUnits))
for _, tp := range defaultUnits {
- if tp == unit.TypeIssues {
+ switch tp {
+ case unit.TypeIssues:
units = append(units, repo_model.RepoUnit{
RepoID: repo.ID,
Type: tp,
@@ -396,7 +395,7 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re
EnableDependencies: setting.Service.DefaultEnableDependencies,
},
})
- } else if tp == unit.TypePullRequests {
+ case unit.TypePullRequests:
units = append(units, repo_model.RepoUnit{
RepoID: repo.ID,
Type: tp,
@@ -406,13 +405,13 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re
AllowRebaseUpdate: true,
},
})
- } else if tp == unit.TypeProjects {
+ case unit.TypeProjects:
units = append(units, repo_model.RepoUnit{
RepoID: repo.ID,
Type: tp,
Config: &repo_model.ProjectsConfig{ProjectsMode: repo_model.ProjectsModeAll},
})
- } else {
+ default:
units = append(units, repo_model.RepoUnit{
RepoID: repo.ID,
Type: tp,
diff --git a/services/repository/delete.go b/services/repository/delete.go
index 2166b4dd5c..ff74a20817 100644
--- a/services/repository/delete.go
+++ b/services/repository/delete.go
@@ -14,6 +14,7 @@ import (
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/organization"
+ packages_model "code.gitea.io/gitea/models/packages"
access_model "code.gitea.io/gitea/models/perm/access"
project_model "code.gitea.io/gitea/models/project"
repo_model "code.gitea.io/gitea/models/repo"
@@ -22,6 +23,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/models/webhook"
actions_module "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
@@ -158,6 +160,7 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID
&actions_model.ActionSchedule{RepoID: repoID},
&actions_model.ActionArtifact{RepoID: repoID},
&actions_model.ActionRunnerToken{RepoID: repoID},
+ &issues_model.IssuePin{RepoID: repoID},
); err != nil {
return fmt.Errorf("deleteBeans: %w", err)
}
@@ -266,6 +269,11 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID
return err
}
+ // unlink packages linked to this repository
+ if err = packages_model.UnlinkRepositoryFromAllPackages(ctx, repoID); err != nil {
+ return err
+ }
+
if err = committer.Commit(); err != nil {
return err
}
@@ -282,8 +290,13 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID
// we delete the file but the database rollback, the repository will be broken.
// Remove repository files.
- repoPath := repo.RepoPath()
- system_model.RemoveAllWithNotice(ctx, "Delete repository files", repoPath)
+ if err := gitrepo.DeleteRepository(ctx, repo); err != nil {
+ desc := fmt.Sprintf("Delete repository files [%s]: %v", repo.FullName(), err)
+ // Note we use the db.DefaultContext here rather than passing in a context as the context may be cancelled
+ if err = system_model.CreateNotice(db.DefaultContext, system_model.NoticeRepository, desc); err != nil {
+ log.Error("CreateRepositoryNotice: %v", err)
+ }
+ }
// Remove wiki files
if repo.HasWiki() {
diff --git a/services/repository/files/cherry_pick.go b/services/repository/files/cherry_pick.go
index 3457283803..0e069fb2ce 100644
--- a/services/repository/files/cherry_pick.go
+++ b/services/repository/files/cherry_pick.go
@@ -39,18 +39,18 @@ func CherryPick(ctx context.Context, repo *repo_model.Repository, doer *user_mod
}
message := strings.TrimSpace(opts.Message)
- t, err := NewTemporaryUploadRepository(ctx, repo)
+ t, err := NewTemporaryUploadRepository(repo)
if err != nil {
log.Error("NewTemporaryUploadRepository failed: %v", err)
}
defer t.Close()
- if err := t.Clone(opts.OldBranch, false); err != nil {
+ if err := t.Clone(ctx, opts.OldBranch, false); err != nil {
return nil, err
}
- if err := t.SetDefaultIndex(); err != nil {
+ if err := t.SetDefaultIndex(ctx); err != nil {
return nil, err
}
- if err := t.RefreshIndex(); err != nil {
+ if err := t.RefreshIndex(ctx); err != nil {
return nil, err
}
@@ -103,7 +103,7 @@ func CherryPick(ctx context.Context, repo *repo_model.Repository, doer *user_mod
return nil, fmt.Errorf("failed to merge due to conflicts")
}
- treeHash, err := t.WriteTree()
+ treeHash, err := t.WriteTree(ctx)
if err != nil {
// likely non-sensical tree due to merge conflicts...
return nil, err
@@ -124,13 +124,13 @@ func CherryPick(ctx context.Context, repo *repo_model.Repository, doer *user_mod
if opts.Dates != nil {
commitOpts.AuthorTime, commitOpts.CommitterTime = &opts.Dates.Author, &opts.Dates.Committer
}
- commitHash, err := t.CommitTree(commitOpts)
+ commitHash, err := t.CommitTree(ctx, commitOpts)
if err != nil {
return nil, err
}
// Then push this tree to NewBranch
- if err := t.Push(doer, commitHash, opts.NewBranch); err != nil {
+ if err := t.Push(ctx, doer, commitHash, opts.NewBranch); err != nil {
return nil, err
}
diff --git a/services/repository/files/commit.go b/services/repository/files/commit.go
index e0dad29273..3cc326d065 100644
--- a/services/repository/files/commit.go
+++ b/services/repository/files/commit.go
@@ -6,10 +6,10 @@ package files
import (
"context"
- asymkey_model "code.gitea.io/gitea/models/asymkey"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/structs"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
)
// CountDivergingCommits determines how many commits a branch is ahead or behind the repository's base branch
@@ -24,7 +24,7 @@ func CountDivergingCommits(ctx context.Context, repo *repo_model.Repository, bra
// GetPayloadCommitVerification returns the verification information of a commit
func GetPayloadCommitVerification(ctx context.Context, commit *git.Commit) *structs.PayloadCommitVerification {
verification := &structs.PayloadCommitVerification{}
- commitVerification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ commitVerification := asymkey_service.ParseCommitWithSignature(ctx, commit)
if commit.Signature != nil {
verification.Signature = commit.Signature.Signature
verification.Payload = commit.Signature.Payload
diff --git a/services/repository/files/content_test.go b/services/repository/files/content_test.go
index 7cb46c0bb6..866a1695e0 100644
--- a/services/repository/files/content_test.go
+++ b/services/repository/files/content_test.go
@@ -67,13 +67,13 @@ func TestGetContents(t *testing.T) {
t.Run("Get README.md contents with GetContents(ctx, )", func(t *testing.T) {
fileContentResponse, err := GetContents(ctx, ctx.Repo.Repository, treePath, ref, false)
- assert.EqualValues(t, expectedContentsResponse, fileContentResponse)
+ assert.Equal(t, expectedContentsResponse, fileContentResponse)
assert.NoError(t, err)
})
t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContents(ctx, )", func(t *testing.T) {
fileContentResponse, err := GetContents(ctx, ctx.Repo.Repository, treePath, "", false)
- assert.EqualValues(t, expectedContentsResponse, fileContentResponse)
+ assert.Equal(t, expectedContentsResponse, fileContentResponse)
assert.NoError(t, err)
})
}
diff --git a/services/repository/files/diff.go b/services/repository/files/diff.go
index bf8b938e21..0b3550452a 100644
--- a/services/repository/files/diff.go
+++ b/services/repository/files/diff.go
@@ -16,27 +16,27 @@ func GetDiffPreview(ctx context.Context, repo *repo_model.Repository, branch, tr
if branch == "" {
branch = repo.DefaultBranch
}
- t, err := NewTemporaryUploadRepository(ctx, repo)
+ t, err := NewTemporaryUploadRepository(repo)
if err != nil {
return nil, err
}
defer t.Close()
- if err := t.Clone(branch, true); err != nil {
+ if err := t.Clone(ctx, branch, true); err != nil {
return nil, err
}
- if err := t.SetDefaultIndex(); err != nil {
+ if err := t.SetDefaultIndex(ctx); err != nil {
return nil, err
}
// Add the object to the database
- objectHash, err := t.HashObject(strings.NewReader(content))
+ objectHash, err := t.HashObject(ctx, strings.NewReader(content))
if err != nil {
return nil, err
}
// Add the object to the index
- if err := t.AddObjectToIndex("100644", objectHash, treePath); err != nil {
+ if err := t.AddObjectToIndex(ctx, "100644", objectHash, treePath); err != nil {
return nil, err
}
- return t.DiffIndex()
+ return t.DiffIndex(ctx)
}
diff --git a/services/repository/files/diff_test.go b/services/repository/files/diff_test.go
index b7bdcd8ecf..ae702e4189 100644
--- a/services/repository/files/diff_test.go
+++ b/services/repository/files/diff_test.go
@@ -30,14 +30,11 @@ func TestGetDiffPreview(t *testing.T) {
content := "# repo1\n\nDescription for repo1\nthis is a new line"
expectedDiff := &gitdiff.Diff{
- TotalAddition: 2,
- TotalDeletion: 1,
Files: []*gitdiff.DiffFile{
{
Name: "README.md",
OldName: "README.md",
NameHash: "8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d",
- Index: 1,
Addition: 2,
Deletion: 1,
Type: 2,
@@ -50,7 +47,6 @@ func TestGetDiffPreview(t *testing.T) {
Sections: []*gitdiff.DiffSection{
{
FileName: "README.md",
- Name: "",
Lines: []*gitdiff.DiffLine{
{
LeftIdx: 0,
@@ -114,7 +110,6 @@ func TestGetDiffPreview(t *testing.T) {
},
IsIncomplete: false,
}
- expectedDiff.NumFiles = len(expectedDiff.Files)
t.Run("with given branch", func(t *testing.T) {
diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, branch, treePath, content)
@@ -123,7 +118,7 @@ func TestGetDiffPreview(t *testing.T) {
assert.NoError(t, err)
bs, err := json.Marshal(diff)
assert.NoError(t, err)
- assert.EqualValues(t, string(expectedBs), string(bs))
+ assert.Equal(t, string(expectedBs), string(bs))
})
t.Run("empty branch, same results", func(t *testing.T) {
@@ -133,7 +128,7 @@ func TestGetDiffPreview(t *testing.T) {
assert.NoError(t, err)
bs, err := json.Marshal(diff)
assert.NoError(t, err)
- assert.EqualValues(t, expectedBs, bs)
+ assert.Equal(t, expectedBs, bs)
})
}
diff --git a/services/repository/files/file_test.go b/services/repository/files/file_test.go
index 52c0574883..5b4b3aebe0 100644
--- a/services/repository/files/file_test.go
+++ b/services/repository/files/file_test.go
@@ -20,14 +20,14 @@ func TestCleanUploadFileName(t *testing.T) {
name := "this/is/test"
cleanName := CleanUploadFileName(name)
expectedCleanName := name
- assert.EqualValues(t, expectedCleanName, cleanName)
+ assert.Equal(t, expectedCleanName, cleanName)
})
t.Run("Clean a .git path", func(t *testing.T) {
name := "this/is/test/.git"
cleanName := CleanUploadFileName(name)
expectedCleanName := ""
- assert.EqualValues(t, expectedCleanName, cleanName)
+ assert.Equal(t, expectedCleanName, cleanName)
})
}
@@ -116,5 +116,5 @@ func TestGetFileResponseFromCommit(t *testing.T) {
fileResponse, err := GetFileResponseFromCommit(ctx, repo, commit, branch, treePath)
assert.NoError(t, err)
- assert.EqualValues(t, expectedFileResponse, fileResponse)
+ assert.Equal(t, expectedFileResponse, fileResponse)
}
diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go
index 78c275f01c..1941adb86a 100644
--- a/services/repository/files/patch.go
+++ b/services/repository/files/patch.go
@@ -126,15 +126,15 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user
message := strings.TrimSpace(opts.Message)
- t, err := NewTemporaryUploadRepository(ctx, repo)
+ t, err := NewTemporaryUploadRepository(repo)
if err != nil {
log.Error("NewTemporaryUploadRepository failed: %v", err)
}
defer t.Close()
- if err := t.Clone(opts.OldBranch, true); err != nil {
+ if err := t.Clone(ctx, opts.OldBranch, true); err != nil {
return nil, err
}
- if err := t.SetDefaultIndex(); err != nil {
+ if err := t.SetDefaultIndex(ctx); err != nil {
return nil, err
}
@@ -164,12 +164,12 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user
stdout := &strings.Builder{}
stderr := &strings.Builder{}
- cmdApply := git.NewCommand(ctx, "apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary")
+ cmdApply := git.NewCommand("apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary")
if git.DefaultFeatures().CheckVersionAtLeast("2.32") {
cmdApply.AddArguments("-3")
}
- if err := cmdApply.Run(&git.RunOpts{
+ if err := cmdApply.Run(ctx, &git.RunOpts{
Dir: t.basePath,
Stdout: stdout,
Stderr: stderr,
@@ -179,7 +179,7 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user
}
// Now write the tree
- treeHash, err := t.WriteTree()
+ treeHash, err := t.WriteTree(ctx)
if err != nil {
return nil, err
}
@@ -199,13 +199,13 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user
if opts.Dates != nil {
commitOpts.AuthorTime, commitOpts.CommitterTime = &opts.Dates.Author, &opts.Dates.Committer
}
- commitHash, err := t.CommitTree(commitOpts)
+ commitHash, err := t.CommitTree(ctx, commitOpts)
if err != nil {
return nil, err
}
// Then push this tree to NewBranch
- if err := t.Push(doer, commitHash, opts.NewBranch); err != nil {
+ if err := t.Push(ctx, doer, commitHash, opts.NewBranch); err != nil {
return nil, err
}
diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go
index cf1402397b..d2c70a7a34 100644
--- a/services/repository/files/temp_repo.go
+++ b/services/repository/files/temp_repo.go
@@ -26,19 +26,18 @@ import (
// TemporaryUploadRepository is a type to wrap our upload repositories as a shallow clone
type TemporaryUploadRepository struct {
- ctx context.Context
repo *repo_model.Repository
gitRepo *git.Repository
basePath string
}
// NewTemporaryUploadRepository creates a new temporary upload repository
-func NewTemporaryUploadRepository(ctx context.Context, repo *repo_model.Repository) (*TemporaryUploadRepository, error) {
+func NewTemporaryUploadRepository(repo *repo_model.Repository) (*TemporaryUploadRepository, error) {
basePath, err := repo_module.CreateTemporaryPath("upload")
if err != nil {
return nil, err
}
- t := &TemporaryUploadRepository{ctx: ctx, repo: repo, basePath: basePath}
+ t := &TemporaryUploadRepository{repo: repo, basePath: basePath}
return t, nil
}
@@ -51,13 +50,13 @@ func (t *TemporaryUploadRepository) Close() {
}
// Clone the base repository to our path and set branch as the HEAD
-func (t *TemporaryUploadRepository) Clone(branch string, bare bool) error {
- cmd := git.NewCommand(t.ctx, "clone", "-s", "-b").AddDynamicArguments(branch, t.repo.RepoPath(), t.basePath)
+func (t *TemporaryUploadRepository) Clone(ctx context.Context, branch string, bare bool) error {
+ cmd := git.NewCommand("clone", "-s", "-b").AddDynamicArguments(branch, t.repo.RepoPath(), t.basePath)
if bare {
cmd.AddArguments("--bare")
}
- if _, _, err := cmd.RunStdString(nil); err != nil {
+ if _, _, err := cmd.RunStdString(ctx, nil); err != nil {
stderr := err.Error()
if matched, _ := regexp.MatchString(".*Remote branch .* not found in upstream origin.*", stderr); matched {
return git.ErrBranchNotExist{
@@ -73,7 +72,7 @@ func (t *TemporaryUploadRepository) Clone(branch string, bare bool) error {
}
return fmt.Errorf("Clone: %w %s", err, stderr)
}
- gitRepo, err := git.OpenRepository(t.ctx, t.basePath)
+ gitRepo, err := git.OpenRepository(ctx, t.basePath)
if err != nil {
return err
}
@@ -82,11 +81,11 @@ func (t *TemporaryUploadRepository) Clone(branch string, bare bool) error {
}
// Init the repository
-func (t *TemporaryUploadRepository) Init(objectFormatName string) error {
- if err := git.InitRepository(t.ctx, t.basePath, false, objectFormatName); err != nil {
+func (t *TemporaryUploadRepository) Init(ctx context.Context, objectFormatName string) error {
+ if err := git.InitRepository(ctx, t.basePath, false, objectFormatName); err != nil {
return err
}
- gitRepo, err := git.OpenRepository(t.ctx, t.basePath)
+ gitRepo, err := git.OpenRepository(ctx, t.basePath)
if err != nil {
return err
}
@@ -95,28 +94,28 @@ func (t *TemporaryUploadRepository) Init(objectFormatName string) error {
}
// SetDefaultIndex sets the git index to our HEAD
-func (t *TemporaryUploadRepository) SetDefaultIndex() error {
- if _, _, err := git.NewCommand(t.ctx, "read-tree", "HEAD").RunStdString(&git.RunOpts{Dir: t.basePath}); err != nil {
+func (t *TemporaryUploadRepository) SetDefaultIndex(ctx context.Context) error {
+ if _, _, err := git.NewCommand("read-tree", "HEAD").RunStdString(ctx, &git.RunOpts{Dir: t.basePath}); err != nil {
return fmt.Errorf("SetDefaultIndex: %w", err)
}
return nil
}
// RefreshIndex looks at the current index and checks to see if merges or updates are needed by checking stat() information.
-func (t *TemporaryUploadRepository) RefreshIndex() error {
- if _, _, err := git.NewCommand(t.ctx, "update-index", "--refresh").RunStdString(&git.RunOpts{Dir: t.basePath}); err != nil {
+func (t *TemporaryUploadRepository) RefreshIndex(ctx context.Context) error {
+ if _, _, err := git.NewCommand("update-index", "--refresh").RunStdString(ctx, &git.RunOpts{Dir: t.basePath}); err != nil {
return fmt.Errorf("RefreshIndex: %w", err)
}
return nil
}
// LsFiles checks if the given filename arguments are in the index
-func (t *TemporaryUploadRepository) LsFiles(filenames ...string) ([]string, error) {
+func (t *TemporaryUploadRepository) LsFiles(ctx context.Context, filenames ...string) ([]string, error) {
stdOut := new(bytes.Buffer)
stdErr := new(bytes.Buffer)
- if err := git.NewCommand(t.ctx, "ls-files", "-z").AddDashesAndList(filenames...).
- Run(&git.RunOpts{
+ if err := git.NewCommand("ls-files", "-z").AddDashesAndList(filenames...).
+ Run(ctx, &git.RunOpts{
Dir: t.basePath,
Stdout: stdOut,
Stderr: stdErr,
@@ -135,7 +134,7 @@ func (t *TemporaryUploadRepository) LsFiles(filenames ...string) ([]string, erro
}
// RemoveFilesFromIndex removes the given files from the index
-func (t *TemporaryUploadRepository) RemoveFilesFromIndex(filenames ...string) error {
+func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, filenames ...string) error {
objFmt, err := t.gitRepo.GetObjectFormat()
if err != nil {
return fmt.Errorf("unable to get object format for temporary repo: %q, error: %w", t.repo.FullName(), err)
@@ -151,8 +150,8 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(filenames ...string) er
}
}
- if err := git.NewCommand(t.ctx, "update-index", "--remove", "-z", "--index-info").
- Run(&git.RunOpts{
+ if err := git.NewCommand("update-index", "--remove", "-z", "--index-info").
+ Run(ctx, &git.RunOpts{
Dir: t.basePath,
Stdin: stdIn,
Stdout: stdOut,
@@ -164,12 +163,12 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(filenames ...string) er
}
// HashObject writes the provided content to the object db and returns its hash
-func (t *TemporaryUploadRepository) HashObject(content io.Reader) (string, error) {
+func (t *TemporaryUploadRepository) HashObject(ctx context.Context, content io.Reader) (string, error) {
stdOut := new(bytes.Buffer)
stdErr := new(bytes.Buffer)
- if err := git.NewCommand(t.ctx, "hash-object", "-w", "--stdin").
- Run(&git.RunOpts{
+ if err := git.NewCommand("hash-object", "-w", "--stdin").
+ Run(ctx, &git.RunOpts{
Dir: t.basePath,
Stdin: content,
Stdout: stdOut,
@@ -183,8 +182,8 @@ func (t *TemporaryUploadRepository) HashObject(content io.Reader) (string, error
}
// AddObjectToIndex adds the provided object hash to the index with the provided mode and path
-func (t *TemporaryUploadRepository) AddObjectToIndex(mode, objectHash, objectPath string) error {
- if _, _, err := git.NewCommand(t.ctx, "update-index", "--add", "--replace", "--cacheinfo").AddDynamicArguments(mode, objectHash, objectPath).RunStdString(&git.RunOpts{Dir: t.basePath}); err != nil {
+func (t *TemporaryUploadRepository) AddObjectToIndex(ctx context.Context, mode, objectHash, objectPath string) error {
+ if _, _, err := git.NewCommand("update-index", "--add", "--replace", "--cacheinfo").AddDynamicArguments(mode, objectHash, objectPath).RunStdString(ctx, &git.RunOpts{Dir: t.basePath}); err != nil {
stderr := err.Error()
if matched, _ := regexp.MatchString(".*Invalid path '.*", stderr); matched {
return ErrFilePathInvalid{
@@ -199,8 +198,8 @@ func (t *TemporaryUploadRepository) AddObjectToIndex(mode, objectHash, objectPat
}
// WriteTree writes the current index as a tree to the object db and returns its hash
-func (t *TemporaryUploadRepository) WriteTree() (string, error) {
- stdout, _, err := git.NewCommand(t.ctx, "write-tree").RunStdString(&git.RunOpts{Dir: t.basePath})
+func (t *TemporaryUploadRepository) WriteTree(ctx context.Context) (string, error) {
+ stdout, _, err := git.NewCommand("write-tree").RunStdString(ctx, &git.RunOpts{Dir: t.basePath})
if err != nil {
log.Error("Unable to write tree in temporary repo: %s(%s): Error: %v", t.repo.FullName(), t.basePath, err)
return "", fmt.Errorf("Unable to write-tree in temporary repo for: %s Error: %w", t.repo.FullName(), err)
@@ -209,16 +208,16 @@ func (t *TemporaryUploadRepository) WriteTree() (string, error) {
}
// GetLastCommit gets the last commit ID SHA of the repo
-func (t *TemporaryUploadRepository) GetLastCommit() (string, error) {
- return t.GetLastCommitByRef("HEAD")
+func (t *TemporaryUploadRepository) GetLastCommit(ctx context.Context) (string, error) {
+ return t.GetLastCommitByRef(ctx, "HEAD")
}
// GetLastCommitByRef gets the last commit ID SHA of the repo by ref
-func (t *TemporaryUploadRepository) GetLastCommitByRef(ref string) (string, error) {
+func (t *TemporaryUploadRepository) GetLastCommitByRef(ctx context.Context, ref string) (string, error) {
if ref == "" {
ref = "HEAD"
}
- stdout, _, err := git.NewCommand(t.ctx, "rev-parse").AddDynamicArguments(ref).RunStdString(&git.RunOpts{Dir: t.basePath})
+ stdout, _, err := git.NewCommand("rev-parse").AddDynamicArguments(ref).RunStdString(ctx, &git.RunOpts{Dir: t.basePath})
if err != nil {
log.Error("Unable to get last ref for %s in temporary repo: %s(%s): Error: %v", ref, t.repo.FullName(), t.basePath, err)
return "", fmt.Errorf("Unable to rev-parse %s in temporary repo for: %s Error: %w", ref, t.repo.FullName(), err)
@@ -259,7 +258,7 @@ func makeGitUserSignature(doer *user_model.User, identity, other *IdentityOption
}
// CommitTree creates a commit from a given tree for the user with provided message
-func (t *TemporaryUploadRepository) CommitTree(opts *CommitTreeUserOptions) (string, error) {
+func (t *TemporaryUploadRepository) CommitTree(ctx context.Context, opts *CommitTreeUserOptions) (string, error) {
authorSig := makeGitUserSignature(opts.DoerUser, opts.AuthorIdentity, opts.CommitterIdentity)
committerSig := makeGitUserSignature(opts.DoerUser, opts.CommitterIdentity, opts.AuthorIdentity)
@@ -286,7 +285,7 @@ func (t *TemporaryUploadRepository) CommitTree(opts *CommitTreeUserOptions) (str
_, _ = messageBytes.WriteString(opts.CommitMessage)
_, _ = messageBytes.WriteString("\n")
- cmdCommitTree := git.NewCommand(t.ctx, "commit-tree").AddDynamicArguments(opts.TreeHash)
+ cmdCommitTree := git.NewCommand("commit-tree").AddDynamicArguments(opts.TreeHash)
if opts.ParentCommitID != "" {
cmdCommitTree.AddOptionValues("-p", opts.ParentCommitID)
}
@@ -295,9 +294,9 @@ func (t *TemporaryUploadRepository) CommitTree(opts *CommitTreeUserOptions) (str
var keyID string
var signer *git.Signature
if opts.ParentCommitID != "" {
- sign, keyID, signer, _ = asymkey_service.SignCRUDAction(t.ctx, t.repo.RepoPath(), opts.DoerUser, t.basePath, opts.ParentCommitID)
+ sign, keyID, signer, _ = asymkey_service.SignCRUDAction(ctx, t.repo.RepoPath(), opts.DoerUser, t.basePath, opts.ParentCommitID)
} else {
- sign, keyID, signer, _ = asymkey_service.SignInitialCommit(t.ctx, t.repo.RepoPath(), opts.DoerUser)
+ sign, keyID, signer, _ = asymkey_service.SignInitialCommit(ctx, t.repo.RepoPath(), opts.DoerUser)
}
if sign {
cmdCommitTree.AddOptionFormat("-S%s", keyID)
@@ -333,7 +332,7 @@ func (t *TemporaryUploadRepository) CommitTree(opts *CommitTreeUserOptions) (str
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
if err := cmdCommitTree.
- Run(&git.RunOpts{
+ Run(ctx, &git.RunOpts{
Env: env,
Dir: t.basePath,
Stdin: messageBytes,
@@ -349,10 +348,10 @@ func (t *TemporaryUploadRepository) CommitTree(opts *CommitTreeUserOptions) (str
}
// Push the provided commitHash to the repository branch by the provided user
-func (t *TemporaryUploadRepository) Push(doer *user_model.User, commitHash, branch string) error {
+func (t *TemporaryUploadRepository) Push(ctx context.Context, doer *user_model.User, commitHash, branch string) error {
// Because calls hooks we need to pass in the environment
env := repo_module.PushingEnvironment(doer, t.repo)
- if err := git.Push(t.ctx, t.basePath, git.PushOptions{
+ if err := git.Push(ctx, t.basePath, git.PushOptions{
Remote: t.repo.RepoPath(),
Branch: strings.TrimSpace(commitHash) + ":" + git.BranchPrefix + strings.TrimSpace(branch),
Env: env,
@@ -374,7 +373,7 @@ func (t *TemporaryUploadRepository) Push(doer *user_model.User, commitHash, bran
}
// DiffIndex returns a Diff of the current index to the head
-func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) {
+func (t *TemporaryUploadRepository) DiffIndex(ctx context.Context) (*gitdiff.Diff, error) {
stdoutReader, stdoutWriter, err := os.Pipe()
if err != nil {
return nil, fmt.Errorf("unable to open stdout pipe: %w", err)
@@ -385,8 +384,8 @@ func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) {
}()
stderr := new(bytes.Buffer)
var diff *gitdiff.Diff
- err = git.NewCommand(t.ctx, "diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD").
- Run(&git.RunOpts{
+ err = git.NewCommand("diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD").
+ Run(ctx, &git.RunOpts{
Timeout: 30 * time.Second,
Dir: t.basePath,
Stdout: stdoutWriter,
@@ -395,7 +394,7 @@ func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) {
_ = stdoutWriter.Close()
defer cancel()
var diffErr error
- diff, diffErr = gitdiff.ParsePatch(t.ctx, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, stdoutReader, "")
+ diff, diffErr = gitdiff.ParsePatch(ctx, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, stdoutReader, "")
_ = stdoutReader.Close()
if diffErr != nil {
// if the diffErr is not nil, it will be returned as the error of "Run()"
@@ -409,11 +408,6 @@ func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) {
return nil, fmt.Errorf("unable to run diff-index pipeline in temporary repo: %w", err)
}
- diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(t.ctx, t.basePath, git.TrustedCmdArgs{"--cached"}, "HEAD")
- if err != nil {
- return nil, err
- }
-
return diff, nil
}
diff --git a/services/repository/files/tree.go b/services/repository/files/tree.go
index 6775186afd..9142416347 100644
--- a/services/repository/files/tree.go
+++ b/services/repository/files/tree.go
@@ -7,9 +7,13 @@ import (
"context"
"fmt"
"net/url"
+ "path"
+ "sort"
+ "strings"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
@@ -118,3 +122,98 @@ func GetTreeBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git
}
return tree, nil
}
+
+func entryModeString(entryMode git.EntryMode) string {
+ switch entryMode {
+ case git.EntryModeBlob:
+ return "blob"
+ case git.EntryModeExec:
+ return "exec"
+ case git.EntryModeSymlink:
+ return "symlink"
+ case git.EntryModeCommit:
+ return "commit" // submodule
+ case git.EntryModeTree:
+ return "tree"
+ }
+ return "unknown"
+}
+
+type TreeViewNode struct {
+ EntryName string `json:"entryName"`
+ EntryMode string `json:"entryMode"`
+ FullPath string `json:"fullPath"`
+ SubmoduleURL string `json:"submoduleUrl,omitempty"`
+ Children []*TreeViewNode `json:"children,omitempty"`
+}
+
+func (node *TreeViewNode) sortLevel() int {
+ return util.Iif(node.EntryMode == "tree" || node.EntryMode == "commit", 0, 1)
+}
+
+func newTreeViewNodeFromEntry(ctx context.Context, commit *git.Commit, parentDir string, entry *git.TreeEntry) *TreeViewNode {
+ node := &TreeViewNode{
+ EntryName: entry.Name(),
+ EntryMode: entryModeString(entry.Mode()),
+ FullPath: path.Join(parentDir, entry.Name()),
+ }
+
+ if node.EntryMode == "commit" {
+ if subModule, err := commit.GetSubModule(node.FullPath); err != nil {
+ log.Error("GetSubModule: %v", err)
+ } else if subModule != nil {
+ submoduleFile := git.NewCommitSubmoduleFile(subModule.URL, entry.ID.String())
+ webLink := submoduleFile.SubmoduleWebLink(ctx)
+ node.SubmoduleURL = webLink.CommitWebLink
+ }
+ }
+
+ return node
+}
+
+// sortTreeViewNodes list directory first and with alpha sequence
+func sortTreeViewNodes(nodes []*TreeViewNode) {
+ sort.Slice(nodes, func(i, j int) bool {
+ a, b := nodes[i].sortLevel(), nodes[j].sortLevel()
+ if a != b {
+ return a < b
+ }
+ return nodes[i].EntryName < nodes[j].EntryName
+ })
+}
+
+func listTreeNodes(ctx context.Context, commit *git.Commit, tree *git.Tree, treePath, subPath string) ([]*TreeViewNode, error) {
+ entries, err := tree.ListEntries()
+ if err != nil {
+ return nil, err
+ }
+
+ subPathDirName, subPathRemaining, _ := strings.Cut(subPath, "/")
+ nodes := make([]*TreeViewNode, 0, len(entries))
+ for _, entry := range entries {
+ node := newTreeViewNodeFromEntry(ctx, commit, treePath, entry)
+ nodes = append(nodes, node)
+ if entry.IsDir() && subPathDirName == entry.Name() {
+ subTreePath := treePath + "/" + node.EntryName
+ if subTreePath[0] == '/' {
+ subTreePath = subTreePath[1:]
+ }
+ subNodes, err := listTreeNodes(ctx, commit, entry.Tree(), subTreePath, subPathRemaining)
+ if err != nil {
+ log.Error("listTreeNodes: %v", err)
+ } else {
+ node.Children = subNodes
+ }
+ }
+ }
+ sortTreeViewNodes(nodes)
+ return nodes, nil
+}
+
+func GetTreeViewNodes(ctx context.Context, commit *git.Commit, treePath, subPath string) ([]*TreeViewNode, error) {
+ entry, err := commit.GetTreeEntryByPath(treePath)
+ if err != nil {
+ return nil, err
+ }
+ return listTreeNodes(ctx, commit, entry.Tree(), treePath, subPath)
+}
diff --git a/services/repository/files/tree_test.go b/services/repository/files/tree_test.go
index 0c60fddf7b..cbb800da01 100644
--- a/services/repository/files/tree_test.go
+++ b/services/repository/files/tree_test.go
@@ -7,6 +7,7 @@ import (
"testing"
"code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/services/contexttest"
@@ -48,5 +49,53 @@ func TestGetTreeBySHA(t *testing.T) {
TotalCount: 1,
}
- assert.EqualValues(t, expectedTree, tree)
+ assert.Equal(t, expectedTree, tree)
+}
+
+func TestGetTreeViewNodes(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1")
+ ctx.Repo.RefFullName = git.RefNameFromBranch("sub-home-md-img-check")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadRepoCommit(t, ctx)
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadGitRepo(t, ctx)
+ defer ctx.Repo.GitRepo.Close()
+
+ treeNodes, err := GetTreeViewNodes(ctx, ctx.Repo.Commit, "", "")
+ assert.NoError(t, err)
+ assert.Equal(t, []*TreeViewNode{
+ {
+ EntryName: "docs",
+ EntryMode: "tree",
+ FullPath: "docs",
+ },
+ }, treeNodes)
+
+ treeNodes, err = GetTreeViewNodes(ctx, ctx.Repo.Commit, "", "docs/README.md")
+ assert.NoError(t, err)
+ assert.Equal(t, []*TreeViewNode{
+ {
+ EntryName: "docs",
+ EntryMode: "tree",
+ FullPath: "docs",
+ Children: []*TreeViewNode{
+ {
+ EntryName: "README.md",
+ EntryMode: "blob",
+ FullPath: "docs/README.md",
+ },
+ },
+ },
+ }, treeNodes)
+
+ treeNodes, err = GetTreeViewNodes(ctx, ctx.Repo.Commit, "docs", "README.md")
+ assert.NoError(t, err)
+ assert.Equal(t, []*TreeViewNode{
+ {
+ EntryName: "README.md",
+ EntryMode: "blob",
+ FullPath: "docs/README.md",
+ },
+ }, treeNodes)
}
diff --git a/services/repository/files/update.go b/services/repository/files/update.go
index a707ea8bb6..cade7ba2bf 100644
--- a/services/repository/files/update.go
+++ b/services/repository/files/update.go
@@ -160,13 +160,13 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
message := strings.TrimSpace(opts.Message)
- t, err := NewTemporaryUploadRepository(ctx, repo)
+ t, err := NewTemporaryUploadRepository(repo)
if err != nil {
log.Error("NewTemporaryUploadRepository failed: %v", err)
}
defer t.Close()
hasOldBranch := true
- if err := t.Clone(opts.OldBranch, true); err != nil {
+ if err := t.Clone(ctx, opts.OldBranch, true); err != nil {
for _, file := range opts.Files {
if file.Operation == "delete" {
return nil, err
@@ -175,14 +175,14 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
if !git.IsErrBranchNotExist(err) || !repo.IsEmpty {
return nil, err
}
- if err := t.Init(repo.ObjectFormatName); err != nil {
+ if err := t.Init(ctx, repo.ObjectFormatName); err != nil {
return nil, err
}
hasOldBranch = false
opts.LastCommitID = ""
}
if hasOldBranch {
- if err := t.SetDefaultIndex(); err != nil {
+ if err := t.SetDefaultIndex(ctx); err != nil {
return nil, err
}
}
@@ -190,7 +190,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
for _, file := range opts.Files {
if file.Operation == "delete" {
// Get the files in the index
- filesInIndex, err := t.LsFiles(file.TreePath)
+ filesInIndex, err := t.LsFiles(ctx, file.TreePath)
if err != nil {
return nil, fmt.Errorf("DeleteRepoFile: %w", err)
}
@@ -245,7 +245,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
}
case "delete":
// Remove the file from the index
- if err := t.RemoveFilesFromIndex(file.TreePath); err != nil {
+ if err := t.RemoveFilesFromIndex(ctx, file.TreePath); err != nil {
return nil, err
}
default:
@@ -254,7 +254,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
}
// Now write the tree
- treeHash, err := t.WriteTree()
+ treeHash, err := t.WriteTree(ctx)
if err != nil {
return nil, err
}
@@ -274,13 +274,13 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
if opts.Dates != nil {
commitOpts.AuthorTime, commitOpts.CommitterTime = &opts.Dates.Author, &opts.Dates.Committer
}
- commitHash, err := t.CommitTree(commitOpts)
+ commitHash, err := t.CommitTree(ctx, commitOpts)
if err != nil {
return nil, err
}
// Then push this tree to NewBranch
- if err := t.Push(doer, commitHash, opts.NewBranch); err != nil {
+ if err := t.Push(ctx, doer, commitHash, opts.NewBranch); err != nil {
log.Error("%T %v", err, err)
return nil, err
}
@@ -453,7 +453,7 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep
// CreateOrUpdateFile handles creating or updating a file for ChangeRepoFiles
func CreateOrUpdateFile(ctx context.Context, t *TemporaryUploadRepository, file *ChangeRepoFile, contentStore *lfs.ContentStore, repoID int64, hasOldBranch bool) error {
// Get the two paths (might be the same if not moving) from the index if they exist
- filesInIndex, err := t.LsFiles(file.TreePath, file.FromTreePath)
+ filesInIndex, err := t.LsFiles(ctx, file.TreePath, file.FromTreePath)
if err != nil {
return fmt.Errorf("UpdateRepoFile: %w", err)
}
@@ -472,7 +472,7 @@ func CreateOrUpdateFile(ctx context.Context, t *TemporaryUploadRepository, file
if file.Options.fromTreePath != file.Options.treePath && len(filesInIndex) > 0 {
for _, indexFile := range filesInIndex {
if indexFile == file.Options.fromTreePath {
- if err := t.RemoveFilesFromIndex(file.FromTreePath); err != nil {
+ if err := t.RemoveFilesFromIndex(ctx, file.FromTreePath); err != nil {
return err
}
}
@@ -504,18 +504,18 @@ func CreateOrUpdateFile(ctx context.Context, t *TemporaryUploadRepository, file
}
// Add the object to the database
- objectHash, err := t.HashObject(treeObjectContentReader)
+ objectHash, err := t.HashObject(ctx, treeObjectContentReader)
if err != nil {
return err
}
// Add the object to the index
if file.Options.executable {
- if err := t.AddObjectToIndex("100755", objectHash, file.Options.treePath); err != nil {
+ if err := t.AddObjectToIndex(ctx, "100755", objectHash, file.Options.treePath); err != nil {
return err
}
} else {
- if err := t.AddObjectToIndex("100644", objectHash, file.Options.treePath); err != nil {
+ if err := t.AddObjectToIndex(ctx, "100644", objectHash, file.Options.treePath); err != nil {
return err
}
}
diff --git a/services/repository/files/upload.go b/services/repository/files/upload.go
index 3c58598427..2e4ed1744e 100644
--- a/services/repository/files/upload.go
+++ b/services/repository/files/upload.go
@@ -82,25 +82,25 @@ func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
infos[i] = uploadInfo{upload: upload}
}
- t, err := NewTemporaryUploadRepository(ctx, repo)
+ t, err := NewTemporaryUploadRepository(repo)
if err != nil {
return err
}
defer t.Close()
hasOldBranch := true
- if err = t.Clone(opts.OldBranch, true); err != nil {
+ if err = t.Clone(ctx, opts.OldBranch, true); err != nil {
if !git.IsErrBranchNotExist(err) || !repo.IsEmpty {
return err
}
- if err = t.Init(repo.ObjectFormatName); err != nil {
+ if err = t.Init(ctx, repo.ObjectFormatName); err != nil {
return err
}
hasOldBranch = false
opts.LastCommitID = ""
}
if hasOldBranch {
- if err = t.SetDefaultIndex(); err != nil {
+ if err = t.SetDefaultIndex(ctx); err != nil {
return err
}
}
@@ -119,13 +119,13 @@ func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
// Copy uploaded files into repository.
for i := range infos {
- if err := copyUploadedLFSFileIntoRepository(&infos[i], filename2attribute2info, t, opts.TreePath); err != nil {
+ if err := copyUploadedLFSFileIntoRepository(ctx, &infos[i], filename2attribute2info, t, opts.TreePath); err != nil {
return err
}
}
// Now write the tree
- treeHash, err := t.WriteTree()
+ treeHash, err := t.WriteTree(ctx)
if err != nil {
return err
}
@@ -140,7 +140,7 @@ func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
AuthorIdentity: opts.Author,
CommitterIdentity: opts.Committer,
}
- commitHash, err := t.CommitTree(commitOpts)
+ commitHash, err := t.CommitTree(ctx, commitOpts)
if err != nil {
return err
}
@@ -169,14 +169,14 @@ func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
}
// Then push this tree to NewBranch
- if err := t.Push(doer, commitHash, opts.NewBranch); err != nil {
+ if err := t.Push(ctx, doer, commitHash, opts.NewBranch); err != nil {
return err
}
return repo_model.DeleteUploads(ctx, uploads...)
}
-func copyUploadedLFSFileIntoRepository(info *uploadInfo, filename2attribute2info map[string]map[string]string, t *TemporaryUploadRepository, treePath string) error {
+func copyUploadedLFSFileIntoRepository(ctx context.Context, info *uploadInfo, filename2attribute2info map[string]map[string]string, t *TemporaryUploadRepository, treePath string) error {
file, err := os.Open(info.upload.LocalPath())
if err != nil {
return err
@@ -194,15 +194,15 @@ func copyUploadedLFSFileIntoRepository(info *uploadInfo, filename2attribute2info
info.lfsMetaObject = &git_model.LFSMetaObject{Pointer: pointer, RepositoryID: t.repo.ID}
- if objectHash, err = t.HashObject(strings.NewReader(pointer.StringContent())); err != nil {
+ if objectHash, err = t.HashObject(ctx, strings.NewReader(pointer.StringContent())); err != nil {
return err
}
- } else if objectHash, err = t.HashObject(file); err != nil {
+ } else if objectHash, err = t.HashObject(ctx, file); err != nil {
return err
}
// Add the object to the index
- return t.AddObjectToIndex("100644", objectHash, path.Join(treePath, info.upload.Name))
+ return t.AddObjectToIndex(ctx, "100644", objectHash, path.Join(treePath, info.upload.Name))
}
func uploadToLFSContentStore(info uploadInfo, contentStore *lfs.ContentStore) error {
diff --git a/services/repository/fork.go b/services/repository/fork.go
index 8d89c2b0b0..5b1ba7a418 100644
--- a/services/repository/fork.go
+++ b/services/repository/fork.go
@@ -110,15 +110,13 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork
return
}
- repoPath := repo_model.RepoPath(owner.Name, repo.Name)
-
- if exists, _ := util.IsExist(repoPath); !exists {
+ if exists, _ := gitrepo.IsRepositoryExist(ctx, repo); !exists {
return
}
// As the transaction will be failed and hence database changes will be destroyed we only need
// to delete the related repository on the filesystem
- if errDelete := util.RemoveAll(repoPath); errDelete != nil {
+ if errDelete := gitrepo.DeleteRepository(ctx, repo); errDelete != nil {
log.Error("Failed to remove fork repo")
}
}
@@ -152,13 +150,12 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork
needsRollback = true
- cloneCmd := git.NewCommand(txCtx, "clone", "--bare")
+ cloneCmd := git.NewCommand("clone", "--bare")
if opts.SingleBranch != "" {
cloneCmd.AddArguments("--single-branch", "--branch").AddDynamicArguments(opts.SingleBranch)
}
- repoPath := repo_model.RepoPath(owner.Name, repo.Name)
- if stdout, _, err := cloneCmd.AddDynamicArguments(oldRepoPath, repoPath).
- RunStdBytes(&git.RunOpts{Timeout: 10 * time.Minute}); err != nil {
+ if stdout, _, err := cloneCmd.AddDynamicArguments(oldRepoPath, repo.RepoPath()).
+ RunStdBytes(txCtx, &git.RunOpts{Timeout: 10 * time.Minute}); err != nil {
log.Error("Fork Repository (git clone) Failed for %v (from %v):\nStdout: %s\nError: %v", repo, opts.BaseRepo, stdout, err)
return fmt.Errorf("git clone: %w", err)
}
@@ -167,13 +164,13 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork
return fmt.Errorf("checkDaemonExportOK: %w", err)
}
- if stdout, _, err := git.NewCommand(txCtx, "update-server-info").
- RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ if stdout, _, err := git.NewCommand("update-server-info").
+ RunStdString(txCtx, &git.RunOpts{Dir: repo.RepoPath()}); err != nil {
log.Error("Fork Repository (git update-server-info) failed for %v:\nStdout: %s\nError: %v", repo, stdout, err)
return fmt.Errorf("git update-server-info: %w", err)
}
- if err = repo_module.CreateDelegateHooks(repoPath); err != nil {
+ if err = gitrepo.CreateDelegateHooks(ctx, repo); err != nil {
return fmt.Errorf("createDelegateHooks: %w", err)
}
diff --git a/services/repository/generate.go b/services/repository/generate.go
index d5c07e9800..9d2bbb1f7f 100644
--- a/services/repository/generate.go
+++ b/services/repository/generate.go
@@ -236,8 +236,8 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r
return err
}
- if stdout, _, err := git.NewCommand(ctx, "remote", "add", "origin").AddDynamicArguments(repo.RepoPath()).
- RunStdString(&git.RunOpts{Dir: tmpDir, Env: env}); err != nil {
+ if stdout, _, err := git.NewCommand("remote", "add", "origin").AddDynamicArguments(repo.RepoPath()).
+ RunStdString(ctx, &git.RunOpts{Dir: tmpDir, Env: env}); err != nil {
log.Error("Unable to add %v as remote origin to temporary repo to %s: stdout %s\nError: %v", repo, tmpDir, stdout, err)
return fmt.Errorf("git remote add: %w", err)
}
@@ -258,7 +258,7 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r
func generateGitContent(ctx context.Context, repo, templateRepo, generateRepo *repo_model.Repository) (err error) {
tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-"+repo.Name)
if err != nil {
- return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.RepoPath(), err)
+ return fmt.Errorf("Failed to create temp dir for repository %s: %w", repo.FullName(), err)
}
defer func() {
@@ -350,10 +350,9 @@ func generateRepository(ctx context.Context, doer, owner *user_model.User, templ
return nil, err
}
- repoPath := generateRepo.RepoPath()
- isExist, err := util.IsExist(repoPath)
+ isExist, err := gitrepo.IsRepositoryExist(ctx, generateRepo)
if err != nil {
- log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ log.Error("Unable to check if %s exists. Error: %v", generateRepo.FullName(), err)
return nil, err
}
if isExist {
@@ -363,7 +362,7 @@ func generateRepository(ctx context.Context, doer, owner *user_model.User, templ
}
}
- if err = repo_module.CheckInitRepository(ctx, owner.Name, generateRepo.Name, generateRepo.ObjectFormatName); err != nil {
+ if err = repo_module.CheckInitRepository(ctx, generateRepo); err != nil {
return generateRepo, err
}
@@ -371,8 +370,8 @@ func generateRepository(ctx context.Context, doer, owner *user_model.User, templ
return generateRepo, fmt.Errorf("checkDaemonExportOK: %w", err)
}
- if stdout, _, err := git.NewCommand(ctx, "update-server-info").
- RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ if stdout, _, err := git.NewCommand("update-server-info").
+ RunStdString(ctx, &git.RunOpts{Dir: generateRepo.RepoPath()}); err != nil {
log.Error("GenerateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", generateRepo, stdout, err)
return generateRepo, fmt.Errorf("error in GenerateRepository(git update-server-info): %w", err)
}
diff --git a/services/repository/gitgraph/graph.go b/services/repository/gitgraph/graph.go
new file mode 100644
index 0000000000..d06d18c1b4
--- /dev/null
+++ b/services/repository/gitgraph/graph.go
@@ -0,0 +1,116 @@
+// Copyright 2016 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitgraph
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "os"
+ "strings"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// GetCommitGraph return a list of commit (GraphItems) from all branches
+func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bool, branches, files []string) (*Graph, error) {
+ format := "DATA:%D|%H|%ad|%h|%s"
+
+ if page == 0 {
+ page = 1
+ }
+
+ graphCmd := git.NewCommand("log", "--graph", "--date-order", "--decorate=full")
+
+ if hidePRRefs {
+ graphCmd.AddArguments("--exclude=" + git.PullPrefix + "*")
+ }
+
+ if len(branches) == 0 {
+ graphCmd.AddArguments("--tags", "--branches")
+ }
+
+ graphCmd.AddArguments("-C", "-M", "--date=iso-strict").
+ AddOptionFormat("-n %d", setting.UI.GraphMaxCommitNum*page).
+ AddOptionFormat("--pretty=format:%s", format)
+
+ if len(branches) > 0 {
+ graphCmd.AddDynamicArguments(branches...)
+ }
+ if len(files) > 0 {
+ graphCmd.AddDashesAndList(files...)
+ }
+ graph := NewGraph()
+
+ stderr := new(strings.Builder)
+ stdoutReader, stdoutWriter, err := os.Pipe()
+ if err != nil {
+ return nil, err
+ }
+ commitsToSkip := setting.UI.GraphMaxCommitNum * (page - 1)
+
+ scanner := bufio.NewScanner(stdoutReader)
+
+ if err := graphCmd.Run(r.Ctx, &git.RunOpts{
+ Dir: r.Path,
+ Stdout: stdoutWriter,
+ Stderr: stderr,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ _ = stdoutWriter.Close()
+ defer stdoutReader.Close()
+ parser := &Parser{}
+ parser.firstInUse = -1
+ parser.maxAllowedColors = maxAllowedColors
+ if maxAllowedColors > 0 {
+ parser.availableColors = make([]int, maxAllowedColors)
+ for i := range parser.availableColors {
+ parser.availableColors[i] = i + 1
+ }
+ } else {
+ parser.availableColors = []int{1, 2}
+ }
+ for commitsToSkip > 0 && scanner.Scan() {
+ line := scanner.Bytes()
+ dataIdx := bytes.Index(line, []byte("DATA:"))
+ if dataIdx < 0 {
+ dataIdx = len(line)
+ }
+ starIdx := bytes.IndexByte(line, '*')
+ if starIdx >= 0 && starIdx < dataIdx {
+ commitsToSkip--
+ }
+ parser.ParseGlyphs(line[:dataIdx])
+ }
+
+ row := 0
+
+ // Skip initial non-commit lines
+ for scanner.Scan() {
+ line := scanner.Bytes()
+ if bytes.IndexByte(line, '*') >= 0 {
+ if err := parser.AddLineToGraph(graph, row, line); err != nil {
+ cancel()
+ return err
+ }
+ break
+ }
+ parser.ParseGlyphs(line)
+ }
+
+ for scanner.Scan() {
+ row++
+ line := scanner.Bytes()
+ if err := parser.AddLineToGraph(graph, row, line); err != nil {
+ cancel()
+ return err
+ }
+ }
+ return scanner.Err()
+ },
+ }); err != nil {
+ return graph, err
+ }
+ return graph, nil
+}
diff --git a/services/repository/gitgraph/graph_models.go b/services/repository/gitgraph/graph_models.go
new file mode 100644
index 0000000000..c45662836b
--- /dev/null
+++ b/services/repository/gitgraph/graph_models.go
@@ -0,0 +1,266 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitgraph
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "strings"
+ "time"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+)
+
+// NewGraph creates a basic graph
+func NewGraph() *Graph {
+ graph := &Graph{}
+ graph.relationCommit = &Commit{
+ Row: -1,
+ Column: -1,
+ }
+ graph.Flows = map[int64]*Flow{}
+ return graph
+}
+
+// Graph represents a collection of flows
+type Graph struct {
+ Flows map[int64]*Flow
+ Commits []*Commit
+ MinRow int
+ MinColumn int
+ MaxRow int
+ MaxColumn int
+ relationCommit *Commit
+}
+
+// Width returns the width of the graph
+func (graph *Graph) Width() int {
+ return graph.MaxColumn - graph.MinColumn + 1
+}
+
+// Height returns the height of the graph
+func (graph *Graph) Height() int {
+ return graph.MaxRow - graph.MinRow + 1
+}
+
+// AddGlyph adds glyph to flows
+func (graph *Graph) AddGlyph(row, column int, flowID int64, color int, glyph byte) {
+ flow, ok := graph.Flows[flowID]
+ if !ok {
+ flow = NewFlow(flowID, color, row, column)
+ graph.Flows[flowID] = flow
+ }
+ flow.AddGlyph(row, column, glyph)
+
+ if row < graph.MinRow {
+ graph.MinRow = row
+ }
+ if row > graph.MaxRow {
+ graph.MaxRow = row
+ }
+ if column < graph.MinColumn {
+ graph.MinColumn = column
+ }
+ if column > graph.MaxColumn {
+ graph.MaxColumn = column
+ }
+}
+
+// AddCommit adds a commit at row, column on flowID with the provided data
+func (graph *Graph) AddCommit(row, column int, flowID int64, data []byte) error {
+ commit, err := NewCommit(row, column, data)
+ if err != nil {
+ return err
+ }
+ commit.Flow = flowID
+ graph.Commits = append(graph.Commits, commit)
+
+ graph.Flows[flowID].Commits = append(graph.Flows[flowID].Commits, commit)
+ return nil
+}
+
+// LoadAndProcessCommits will load the git.Commits for each commit in the graph,
+// the associate the commit with the user author, and check the commit verification
+// before finally retrieving the latest status
+func (graph *Graph) LoadAndProcessCommits(ctx context.Context, repository *repo_model.Repository, gitRepo *git.Repository) error {
+ var err error
+ var ok bool
+
+ emails := map[string]*user_model.User{}
+ keyMap := map[string]bool{}
+
+ for _, c := range graph.Commits {
+ if len(c.Rev) == 0 {
+ continue
+ }
+ c.Commit, err = gitRepo.GetCommit(c.Rev)
+ if err != nil {
+ return fmt.Errorf("GetCommit: %s Error: %w", c.Rev, err)
+ }
+
+ if c.Commit.Author != nil {
+ email := c.Commit.Author.Email
+ if c.User, ok = emails[email]; !ok {
+ c.User, _ = user_model.GetUserByEmail(ctx, email)
+ emails[email] = c.User
+ }
+ }
+
+ c.Verification = asymkey_service.ParseCommitWithSignature(ctx, c.Commit)
+
+ _ = asymkey_model.CalculateTrustStatus(c.Verification, repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
+ return repo_model.IsOwnerMemberCollaborator(ctx, repository, user.ID)
+ }, &keyMap)
+
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, repository.ID, c.Commit.ID.String(), db.ListOptions{})
+ if err != nil {
+ log.Error("GetLatestCommitStatus: %v", err)
+ } else {
+ c.Status = git_model.CalcCommitStatus(statuses)
+ }
+ }
+ return nil
+}
+
+// NewFlow creates a new flow
+func NewFlow(flowID int64, color, row, column int) *Flow {
+ return &Flow{
+ ID: flowID,
+ ColorNumber: color,
+ MinRow: row,
+ MinColumn: column,
+ MaxRow: row,
+ MaxColumn: column,
+ }
+}
+
+// Flow represents a series of glyphs
+type Flow struct {
+ ID int64
+ ColorNumber int
+ Glyphs []Glyph
+ Commits []*Commit
+ MinRow int
+ MinColumn int
+ MaxRow int
+ MaxColumn int
+}
+
+// Color16 wraps the color numbers around mod 16
+func (flow *Flow) Color16() int {
+ return flow.ColorNumber % 16
+}
+
+// AddGlyph adds glyph at row and column
+func (flow *Flow) AddGlyph(row, column int, glyph byte) {
+ if row < flow.MinRow {
+ flow.MinRow = row
+ }
+ if row > flow.MaxRow {
+ flow.MaxRow = row
+ }
+ if column < flow.MinColumn {
+ flow.MinColumn = column
+ }
+ if column > flow.MaxColumn {
+ flow.MaxColumn = column
+ }
+
+ flow.Glyphs = append(flow.Glyphs, Glyph{
+ row,
+ column,
+ glyph,
+ })
+}
+
+// Glyph represents a co-ordinate and glyph
+type Glyph struct {
+ Row int
+ Column int
+ Glyph byte
+}
+
+// RelationCommit represents an empty relation commit
+var RelationCommit = &Commit{
+ Row: -1,
+}
+
+func parseGitTime(timeStr string) time.Time {
+ t, err := time.Parse(time.RFC3339, timeStr)
+ if err != nil {
+ return time.Unix(0, 0)
+ }
+ return t
+}
+
+// NewCommit creates a new commit from a provided line
+func NewCommit(row, column int, line []byte) (*Commit, error) {
+ data := bytes.SplitN(line, []byte("|"), 5)
+ if len(data) < 5 {
+ return nil, fmt.Errorf("malformed data section on line %d with commit: %s", row, string(line))
+ }
+ return &Commit{
+ Row: row,
+ Column: column,
+ // 0 matches git log --pretty=format:%d => ref names, like the --decorate option of git-log(1)
+ Refs: newRefsFromRefNames(data[0]),
+ // 1 matches git log --pretty=format:%H => commit hash
+ Rev: string(data[1]),
+ // 2 matches git log --pretty=format:%ad => author date (format respects --date= option)
+ Date: parseGitTime(string(data[2])),
+ // 3 matches git log --pretty=format:%h => abbreviated commit hash
+ ShortRev: string(data[3]),
+ // 4 matches git log --pretty=format:%s => subject
+ Subject: string(data[4]),
+ }, nil
+}
+
+func newRefsFromRefNames(refNames []byte) []git.Reference {
+ refBytes := bytes.Split(refNames, []byte{',', ' '})
+ refs := make([]git.Reference, 0, len(refBytes))
+ for _, refNameBytes := range refBytes {
+ if len(refNameBytes) == 0 {
+ continue
+ }
+ refName := string(refNameBytes)
+ if strings.HasPrefix(refName, "tag: ") {
+ refName = strings.TrimPrefix(refName, "tag: ")
+ } else {
+ refName = strings.TrimPrefix(refName, "HEAD -> ")
+ }
+ refs = append(refs, git.Reference{
+ Name: refName,
+ })
+ }
+ return refs
+}
+
+// Commit represents a commit at co-ordinate X, Y with the data
+type Commit struct {
+ Commit *git.Commit
+ User *user_model.User
+ Verification *asymkey_model.CommitVerification
+ Status *git_model.CommitStatus
+ Flow int64
+ Row int
+ Column int
+ Refs []git.Reference
+ Rev string
+ Date time.Time
+ ShortRev string
+ Subject string
+}
+
+// OnlyRelation returns whether this a relation only commit
+func (c *Commit) OnlyRelation() bool {
+ return c.Row == -1
+}
diff --git a/services/repository/gitgraph/graph_test.go b/services/repository/gitgraph/graph_test.go
new file mode 100644
index 0000000000..4c48b94aa2
--- /dev/null
+++ b/services/repository/gitgraph/graph_test.go
@@ -0,0 +1,712 @@
+// Copyright 2016 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitgraph
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "testing"
+
+ "code.gitea.io/gitea/modules/git"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func BenchmarkGetCommitGraph(b *testing.B) {
+ currentRepo, err := git.OpenRepository(git.DefaultContext, ".")
+ if err != nil || currentRepo == nil {
+ b.Error("Could not open repository")
+ }
+ defer currentRepo.Close()
+
+ for b.Loop() {
+ graph, err := GetCommitGraph(currentRepo, 1, 0, false, nil, nil)
+ if err != nil {
+ b.Error("Could get commit graph")
+ }
+
+ if len(graph.Commits) < 100 {
+ b.Error("Should get 100 log lines.")
+ }
+ }
+}
+
+func BenchmarkParseCommitString(b *testing.B) {
+ testString := "* DATA:|4e61bacab44e9b4730e44a6615d04098dd3a8eaf|2016-12-20 21:10:41 +0100|4e61bac|Add route for graph"
+
+ parser := &Parser{}
+ parser.Reset()
+ for b.Loop() {
+ parser.Reset()
+ graph := NewGraph()
+ if err := parser.AddLineToGraph(graph, 0, []byte(testString)); err != nil {
+ b.Error("could not parse teststring")
+ }
+ if graph.Flows[1].Commits[0].Rev != "4e61bacab44e9b4730e44a6615d04098dd3a8eaf" {
+ b.Error("Did not get expected data")
+ }
+ }
+}
+
+func BenchmarkParseGlyphs(b *testing.B) {
+ parser := &Parser{}
+ parser.Reset()
+ tgBytes := []byte(testglyphs)
+ var tg []byte
+ for b.Loop() {
+ parser.Reset()
+ tg = tgBytes
+ idx := bytes.Index(tg, []byte("\n"))
+ for idx > 0 {
+ parser.ParseGlyphs(tg[:idx])
+ tg = tg[idx+1:]
+ idx = bytes.Index(tg, []byte("\n"))
+ }
+ }
+}
+
+func TestReleaseUnusedColors(t *testing.T) {
+ testcases := []struct {
+ availableColors []int
+ oldColors []int
+ firstInUse int // these values have to be either be correct or suggest less is
+ firstAvailable int // available than possibly is - i.e. you cannot say 10 is available when it
+ }{
+ {
+ availableColors: []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
+ oldColors: []int{1, 1, 1, 1, 1},
+ firstAvailable: -1,
+ firstInUse: 1,
+ },
+ {
+ availableColors: []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
+ oldColors: []int{1, 2, 3, 4},
+ firstAvailable: 6,
+ firstInUse: 0,
+ },
+ {
+ availableColors: []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
+ oldColors: []int{6, 0, 3, 5, 3, 4, 0, 0},
+ firstAvailable: 6,
+ firstInUse: 0,
+ },
+ {
+ availableColors: []int{1, 2, 3, 4, 5, 6, 7},
+ oldColors: []int{6, 1, 3, 5, 3, 4, 2, 7},
+ firstAvailable: -1,
+ firstInUse: 0,
+ },
+ {
+ availableColors: []int{1, 2, 3, 4, 5, 6, 7},
+ oldColors: []int{6, 0, 3, 5, 3, 4, 2, 7},
+ firstAvailable: -1,
+ firstInUse: 0,
+ },
+ }
+ for _, testcase := range testcases {
+ parser := &Parser{}
+ parser.Reset()
+ parser.availableColors = append([]int{}, testcase.availableColors...)
+ parser.oldColors = append(parser.oldColors, testcase.oldColors...)
+ parser.firstAvailable = testcase.firstAvailable
+ parser.firstInUse = testcase.firstInUse
+ parser.releaseUnusedColors()
+
+ if parser.firstAvailable == -1 {
+ // All in use
+ for _, color := range parser.availableColors {
+ found := false
+ for _, oldColor := range parser.oldColors {
+ if oldColor == color {
+ found = true
+ break
+ }
+ }
+ if !found {
+ t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should be available but is not",
+ testcase.availableColors,
+ testcase.oldColors,
+ testcase.firstAvailable,
+ testcase.firstInUse,
+ parser.availableColors,
+ parser.oldColors,
+ parser.firstAvailable,
+ parser.firstInUse,
+ color)
+ }
+ }
+ } else if parser.firstInUse != -1 {
+ // Some in use
+ for i := parser.firstInUse; i != parser.firstAvailable; i = (i + 1) % len(parser.availableColors) {
+ color := parser.availableColors[i]
+ found := false
+ for _, oldColor := range parser.oldColors {
+ if oldColor == color {
+ found = true
+ break
+ }
+ }
+ if !found {
+ t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should be available but is not",
+ testcase.availableColors,
+ testcase.oldColors,
+ testcase.firstAvailable,
+ testcase.firstInUse,
+ parser.availableColors,
+ parser.oldColors,
+ parser.firstAvailable,
+ parser.firstInUse,
+ color)
+ }
+ }
+ for i := parser.firstAvailable; i != parser.firstInUse; i = (i + 1) % len(parser.availableColors) {
+ color := parser.availableColors[i]
+ found := false
+ for _, oldColor := range parser.oldColors {
+ if oldColor == color {
+ found = true
+ break
+ }
+ }
+ if found {
+ t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should not be available but is",
+ testcase.availableColors,
+ testcase.oldColors,
+ testcase.firstAvailable,
+ testcase.firstInUse,
+ parser.availableColors,
+ parser.oldColors,
+ parser.firstAvailable,
+ parser.firstInUse,
+ color)
+ }
+ }
+ } else {
+ // None in use
+ for _, color := range parser.oldColors {
+ if color != 0 {
+ t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should not be available but is",
+ testcase.availableColors,
+ testcase.oldColors,
+ testcase.firstAvailable,
+ testcase.firstInUse,
+ parser.availableColors,
+ parser.oldColors,
+ parser.firstAvailable,
+ parser.firstInUse,
+ color)
+ }
+ }
+ }
+ }
+}
+
+func TestParseGlyphs(t *testing.T) {
+ parser := &Parser{}
+ parser.Reset()
+ tgBytes := []byte(testglyphs)
+ tg := tgBytes
+ idx := bytes.Index(tg, []byte("\n"))
+ row := 0
+ for idx > 0 {
+ parser.ParseGlyphs(tg[:idx])
+ tg = tg[idx+1:]
+ idx = bytes.Index(tg, []byte("\n"))
+ if parser.flows[0] != 1 {
+ t.Errorf("First column flow should be 1 but was %d", parser.flows[0])
+ }
+ colorToFlow := map[int]int64{}
+ flowToColor := map[int64]int{}
+
+ for i, flow := range parser.flows {
+ if flow == 0 {
+ continue
+ }
+ color := parser.colors[i]
+
+ if fColor, in := flowToColor[flow]; in && fColor != color {
+ t.Errorf("Row %d column %d flow %d has color %d but should be %d", row, i, flow, color, fColor)
+ }
+ flowToColor[flow] = color
+ if cFlow, in := colorToFlow[color]; in && cFlow != flow {
+ t.Errorf("Row %d column %d flow %d has color %d but conflicts with flow %d", row, i, flow, color, cFlow)
+ }
+ colorToFlow[color] = flow
+ }
+ row++
+ }
+ assert.Len(t, parser.availableColors, 9)
+}
+
+func TestCommitStringParsing(t *testing.T) {
+ dataFirstPart := "* DATA:|4e61bacab44e9b4730e44a6615d04098dd3a8eaf|2016-12-20 21:10:41 +0100|4e61bac|"
+ tests := []struct {
+ shouldPass bool
+ testName string
+ commitMessage string
+ }{
+ {true, "normal", "not a fancy message"},
+ {true, "extra pipe", "An extra pipe: |"},
+ {true, "extra 'Data:'", "DATA: might be trouble"},
+ }
+
+ for _, test := range tests {
+ t.Run(test.testName, func(t *testing.T) {
+ testString := fmt.Sprintf("%s%s", dataFirstPart, test.commitMessage)
+ idx := strings.Index(testString, "DATA:")
+ commit, err := NewCommit(0, 0, []byte(testString[idx+5:]))
+ if err != nil && test.shouldPass {
+ t.Errorf("Could not parse %s", testString)
+ return
+ }
+
+ assert.Equal(t, test.commitMessage, commit.Subject)
+ })
+ }
+}
+
+var testglyphs = `*
+*
+*
+*
+*
+*
+*
+*
+|\
+* |
+* |
+* |
+* |
+* |
+| *
+* |
+| *
+| |\
+* | |
+| | *
+| | |\
+* | | \
+|\ \ \ \
+| * | | |
+| |\| | |
+* | | | |
+|/ / / /
+| | | *
+| * | |
+| * | |
+| * | |
+* | | |
+* | | |
+* | | |
+* | | |
+* | | |
+|\ \ \ \
+| | * | |
+| | |\| |
+| | | * |
+| | | | *
+* | | | |
+* | | | |
+* | | | |
+* | | | |
+* | | | |
+|\ \ \ \ \
+| * | | | |
+|/| | | | |
+| | |/ / /
+| |/| | |
+| | | | *
+| * | | |
+|/| | | |
+| * | | |
+|/| | | |
+| | |/ /
+| |/| |
+| * | |
+| * | |
+| |\ \ \
+| | * | |
+| |/| | |
+| | | |/
+| | |/|
+| * | |
+| * | |
+| * | |
+| | * |
+| | |\ \
+| | | * |
+| | |/| |
+| | | * |
+| | | |\ \
+| | | | * |
+| | | |/| |
+| | * | | |
+| | * | | |
+| | |\ \ \ \
+| | | * | | |
+| | |/| | | |
+| | | | | * |
+| | | | |/ /
+* | | | / /
+|/ / / / /
+* | | | |
+|\ \ \ \ \
+| * | | | |
+|/| | | | |
+| * | | | |
+| * | | | |
+| |\ \ \ \ \
+| | | * \ \ \
+| | | |\ \ \ \
+| | | | * | | |
+| | | |/| | | |
+| | | | | |/ /
+| | | | |/| |
+* | | | | | |
+* | | | | | |
+* | | | | | |
+| | | | * | |
+* | | | | | |
+| | * | | | |
+| |/| | | | |
+* | | | | | |
+| |/ / / / /
+|/| | | | |
+| | | | * |
+| | | |/ /
+| | |/| |
+| * | | |
+| | | | *
+| | * | |
+| | |\ \ \
+| | | * | |
+| | |/| | |
+| | | |/ /
+| | | * |
+| | * | |
+| | |\ \ \
+| | | * | |
+| | |/| | |
+| | | |/ /
+| | | * |
+* | | | |
+|\ \ \ \ \
+| * \ \ \ \
+| |\ \ \ \ \
+| | | |/ / /
+| | |/| | |
+| | | | * |
+| | | | * |
+* | | | | |
+* | | | | |
+|/ / / / /
+| | | * |
+* | | | |
+* | | | |
+* | | | |
+* | | | |
+|\ \ \ \ \
+| * | | | |
+|/| | | | |
+| | * | | |
+| | |\ \ \ \
+| | | * | | |
+| | |/| | | |
+| |/| | |/ /
+| | | |/| |
+| | | | | *
+| |_|_|_|/
+|/| | | |
+| | * | |
+| |/ / /
+* | | |
+* | | |
+| | * |
+* | | |
+* | | |
+| * | |
+| | * |
+| * | |
+* | | |
+|\ \ \ \
+| * | | |
+|/| | | |
+| |/ / /
+| * | |
+| |\ \ \
+| | * | |
+| |/| | |
+| | |/ /
+| | * |
+| | |\ \
+| | | * |
+| | |/| |
+* | | | |
+* | | | |
+|\ \ \ \ \
+| * | | | |
+|/| | | | |
+| | * | | |
+| | * | | |
+| | * | | |
+| |/ / / /
+| * | | |
+| |\ \ \ \
+| | * | | |
+| |/| | | |
+* | | | | |
+* | | | | |
+* | | | | |
+* | | | | |
+* | | | | |
+| | | | * |
+* | | | | |
+|\ \ \ \ \ \
+| * | | | | |
+|/| | | | | |
+| | | | | * |
+| | | | |/ /
+* | | | | |
+|\ \ \ \ \ \
+* | | | | | |
+* | | | | | |
+| | | | * | |
+* | | | | | |
+* | | | | | |
+|\ \ \ \ \ \ \
+| | |_|_|/ / /
+| |/| | | | |
+| | | | * | |
+| | | | * | |
+| | | | * | |
+| | | | * | |
+| | | | * | |
+| | | | * | |
+| | | |/ / /
+| | | * | |
+| | | * | |
+| | | * | |
+| | |/| | |
+| | | * | |
+| | |/| | |
+| | | |/ /
+| | * | |
+| |/| | |
+| | | * |
+| | |/ /
+| | * |
+| * | |
+| |\ \ \
+| * | | |
+| | * | |
+| |/| | |
+| | |/ /
+| | * |
+| | |\ \
+| | * | |
+* | | | |
+|\| | | |
+| * | | |
+| * | | |
+| * | | |
+| | * | |
+| * | | |
+| |\| | |
+| * | | |
+| | * | |
+| | * | |
+| * | | |
+| * | | |
+| * | | |
+| * | | |
+| * | | |
+| * | | |
+| * | | |
+| * | | |
+| | * | |
+| * | | |
+| * | | |
+| * | | |
+| * | | |
+| | * | |
+* | | | |
+|\| | | |
+| | * | |
+| * | | |
+| |\| | |
+| | * | |
+| | * | |
+| | * | |
+| | | * |
+* | | | |
+|\| | | |
+| | * | |
+| | |/ /
+| * | |
+| * | |
+| |\| |
+* | | |
+|\| | |
+| | * |
+| | * |
+| | * |
+| * | |
+| | * |
+| * | |
+| | * |
+| | * |
+| | * |
+| * | |
+| * | |
+| * | |
+| * | |
+| * | |
+| * | |
+| * | |
+* | | |
+|\| | |
+| * | |
+| |\| |
+| | * |
+| | |\ \
+* | | | |
+|\| | | |
+| * | | |
+| |\| | |
+| | * | |
+| | | * |
+| | |/ /
+* | | |
+* | | |
+|\| | |
+| * | |
+| |\| |
+| | * |
+| | * |
+| | * |
+| | | *
+* | | |
+|\| | |
+| * | |
+| * | |
+| | | *
+| | | |\
+* | | | |
+| |_|_|/
+|/| | |
+| * | |
+| |\| |
+| | * |
+| | * |
+| | * |
+| | * |
+| | * |
+| * | |
+* | | |
+|\| | |
+| * | |
+|/| | |
+| |/ /
+| * |
+| |\ \
+| * | |
+| * | |
+* | | |
+|\| | |
+| | * |
+| * | |
+| * | |
+| * | |
+* | | |
+|\| | |
+| * | |
+| * | |
+| | * |
+| | |\ \
+| | |/ /
+| |/| |
+| * | |
+* | | |
+|\| | |
+| * | |
+* | | |
+|\| | |
+| * | |
+| |\ \ \
+| * | | |
+| * | | |
+| | | * |
+| * | | |
+| * | | |
+| | |/ /
+| |/| |
+| | * |
+* | | |
+|\| | |
+| * | |
+| * | |
+| * | |
+| * | |
+| * | |
+| |\ \ \
+* | | | |
+|\| | | |
+| * | | |
+| * | | |
+* | | | |
+* | | | |
+|\| | | |
+| | | | *
+| | | | |\
+| |_|_|_|/
+|/| | | |
+| * | | |
+* | | | |
+* | | | |
+|\| | | |
+| * | | |
+| |\ \ \ \
+| | | |/ /
+| | |/| |
+| * | | |
+| * | | |
+| * | | |
+| * | | |
+| | * | |
+| | | * |
+| | |/ /
+| |/| |
+* | | |
+|\| | |
+| * | |
+| * | |
+| * | |
+| * | |
+| * | |
+* | | |
+|\| | |
+| * | |
+| * | |
+* | | |
+| * | |
+| * | |
+| * | |
+* | | |
+* | | |
+* | | |
+|\| | |
+| * | |
+* | | |
+* | | |
+* | | |
+* | | |
+| | | *
+* | | |
+|\| | |
+| * | |
+| * | |
+| * | |
+`
diff --git a/services/repository/gitgraph/parser.go b/services/repository/gitgraph/parser.go
new file mode 100644
index 0000000000..f6bf9b0b90
--- /dev/null
+++ b/services/repository/gitgraph/parser.go
@@ -0,0 +1,336 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitgraph
+
+import (
+ "bytes"
+ "fmt"
+)
+
+// Parser represents a git graph parser. It is stateful containing the previous
+// glyphs, detected flows and color assignments.
+type Parser struct {
+ glyphs []byte
+ oldGlyphs []byte
+ flows []int64
+ oldFlows []int64
+ maxFlow int64
+ colors []int
+ oldColors []int
+ availableColors []int
+ nextAvailable int
+ firstInUse int
+ firstAvailable int
+ maxAllowedColors int
+}
+
+// Reset resets the internal parser state.
+func (parser *Parser) Reset() {
+ parser.glyphs = parser.glyphs[0:0]
+ parser.oldGlyphs = parser.oldGlyphs[0:0]
+ parser.flows = parser.flows[0:0]
+ parser.oldFlows = parser.oldFlows[0:0]
+ parser.maxFlow = 0
+ parser.colors = parser.colors[0:0]
+ parser.oldColors = parser.oldColors[0:0]
+ parser.availableColors = parser.availableColors[0:0]
+ parser.availableColors = append(parser.availableColors, 1, 2)
+ parser.nextAvailable = 0
+ parser.firstInUse = -1
+ parser.firstAvailable = 0
+ parser.maxAllowedColors = 0
+}
+
+// AddLineToGraph adds the line as a row to the graph
+func (parser *Parser) AddLineToGraph(graph *Graph, row int, line []byte) error {
+ idx := bytes.Index(line, []byte("DATA:"))
+ if idx < 0 {
+ parser.ParseGlyphs(line)
+ } else {
+ parser.ParseGlyphs(line[:idx])
+ }
+
+ var err error
+ commitDone := false
+
+ for column, glyph := range parser.glyphs {
+ if glyph == ' ' {
+ continue
+ }
+
+ flowID := parser.flows[column]
+
+ graph.AddGlyph(row, column, flowID, parser.colors[column], glyph)
+
+ if glyph == '*' {
+ if commitDone {
+ if err != nil {
+ err = fmt.Errorf("double commit on line %d: %s. %w", row, string(line), err)
+ } else {
+ err = fmt.Errorf("double commit on line %d: %s", row, string(line))
+ }
+ }
+ commitDone = true
+ if idx < 0 {
+ if err != nil {
+ err = fmt.Errorf("missing data section on line %d with commit: %s. %w", row, string(line), err)
+ } else {
+ err = fmt.Errorf("missing data section on line %d with commit: %s", row, string(line))
+ }
+ continue
+ }
+ err2 := graph.AddCommit(row, column, flowID, line[idx+5:])
+ if err != nil && err2 != nil {
+ err = fmt.Errorf("%v %w", err2, err)
+ continue
+ } else if err2 != nil {
+ err = err2
+ continue
+ }
+ }
+ }
+ if !commitDone {
+ graph.Commits = append(graph.Commits, RelationCommit)
+ }
+ return err
+}
+
+func (parser *Parser) releaseUnusedColors() {
+ if parser.firstInUse > -1 {
+ // Here we step through the old colors, searching for them in the
+ // "in-use" section of availableColors (that is, the colors between
+ // firstInUse and firstAvailable)
+ // Ensure that the benchmarks are not worsened with proposed changes
+ stepstaken := 0
+ position := parser.firstInUse
+ for _, color := range parser.oldColors {
+ if color == 0 {
+ continue
+ }
+ found := false
+ i := position
+ for j := stepstaken; i != parser.firstAvailable && j < len(parser.availableColors); j++ {
+ colorToCheck := parser.availableColors[i]
+ if colorToCheck == color {
+ found = true
+ break
+ }
+ i = (i + 1) % len(parser.availableColors)
+ }
+ if !found {
+ // Duplicate color
+ continue
+ }
+ // Swap them around
+ parser.availableColors[position], parser.availableColors[i] = parser.availableColors[i], parser.availableColors[position]
+ stepstaken++
+ position = (parser.firstInUse + stepstaken) % len(parser.availableColors)
+ if position == parser.firstAvailable || stepstaken == len(parser.availableColors) {
+ break
+ }
+ }
+ if stepstaken == len(parser.availableColors) {
+ parser.firstAvailable = -1
+ } else {
+ parser.firstAvailable = position
+ if parser.nextAvailable == -1 {
+ parser.nextAvailable = parser.firstAvailable
+ }
+ }
+ }
+}
+
+// ParseGlyphs parses the provided glyphs and sets the internal state
+func (parser *Parser) ParseGlyphs(glyphs []byte) {
+ // Clean state for parsing this row
+ parser.glyphs, parser.oldGlyphs = parser.oldGlyphs, parser.glyphs
+ parser.glyphs = parser.glyphs[0:0]
+ parser.flows, parser.oldFlows = parser.oldFlows, parser.flows
+ parser.flows = parser.flows[0:0]
+ parser.colors, parser.oldColors = parser.oldColors, parser.colors
+
+ // Ensure we have enough flows and colors
+ parser.colors = parser.colors[0:0]
+ for range glyphs {
+ parser.flows = append(parser.flows, 0)
+ parser.colors = append(parser.colors, 0)
+ }
+
+ // Copy the provided glyphs in to state.glyphs for safekeeping
+ parser.glyphs = append(parser.glyphs, glyphs...)
+
+ // release unused colors
+ parser.releaseUnusedColors()
+
+ for i := len(glyphs) - 1; i >= 0; i-- {
+ glyph := glyphs[i]
+ switch glyph {
+ case '|':
+ fallthrough
+ case '*':
+ parser.setUpFlow(i)
+ case '/':
+ parser.setOutFlow(i)
+ case '\\':
+ parser.setInFlow(i)
+ case '_':
+ parser.setRightFlow(i)
+ case '.':
+ fallthrough
+ case '-':
+ parser.setLeftFlow(i)
+ case ' ':
+ // no-op
+ default:
+ parser.newFlow(i)
+ }
+ }
+}
+
+func (parser *Parser) takePreviousFlow(i, j int) {
+ if j < len(parser.oldFlows) && parser.oldFlows[j] > 0 {
+ parser.flows[i] = parser.oldFlows[j]
+ parser.oldFlows[j] = 0
+ parser.colors[i] = parser.oldColors[j]
+ parser.oldColors[j] = 0
+ } else {
+ parser.newFlow(i)
+ }
+}
+
+func (parser *Parser) takeCurrentFlow(i, j int) {
+ if j < len(parser.flows) && parser.flows[j] > 0 {
+ parser.flows[i] = parser.flows[j]
+ parser.colors[i] = parser.colors[j]
+ } else {
+ parser.newFlow(i)
+ }
+}
+
+func (parser *Parser) newFlow(i int) {
+ parser.maxFlow++
+ parser.flows[i] = parser.maxFlow
+
+ // Now give this flow a color
+ if parser.nextAvailable == -1 {
+ next := len(parser.availableColors)
+ if parser.maxAllowedColors < 1 || next < parser.maxAllowedColors {
+ parser.nextAvailable = next
+ parser.firstAvailable = next
+ parser.availableColors = append(parser.availableColors, next+1)
+ }
+ }
+ parser.colors[i] = parser.availableColors[parser.nextAvailable]
+ if parser.firstInUse == -1 {
+ parser.firstInUse = parser.nextAvailable
+ }
+ parser.availableColors[parser.firstAvailable], parser.availableColors[parser.nextAvailable] = parser.availableColors[parser.nextAvailable], parser.availableColors[parser.firstAvailable]
+
+ parser.nextAvailable = (parser.nextAvailable + 1) % len(parser.availableColors)
+ parser.firstAvailable = (parser.firstAvailable + 1) % len(parser.availableColors)
+
+ if parser.nextAvailable == parser.firstInUse {
+ parser.nextAvailable = parser.firstAvailable
+ }
+ if parser.nextAvailable == parser.firstInUse {
+ parser.nextAvailable = -1
+ parser.firstAvailable = -1
+ }
+}
+
+// setUpFlow handles '|' or '*'
+func (parser *Parser) setUpFlow(i int) {
+ // In preference order:
+ //
+ // Previous Row: '\? ' ' |' ' /'
+ // Current Row: ' | ' ' |' ' | '
+ if i > 0 && i-1 < len(parser.oldGlyphs) && parser.oldGlyphs[i-1] == '\\' {
+ parser.takePreviousFlow(i, i-1)
+ } else if i < len(parser.oldGlyphs) && (parser.oldGlyphs[i] == '|' || parser.oldGlyphs[i] == '*') {
+ parser.takePreviousFlow(i, i)
+ } else if i+1 < len(parser.oldGlyphs) && parser.oldGlyphs[i+1] == '/' {
+ parser.takePreviousFlow(i, i+1)
+ } else {
+ parser.newFlow(i)
+ }
+}
+
+// setOutFlow handles '/'
+func (parser *Parser) setOutFlow(i int) {
+ // In preference order:
+ //
+ // Previous Row: ' |/' ' |_' ' |' ' /' ' _' '\'
+ // Current Row: '/| ' '/| ' '/ ' '/ ' '/ ' '/'
+ if i+2 < len(parser.oldGlyphs) &&
+ (parser.oldGlyphs[i+1] == '|' || parser.oldGlyphs[i+1] == '*') &&
+ (parser.oldGlyphs[i+2] == '/' || parser.oldGlyphs[i+2] == '_') &&
+ i+1 < len(parser.glyphs) &&
+ (parser.glyphs[i+1] == '|' || parser.glyphs[i+1] == '*') {
+ parser.takePreviousFlow(i, i+2)
+ } else if i+1 < len(parser.oldGlyphs) &&
+ (parser.oldGlyphs[i+1] == '|' || parser.oldGlyphs[i+1] == '*' ||
+ parser.oldGlyphs[i+1] == '/' || parser.oldGlyphs[i+1] == '_') {
+ parser.takePreviousFlow(i, i+1)
+ if parser.oldGlyphs[i+1] == '/' {
+ parser.glyphs[i] = '|'
+ }
+ } else if i < len(parser.oldGlyphs) && parser.oldGlyphs[i] == '\\' {
+ parser.takePreviousFlow(i, i)
+ } else {
+ parser.newFlow(i)
+ }
+}
+
+// setInFlow handles '\'
+func (parser *Parser) setInFlow(i int) {
+ // In preference order:
+ //
+ // Previous Row: '| ' '-. ' '| ' '\ ' '/' '---'
+ // Current Row: '|\' ' \' ' \' ' \' '\' ' \ '
+ if i > 0 && i-1 < len(parser.oldGlyphs) &&
+ (parser.oldGlyphs[i-1] == '|' || parser.oldGlyphs[i-1] == '*') &&
+ (parser.glyphs[i-1] == '|' || parser.glyphs[i-1] == '*') {
+ parser.newFlow(i)
+ } else if i > 0 && i-1 < len(parser.oldGlyphs) &&
+ (parser.oldGlyphs[i-1] == '|' || parser.oldGlyphs[i-1] == '*' ||
+ parser.oldGlyphs[i-1] == '.' || parser.oldGlyphs[i-1] == '\\') {
+ parser.takePreviousFlow(i, i-1)
+ if parser.oldGlyphs[i-1] == '\\' {
+ parser.glyphs[i] = '|'
+ }
+ } else if i < len(parser.oldGlyphs) && parser.oldGlyphs[i] == '/' {
+ parser.takePreviousFlow(i, i)
+ } else {
+ parser.newFlow(i)
+ }
+}
+
+// setRightFlow handles '_'
+func (parser *Parser) setRightFlow(i int) {
+ // In preference order:
+ //
+ // Current Row: '__' '_/' '_|_' '_|/'
+ if i+1 < len(parser.glyphs) &&
+ (parser.glyphs[i+1] == '_' || parser.glyphs[i+1] == '/') {
+ parser.takeCurrentFlow(i, i+1)
+ } else if i+2 < len(parser.glyphs) &&
+ (parser.glyphs[i+1] == '|' || parser.glyphs[i+1] == '*') &&
+ (parser.glyphs[i+2] == '_' || parser.glyphs[i+2] == '/') {
+ parser.takeCurrentFlow(i, i+2)
+ } else {
+ parser.newFlow(i)
+ }
+}
+
+// setLeftFlow handles '----.'
+func (parser *Parser) setLeftFlow(i int) {
+ if parser.glyphs[i] == '.' {
+ parser.newFlow(i)
+ } else if i+1 < len(parser.glyphs) &&
+ (parser.glyphs[i+1] == '-' || parser.glyphs[i+1] == '.') {
+ parser.takeCurrentFlow(i, i+1)
+ } else {
+ parser.newFlow(i)
+ }
+}
diff --git a/services/repository/hooks.go b/services/repository/hooks.go
index 97e9e290a3..c13b272550 100644
--- a/services/repository/hooks.go
+++ b/services/repository/hooks.go
@@ -12,7 +12,6 @@ import (
"code.gitea.io/gitea/models/webhook"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
- repo_module "code.gitea.io/gitea/modules/repository"
"xorm.io/builder"
)
@@ -32,11 +31,11 @@ func SyncRepositoryHooks(ctx context.Context) error {
default:
}
- if err := repo_module.CreateDelegateHooks(repo.RepoPath()); err != nil {
+ if err := gitrepo.CreateDelegateHooks(ctx, repo); err != nil {
return fmt.Errorf("SyncRepositoryHook: %w", err)
}
if repo.HasWiki() {
- if err := repo_module.CreateDelegateHooks(repo.WikiPath()); err != nil {
+ if err := gitrepo.CreateDelegateHooks(ctx, repo.WikiStorageRepo()); err != nil {
return fmt.Errorf("SyncRepositoryHook: %w", err)
}
}
diff --git a/services/repository/init.go b/services/repository/init.go
index c719e11786..bd777b8a2f 100644
--- a/services/repository/init.go
+++ b/services/repository/init.go
@@ -33,13 +33,13 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi
committerName := sig.Name
committerEmail := sig.Email
- if stdout, _, err := git.NewCommand(ctx, "add", "--all").
- RunStdString(&git.RunOpts{Dir: tmpPath}); err != nil {
+ if stdout, _, err := git.NewCommand("add", "--all").
+ RunStdString(ctx, &git.RunOpts{Dir: tmpPath}); err != nil {
log.Error("git add --all failed: Stdout: %s\nError: %v", stdout, err)
return fmt.Errorf("git add --all: %w", err)
}
- cmd := git.NewCommand(ctx, "commit", "--message=Initial commit").
+ cmd := git.NewCommand("commit", "--message=Initial commit").
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email)
sign, keyID, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u)
@@ -61,7 +61,7 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi
)
if stdout, _, err := cmd.
- RunStdString(&git.RunOpts{Dir: tmpPath, Env: env}); err != nil {
+ RunStdString(ctx, &git.RunOpts{Dir: tmpPath, Env: env}); err != nil {
log.Error("Failed to commit: %v: Stdout: %s\nError: %v", cmd.LogString(), stdout, err)
return fmt.Errorf("git commit: %w", err)
}
@@ -70,8 +70,8 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi
defaultBranch = setting.Repository.DefaultBranch
}
- if stdout, _, err := git.NewCommand(ctx, "push", "origin").AddDynamicArguments("HEAD:" + defaultBranch).
- RunStdString(&git.RunOpts{Dir: tmpPath, Env: repo_module.InternalPushingEnvironment(u, repo)}); err != nil {
+ if stdout, _, err := git.NewCommand("push", "origin").AddDynamicArguments("HEAD:"+defaultBranch).
+ RunStdString(ctx, &git.RunOpts{Dir: tmpPath, Env: repo_module.InternalPushingEnvironment(u, repo)}); err != nil {
log.Error("Failed to push back to HEAD: Stdout: %s\nError: %v", stdout, err)
return fmt.Errorf("git push: %w", err)
}
diff --git a/services/repository/lfs_test.go b/services/repository/lfs_test.go
index ee0b8f6b89..78ff8c853e 100644
--- a/services/repository/lfs_test.go
+++ b/services/repository/lfs_test.go
@@ -5,7 +5,6 @@ package repository_test
import (
"bytes"
- "context"
"testing"
"time"
@@ -36,7 +35,7 @@ func TestGarbageCollectLFSMetaObjects(t *testing.T) {
lfsOid := storeObjectInRepo(t, repo.ID, &lfsContent)
// gc
- err = repo_service.GarbageCollectLFSMetaObjects(context.Background(), repo_service.GarbageCollectLFSMetaObjectsOptions{
+ err = repo_service.GarbageCollectLFSMetaObjects(t.Context(), repo_service.GarbageCollectLFSMetaObjectsOptions{
AutoFix: true,
OlderThan: time.Now().Add(7 * 24 * time.Hour).Add(5 * 24 * time.Hour),
UpdatedLessRecentlyThan: time.Now().Add(7 * 24 * time.Hour).Add(3 * 24 * time.Hour),
diff --git a/services/repository/license.go b/services/repository/license.go
index 2453be3c87..8622911fa2 100644
--- a/services/repository/license.go
+++ b/services/repository/license.go
@@ -14,7 +14,6 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/graceful"
- "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/options"
"code.gitea.io/gitea/modules/queue"
@@ -25,7 +24,6 @@ import (
var (
classifier *licenseclassifier.Classifier
LicenseFileName = "LICENSE"
- licenseAliases map[string]string
// licenseUpdaterQueue represents a queue to handle update repo licenses
licenseUpdaterQueue *queue.WorkerPoolQueue[*LicenseUpdaterOptions]
@@ -38,34 +36,6 @@ func AddRepoToLicenseUpdaterQueue(opts *LicenseUpdaterOptions) error {
return licenseUpdaterQueue.Push(opts)
}
-func loadLicenseAliases() error {
- if licenseAliases != nil {
- return nil
- }
-
- data, err := options.AssetFS().ReadFile("license", "etc", "license-aliases.json")
- if err != nil {
- return err
- }
- err = json.Unmarshal(data, &licenseAliases)
- if err != nil {
- return err
- }
- return nil
-}
-
-func ConvertLicenseName(name string) string {
- if err := loadLicenseAliases(); err != nil {
- return name
- }
-
- v, ok := licenseAliases[name]
- if ok {
- return v
- }
- return name
-}
-
func InitLicenseClassifier() error {
// threshold should be 0.84~0.86 or the test will be failed
classifier = licenseclassifier.NewClassifier(.85)
@@ -74,20 +44,13 @@ func InitLicenseClassifier() error {
return err
}
- existLicense := make(container.Set[string])
- if len(licenseFiles) > 0 {
- for _, licenseFile := range licenseFiles {
- licenseName := ConvertLicenseName(licenseFile)
- if existLicense.Contains(licenseName) {
- continue
- }
- existLicense.Add(licenseName)
- data, err := options.License(licenseFile)
- if err != nil {
- return err
- }
- classifier.AddContent("License", licenseFile, licenseName, data)
+ for _, licenseFile := range licenseFiles {
+ licenseName := licenseFile
+ data, err := options.License(licenseFile)
+ if err != nil {
+ return err
}
+ classifier.AddContent("License", licenseName, licenseName, data)
}
return nil
}
diff --git a/services/repository/license_test.go b/services/repository/license_test.go
index 9d3e0f36e3..9e74a268f5 100644
--- a/services/repository/license_test.go
+++ b/services/repository/license_test.go
@@ -11,6 +11,7 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func Test_detectLicense(t *testing.T) {
@@ -33,9 +34,7 @@ func Test_detectLicense(t *testing.T) {
},
}
- repo_module.LoadRepoConfig()
- err := loadLicenseAliases()
- assert.NoError(t, err)
+ require.NoError(t, repo_module.LoadRepoConfig())
for _, licenseName := range repo_module.Licenses {
license, err := repo_module.GetLicense(licenseName, &repo_module.LicenseValues{
Owner: "Gitea",
@@ -48,12 +47,11 @@ func Test_detectLicense(t *testing.T) {
tests = append(tests, DetectLicenseTest{
name: fmt.Sprintf("single license test: %s", licenseName),
arg: string(license),
- want: []string{ConvertLicenseName(licenseName)},
+ want: []string{licenseName},
})
}
- err = InitLicenseClassifier()
- assert.NoError(t, err)
+ require.NoError(t, InitLicenseClassifier())
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
license, err := detectLicense(strings.NewReader(tt.arg))
diff --git a/services/repository/migrate.go b/services/repository/migrate.go
index 6f3a87afa3..9a5c6ffb0f 100644
--- a/services/repository/migrate.go
+++ b/services/repository/migrate.go
@@ -13,8 +13,10 @@ import (
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/organization"
repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/migration"
@@ -120,8 +122,8 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
return repo, fmt.Errorf("checkDaemonExportOK: %w", err)
}
- if stdout, _, err := git.NewCommand(ctx, "update-server-info").
- RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ if stdout, _, err := git.NewCommand("update-server-info").
+ RunStdString(ctx, &git.RunOpts{Dir: repoPath}); err != nil {
log.Error("MigrateRepositoryGitData(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
return repo, fmt.Errorf("error in MigrateRepositoryGitData(git update-server-info): %w", err)
}
@@ -230,9 +232,9 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
// this is necessary for sync local tags from remote
configName := fmt.Sprintf("remote.%s.fetch", mirrorModel.GetRemoteName())
- if stdout, _, err := git.NewCommand(ctx, "config").
+ if stdout, _, err := git.NewCommand("config").
AddOptionValues("--add", configName, `+refs/tags/*:refs/tags/*`).
- RunStdString(&git.RunOpts{Dir: repoPath}); err != nil {
+ RunStdString(ctx, &git.RunOpts{Dir: repoPath}); err != nil {
log.Error("MigrateRepositoryGitData(git config --add <remote> +refs/tags/*:refs/tags/*) in %v: Stdout: %s\nError: %v", repo, stdout, err)
return repo, fmt.Errorf("error in MigrateRepositoryGitData(git config --add <remote> +refs/tags/*:refs/tags/*): %w", err)
}
@@ -245,15 +247,28 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
}
}
+ var enableRepoUnits []repo_model.RepoUnit
+ if opts.Releases && !unit_model.TypeReleases.UnitGlobalDisabled() {
+ enableRepoUnits = append(enableRepoUnits, repo_model.RepoUnit{RepoID: repo.ID, Type: unit_model.TypeReleases})
+ }
+ if opts.Wiki && !unit_model.TypeWiki.UnitGlobalDisabled() {
+ enableRepoUnits = append(enableRepoUnits, repo_model.RepoUnit{RepoID: repo.ID, Type: unit_model.TypeWiki})
+ }
+ if len(enableRepoUnits) > 0 {
+ err = UpdateRepositoryUnits(ctx, repo, enableRepoUnits, nil)
+ if err != nil {
+ return nil, err
+ }
+ }
return repo, committer.Commit()
}
// cleanUpMigrateGitConfig removes mirror info which prevents "push --all".
// This also removes possible user credentials.
func cleanUpMigrateGitConfig(ctx context.Context, repoPath string) error {
- cmd := git.NewCommand(ctx, "remote", "rm", "origin")
+ cmd := git.NewCommand("remote", "rm", "origin")
// if the origin does not exist
- _, _, err := cmd.RunStdString(&git.RunOpts{
+ _, _, err := cmd.RunStdString(ctx, &git.RunOpts{
Dir: repoPath,
})
if err != nil && !git.IsRemoteNotExistError(err) {
@@ -264,17 +279,16 @@ func cleanUpMigrateGitConfig(ctx context.Context, repoPath string) error {
// CleanUpMigrateInfo finishes migrating repository and/or wiki with things that don't need to be done for mirrors.
func CleanUpMigrateInfo(ctx context.Context, repo *repo_model.Repository) (*repo_model.Repository, error) {
- repoPath := repo.RepoPath()
- if err := repo_module.CreateDelegateHooks(repoPath); err != nil {
+ if err := gitrepo.CreateDelegateHooks(ctx, repo); err != nil {
return repo, fmt.Errorf("createDelegateHooks: %w", err)
}
if repo.HasWiki() {
- if err := repo_module.CreateDelegateHooks(repo.WikiPath()); err != nil {
+ if err := gitrepo.CreateDelegateHooks(ctx, repo.WikiStorageRepo()); err != nil {
return repo, fmt.Errorf("createDelegateHooks.(wiki): %w", err)
}
}
- _, _, err := git.NewCommand(ctx, "remote", "rm", "origin").RunStdString(&git.RunOpts{Dir: repoPath})
+ _, _, err := git.NewCommand("remote", "rm", "origin").RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath()})
if err != nil && !git.IsRemoteNotExistError(err) {
return repo, fmt.Errorf("CleanUpMigrateInfo: %w", err)
}
diff --git a/services/repository/push.go b/services/repository/push.go
index 0ea51f9c07..6d3b9dd252 100644
--- a/services/repository/push.go
+++ b/services/repository/push.go
@@ -23,6 +23,7 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
issue_service "code.gitea.io/gitea/services/issue"
notify_service "code.gitea.io/gitea/services/notify"
pull_service "code.gitea.io/gitea/services/pull"
@@ -133,23 +134,26 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error {
} else { // is new tag
newCommit, err := gitRepo.GetCommit(opts.NewCommitID)
if err != nil {
- return fmt.Errorf("gitRepo.GetCommit(%s) in %s/%s[%d]: %w", opts.NewCommitID, repo.OwnerName, repo.Name, repo.ID, err)
+ // in case there is dirty data, for example, the "github.com/git/git" repository has tags pointing to non-existing commits
+ if !errors.Is(err, util.ErrNotExist) {
+ log.Error("Unable to get tag commit: gitRepo.GetCommit(%s) in %s/%s[%d]: %v", opts.NewCommitID, repo.OwnerName, repo.Name, repo.ID, err)
+ }
+ } else {
+ commits := repo_module.NewPushCommits()
+ commits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
+ commits.CompareURL = repo.ComposeCompareURL(objectFormat.EmptyObjectID().String(), opts.NewCommitID)
+
+ notify_service.PushCommits(
+ ctx, pusher, repo,
+ &repo_module.PushUpdateOptions{
+ RefFullName: opts.RefFullName,
+ OldCommitID: objectFormat.EmptyObjectID().String(),
+ NewCommitID: opts.NewCommitID,
+ }, commits)
+
+ addTags = append(addTags, tagName)
+ notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID)
}
-
- commits := repo_module.NewPushCommits()
- commits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
- commits.CompareURL = repo.ComposeCompareURL(objectFormat.EmptyObjectID().String(), opts.NewCommitID)
-
- notify_service.PushCommits(
- ctx, pusher, repo,
- &repo_module.PushUpdateOptions{
- RefFullName: opts.RefFullName,
- OldCommitID: objectFormat.EmptyObjectID().String(),
- NewCommitID: opts.NewCommitID,
- }, commits)
-
- addTags = append(addTags, tagName)
- notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID)
}
} else if opts.RefFullName.IsBranch() {
if pusher == nil || pusher.ID != opts.PusherID {
@@ -163,59 +167,25 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error {
}
}
- branch := opts.RefFullName.BranchName()
if !opts.IsDelRef() {
+ branch := opts.RefFullName.BranchName()
+
log.Trace("TriggerTask '%s/%s' by %s", repo.Name, branch, pusher.Name)
- go pull_service.AddTestPullRequestTask(pusher, repo.ID, branch, true, opts.OldCommitID, opts.NewCommitID)
newCommit, err := gitRepo.GetCommit(opts.NewCommitID)
if err != nil {
return fmt.Errorf("gitRepo.GetCommit(%s) in %s/%s[%d]: %w", opts.NewCommitID, repo.OwnerName, repo.Name, repo.ID, err)
}
- refName := opts.RefName()
-
// Push new branch.
var l []*git.Commit
if opts.IsNewRef() {
- if repo.IsEmpty { // Change default branch and empty status only if pushed ref is non-empty branch.
- repo.DefaultBranch = refName
- repo.IsEmpty = false
- if repo.DefaultBranch != setting.Repository.DefaultBranch {
- if err := gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil {
- return err
- }
- }
- // Update the is empty and default_branch columns
- if err := repo_model.UpdateRepositoryCols(ctx, repo, "default_branch", "is_empty"); err != nil {
- return fmt.Errorf("UpdateRepositoryCols: %w", err)
- }
- }
-
- l, err = newCommit.CommitsBeforeLimit(10)
- if err != nil {
- return fmt.Errorf("newCommit.CommitsBeforeLimit: %w", err)
- }
- notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID)
+ l, err = pushNewBranch(ctx, repo, pusher, opts, newCommit)
} else {
- l, err = newCommit.CommitsBeforeUntil(opts.OldCommitID)
- if err != nil {
- return fmt.Errorf("newCommit.CommitsBeforeUntil: %w", err)
- }
-
- isForcePush, err := newCommit.IsForcePush(opts.OldCommitID)
- if err != nil {
- log.Error("IsForcePush %s:%s failed: %v", repo.FullName(), branch, err)
- }
-
- if isForcePush {
- log.Trace("Push %s is a force push", opts.NewCommitID)
-
- cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true))
- } else {
- // TODO: increment update the commit count cache but not remove
- cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true))
- }
+ l, err = pushUpdateBranch(ctx, repo, pusher, opts, newCommit)
+ }
+ if err != nil {
+ return err
}
// delete cache for divergence
@@ -232,36 +202,11 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error {
commits := repo_module.GitToPushCommits(l)
commits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
- if err := issue_service.UpdateIssuesCommit(ctx, pusher, repo, commits.Commits, refName); err != nil {
+ if err := issue_service.UpdateIssuesCommit(ctx, pusher, repo, commits.Commits, opts.RefName()); err != nil {
log.Error("updateIssuesCommit: %v", err)
}
- oldCommitID := opts.OldCommitID
- if oldCommitID == objectFormat.EmptyObjectID().String() && len(commits.Commits) > 0 {
- oldCommit, err := gitRepo.GetCommit(commits.Commits[len(commits.Commits)-1].Sha1)
- if err != nil && !git.IsErrNotExist(err) {
- log.Error("unable to GetCommit %s from %-v: %v", oldCommitID, repo, err)
- }
- if oldCommit != nil {
- for i := 0; i < oldCommit.ParentCount(); i++ {
- commitID, _ := oldCommit.ParentID(i)
- if !commitID.IsZero() {
- oldCommitID = commitID.String()
- break
- }
- }
- }
- }
-
- if oldCommitID == objectFormat.EmptyObjectID().String() && repo.DefaultBranch != branch {
- oldCommitID = repo.DefaultBranch
- }
-
- if oldCommitID != objectFormat.EmptyObjectID().String() {
- commits.CompareURL = repo.ComposeCompareURL(oldCommitID, opts.NewCommitID)
- } else {
- commits.CompareURL = ""
- }
+ commits.CompareURL = getCompareURL(repo, gitRepo, objectFormat, commits.Commits, opts)
if len(commits.Commits) > setting.UI.FeedMaxCommitNum {
commits.Commits = commits.Commits[:setting.UI.FeedMaxCommitNum]
@@ -274,12 +219,7 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error {
log.Error("repo_module.CacheRef %s/%s failed: %v", repo.ID, branch, err)
}
} else {
- notify_service.DeleteRef(ctx, pusher, repo, opts.RefFullName)
-
- if err := pull_service.AdjustPullsCausedByBranchDeleted(ctx, pusher, repo, branch); err != nil {
- // close all related pulls
- log.Error("close related pull request failed: %v", err)
- }
+ pushDeleteBranch(ctx, repo, pusher, opts)
}
// Even if user delete a branch on a repository which he didn't watch, he will be watch that.
@@ -290,8 +230,11 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error {
log.Trace("Non-tag and non-branch commits pushed.")
}
}
- if err := PushUpdateAddDeleteTags(ctx, repo, gitRepo, addTags, delTags); err != nil {
- return fmt.Errorf("PushUpdateAddDeleteTags: %w", err)
+
+ if len(addTags)+len(delTags) > 0 {
+ if err := PushUpdateAddDeleteTags(ctx, repo, gitRepo, addTags, delTags); err != nil {
+ return fmt.Errorf("PushUpdateAddDeleteTags: %w", err)
+ }
}
// Change repository last updated time.
@@ -302,6 +245,102 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error {
return nil
}
+func getCompareURL(repo *repo_model.Repository, gitRepo *git.Repository, objectFormat git.ObjectFormat, commits []*repo_module.PushCommit, opts *repo_module.PushUpdateOptions) string {
+ oldCommitID := opts.OldCommitID
+ if oldCommitID == objectFormat.EmptyObjectID().String() && len(commits) > 0 {
+ oldCommit, err := gitRepo.GetCommit(commits[len(commits)-1].Sha1)
+ if err != nil && !git.IsErrNotExist(err) {
+ log.Error("unable to GetCommit %s from %-v: %v", oldCommitID, repo, err)
+ }
+ if oldCommit != nil {
+ for i := 0; i < oldCommit.ParentCount(); i++ {
+ commitID, _ := oldCommit.ParentID(i)
+ if !commitID.IsZero() {
+ oldCommitID = commitID.String()
+ break
+ }
+ }
+ }
+ }
+
+ if oldCommitID == objectFormat.EmptyObjectID().String() && repo.DefaultBranch != opts.RefFullName.BranchName() {
+ oldCommitID = repo.DefaultBranch
+ }
+
+ if oldCommitID != objectFormat.EmptyObjectID().String() {
+ return repo.ComposeCompareURL(oldCommitID, opts.NewCommitID)
+ }
+ return ""
+}
+
+func pushNewBranch(ctx context.Context, repo *repo_model.Repository, pusher *user_model.User, opts *repo_module.PushUpdateOptions, newCommit *git.Commit) ([]*git.Commit, error) {
+ if repo.IsEmpty { // Change default branch and empty status only if pushed ref is non-empty branch.
+ repo.DefaultBranch = opts.RefName()
+ repo.IsEmpty = false
+ if repo.DefaultBranch != setting.Repository.DefaultBranch {
+ if err := gitrepo.SetDefaultBranch(ctx, repo, repo.DefaultBranch); err != nil {
+ return nil, err
+ }
+ }
+ // Update the is empty and default_branch columns
+ if err := repo_model.UpdateRepositoryCols(ctx, repo, "default_branch", "is_empty"); err != nil {
+ return nil, fmt.Errorf("UpdateRepositoryCols: %w", err)
+ }
+ }
+
+ l, err := newCommit.CommitsBeforeLimit(10)
+ if err != nil {
+ return nil, fmt.Errorf("newCommit.CommitsBeforeLimit: %w", err)
+ }
+ notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID)
+ return l, nil
+}
+
+func pushUpdateBranch(_ context.Context, repo *repo_model.Repository, pusher *user_model.User, opts *repo_module.PushUpdateOptions, newCommit *git.Commit) ([]*git.Commit, error) {
+ l, err := newCommit.CommitsBeforeUntil(opts.OldCommitID)
+ if err != nil {
+ return nil, fmt.Errorf("newCommit.CommitsBeforeUntil: %w", err)
+ }
+
+ branch := opts.RefFullName.BranchName()
+
+ isForcePush, err := newCommit.IsForcePush(opts.OldCommitID)
+ if err != nil {
+ log.Error("IsForcePush %s:%s failed: %v", repo.FullName(), branch, err)
+ }
+
+ // only update branch can trigger pull request task because the pull request hasn't been created yet when creating a branch
+ go pull_service.AddTestPullRequestTask(pull_service.TestPullRequestOptions{
+ RepoID: repo.ID,
+ Doer: pusher,
+ Branch: branch,
+ IsSync: true,
+ IsForcePush: isForcePush,
+ OldCommitID: opts.OldCommitID,
+ NewCommitID: opts.NewCommitID,
+ })
+
+ if isForcePush {
+ log.Trace("Push %s is a force push", opts.NewCommitID)
+
+ cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true))
+ } else {
+ // TODO: increment update the commit count cache but not remove
+ cache.Remove(repo.GetCommitsCountCacheKey(opts.RefName(), true))
+ }
+
+ return l, nil
+}
+
+func pushDeleteBranch(ctx context.Context, repo *repo_model.Repository, pusher *user_model.User, opts *repo_module.PushUpdateOptions) {
+ notify_service.DeleteRef(ctx, pusher, repo, opts.RefFullName)
+
+ if err := pull_service.AdjustPullsCausedByBranchDeleted(ctx, pusher, repo, opts.RefFullName.BranchName()); err != nil {
+ // close all related pulls
+ log.Error("close related pull request failed: %v", err)
+ }
+}
+
// PushUpdateAddDeleteTags updates a number of added and delete tags
func PushUpdateAddDeleteTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, addTags, delTags []string) error {
return db.WithTx(ctx, func(ctx context.Context) error {
diff --git a/services/repository/repository.go b/services/repository/repository.go
index 59b4491132..fcc617979e 100644
--- a/services/repository/repository.go
+++ b/services/repository/repository.go
@@ -11,7 +11,6 @@ import (
"code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/organization"
- packages_model "code.gitea.io/gitea/models/packages"
repo_model "code.gitea.io/gitea/models/repo"
system_model "code.gitea.io/gitea/models/system"
"code.gitea.io/gitea/models/unit"
@@ -63,11 +62,7 @@ func DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_mod
notify_service.DeleteRepository(ctx, doer, repo)
}
- if err := DeleteRepositoryDirectly(ctx, doer, repo.ID); err != nil {
- return err
- }
-
- return packages_model.UnlinkRepositoryFromAllPackages(ctx, repo.ID)
+ return DeleteRepositoryDirectly(ctx, doer, repo.ID)
}
// PushCreateRepo creates a repository when a new repository is pushed to an appropriate namespace
diff --git a/services/repository/setting.go b/services/repository/setting.go
index b82f24271e..e0c787dd2d 100644
--- a/services/repository/setting.go
+++ b/services/repository/setting.go
@@ -7,7 +7,6 @@ import (
"context"
"slices"
- actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
@@ -29,7 +28,7 @@ func UpdateRepositoryUnits(ctx context.Context, repo *repo_model.Repository, uni
}
if slices.Contains(deleteUnitTypes, unit.TypeActions) {
- if err := actions_model.CleanRepoScheduleTasks(ctx, repo); err != nil {
+ if err := actions_service.CleanRepoScheduleTasks(ctx, repo); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err)
}
}
diff --git a/services/repository/transfer.go b/services/repository/transfer.go
index bd3bf326b4..a589bc469d 100644
--- a/services/repository/transfer.go
+++ b/services/repository/transfer.go
@@ -17,6 +17,7 @@ import (
project_model "code.gitea.io/gitea/models/project"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/globallock"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
@@ -330,13 +331,13 @@ func changeRepositoryName(ctx context.Context, repo *repo_model.Repository, newR
return fmt.Errorf("IsRepositoryExist: %w", err)
} else if has {
return repo_model.ErrRepoAlreadyExist{
- Uname: repo.Owner.Name,
+ Uname: repo.OwnerName,
Name: newRepoName,
}
}
- newRepoPath := repo_model.RepoPath(repo.Owner.Name, newRepoName)
- if err = util.Rename(repo.RepoPath(), newRepoPath); err != nil {
+ if err = gitrepo.RenameRepository(ctx, repo,
+ repo_model.StorageRepo(repo_model.RelativePath(repo.OwnerName, newRepoName))); err != nil {
return fmt.Errorf("rename repository directory: %w", err)
}
diff --git a/services/secrets/secrets.go b/services/secrets/secrets.go
index 031c474dd7..ec6a3cb062 100644
--- a/services/secrets/secrets.go
+++ b/services/secrets/secrets.go
@@ -10,7 +10,7 @@ import (
secret_model "code.gitea.io/gitea/models/secret"
)
-func CreateOrUpdateSecret(ctx context.Context, ownerID, repoID int64, name, data string) (*secret_model.Secret, bool, error) {
+func CreateOrUpdateSecret(ctx context.Context, ownerID, repoID int64, name, data, description string) (*secret_model.Secret, bool, error) {
if err := ValidateName(name); err != nil {
return nil, false, err
}
@@ -25,14 +25,14 @@ func CreateOrUpdateSecret(ctx context.Context, ownerID, repoID int64, name, data
}
if len(s) == 0 {
- s, err := secret_model.InsertEncryptedSecret(ctx, ownerID, repoID, name, data)
+ s, err := secret_model.InsertEncryptedSecret(ctx, ownerID, repoID, name, data, description)
if err != nil {
return nil, false, err
}
return s, true, nil
}
- if err := secret_model.UpdateSecret(ctx, s[0].ID, data); err != nil {
+ if err := secret_model.UpdateSecret(ctx, s[0].ID, data, description); err != nil {
return nil, false, err
}
diff --git a/services/user/user_test.go b/services/user/user_test.go
index 162a735cd4..28a0df8628 100644
--- a/services/user/user_test.go
+++ b/services/user/user_test.go
@@ -150,7 +150,7 @@ func TestRenameUser(t *testing.T) {
redirectUID, err := user_model.LookupUserRedirect(db.DefaultContext, oldUsername)
assert.NoError(t, err)
- assert.EqualValues(t, user.ID, redirectUID)
+ assert.Equal(t, user.ID, redirectUID)
unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, OwnerName: user.Name})
})
diff --git a/services/versioned_migration/migration.go b/services/versioned_migration/migration.go
new file mode 100644
index 0000000000..daec89d7c1
--- /dev/null
+++ b/services/versioned_migration/migration.go
@@ -0,0 +1,24 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package versioned_migration //nolint
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/migrations"
+ "code.gitea.io/gitea/modules/globallock"
+
+ "xorm.io/xorm"
+)
+
+func Migrate(ctx context.Context, x *xorm.Engine) error {
+ // only one instance can do the migration at the same time if there are multiple instances
+ release, err := globallock.Lock(ctx, "gitea_versioned_migration")
+ if err != nil {
+ return err
+ }
+ defer release()
+
+ return migrations.Migrate(x)
+}
diff --git a/services/webhook/deliver.go b/services/webhook/deliver.go
index 4707602cdf..df32d5741e 100644
--- a/services/webhook/deliver.go
+++ b/services/webhook/deliver.go
@@ -18,6 +18,7 @@ import (
"sync"
"time"
+ user_model "code.gitea.io/gitea/models/user"
webhook_model "code.gitea.io/gitea/models/webhook"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/hostmatcher"
@@ -92,10 +93,10 @@ func newDefaultRequest(ctx context.Context, w *webhook_model.Webhook, t *webhook
}
body = []byte(t.PayloadContent)
- return req, body, addDefaultHeaders(req, []byte(w.Secret), t, body)
+ return req, body, addDefaultHeaders(req, []byte(w.Secret), w, t, body)
}
-func addDefaultHeaders(req *http.Request, secret []byte, t *webhook_model.HookTask, payloadContent []byte) error {
+func addDefaultHeaders(req *http.Request, secret []byte, w *webhook_model.Webhook, t *webhook_model.HookTask, payloadContent []byte) error {
var signatureSHA1 string
var signatureSHA256 string
if len(secret) > 0 {
@@ -112,10 +113,27 @@ func addDefaultHeaders(req *http.Request, secret []byte, t *webhook_model.HookTa
event := t.EventType.Event()
eventType := string(t.EventType)
+ targetType := "default"
+ if w.IsSystemWebhook {
+ targetType = "system"
+ } else if w.RepoID != 0 {
+ targetType = "repository"
+ } else if w.OwnerID != 0 {
+ owner, err := user_model.GetUserByID(req.Context(), w.OwnerID)
+ if owner != nil && err == nil {
+ if owner.IsOrganization() {
+ targetType = "organization"
+ } else {
+ targetType = "user"
+ }
+ }
+ }
+
req.Header.Add("X-Gitea-Delivery", t.UUID)
req.Header.Add("X-Gitea-Event", event)
req.Header.Add("X-Gitea-Event-Type", eventType)
req.Header.Add("X-Gitea-Signature", signatureSHA256)
+ req.Header.Add("X-Gitea-Hook-Installation-Target-Type", targetType)
req.Header.Add("X-Gogs-Delivery", t.UUID)
req.Header.Add("X-Gogs-Event", event)
req.Header.Add("X-Gogs-Event-Type", eventType)
@@ -125,6 +143,7 @@ func addDefaultHeaders(req *http.Request, secret []byte, t *webhook_model.HookTa
req.Header["X-GitHub-Delivery"] = []string{t.UUID}
req.Header["X-GitHub-Event"] = []string{event}
req.Header["X-GitHub-Event-Type"] = []string{eventType}
+ req.Header["X-GitHub-Hook-Installation-Target-Type"] = []string{targetType}
return nil
}
diff --git a/services/webhook/deliver_test.go b/services/webhook/deliver_test.go
index d0cfc1598f..be1347c07b 100644
--- a/services/webhook/deliver_test.go
+++ b/services/webhook/deliver_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"io"
"net/http"
"net/http/httptest"
@@ -118,7 +117,7 @@ func TestWebhookDeliverAuthorizationHeader(t *testing.T) {
assert.NoError(t, err)
assert.NotNil(t, hookTask)
- assert.NoError(t, Deliver(context.Background(), hookTask))
+ assert.NoError(t, Deliver(t.Context(), hookTask))
select {
case <-done:
case <-time.After(5 * time.Second):
@@ -139,7 +138,7 @@ func TestWebhookDeliverHookTask(t *testing.T) {
case "/webhook/66d222a5d6349e1311f551e50722d837e30fce98":
// Version 1
assert.Equal(t, "push", r.Header.Get("X-GitHub-Event"))
- assert.Equal(t, "", r.Header.Get("Content-Type"))
+ assert.Empty(t, r.Header.Get("Content-Type"))
body, err := io.ReadAll(r.Body)
assert.NoError(t, err)
assert.Equal(t, `{"data": 42}`, string(body))
@@ -185,7 +184,7 @@ func TestWebhookDeliverHookTask(t *testing.T) {
assert.NoError(t, err)
assert.NotNil(t, hookTask)
- assert.NoError(t, Deliver(context.Background(), hookTask))
+ assert.NoError(t, Deliver(t.Context(), hookTask))
select {
case <-done:
case <-time.After(5 * time.Second):
@@ -211,7 +210,7 @@ func TestWebhookDeliverHookTask(t *testing.T) {
assert.NoError(t, err)
assert.NotNil(t, hookTask)
- assert.NoError(t, Deliver(context.Background(), hookTask))
+ assert.NoError(t, Deliver(t.Context(), hookTask))
select {
case <-done:
case <-time.After(5 * time.Second):
@@ -280,7 +279,7 @@ func TestWebhookDeliverSpecificTypes(t *testing.T) {
assert.NoError(t, err)
assert.NotNil(t, hookTask)
- assert.NoError(t, Deliver(context.Background(), hookTask))
+ assert.NoError(t, Deliver(t.Context(), hookTask))
select {
case gotBody := <-cases[typ].gotBody:
diff --git a/services/webhook/dingtalk.go b/services/webhook/dingtalk.go
index 992b8c566f..5afca8d65a 100644
--- a/services/webhook/dingtalk.go
+++ b/services/webhook/dingtalk.go
@@ -170,6 +170,18 @@ func (dc dingtalkConvertor) Package(p *api.PackagePayload) (DingtalkPayload, err
return createDingtalkPayload(text, text, "view package", p.Package.HTMLURL), nil
}
+func (dc dingtalkConvertor) Status(p *api.CommitStatusPayload) (DingtalkPayload, error) {
+ text, _ := getStatusPayloadInfo(p, noneLinkFormatter, true)
+
+ return createDingtalkPayload(text, text, "Status Changed", p.TargetURL), nil
+}
+
+func (dingtalkConvertor) WorkflowJob(p *api.WorkflowJobPayload) (DingtalkPayload, error) {
+ text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true)
+
+ return createDingtalkPayload(text, text, "Workflow Job", p.WorkflowJob.HTMLURL), nil
+}
+
func createDingtalkPayload(title, text, singleTitle, singleURL string) DingtalkPayload {
return DingtalkPayload{
MsgType: "actionCard",
diff --git a/services/webhook/dingtalk_test.go b/services/webhook/dingtalk_test.go
index 25f47347d0..763d23048a 100644
--- a/services/webhook/dingtalk_test.go
+++ b/services/webhook/dingtalk_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"net/url"
"testing"
@@ -236,7 +235,7 @@ func TestDingTalkJSONPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newDingtalkRequest(context.Background(), hook, task)
+ req, reqBody, err := newDingtalkRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
diff --git a/services/webhook/discord.go b/services/webhook/discord.go
index 30d930062e..0a7eb0b166 100644
--- a/services/webhook/discord.go
+++ b/services/webhook/discord.go
@@ -265,6 +265,18 @@ func (d discordConvertor) Package(p *api.PackagePayload) (DiscordPayload, error)
return d.createPayload(p.Sender, text, "", p.Package.HTMLURL, color), nil
}
+func (d discordConvertor) Status(p *api.CommitStatusPayload) (DiscordPayload, error) {
+ text, color := getStatusPayloadInfo(p, noneLinkFormatter, false)
+
+ return d.createPayload(p.Sender, text, "", p.TargetURL, color), nil
+}
+
+func (d discordConvertor) WorkflowJob(p *api.WorkflowJobPayload) (DiscordPayload, error) {
+ text, color := getWorkflowJobPayloadInfo(p, noneLinkFormatter, false)
+
+ return d.createPayload(p.Sender, text, "", p.WorkflowJob.HTMLURL, color), nil
+}
+
func newDiscordRequest(_ context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
meta := &DiscordMeta{}
if err := json.Unmarshal([]byte(w.Meta), meta); err != nil {
diff --git a/services/webhook/discord_test.go b/services/webhook/discord_test.go
index 36b99d452e..7f503e3374 100644
--- a/services/webhook/discord_test.go
+++ b/services/webhook/discord_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"testing"
webhook_model "code.gitea.io/gitea/models/webhook"
@@ -303,7 +302,7 @@ func TestDiscordJSONPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newDiscordRequest(context.Background(), hook, task)
+ req, reqBody, err := newDiscordRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
diff --git a/services/webhook/feishu.go b/services/webhook/feishu.go
index 4e6aebc39d..274aaf90b3 100644
--- a/services/webhook/feishu.go
+++ b/services/webhook/feishu.go
@@ -166,6 +166,18 @@ func (fc feishuConvertor) Package(p *api.PackagePayload) (FeishuPayload, error)
return newFeishuTextPayload(text), nil
}
+func (fc feishuConvertor) Status(p *api.CommitStatusPayload) (FeishuPayload, error) {
+ text, _ := getStatusPayloadInfo(p, noneLinkFormatter, true)
+
+ return newFeishuTextPayload(text), nil
+}
+
+func (feishuConvertor) WorkflowJob(p *api.WorkflowJobPayload) (FeishuPayload, error) {
+ text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true)
+
+ return newFeishuTextPayload(text), nil
+}
+
func newFeishuRequest(_ context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
var pc payloadConvertor[FeishuPayload] = feishuConvertor{}
return newJSONRequest(pc, w, t, true)
diff --git a/services/webhook/feishu_test.go b/services/webhook/feishu_test.go
index ef18333fd4..c4249bdb30 100644
--- a/services/webhook/feishu_test.go
+++ b/services/webhook/feishu_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"testing"
webhook_model "code.gitea.io/gitea/models/webhook"
@@ -177,7 +176,7 @@ func TestFeishuJSONPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newFeishuRequest(context.Background(), hook, task)
+ req, reqBody, err := newFeishuRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
diff --git a/services/webhook/general.go b/services/webhook/general.go
index dde43bb349..c58f83354d 100644
--- a/services/webhook/general.go
+++ b/services/webhook/general.go
@@ -9,7 +9,9 @@ import (
"net/url"
"strings"
+ user_model "code.gitea.io/gitea/models/user"
webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
@@ -37,11 +39,12 @@ func getPullRequestInfo(p *api.PullRequestPayload) (title, link, by, operator, o
for i, user := range assignList {
assignStringList[i] = user.UserName
}
- if p.Action == api.HookIssueAssigned {
+ switch p.Action {
+ case api.HookIssueAssigned:
operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName)
- } else if p.Action == api.HookIssueUnassigned {
+ case api.HookIssueUnassigned:
operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName)
- } else if p.Action == api.HookIssueMilestoned {
+ case api.HookIssueMilestoned:
operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.PullRequest.Milestone.ID)
}
link = p.PullRequest.HTMLURL
@@ -62,11 +65,12 @@ func getIssuesInfo(p *api.IssuePayload) (issueTitle, link, by, operator, operate
for i, user := range assignList {
assignStringList[i] = user.UserName
}
- if p.Action == api.HookIssueAssigned {
+ switch p.Action {
+ case api.HookIssueAssigned:
operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName)
- } else if p.Action == api.HookIssueUnassigned {
+ case api.HookIssueUnassigned:
operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName)
- } else if p.Action == api.HookIssueMilestoned {
+ case api.HookIssueMilestoned:
operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.Issue.Milestone.ID)
}
link = p.Issue.HTMLURL
@@ -307,6 +311,53 @@ func getPackagePayloadInfo(p *api.PackagePayload, linkFormatter linkFormatter, w
return text, color
}
+func getStatusPayloadInfo(p *api.CommitStatusPayload, linkFormatter linkFormatter, withSender bool) (text string, color int) {
+ refLink := linkFormatter(p.TargetURL, fmt.Sprintf("%s [%s]", p.Context, base.ShortSha(p.SHA)))
+
+ text = fmt.Sprintf("Commit Status changed: %s - %s", refLink, p.Description)
+ color = greenColor
+ if withSender {
+ if user_model.IsGiteaActionsUserName(p.Sender.UserName) {
+ text += fmt.Sprintf(" by %s", p.Sender.FullName)
+ } else {
+ text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName))
+ }
+ }
+
+ return text, color
+}
+
+func getWorkflowJobPayloadInfo(p *api.WorkflowJobPayload, linkFormatter linkFormatter, withSender bool) (text string, color int) {
+ description := p.WorkflowJob.Conclusion
+ if description == "" {
+ description = p.WorkflowJob.Status
+ }
+ refLink := linkFormatter(p.WorkflowJob.HTMLURL, fmt.Sprintf("%s(#%d)", p.WorkflowJob.Name, p.WorkflowJob.RunID)+"["+base.ShortSha(p.WorkflowJob.HeadSha)+"]:"+description)
+
+ text = fmt.Sprintf("Workflow Job %s: %s", p.Action, refLink)
+ switch description {
+ case "waiting":
+ color = orangeColor
+ case "queued":
+ color = orangeColorLight
+ case "success":
+ color = greenColor
+ case "failure":
+ color = redColor
+ case "cancelled":
+ color = yellowColor
+ case "skipped":
+ color = purpleColor
+ default:
+ color = greyColor
+ }
+ if withSender {
+ text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName))
+ }
+
+ return text, color
+}
+
// ToHook convert models.Webhook to api.Hook
// This function is not part of the convert package to prevent an import cycle
func ToHook(repoLink string, w *webhook_model.Webhook) (*api.Hook, error) {
diff --git a/services/webhook/matrix.go b/services/webhook/matrix.go
index 96dfa139ac..5bc7ba097e 100644
--- a/services/webhook/matrix.go
+++ b/services/webhook/matrix.go
@@ -15,6 +15,7 @@ import (
"strings"
webhook_model "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
@@ -56,7 +57,7 @@ func newMatrixRequest(_ context.Context, w *webhook_model.Webhook, t *webhook_mo
}
req.Header.Set("Content-Type", "application/json")
- return req, body, addDefaultHeaders(req, []byte(w.Secret), t, body) // likely useless, but has always been sent historially
+ return req, body, addDefaultHeaders(req, []byte(w.Secret), w, t, body) // likely useless, but has always been sent historially
}
const matrixPayloadSizeLimit = 1024 * 64
@@ -244,6 +245,19 @@ func (m matrixConvertor) Package(p *api.PackagePayload) (MatrixPayload, error) {
return m.newPayload(text)
}
+func (m matrixConvertor) Status(p *api.CommitStatusPayload) (MatrixPayload, error) {
+ refLink := htmlLinkFormatter(p.TargetURL, fmt.Sprintf("%s [%s]", p.Context, base.ShortSha(p.SHA)))
+ text := fmt.Sprintf("Commit Status changed: %s - %s", refLink, p.Description)
+
+ return m.newPayload(text)
+}
+
+func (m matrixConvertor) WorkflowJob(p *api.WorkflowJobPayload) (MatrixPayload, error) {
+ text, _ := getWorkflowJobPayloadInfo(p, htmlLinkFormatter, true)
+
+ return m.newPayload(text)
+}
+
var urlRegex = regexp.MustCompile(`<a [^>]*?href="([^">]*?)">(.*?)</a>`)
func getMessageBody(htmlText string) string {
diff --git a/services/webhook/matrix_test.go b/services/webhook/matrix_test.go
index 058f8e3c5f..d36d93c5a7 100644
--- a/services/webhook/matrix_test.go
+++ b/services/webhook/matrix_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"testing"
webhook_model "code.gitea.io/gitea/models/webhook"
@@ -211,7 +210,7 @@ func TestMatrixJSONPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newMatrixRequest(context.Background(), hook, task)
+ req, reqBody, err := newMatrixRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
diff --git a/services/webhook/msteams.go b/services/webhook/msteams.go
index 1ae7c4f931..f70e235f20 100644
--- a/services/webhook/msteams.go
+++ b/services/webhook/msteams.go
@@ -303,6 +303,34 @@ func (m msteamsConvertor) Package(p *api.PackagePayload) (MSTeamsPayload, error)
), nil
}
+func (m msteamsConvertor) Status(p *api.CommitStatusPayload) (MSTeamsPayload, error) {
+ title, color := getStatusPayloadInfo(p, noneLinkFormatter, false)
+
+ return createMSTeamsPayload(
+ p.Repo,
+ p.Sender,
+ title,
+ "",
+ p.TargetURL,
+ color,
+ &MSTeamsFact{"CommitStatus:", p.Context},
+ ), nil
+}
+
+func (msteamsConvertor) WorkflowJob(p *api.WorkflowJobPayload) (MSTeamsPayload, error) {
+ title, color := getWorkflowJobPayloadInfo(p, noneLinkFormatter, false)
+
+ return createMSTeamsPayload(
+ p.Repo,
+ p.Sender,
+ title,
+ "",
+ p.WorkflowJob.HTMLURL,
+ color,
+ &MSTeamsFact{"WorkflowJob:", p.WorkflowJob.Name},
+ ), nil
+}
+
func createMSTeamsPayload(r *api.Repository, s *api.User, title, text, actionTarget string, color int, fact *MSTeamsFact) MSTeamsPayload {
facts := make([]MSTeamsFact, 0, 2)
if r != nil {
diff --git a/services/webhook/msteams_test.go b/services/webhook/msteams_test.go
index 01e08b918e..0d98b94bad 100644
--- a/services/webhook/msteams_test.go
+++ b/services/webhook/msteams_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"testing"
webhook_model "code.gitea.io/gitea/models/webhook"
@@ -336,7 +335,7 @@ func TestMSTeamsPayload(t *testing.T) {
assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment)", pl.Summary)
assert.Len(t, pl.Sections, 1)
assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
- assert.Equal(t, "", pl.Sections[0].Text)
+ assert.Empty(t, pl.Sections[0].Text)
assert.Len(t, pl.Sections[0].Facts, 2)
for _, fact := range pl.Sections[0].Facts {
if fact.Name == "Repository:" {
@@ -357,7 +356,7 @@ func TestMSTeamsPayload(t *testing.T) {
assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment)", pl.Summary)
assert.Len(t, pl.Sections, 1)
assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
- assert.Equal(t, "", pl.Sections[0].Text)
+ assert.Empty(t, pl.Sections[0].Text)
assert.Len(t, pl.Sections[0].Facts, 2)
for _, fact := range pl.Sections[0].Facts {
if fact.Name == "Repository:" {
@@ -439,7 +438,7 @@ func TestMSTeamsJSONPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newMSTeamsRequest(context.Background(), hook, task)
+ req, reqBody, err := newMSTeamsRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
diff --git a/services/webhook/notifier.go b/services/webhook/notifier.go
index 6c691c21f4..9e3f21de29 100644
--- a/services/webhook/notifier.go
+++ b/services/webhook/notifier.go
@@ -5,7 +5,10 @@ package webhook
import (
"context"
+ "fmt"
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/organization"
@@ -15,6 +18,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
@@ -602,7 +606,7 @@ func (m *webhookNotifier) IssueChangeMilestone(ctx context.Context, doer *user_m
func (m *webhookNotifier) PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
apiPusher := convert.ToUser(ctx, pusher, nil)
- apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo)
if err != nil {
log.Error("commits.ToAPIPayloadCommits failed: %v", err)
return
@@ -841,7 +845,7 @@ func (m *webhookNotifier) DeleteRelease(ctx context.Context, doer *user_model.Us
func (m *webhookNotifier) SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
apiPusher := convert.ToUser(ctx, pusher, nil)
- apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo.RepoPath(), repo.HTMLURL())
+ apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo)
if err != nil {
log.Error("commits.ToAPIPayloadCommits failed: %v", err)
return
@@ -865,12 +869,17 @@ func (m *webhookNotifier) SyncPushCommits(ctx context.Context, pusher *user_mode
func (m *webhookNotifier) CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus) {
apiSender := convert.ToUser(ctx, sender, nil)
- apiCommit, err := repository.ToAPIPayloadCommit(ctx, map[string]*user_model.User{}, repo.RepoPath(), repo.HTMLURL(), commit)
+ apiCommit, err := repository.ToAPIPayloadCommit(ctx, map[string]*user_model.User{}, repo, commit)
if err != nil {
log.Error("commits.ToAPIPayloadCommits failed: %v", err)
return
}
+ // as a webhook url, target should be an absolute url. But for internal actions target url
+ // the target url is a url path with no host and port to make it easy to be visited
+ // from multiple hosts. So we need to convert it to an absolute url here.
+ target := httplib.MakeAbsoluteURL(ctx, status.TargetURL)
+
payload := api.CommitStatusPayload{
Context: status.Context,
CreatedAt: status.CreatedUnix.AsTime().UTC(),
@@ -878,7 +887,7 @@ func (m *webhookNotifier) CreateCommitStatus(ctx context.Context, repo *repo_mod
ID: status.ID,
SHA: commit.Sha1,
State: status.State.String(),
- TargetURL: status.TargetURL,
+ TargetURL: target,
Commit: apiCommit,
Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
@@ -935,3 +944,114 @@ func notifyPackage(ctx context.Context, sender *user_model.User, pd *packages_mo
log.Error("PrepareWebhooks: %v", err)
}
}
+
+func (*webhookNotifier) WorkflowJobStatusUpdate(ctx context.Context, repo *repo_model.Repository, sender *user_model.User, job *actions_model.ActionRunJob, task *actions_model.ActionTask) {
+ source := EventSource{
+ Repository: repo,
+ Owner: repo.Owner,
+ }
+
+ var org *api.Organization
+ if repo.Owner.IsOrganization() {
+ org = convert.ToOrganization(ctx, organization.OrgFromUser(repo.Owner))
+ }
+
+ err := job.LoadAttributes(ctx)
+ if err != nil {
+ log.Error("Error loading job attributes: %v", err)
+ return
+ }
+
+ jobIndex := 0
+ jobs, err := actions_model.GetRunJobsByRunID(ctx, job.RunID)
+ if err != nil {
+ log.Error("Error loading getting run jobs: %v", err)
+ return
+ }
+ for i, j := range jobs {
+ if j.ID == job.ID {
+ jobIndex = i
+ break
+ }
+ }
+
+ status, conclusion := toActionStatus(job.Status)
+ var runnerID int64
+ var runnerName string
+ var steps []*api.ActionWorkflowStep
+
+ if task != nil {
+ runnerID = task.RunnerID
+ if runner, ok, _ := db.GetByID[actions_model.ActionRunner](ctx, runnerID); ok {
+ runnerName = runner.Name
+ }
+ for i, step := range task.Steps {
+ stepStatus, stepConclusion := toActionStatus(job.Status)
+ steps = append(steps, &api.ActionWorkflowStep{
+ Name: step.Name,
+ Number: int64(i),
+ Status: stepStatus,
+ Conclusion: stepConclusion,
+ StartedAt: step.Started.AsTime().UTC(),
+ CompletedAt: step.Stopped.AsTime().UTC(),
+ })
+ }
+ }
+
+ if err := PrepareWebhooks(ctx, source, webhook_module.HookEventWorkflowJob, &api.WorkflowJobPayload{
+ Action: status,
+ WorkflowJob: &api.ActionWorkflowJob{
+ ID: job.ID,
+ // missing api endpoint for this location
+ URL: fmt.Sprintf("%s/actions/runs/%d/jobs/%d", repo.APIURL(), job.RunID, job.ID),
+ HTMLURL: fmt.Sprintf("%s/jobs/%d", job.Run.HTMLURL(), jobIndex),
+ RunID: job.RunID,
+ // Missing api endpoint for this location, artifacts are available under a nested url
+ RunURL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), job.RunID),
+ Name: job.Name,
+ Labels: job.RunsOn,
+ RunAttempt: job.Attempt,
+ HeadSha: job.Run.CommitSHA,
+ HeadBranch: git.RefName(job.Run.Ref).BranchName(),
+ Status: status,
+ Conclusion: conclusion,
+ RunnerID: runnerID,
+ RunnerName: runnerName,
+ Steps: steps,
+ CreatedAt: job.Created.AsTime().UTC(),
+ StartedAt: job.Started.AsTime().UTC(),
+ CompletedAt: job.Stopped.AsTime().UTC(),
+ },
+ Organization: org,
+ Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
+ Sender: convert.ToUser(ctx, sender, nil),
+ }); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+}
+
+func toActionStatus(status actions_model.Status) (string, string) {
+ var action string
+ var conclusion string
+ switch status {
+ // This is a naming conflict of the webhook between Gitea and GitHub Actions
+ case actions_model.StatusWaiting:
+ action = "queued"
+ case actions_model.StatusBlocked:
+ action = "waiting"
+ case actions_model.StatusRunning:
+ action = "in_progress"
+ }
+ if status.IsDone() {
+ action = "completed"
+ switch status {
+ case actions_model.StatusSuccess:
+ conclusion = "success"
+ case actions_model.StatusCancelled:
+ conclusion = "cancelled"
+ case actions_model.StatusFailure:
+ conclusion = "failure"
+ }
+ }
+ return action, conclusion
+}
diff --git a/services/webhook/packagist.go b/services/webhook/packagist.go
index e66895832b..8829d95da6 100644
--- a/services/webhook/packagist.go
+++ b/services/webhook/packagist.go
@@ -110,6 +110,14 @@ func (pc packagistConvertor) Package(_ *api.PackagePayload) (PackagistPayload, e
return PackagistPayload{}, nil
}
+func (pc packagistConvertor) Status(_ *api.CommitStatusPayload) (PackagistPayload, error) {
+ return PackagistPayload{}, nil
+}
+
+func (pc packagistConvertor) WorkflowJob(_ *api.WorkflowJobPayload) (PackagistPayload, error) {
+ return PackagistPayload{}, nil
+}
+
func newPackagistRequest(_ context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
meta := &PackagistMeta{}
if err := json.Unmarshal([]byte(w.Meta), meta); err != nil {
diff --git a/services/webhook/packagist_test.go b/services/webhook/packagist_test.go
index f47807fa6e..4e77f29edc 100644
--- a/services/webhook/packagist_test.go
+++ b/services/webhook/packagist_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"testing"
webhook_model "code.gitea.io/gitea/models/webhook"
@@ -164,7 +163,7 @@ func TestPackagistJSONPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newPackagistRequest(context.Background(), hook, task)
+ req, reqBody, err := newPackagistRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
@@ -199,7 +198,7 @@ func TestPackagistEmptyPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newPackagistRequest(context.Background(), hook, task)
+ req, reqBody, err := newPackagistRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
@@ -211,5 +210,5 @@ func TestPackagistEmptyPayload(t *testing.T) {
var body PackagistPayload
err = json.NewDecoder(req.Body).Decode(&body)
assert.NoError(t, err)
- assert.Equal(t, "", body.PackagistRepository.URL)
+ assert.Empty(t, body.PackagistRepository.URL)
}
diff --git a/services/webhook/payloader.go b/services/webhook/payloader.go
index ab280a25b6..adb7243fb1 100644
--- a/services/webhook/payloader.go
+++ b/services/webhook/payloader.go
@@ -28,6 +28,8 @@ type payloadConvertor[T any] interface {
Release(*api.ReleasePayload) (T, error)
Wiki(*api.WikiPayload) (T, error)
Package(*api.PackagePayload) (T, error)
+ Status(*api.CommitStatusPayload) (T, error)
+ WorkflowJob(*api.WorkflowJobPayload) (T, error)
}
func convertUnmarshalledJSON[T, P any](convert func(P) (T, error), data []byte) (t T, err error) {
@@ -77,6 +79,10 @@ func newPayload[T any](rc payloadConvertor[T], data []byte, event webhook_module
return convertUnmarshalledJSON(rc.Wiki, data)
case webhook_module.HookEventPackage:
return convertUnmarshalledJSON(rc.Package, data)
+ case webhook_module.HookEventStatus:
+ return convertUnmarshalledJSON(rc.Status, data)
+ case webhook_module.HookEventWorkflowJob:
+ return convertUnmarshalledJSON(rc.WorkflowJob, data)
}
return t, fmt.Errorf("newPayload unsupported event: %s", event)
}
@@ -104,7 +110,7 @@ func newJSONRequest[T any](pc payloadConvertor[T], w *webhook_model.Webhook, t *
req.Header.Set("Content-Type", "application/json")
if withDefaultHeaders {
- return req, body, addDefaultHeaders(req, []byte(w.Secret), t, body)
+ return req, body, addDefaultHeaders(req, []byte(w.Secret), w, t, body)
}
return req, body, nil
}
diff --git a/services/webhook/slack.go b/services/webhook/slack.go
index 0371ee23e6..589ef3fe9b 100644
--- a/services/webhook/slack.go
+++ b/services/webhook/slack.go
@@ -167,6 +167,18 @@ func (s slackConvertor) Package(p *api.PackagePayload) (SlackPayload, error) {
return s.createPayload(text, nil), nil
}
+func (s slackConvertor) Status(p *api.CommitStatusPayload) (SlackPayload, error) {
+ text, _ := getStatusPayloadInfo(p, SlackLinkFormatter, true)
+
+ return s.createPayload(text, nil), nil
+}
+
+func (s slackConvertor) WorkflowJob(p *api.WorkflowJobPayload) (SlackPayload, error) {
+ text, _ := getWorkflowJobPayloadInfo(p, SlackLinkFormatter, true)
+
+ return s.createPayload(text, nil), nil
+}
+
// Push implements payloadConvertor Push method
func (s slackConvertor) Push(p *api.PushPayload) (SlackPayload, error) {
// n new commits
diff --git a/services/webhook/slack_test.go b/services/webhook/slack_test.go
index 7ebf16aba2..839ed6f770 100644
--- a/services/webhook/slack_test.go
+++ b/services/webhook/slack_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"testing"
webhook_model "code.gitea.io/gitea/models/webhook"
@@ -178,7 +177,7 @@ func TestSlackJSONPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newSlackRequest(context.Background(), hook, task)
+ req, reqBody, err := newSlackRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
diff --git a/services/webhook/telegram.go b/services/webhook/telegram.go
index 6fbf995801..ca74eabe1c 100644
--- a/services/webhook/telegram.go
+++ b/services/webhook/telegram.go
@@ -174,6 +174,18 @@ func (t telegramConvertor) Package(p *api.PackagePayload) (TelegramPayload, erro
return createTelegramPayloadHTML(text), nil
}
+func (t telegramConvertor) Status(p *api.CommitStatusPayload) (TelegramPayload, error) {
+ text, _ := getStatusPayloadInfo(p, htmlLinkFormatter, true)
+
+ return createTelegramPayloadHTML(text), nil
+}
+
+func (telegramConvertor) WorkflowJob(p *api.WorkflowJobPayload) (TelegramPayload, error) {
+ text, _ := getWorkflowJobPayloadInfo(p, htmlLinkFormatter, true)
+
+ return createTelegramPayloadHTML(text), nil
+}
+
func createTelegramPayloadHTML(msgHTML string) TelegramPayload {
// https://core.telegram.org/bots/api#formatting-options
return TelegramPayload{
diff --git a/services/webhook/telegram_test.go b/services/webhook/telegram_test.go
index 7ba81f1564..3fa8e27836 100644
--- a/services/webhook/telegram_test.go
+++ b/services/webhook/telegram_test.go
@@ -4,7 +4,6 @@
package webhook
import (
- "context"
"testing"
webhook_model "code.gitea.io/gitea/models/webhook"
@@ -195,7 +194,7 @@ func TestTelegramJSONPayload(t *testing.T) {
PayloadVersion: 2,
}
- req, reqBody, err := newTelegramRequest(context.Background(), hook, task)
+ req, reqBody, err := newTelegramRequest(t.Context(), hook, task)
require.NotNil(t, req)
require.NotNil(t, reqBody)
require.NoError(t, err)
diff --git a/services/webhook/wechatwork.go b/services/webhook/wechatwork.go
index 44e0ff7de5..2b19822caf 100644
--- a/services/webhook/wechatwork.go
+++ b/services/webhook/wechatwork.go
@@ -175,6 +175,18 @@ func (wc wechatworkConvertor) Package(p *api.PackagePayload) (WechatworkPayload,
return newWechatworkMarkdownPayload(text), nil
}
+func (wc wechatworkConvertor) Status(p *api.CommitStatusPayload) (WechatworkPayload, error) {
+ text, _ := getStatusPayloadInfo(p, noneLinkFormatter, true)
+
+ return newWechatworkMarkdownPayload(text), nil
+}
+
+func (wc wechatworkConvertor) WorkflowJob(p *api.WorkflowJobPayload) (WechatworkPayload, error) {
+ text, _ := getWorkflowJobPayloadInfo(p, noneLinkFormatter, true)
+
+ return newWechatworkMarkdownPayload(text), nil
+}
+
func newWechatworkRequest(_ context.Context, w *webhook_model.Webhook, t *webhook_model.HookTask) (*http.Request, []byte, error) {
var pc payloadConvertor[WechatworkPayload] = wechatworkConvertor{}
return newJSONRequest(pc, w, t, true)
diff --git a/services/webtheme/webtheme.go b/services/webtheme/webtheme.go
index dc801e1ff7..58aea3bc74 100644
--- a/services/webtheme/webtheme.go
+++ b/services/webtheme/webtheme.go
@@ -4,6 +4,7 @@
package webtheme
import (
+ "regexp"
"sort"
"strings"
"sync"
@@ -12,63 +13,154 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/public"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
)
var (
- availableThemes []string
- availableThemesSet container.Set[string]
- themeOnce sync.Once
+ availableThemes []*ThemeMetaInfo
+ availableThemeInternalNames container.Set[string]
+ themeOnce sync.Once
)
+const (
+ fileNamePrefix = "theme-"
+ fileNameSuffix = ".css"
+)
+
+type ThemeMetaInfo struct {
+ FileName string
+ InternalName string
+ DisplayName string
+}
+
+func parseThemeMetaInfoToMap(cssContent string) map[string]string {
+ /*
+ The theme meta info is stored in the CSS file's variables of `gitea-theme-meta-info` element,
+ which is a privately defined and is only used by backend to extract the meta info.
+ Not using ":root" because it is difficult to parse various ":root" blocks when importing other files,
+ it is difficult to control the overriding, and it's difficult to avoid user's customized overridden styles.
+ */
+ metaInfoContent := cssContent
+ if pos := strings.LastIndex(metaInfoContent, "gitea-theme-meta-info"); pos >= 0 {
+ metaInfoContent = metaInfoContent[pos:]
+ }
+
+ reMetaInfoItem := `
+(
+\s*(--[-\w]+)
+\s*:
+\s*(
+("(\\"|[^"])*")
+|('(\\'|[^'])*')
+|([^'";]+)
+)
+\s*;
+\s*
+)
+`
+ reMetaInfoItem = strings.ReplaceAll(reMetaInfoItem, "\n", "")
+ reMetaInfoBlock := `\bgitea-theme-meta-info\s*\{(` + reMetaInfoItem + `+)\}`
+ re := regexp.MustCompile(reMetaInfoBlock)
+ matchedMetaInfoBlock := re.FindAllStringSubmatch(metaInfoContent, -1)
+ if len(matchedMetaInfoBlock) == 0 {
+ return nil
+ }
+ re = regexp.MustCompile(strings.ReplaceAll(reMetaInfoItem, "\n", ""))
+ matchedItems := re.FindAllStringSubmatch(matchedMetaInfoBlock[0][1], -1)
+ m := map[string]string{}
+ for _, item := range matchedItems {
+ v := item[3]
+ if strings.HasPrefix(v, `"`) {
+ v = strings.TrimSuffix(strings.TrimPrefix(v, `"`), `"`)
+ v = strings.ReplaceAll(v, `\"`, `"`)
+ } else if strings.HasPrefix(v, `'`) {
+ v = strings.TrimSuffix(strings.TrimPrefix(v, `'`), `'`)
+ v = strings.ReplaceAll(v, `\'`, `'`)
+ }
+ m[item[2]] = v
+ }
+ return m
+}
+
+func defaultThemeMetaInfoByFileName(fileName string) *ThemeMetaInfo {
+ themeInfo := &ThemeMetaInfo{
+ FileName: fileName,
+ InternalName: strings.TrimSuffix(strings.TrimPrefix(fileName, fileNamePrefix), fileNameSuffix),
+ }
+ themeInfo.DisplayName = themeInfo.InternalName
+ return themeInfo
+}
+
+func defaultThemeMetaInfoByInternalName(fileName string) *ThemeMetaInfo {
+ return defaultThemeMetaInfoByFileName(fileNamePrefix + fileName + fileNameSuffix)
+}
+
+func parseThemeMetaInfo(fileName, cssContent string) *ThemeMetaInfo {
+ themeInfo := defaultThemeMetaInfoByFileName(fileName)
+ m := parseThemeMetaInfoToMap(cssContent)
+ if m == nil {
+ return themeInfo
+ }
+ themeInfo.DisplayName = m["--theme-display-name"]
+ return themeInfo
+}
+
func initThemes() {
availableThemes = nil
defer func() {
- availableThemesSet = container.SetOf(availableThemes...)
- if !availableThemesSet.Contains(setting.UI.DefaultTheme) {
+ availableThemeInternalNames = container.Set[string]{}
+ for _, theme := range availableThemes {
+ availableThemeInternalNames.Add(theme.InternalName)
+ }
+ if !availableThemeInternalNames.Contains(setting.UI.DefaultTheme) {
setting.LogStartupProblem(1, log.ERROR, "Default theme %q is not available, please correct the '[ui].DEFAULT_THEME' setting in the config file", setting.UI.DefaultTheme)
}
}()
cssFiles, err := public.AssetFS().ListFiles("/assets/css")
if err != nil {
log.Error("Failed to list themes: %v", err)
- availableThemes = []string{setting.UI.DefaultTheme}
+ availableThemes = []*ThemeMetaInfo{defaultThemeMetaInfoByInternalName(setting.UI.DefaultTheme)}
return
}
- var foundThemes []string
- for _, name := range cssFiles {
- name, ok := strings.CutPrefix(name, "theme-")
- if !ok {
- continue
- }
- name, ok = strings.CutSuffix(name, ".css")
- if !ok {
- continue
+ var foundThemes []*ThemeMetaInfo
+ for _, fileName := range cssFiles {
+ if strings.HasPrefix(fileName, fileNamePrefix) && strings.HasSuffix(fileName, fileNameSuffix) {
+ content, err := public.AssetFS().ReadFile("/assets/css/" + fileName)
+ if err != nil {
+ log.Error("Failed to read theme file %q: %v", fileName, err)
+ continue
+ }
+ foundThemes = append(foundThemes, parseThemeMetaInfo(fileName, util.UnsafeBytesToString(content)))
}
- foundThemes = append(foundThemes, name)
}
if len(setting.UI.Themes) > 0 {
allowedThemes := container.SetOf(setting.UI.Themes...)
for _, theme := range foundThemes {
- if allowedThemes.Contains(theme) {
+ if allowedThemes.Contains(theme.InternalName) {
availableThemes = append(availableThemes, theme)
}
}
} else {
availableThemes = foundThemes
}
- sort.Strings(availableThemes)
+ sort.Slice(availableThemes, func(i, j int) bool {
+ if availableThemes[i].InternalName == setting.UI.DefaultTheme {
+ return true
+ }
+ return availableThemes[i].DisplayName < availableThemes[j].DisplayName
+ })
if len(availableThemes) == 0 {
setting.LogStartupProblem(1, log.ERROR, "No theme candidate in asset files, but Gitea requires there should be at least one usable theme")
- availableThemes = []string{setting.UI.DefaultTheme}
+ availableThemes = []*ThemeMetaInfo{defaultThemeMetaInfoByInternalName(setting.UI.DefaultTheme)}
}
}
-func GetAvailableThemes() []string {
+func GetAvailableThemes() []*ThemeMetaInfo {
themeOnce.Do(initThemes)
return availableThemes
}
-func IsThemeAvailable(name string) bool {
+func IsThemeAvailable(internalName string) bool {
themeOnce.Do(initThemes)
- return availableThemesSet.Contains(name)
+ return availableThemeInternalNames.Contains(internalName)
}
diff --git a/services/webtheme/webtheme_test.go b/services/webtheme/webtheme_test.go
new file mode 100644
index 0000000000..587953ab0c
--- /dev/null
+++ b/services/webtheme/webtheme_test.go
@@ -0,0 +1,37 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package webtheme
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestParseThemeMetaInfo(t *testing.T) {
+ m := parseThemeMetaInfoToMap(`gitea-theme-meta-info {
+ --k1: "v1";
+ --k2: "v\"2";
+ --k3: 'v3';
+ --k4: 'v\'4';
+ --k5: v5;
+}`)
+ assert.Equal(t, map[string]string{
+ "--k1": "v1",
+ "--k2": `v"2`,
+ "--k3": "v3",
+ "--k4": "v'4",
+ "--k5": "v5",
+ }, m)
+
+ // if an auto theme imports others, the meta info should be extracted from the last one
+ // the meta in imported themes should be ignored to avoid incorrect overriding
+ m = parseThemeMetaInfoToMap(`
+@media (prefers-color-scheme: dark) { gitea-theme-meta-info { --k1: foo; } }
+@media (prefers-color-scheme: light) { gitea-theme-meta-info { --k1: bar; } }
+gitea-theme-meta-info {
+ --k2: real;
+}`)
+ assert.Equal(t, map[string]string{"--k2": "real"}, m)
+}
diff --git a/services/wiki/wiki.go b/services/wiki/wiki.go
index 7a0419aea7..b21f46639d 100644
--- a/services/wiki/wiki.go
+++ b/services/wiki/wiki.go
@@ -41,9 +41,9 @@ func InitWiki(ctx context.Context, repo *repo_model.Repository) error {
if err := git.InitRepository(ctx, repo.WikiPath(), true, repo.ObjectFormatName); err != nil {
return fmt.Errorf("InitRepository: %w", err)
- } else if err = repo_module.CreateDelegateHooks(repo.WikiPath()); err != nil {
+ } else if err = gitrepo.CreateDelegateHooks(ctx, repo.WikiStorageRepo()); err != nil {
return fmt.Errorf("createDelegateHooks: %w", err)
- } else if _, _, err = git.NewCommand(ctx, "symbolic-ref", "HEAD").AddDynamicArguments(git.BranchPrefix + repo.DefaultWikiBranch).RunStdString(&git.RunOpts{Dir: repo.WikiPath()}); err != nil {
+ } else if _, _, err = git.NewCommand("symbolic-ref", "HEAD").AddDynamicArguments(git.BranchPrefix+repo.DefaultWikiBranch).RunStdString(ctx, &git.RunOpts{Dir: repo.WikiPath()}); err != nil {
return fmt.Errorf("unable to set default wiki branch to %q: %w", repo.DefaultWikiBranch, err)
}
return nil
@@ -100,7 +100,7 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model
return fmt.Errorf("InitWiki: %w", err)
}
- hasDefaultBranch := git.IsBranchExist(ctx, repo.WikiPath(), repo.DefaultWikiBranch)
+ hasDefaultBranch := gitrepo.IsBranchExist(ctx, repo.WikiStorageRepo(), repo.DefaultWikiBranch)
basePath, err := repo_module.CreateTemporaryPath("update-wiki")
if err != nil {
@@ -381,7 +381,7 @@ func ChangeDefaultWikiBranch(ctx context.Context, repo *repo_model.Repository, n
return nil
}
- oldDefBranch, err := gitrepo.GetWikiDefaultBranch(ctx, repo)
+ oldDefBranch, err := gitrepo.GetDefaultBranch(ctx, repo.WikiStorageRepo())
if err != nil {
return fmt.Errorf("unable to get default branch: %w", err)
}
@@ -389,7 +389,7 @@ func ChangeDefaultWikiBranch(ctx context.Context, repo *repo_model.Repository, n
return nil
}
- gitRepo, err := gitrepo.OpenWikiRepository(ctx, repo)
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo.WikiStorageRepo())
if errors.Is(err, util.ErrNotExist) {
return nil // no git repo on storage, no need to do anything else
} else if err != nil {
diff --git a/services/wiki/wiki_test.go b/services/wiki/wiki_test.go
index e8b89f5e97..f441c2939b 100644
--- a/services/wiki/wiki_test.go
+++ b/services/wiki/wiki_test.go
@@ -26,7 +26,7 @@ func TestMain(m *testing.M) {
func TestWebPathSegments(t *testing.T) {
a := WebPathSegments("a%2Fa/b+c/d-e/f-g.-")
- assert.EqualValues(t, []string{"a/a", "b c", "d e", "f-g"}, a)
+ assert.Equal(t, []string{"a/a", "b c", "d e", "f-g"}, a)
}
func TestUserTitleToWebPath(t *testing.T) {
@@ -63,7 +63,7 @@ func TestWebPathToDisplayName(t *testing.T) {
{"a b", "a%20b.md"},
} {
_, displayName := WebPathToUserTitle(test.WebPath)
- assert.EqualValues(t, test.Expected, displayName)
+ assert.Equal(t, test.Expected, displayName)
}
}
@@ -80,7 +80,7 @@ func TestWebPathToGitPath(t *testing.T) {
{"2000-01-02-meeting.md", "2000-01-02+meeting"},
{"2000-01-02 meeting.-.md", "2000-01-02%20meeting.-"},
} {
- assert.EqualValues(t, test.Expected, WebPathToGitPath(test.WikiName))
+ assert.Equal(t, test.Expected, WebPathToGitPath(test.WikiName))
}
}
@@ -134,9 +134,9 @@ func TestUserWebGitPathConsistency(t *testing.T) {
_, userTitle1 := WebPathToUserTitle(webPath1)
gitPath1 := WebPathToGitPath(webPath1)
- assert.EqualValues(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle)
- assert.EqualValues(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle)
- assert.EqualValues(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle)
+ assert.Equal(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle)
+ assert.Equal(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle)
+ assert.Equal(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle)
}
}
@@ -166,7 +166,7 @@ func TestRepository_AddWikiPage(t *testing.T) {
webPath := UserTitleToWebPath("", userTitle)
assert.NoError(t, AddWikiPage(git.DefaultContext, doer, repo, webPath, wikiContent, commitMsg))
// Now need to show that the page has been added:
- gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo.WikiStorageRepo())
require.NoError(t, err)
defer gitRepo.Close()
@@ -175,7 +175,7 @@ func TestRepository_AddWikiPage(t *testing.T) {
gitPath := WebPathToGitPath(webPath)
entry, err := masterTree.GetTreeEntryByPath(gitPath)
assert.NoError(t, err)
- assert.EqualValues(t, gitPath, entry.Name(), "%s not added correctly", userTitle)
+ assert.Equal(t, gitPath, entry.Name(), "%s not added correctly", userTitle)
})
}
@@ -213,14 +213,14 @@ func TestRepository_EditWikiPage(t *testing.T) {
assert.NoError(t, EditWikiPage(git.DefaultContext, doer, repo, "Home", webPath, newWikiContent, commitMsg))
// Now need to show that the page has been added:
- gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo.WikiStorageRepo())
assert.NoError(t, err)
masterTree, err := gitRepo.GetTree(repo.DefaultWikiBranch)
assert.NoError(t, err)
gitPath := WebPathToGitPath(webPath)
entry, err := masterTree.GetTreeEntryByPath(gitPath)
assert.NoError(t, err)
- assert.EqualValues(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName)
+ assert.Equal(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName)
if newWikiName != "Home" {
_, err := masterTree.GetTreeEntryByPath("Home.md")
@@ -237,7 +237,7 @@ func TestRepository_DeleteWikiPage(t *testing.T) {
assert.NoError(t, DeleteWikiPage(git.DefaultContext, doer, repo, "Home"))
// Now need to show that the page has been added:
- gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo.WikiStorageRepo())
require.NoError(t, err)
defer gitRepo.Close()
@@ -251,7 +251,7 @@ func TestRepository_DeleteWikiPage(t *testing.T) {
func TestPrepareWikiFileName(t *testing.T) {
unittest.PrepareTestEnv(t)
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
- gitRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo.WikiStorageRepo())
require.NoError(t, err)
defer gitRepo.Close()
@@ -290,7 +290,7 @@ func TestPrepareWikiFileName(t *testing.T) {
t.Errorf("expect to find an escaped file but we could not detect one")
}
}
- assert.EqualValues(t, tt.wikiPath, newWikiPath)
+ assert.Equal(t, tt.wikiPath, newWikiPath)
})
}
}
@@ -312,13 +312,13 @@ func TestPrepareWikiFileName_FirstPage(t *testing.T) {
existence, newWikiPath, err := prepareGitPath(gitRepo, "master", "Home")
assert.False(t, existence)
assert.NoError(t, err)
- assert.EqualValues(t, "Home.md", newWikiPath)
+ assert.Equal(t, "Home.md", newWikiPath)
}
func TestWebPathConversion(t *testing.T) {
assert.Equal(t, "path/wiki", WebPathToURLPath(WebPath("path/wiki")))
assert.Equal(t, "wiki", WebPathToURLPath(WebPath("wiki")))
- assert.Equal(t, "", WebPathToURLPath(WebPath("")))
+ assert.Empty(t, WebPathToURLPath(WebPath("")))
}
func TestWebPathFromRequest(t *testing.T) {