@@ -1456,7 +1456,7 @@ LEVEL = Info | |||
;; Batch size to send for batched queues | |||
;BATCH_LENGTH = 20 | |||
;; | |||
;; Connection string for redis queues this will store the redis or redis-cluster connection string. | |||
;; Connection string for redis queues this will store the redis (or Redis cluster) connection string. | |||
;; When `TYPE` is `persistable-channel`, this provides a directory for the underlying leveldb | |||
;; or additional options of the form `leveldb://path/to/db?option=value&....`, and will override `DATADIR`. | |||
;CONN_STR = "redis://127.0.0.1:6379/0" | |||
@@ -1740,9 +1740,8 @@ LEVEL = Info | |||
;; For "memory" only, GC interval in seconds, default is 60 | |||
;INTERVAL = 60 | |||
;; | |||
;; For "redis", "redis-cluster" and "memcache", connection host address | |||
;; redis: `redis://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` | |||
;; redis-cluster: `redis+cluster://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` | |||
;; For "redis" and "memcache", connection host address | |||
;; redis: `redis://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` (or `redis+cluster://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` for a Redis cluster) | |||
;; memcache: `127.0.0.1:11211` | |||
;; twoqueue: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000` | |||
;HOST = | |||
@@ -1772,15 +1771,14 @@ LEVEL = Info | |||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; | |||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; | |||
;; | |||
;; Either "memory", "file", "redis", "redis-cluster", "db", "mysql", "couchbase", "memcache" or "postgres" | |||
;; Either "memory", "file", "redis", "db", "mysql", "couchbase", "memcache" or "postgres" | |||
;; Default is "memory". "db" will reuse the configuration in [database] | |||
;PROVIDER = memory | |||
;; | |||
;; Provider config options | |||
;; memory: doesn't have any config yet | |||
;; file: session file path, e.g. `data/sessions` | |||
;; redis: `redis://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` | |||
;; redis-cluster: `redis+cluster://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` | |||
;; redis: `redis://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` (or `redis+cluster://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` for a Redis cluster) | |||
;; mysql: go-sql-driver/mysql dsn config string, e.g. `root:password@/session_table` | |||
;PROVIDER_CONFIG = data/sessions ; Relative paths will be made absolute against _`AppWorkPath`_. | |||
;; |
@@ -1,7 +1,7 @@ | |||
# Gitea - Docker | |||
Dockerfile is found in root of repository. | |||
Dockerfile is found in the root of the repository. | |||
Docker image can be found on [docker hub](https://hub.docker.com/r/gitea/gitea) | |||
Docker image can be found on [docker hub](https://hub.docker.com/r/gitea/gitea). | |||
Documentation on using docker image can be found on [Gitea Docs site](https://docs.gitea.com/installation/install-with-docker-rootless) | |||
Documentation on using docker image can be found on [Gitea Docs site](https://docs.gitea.com/installation/install-with-docker-rootless). |
@@ -492,7 +492,7 @@ Configuration at `[queue]` will set defaults for queues with overrides for indiv | |||
- `DATADIR`: **queues/common**: Base DataDir for storing level queues. `DATADIR` for individual queues can be set in `queue.name` sections. Relative paths will be made absolute against `%(APP_DATA_PATH)s`. | |||
- `LENGTH`: **100000**: Maximal queue size before channel queues block | |||
- `BATCH_LENGTH`: **20**: Batch data before passing to the handler | |||
- `CONN_STR`: **redis://127.0.0.1:6379/0**: Connection string for the redis queue type. For `redis-cluster` use `redis+cluster://127.0.0.1:6379/0`. Options can be set using query params. Similarly, LevelDB options can also be set using: **leveldb://relative/path?option=value** or **leveldb:///absolute/path?option=value**, and will override `DATADIR` | |||
- `CONN_STR`: **redis://127.0.0.1:6379/0**: Connection string for the redis queue type. If you're running a Redis cluster, use `redis+cluster://127.0.0.1:6379/0`. Options can be set using query params. Similarly, LevelDB options can also be set using: **leveldb://relative/path?option=value** or **leveldb:///absolute/path?option=value**, and will override `DATADIR` | |||
- `QUEUE_NAME`: **_queue**: The suffix for default redis and disk queue name. Individual queues will default to **`name`**`QUEUE_NAME` but can be overridden in the specific `queue.name` section. | |||
- `SET_NAME`: **_unique**: The suffix that will be added to the default redis and disk queue `set` name for unique queues. Individual queues will default to **`name`**`QUEUE_NAME`_`SET_NAME`_ but can be overridden in the specific `queue.name` section. | |||
- `MAX_WORKERS`: **(dynamic)**: Maximum number of worker go-routines for the queue. Default value is "CpuNum/2" clipped to between 1 and 10. | |||
@@ -777,11 +777,11 @@ and | |||
## Cache (`cache`) | |||
- `ADAPTER`: **memory**: Cache engine adapter, either `memory`, `redis`, `redis-cluster`, `twoqueue` or `memcache`. (`twoqueue` represents a size limited LRU cache.) | |||
- `ADAPTER`: **memory**: Cache engine adapter, either `memory`, `redis`, `twoqueue` or `memcache`. (`twoqueue` represents a size limited LRU cache.) | |||
- `INTERVAL`: **60**: Garbage Collection interval (sec), for memory and twoqueue cache only. | |||
- `HOST`: **_empty_**: Connection string for `redis`, `redis-cluster` and `memcache`. For `twoqueue` sets configuration for the queue. | |||
- `HOST`: **_empty_**: Connection string for `redis` and `memcache`. For `twoqueue` sets configuration for the queue. | |||
- Redis: `redis://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` | |||
- Redis-cluster `redis+cluster://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` | |||
- For a Redis cluster: `redis+cluster://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` | |||
- Memcache: `127.0.0.1:9090;127.0.0.1:9091` | |||
- TwoQueue LRU cache: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000` representing the maximum number of objects stored in the cache. | |||
- `ITEM_TTL`: **16h**: Time to keep items in cache if not used, Setting it to -1 disables caching. | |||
@@ -793,7 +793,7 @@ and | |||
## Session (`session`) | |||
- `PROVIDER`: **memory**: Session engine provider \[memory, file, redis, redis-cluster, db, mysql, couchbase, memcache, postgres\]. Setting `db` will reuse the configuration in `[database]` | |||
- `PROVIDER`: **memory**: Session engine provider \[memory, file, redis, db, mysql, couchbase, memcache, postgres\]. Setting `db` will reuse the configuration in `[database]` | |||
- `PROVIDER_CONFIG`: **data/sessions**: For file, the root path; for db, empty (database config will be used); for others, the connection string. Relative paths will be made absolute against _`AppWorkPath`_. | |||
- `COOKIE_SECURE`:**_empty_**: `true` or `false`. Enable this to force using HTTPS for all session access. If not set, it defaults to `true` if the ROOT_URL is an HTTPS URL. | |||
- `COOKIE_NAME`: **i\_like\_gitea**: The name of the cookie used for the session ID. |
@@ -8,7 +8,7 @@ require ( | |||
code.gitea.io/sdk/gitea v0.17.1 | |||
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570 | |||
connectrpc.com/connect v1.15.0 | |||
gitea.com/go-chi/binding v0.0.0-20240316035258-17450c5f3028 | |||
gitea.com/go-chi/binding v0.0.0-20240430071103-39a851e106ed | |||
gitea.com/go-chi/cache v0.2.0 | |||
gitea.com/go-chi/captcha v0.0.0-20240315150714-fb487f629098 | |||
gitea.com/go-chi/session v0.0.0-20240316035857-16768d98ec96 | |||
@@ -59,6 +59,7 @@ require ( | |||
github.com/google/uuid v1.6.0 | |||
github.com/gorilla/feeds v1.1.2 | |||
github.com/gorilla/sessions v1.2.2 | |||
github.com/h2non/gock v1.2.0 | |||
github.com/hashicorp/go-version v1.6.0 | |||
github.com/hashicorp/golang-lru/v2 v2.0.7 | |||
github.com/huandu/xstrings v1.4.0 | |||
@@ -209,6 +210,7 @@ require ( | |||
github.com/gorilla/handlers v1.5.2 // indirect | |||
github.com/gorilla/mux v1.8.1 // indirect | |||
github.com/gorilla/securecookie v1.1.2 // indirect | |||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect | |||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect | |||
github.com/hashicorp/go-retryablehttp v0.7.5 // indirect | |||
github.com/hashicorp/hcl v1.0.0 // indirect |
@@ -20,8 +20,8 @@ git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 h1:cliQ4H | |||
git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078/go.mod h1:g/V2Hjas6Z1UHUp4yIx6bATpNzJ7DYtD0FG3+xARWxs= | |||
gitea.com/gitea/act v0.259.1 h1:8GG1o/xtUHl3qjn5f0h/2FXrT5ubBn05TJOM5ry+FBw= | |||
gitea.com/gitea/act v0.259.1/go.mod h1:UxZWRYqQG2Yj4+4OqfGWW5a3HELwejyWFQyU7F1jUD8= | |||
gitea.com/go-chi/binding v0.0.0-20240316035258-17450c5f3028 h1:6/QAx4+s0dyRwdaTFPTnhGppuiuu0OqxIH9szyTpvKw= | |||
gitea.com/go-chi/binding v0.0.0-20240316035258-17450c5f3028/go.mod h1:E3i3cgB04dDx0v3CytCgRTTn9Z/9x891aet3r456RVw= | |||
gitea.com/go-chi/binding v0.0.0-20240430071103-39a851e106ed h1:EZZBtilMLSZNWtHHcgq2mt6NSGhJSZBuduAlinMEmso= | |||
gitea.com/go-chi/binding v0.0.0-20240430071103-39a851e106ed/go.mod h1:E3i3cgB04dDx0v3CytCgRTTn9Z/9x891aet3r456RVw= | |||
gitea.com/go-chi/cache v0.2.0 h1:E0npuTfDW6CT1yD8NMDVc1SK6IeRjfmRL2zlEsCEd7w= | |||
gitea.com/go-chi/cache v0.2.0/go.mod h1:iQlVK2aKTZ/rE9UcHyz9pQWGvdP9i1eI2spOpzgCrtE= | |||
gitea.com/go-chi/captcha v0.0.0-20240315150714-fb487f629098 h1:p2ki+WK0cIeNQuqjR98IP2KZQKRzJJiV7aTeMAFwaWo= | |||
@@ -430,6 +430,10 @@ github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pw | |||
github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= | |||
github.com/gorilla/sessions v1.2.2 h1:lqzMYz6bOfvn2WriPUjNByzeXIlVzURcPmgMczkmTjY= | |||
github.com/gorilla/sessions v1.2.2/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ= | |||
github.com/h2non/gock v1.2.0 h1:K6ol8rfrRkUOefooBC8elXoaNGYkpp7y2qcxGG6BzUE= | |||
github.com/h2non/gock v1.2.0/go.mod h1:tNhoxHYW2W42cYkYb1WqzdbYIieALC99kpYr7rH/BQk= | |||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= | |||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= | |||
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= | |||
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= | |||
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= | |||
@@ -591,6 +595,8 @@ github.com/mschoch/smat v0.2.0 h1:8imxQsjDm8yFEAVBe7azKmKSgzSkZXDuKkSq9374khM= | |||
github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw= | |||
github.com/msteinert/pam v1.2.0 h1:mYfjlvN2KYs2Pb9G6nb/1f/nPfAttT/Jee5Sq9r3bGE= | |||
github.com/msteinert/pam v1.2.0/go.mod h1:d2n0DCUK8rGecChV3JzvmsDjOY4R7AYbsNxAT+ftQl0= | |||
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= | |||
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= | |||
github.com/niklasfasching/go-org v1.7.0 h1:vyMdcMWWTe/XmANk19F4k8XGBYg0GQ/gJGMimOjGMek= | |||
github.com/niklasfasching/go-org v1.7.0/go.mod h1:WuVm4d45oePiE0eX25GqTDQIt/qPW1T9DGkRscqLW5o= | |||
github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= |
@@ -397,36 +397,16 @@ func GetLatestCommitStatusForRepoCommitIDs(ctx context.Context, repoID int64, co | |||
// FindRepoRecentCommitStatusContexts returns repository's recent commit status contexts | |||
func FindRepoRecentCommitStatusContexts(ctx context.Context, repoID int64, before time.Duration) ([]string, error) { | |||
type result struct { | |||
Index int64 | |||
SHA string | |||
} | |||
getBase := func() *xorm.Session { | |||
return db.GetEngine(ctx).Table(&CommitStatus{}).Where("repo_id = ?", repoID) | |||
} | |||
start := timeutil.TimeStampNow().AddDuration(-before) | |||
results := make([]result, 0, 10) | |||
sess := getBase().And("updated_unix >= ?", start). | |||
Select("max( `index` ) as `index`, sha"). | |||
GroupBy("context_hash, sha").OrderBy("max( `index` ) desc") | |||
err := sess.Find(&results) | |||
if err != nil { | |||
var contexts []string | |||
if err := db.GetEngine(ctx).Table("commit_status"). | |||
Where("repo_id = ?", repoID).And("updated_unix >= ?", start). | |||
Cols("context").Distinct().Find(&contexts); err != nil { | |||
return nil, err | |||
} | |||
contexts := make([]string, 0, len(results)) | |||
if len(results) == 0 { | |||
return contexts, nil | |||
} | |||
conds := make([]builder.Cond, 0, len(results)) | |||
for _, result := range results { | |||
conds = append(conds, builder.Eq{"`index`": result.Index, "sha": result.SHA}) | |||
} | |||
return contexts, getBase().And(builder.Or(conds...)).Select("context").Find(&contexts) | |||
return contexts, nil | |||
} | |||
// NewCommitStatusOptions holds options for creating a CommitStatus |
@@ -5,11 +5,15 @@ package git_test | |||
import ( | |||
"testing" | |||
"time" | |||
"code.gitea.io/gitea/models/db" | |||
git_model "code.gitea.io/gitea/models/git" | |||
repo_model "code.gitea.io/gitea/models/repo" | |||
"code.gitea.io/gitea/models/unittest" | |||
user_model "code.gitea.io/gitea/models/user" | |||
"code.gitea.io/gitea/modules/git" | |||
"code.gitea.io/gitea/modules/gitrepo" | |||
"code.gitea.io/gitea/modules/structs" | |||
"github.com/stretchr/testify/assert" | |||
@@ -175,3 +179,55 @@ func Test_CalcCommitStatus(t *testing.T) { | |||
assert.Equal(t, kase.expected, git_model.CalcCommitStatus(kase.statuses)) | |||
} | |||
} | |||
func TestFindRepoRecentCommitStatusContexts(t *testing.T) { | |||
assert.NoError(t, unittest.PrepareTestDatabase()) | |||
repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) | |||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) | |||
gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, repo2) | |||
assert.NoError(t, err) | |||
defer gitRepo.Close() | |||
commit, err := gitRepo.GetBranchCommit(repo2.DefaultBranch) | |||
assert.NoError(t, err) | |||
defer func() { | |||
_, err := db.DeleteByBean(db.DefaultContext, &git_model.CommitStatus{ | |||
RepoID: repo2.ID, | |||
CreatorID: user2.ID, | |||
SHA: commit.ID.String(), | |||
}) | |||
assert.NoError(t, err) | |||
}() | |||
err = git_model.NewCommitStatus(db.DefaultContext, git_model.NewCommitStatusOptions{ | |||
Repo: repo2, | |||
Creator: user2, | |||
SHA: commit.ID, | |||
CommitStatus: &git_model.CommitStatus{ | |||
State: structs.CommitStatusFailure, | |||
TargetURL: "https://example.com/tests/", | |||
Context: "compliance/lint-backend", | |||
}, | |||
}) | |||
assert.NoError(t, err) | |||
err = git_model.NewCommitStatus(db.DefaultContext, git_model.NewCommitStatusOptions{ | |||
Repo: repo2, | |||
Creator: user2, | |||
SHA: commit.ID, | |||
CommitStatus: &git_model.CommitStatus{ | |||
State: structs.CommitStatusSuccess, | |||
TargetURL: "https://example.com/tests/", | |||
Context: "compliance/lint-backend", | |||
}, | |||
}) | |||
assert.NoError(t, err) | |||
contexts, err := git_model.FindRepoRecentCommitStatusContexts(db.DefaultContext, repo2.ID, time.Hour) | |||
assert.NoError(t, err) | |||
if assert.Len(t, contexts, 1) { | |||
assert.Equal(t, "compliance/lint-backend", contexts[0]) | |||
} | |||
} |
@@ -429,62 +429,6 @@ func UpdateIssueMentions(ctx context.Context, issueID int64, mentions []*user_mo | |||
return nil | |||
} | |||
// UpdateIssueByAPI updates all allowed fields of given issue. | |||
// If the issue status is changed a statusChangeComment is returned | |||
// similarly if the title is changed the titleChanged bool is set to true | |||
func UpdateIssueByAPI(ctx context.Context, issue *Issue, doer *user_model.User) (statusChangeComment *Comment, titleChanged bool, err error) { | |||
ctx, committer, err := db.TxContext(ctx) | |||
if err != nil { | |||
return nil, false, err | |||
} | |||
defer committer.Close() | |||
if err := issue.LoadRepo(ctx); err != nil { | |||
return nil, false, fmt.Errorf("loadRepo: %w", err) | |||
} | |||
// Reload the issue | |||
currentIssue, err := GetIssueByID(ctx, issue.ID) | |||
if err != nil { | |||
return nil, false, err | |||
} | |||
if _, err := db.GetEngine(ctx).ID(issue.ID).Cols( | |||
"name", "content", "milestone_id", "priority", | |||
"deadline_unix", "updated_unix", "is_locked"). | |||
Update(issue); err != nil { | |||
return nil, false, err | |||
} | |||
titleChanged = currentIssue.Title != issue.Title | |||
if titleChanged { | |||
opts := &CreateCommentOptions{ | |||
Type: CommentTypeChangeTitle, | |||
Doer: doer, | |||
Repo: issue.Repo, | |||
Issue: issue, | |||
OldTitle: currentIssue.Title, | |||
NewTitle: issue.Title, | |||
} | |||
_, err := CreateComment(ctx, opts) | |||
if err != nil { | |||
return nil, false, fmt.Errorf("createComment: %w", err) | |||
} | |||
} | |||
if currentIssue.IsClosed != issue.IsClosed { | |||
statusChangeComment, err = doChangeIssueStatus(ctx, issue, doer, false) | |||
if err != nil { | |||
return nil, false, err | |||
} | |||
} | |||
if err := issue.AddCrossReferences(ctx, doer, true); err != nil { | |||
return nil, false, err | |||
} | |||
return statusChangeComment, titleChanged, committer.Commit() | |||
} | |||
// UpdateIssueDeadline updates an issue deadline and adds comments. Setting a deadline to 0 means deleting it. | |||
func UpdateIssueDeadline(ctx context.Context, issue *Issue, deadlineUnix timeutil.TimeStamp, doer *user_model.User) (err error) { | |||
// if the deadline hasn't changed do nothing |
@@ -34,7 +34,7 @@ func TestXRef_AddCrossReferences(t *testing.T) { | |||
// Comment on PR to reopen issue #1 | |||
content = fmt.Sprintf("content2, reopens #%d", itarget.Index) | |||
c := testCreateComment(t, 1, 2, pr.ID, content) | |||
c := testCreateComment(t, 2, pr.ID, content) | |||
ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: c.ID}) | |||
assert.Equal(t, issues_model.CommentTypeCommentRef, ref.Type) | |||
assert.Equal(t, pr.RepoID, ref.RefRepoID) | |||
@@ -104,18 +104,18 @@ func TestXRef_ResolveCrossReferences(t *testing.T) { | |||
pr := testCreatePR(t, 1, 2, "titlepr", fmt.Sprintf("closes #%d", i1.Index)) | |||
rp := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i1.ID, RefIssueID: pr.Issue.ID, RefCommentID: 0}) | |||
c1 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i2.Index)) | |||
c1 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i2.Index)) | |||
r1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c1.ID}) | |||
// Must be ignored | |||
c2 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("mentions #%d", i2.Index)) | |||
c2 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("mentions #%d", i2.Index)) | |||
unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c2.ID}) | |||
// Must be superseded by c4/r4 | |||
c3 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("reopens #%d", i3.Index)) | |||
c3 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("reopens #%d", i3.Index)) | |||
unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c3.ID}) | |||
c4 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index)) | |||
c4 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index)) | |||
r4 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c4.ID}) | |||
refs, err := pr.ResolveCrossReferences(db.DefaultContext) | |||
@@ -168,7 +168,7 @@ func testCreatePR(t *testing.T, repo, doer int64, title, content string) *issues | |||
return pr | |||
} | |||
func testCreateComment(t *testing.T, repo, doer, issue int64, content string) *issues_model.Comment { | |||
func testCreateComment(t *testing.T, doer, issue int64, content string) *issues_model.Comment { | |||
d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: doer}) | |||
i := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue}) | |||
c := &issues_model.Comment{Type: issues_model.CommentTypeComment, PosterID: doer, Poster: d, IssueID: issue, Issue: i, Content: content} |
@@ -291,15 +291,15 @@ func TestAccessibleReposEnv_CountRepos(t *testing.T) { | |||
func TestAccessibleReposEnv_RepoIDs(t *testing.T) { | |||
assert.NoError(t, unittest.PrepareTestDatabase()) | |||
org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}) | |||
testSuccess := func(userID, _, pageSize int64, expectedRepoIDs []int64) { | |||
testSuccess := func(userID int64, expectedRepoIDs []int64) { | |||
env, err := organization.AccessibleReposEnv(db.DefaultContext, org, userID) | |||
assert.NoError(t, err) | |||
repoIDs, err := env.RepoIDs(1, 100) | |||
assert.NoError(t, err) | |||
assert.Equal(t, expectedRepoIDs, repoIDs) | |||
} | |||
testSuccess(2, 1, 100, []int64{3, 5, 32}) | |||
testSuccess(4, 0, 100, []int64{3, 32}) | |||
testSuccess(2, []int64{3, 5, 32}) | |||
testSuccess(4, []int64{3, 32}) | |||
} | |||
func TestAccessibleReposEnv_Repos(t *testing.T) { |
@@ -130,7 +130,10 @@ func GetRepoAssignees(ctx context.Context, repo *Repository) (_ []*user_model.Us | |||
// and just waste 1 unit is cheaper than re-allocate memory once. | |||
users := make([]*user_model.User, 0, len(uniqueUserIDs)+1) | |||
if len(userIDs) > 0 { | |||
if err = e.In("id", uniqueUserIDs.Values()).OrderBy(user_model.GetOrderByName()).Find(&users); err != nil { | |||
if err = e.In("id", uniqueUserIDs.Values()). | |||
Where(builder.Eq{"`user`.is_active": true}). | |||
OrderBy(user_model.GetOrderByName()). | |||
Find(&users); err != nil { | |||
return nil, err | |||
} | |||
} | |||
@@ -152,7 +155,8 @@ func GetReviewers(ctx context.Context, repo *Repository, doerID, posterID int64) | |||
return nil, err | |||
} | |||
cond := builder.And(builder.Neq{"`user`.id": posterID}) | |||
cond := builder.And(builder.Neq{"`user`.id": posterID}). | |||
And(builder.Eq{"`user`.is_active": true}) | |||
if repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate { | |||
// This a private repository: |
@@ -9,6 +9,7 @@ import ( | |||
"code.gitea.io/gitea/models/db" | |||
repo_model "code.gitea.io/gitea/models/repo" | |||
"code.gitea.io/gitea/models/unittest" | |||
user_model "code.gitea.io/gitea/models/user" | |||
"github.com/stretchr/testify/assert" | |||
) | |||
@@ -25,8 +26,17 @@ func TestRepoAssignees(t *testing.T) { | |||
repo21 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21}) | |||
users, err = repo_model.GetRepoAssignees(db.DefaultContext, repo21) | |||
assert.NoError(t, err) | |||
assert.Len(t, users, 4) | |||
assert.ElementsMatch(t, []int64{10, 15, 16, 18}, []int64{users[0].ID, users[1].ID, users[2].ID, users[3].ID}) | |||
if assert.Len(t, users, 4) { | |||
assert.ElementsMatch(t, []int64{10, 15, 16, 18}, []int64{users[0].ID, users[1].ID, users[2].ID, users[3].ID}) | |||
} | |||
// do not return deactivated users | |||
assert.NoError(t, user_model.UpdateUserCols(db.DefaultContext, &user_model.User{ID: 15, IsActive: false}, "is_active")) | |||
users, err = repo_model.GetRepoAssignees(db.DefaultContext, repo21) | |||
assert.NoError(t, err) | |||
if assert.Len(t, users, 3) { | |||
assert.NotContains(t, []int64{users[0].ID, users[1].ID, users[2].ID}, 15) | |||
} | |||
} | |||
func TestRepoGetReviewers(t *testing.T) { | |||
@@ -38,17 +48,19 @@ func TestRepoGetReviewers(t *testing.T) { | |||
ctx := db.DefaultContext | |||
reviewers, err := repo_model.GetReviewers(ctx, repo1, 2, 2) | |||
assert.NoError(t, err) | |||
assert.Len(t, reviewers, 4) | |||
if assert.Len(t, reviewers, 3) { | |||
assert.ElementsMatch(t, []int64{1, 4, 11}, []int64{reviewers[0].ID, reviewers[1].ID, reviewers[2].ID}) | |||
} | |||
// should include doer if doer is not PR poster. | |||
reviewers, err = repo_model.GetReviewers(ctx, repo1, 11, 2) | |||
assert.NoError(t, err) | |||
assert.Len(t, reviewers, 4) | |||
assert.Len(t, reviewers, 3) | |||
// should not include PR poster, if PR poster would be otherwise eligible | |||
reviewers, err = repo_model.GetReviewers(ctx, repo1, 11, 4) | |||
assert.NoError(t, err) | |||
assert.Len(t, reviewers, 3) | |||
assert.Len(t, reviewers, 2) | |||
// test private user repo | |||
repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) |
@@ -208,14 +208,14 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web | |||
webhook_module.HookEventIssueAssign, | |||
webhook_module.HookEventIssueLabel, | |||
webhook_module.HookEventIssueMilestone: | |||
return matchIssuesEvent(commit, payload.(*api.IssuePayload), evt) | |||
return matchIssuesEvent(payload.(*api.IssuePayload), evt) | |||
case // issue_comment | |||
webhook_module.HookEventIssueComment, | |||
// `pull_request_comment` is same as `issue_comment` | |||
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment | |||
webhook_module.HookEventPullRequestComment: | |||
return matchIssueCommentEvent(commit, payload.(*api.IssueCommentPayload), evt) | |||
return matchIssueCommentEvent(payload.(*api.IssueCommentPayload), evt) | |||
case // pull_request | |||
webhook_module.HookEventPullRequest, | |||
@@ -229,19 +229,19 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web | |||
case // pull_request_review | |||
webhook_module.HookEventPullRequestReviewApproved, | |||
webhook_module.HookEventPullRequestReviewRejected: | |||
return matchPullRequestReviewEvent(commit, payload.(*api.PullRequestPayload), evt) | |||
return matchPullRequestReviewEvent(payload.(*api.PullRequestPayload), evt) | |||
case // pull_request_review_comment | |||
webhook_module.HookEventPullRequestReviewComment: | |||
return matchPullRequestReviewCommentEvent(commit, payload.(*api.PullRequestPayload), evt) | |||
return matchPullRequestReviewCommentEvent(payload.(*api.PullRequestPayload), evt) | |||
case // release | |||
webhook_module.HookEventRelease: | |||
return matchReleaseEvent(commit, payload.(*api.ReleasePayload), evt) | |||
return matchReleaseEvent(payload.(*api.ReleasePayload), evt) | |||
case // registry_package | |||
webhook_module.HookEventPackage: | |||
return matchPackageEvent(commit, payload.(*api.PackagePayload), evt) | |||
return matchPackageEvent(payload.(*api.PackagePayload), evt) | |||
default: | |||
log.Warn("unsupported event %q", triggedEvent) | |||
@@ -347,7 +347,7 @@ func matchPushEvent(commit *git.Commit, pushPayload *api.PushPayload, evt *jobpa | |||
return matchTimes == len(evt.Acts()) | |||
} | |||
func matchIssuesEvent(commit *git.Commit, issuePayload *api.IssuePayload, evt *jobparser.Event) bool { | |||
func matchIssuesEvent(issuePayload *api.IssuePayload, evt *jobparser.Event) bool { | |||
// with no special filter parameters | |||
if len(evt.Acts()) == 0 { | |||
return true | |||
@@ -495,7 +495,7 @@ func matchPullRequestEvent(gitRepo *git.Repository, commit *git.Commit, prPayloa | |||
return activityTypeMatched && matchTimes == len(evt.Acts()) | |||
} | |||
func matchIssueCommentEvent(commit *git.Commit, issueCommentPayload *api.IssueCommentPayload, evt *jobparser.Event) bool { | |||
func matchIssueCommentEvent(issueCommentPayload *api.IssueCommentPayload, evt *jobparser.Event) bool { | |||
// with no special filter parameters | |||
if len(evt.Acts()) == 0 { | |||
return true | |||
@@ -527,7 +527,7 @@ func matchIssueCommentEvent(commit *git.Commit, issueCommentPayload *api.IssueCo | |||
return matchTimes == len(evt.Acts()) | |||
} | |||
func matchPullRequestReviewEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool { | |||
func matchPullRequestReviewEvent(prPayload *api.PullRequestPayload, evt *jobparser.Event) bool { | |||
// with no special filter parameters | |||
if len(evt.Acts()) == 0 { | |||
return true | |||
@@ -576,7 +576,7 @@ func matchPullRequestReviewEvent(commit *git.Commit, prPayload *api.PullRequestP | |||
return matchTimes == len(evt.Acts()) | |||
} | |||
func matchPullRequestReviewCommentEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool { | |||
func matchPullRequestReviewCommentEvent(prPayload *api.PullRequestPayload, evt *jobparser.Event) bool { | |||
// with no special filter parameters | |||
if len(evt.Acts()) == 0 { | |||
return true | |||
@@ -625,7 +625,7 @@ func matchPullRequestReviewCommentEvent(commit *git.Commit, prPayload *api.PullR | |||
return matchTimes == len(evt.Acts()) | |||
} | |||
func matchReleaseEvent(commit *git.Commit, payload *api.ReleasePayload, evt *jobparser.Event) bool { | |||
func matchReleaseEvent(payload *api.ReleasePayload, evt *jobparser.Event) bool { | |||
// with no special filter parameters | |||
if len(evt.Acts()) == 0 { | |||
return true | |||
@@ -662,7 +662,7 @@ func matchReleaseEvent(commit *git.Commit, payload *api.ReleasePayload, evt *job | |||
return matchTimes == len(evt.Acts()) | |||
} | |||
func matchPackageEvent(commit *git.Commit, payload *api.PackagePayload, evt *jobparser.Event) bool { | |||
func matchPackageEvent(payload *api.PackagePayload, evt *jobparser.Event) bool { | |||
// with no special filter parameters | |||
if len(evt.Acts()) == 0 { | |||
return true |
@@ -4,12 +4,11 @@ | |||
package pwn | |||
import ( | |||
"math/rand/v2" | |||
"net/http" | |||
"strings" | |||
"testing" | |||
"time" | |||
"github.com/h2non/gock" | |||
"github.com/stretchr/testify/assert" | |||
) | |||
@@ -18,86 +17,34 @@ var client = New(WithHTTP(&http.Client{ | |||
})) | |||
func TestPassword(t *testing.T) { | |||
// Check input error | |||
_, err := client.CheckPassword("", false) | |||
defer gock.Off() | |||
count, err := client.CheckPassword("", false) | |||
assert.ErrorIs(t, err, ErrEmptyPassword, "blank input should return ErrEmptyPassword") | |||
assert.Equal(t, -1, count) | |||
// Should fail | |||
fail := "password1234" | |||
count, err := client.CheckPassword(fail, false) | |||
assert.NotEmpty(t, count, "%s should fail as a password", fail) | |||
gock.New("https://api.pwnedpasswords.com").Get("/range/5c1d8").Times(1).Reply(200).BodyString("EAF2F254732680E8AC339B84F3266ECCBB5:1\r\nFC446EB88938834178CB9322C1EE273C2A7:2") | |||
count, err = client.CheckPassword("pwned", false) | |||
assert.NoError(t, err) | |||
assert.Equal(t, 1, count) | |||
// Should fail (with padding) | |||
failPad := "administrator" | |||
count, err = client.CheckPassword(failPad, true) | |||
assert.NotEmpty(t, count, "%s should fail as a password", failPad) | |||
gock.New("https://api.pwnedpasswords.com").Get("/range/ba189").Times(1).Reply(200).BodyString("FD4CB34F0378BCB15D23F6FFD28F0775C9E:3\r\nFDF342FCD8C3611DAE4D76E8A992A3E4169:4") | |||
count, err = client.CheckPassword("notpwned", false) | |||
assert.NoError(t, err) | |||
assert.Equal(t, 0, count) | |||
// Checking for a "good" password isn't going to be perfect, but we can give it a good try | |||
// with hopefully minimal error. Try five times? | |||
assert.Condition(t, func() bool { | |||
for i := 0; i <= 5; i++ { | |||
count, err = client.CheckPassword(testPassword(), false) | |||
assert.NoError(t, err) | |||
if count == 0 { | |||
return true | |||
} | |||
} | |||
return false | |||
}, "no generated passwords passed. there is a chance this is a fluke") | |||
// Again, but with padded responses | |||
assert.Condition(t, func() bool { | |||
for i := 0; i <= 5; i++ { | |||
count, err = client.CheckPassword(testPassword(), true) | |||
assert.NoError(t, err) | |||
if count == 0 { | |||
return true | |||
} | |||
} | |||
return false | |||
}, "no generated passwords passed. there is a chance this is a fluke") | |||
} | |||
// Credit to https://golangbyexample.com/generate-random-password-golang/ | |||
// DO NOT USE THIS FOR AN ACTUAL PASSWORD GENERATOR | |||
var ( | |||
lowerCharSet = "abcdedfghijklmnopqrst" | |||
upperCharSet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" | |||
specialCharSet = "!@#$%&*" | |||
numberSet = "0123456789" | |||
allCharSet = lowerCharSet + upperCharSet + specialCharSet + numberSet | |||
) | |||
func testPassword() string { | |||
var password strings.Builder | |||
// Set special character | |||
for i := 0; i < 5; i++ { | |||
random := rand.IntN(len(specialCharSet)) | |||
password.WriteString(string(specialCharSet[random])) | |||
} | |||
// Set numeric | |||
for i := 0; i < 5; i++ { | |||
random := rand.IntN(len(numberSet)) | |||
password.WriteString(string(numberSet[random])) | |||
} | |||
gock.New("https://api.pwnedpasswords.com").Get("/range/a1733").Times(1).Reply(200).BodyString("C4CE0F1F0062B27B9E2F41AF0C08218017C:1\r\nFC446EB88938834178CB9322C1EE273C2A7:2\r\nFE81480327C992FE62065A827429DD1318B:0") | |||
count, err = client.CheckPassword("paddedpwned", true) | |||
assert.NoError(t, err) | |||
assert.Equal(t, 1, count) | |||
// Set uppercase | |||
for i := 0; i < 5; i++ { | |||
random := rand.IntN(len(upperCharSet)) | |||
password.WriteString(string(upperCharSet[random])) | |||
} | |||
gock.New("https://api.pwnedpasswords.com").Get("/range/5617b").Times(1).Reply(200).BodyString("FD4CB34F0378BCB15D23F6FFD28F0775C9E:3\r\nFDF342FCD8C3611DAE4D76E8A992A3E4169:4\r\nFE81480327C992FE62065A827429DD1318B:0") | |||
count, err = client.CheckPassword("paddednotpwned", true) | |||
assert.NoError(t, err) | |||
assert.Equal(t, 0, count) | |||
for i := 0; i < 5; i++ { | |||
random := rand.IntN(len(allCharSet)) | |||
password.WriteString(string(allCharSet[random])) | |||
} | |||
inRune := []rune(password.String()) | |||
rand.Shuffle(len(inRune), func(i, j int) { | |||
inRune[i], inRune[j] = inRune[j], inRune[i] | |||
}) | |||
return string(inRune) | |||
gock.New("https://api.pwnedpasswords.com").Get("/range/79082").Times(1).Reply(200).BodyString("FDF342FCD8C3611DAE4D76E8A992A3E4169:4\r\nFE81480327C992FE62065A827429DD1318B:0\r\nAFEF386F56EB0B4BE314E07696E5E6E6536:0") | |||
count, err = client.CheckPassword("paddednotpwnedzero", true) | |||
assert.NoError(t, err) | |||
assert.Equal(t, 0, count) | |||
} |
@@ -29,7 +29,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath | |||
var revs map[string]*Commit | |||
if commit.repo.LastCommitCache != nil { | |||
var unHitPaths []string | |||
revs, unHitPaths, err = getLastCommitForPathsByCache(ctx, commit.ID.String(), treePath, entryPaths, commit.repo.LastCommitCache) | |||
revs, unHitPaths, err = getLastCommitForPathsByCache(commit.ID.String(), treePath, entryPaths, commit.repo.LastCommitCache) | |||
if err != nil { | |||
return nil, nil, err | |||
} | |||
@@ -97,7 +97,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath | |||
return commitsInfo, treeCommit, nil | |||
} | |||
func getLastCommitForPathsByCache(ctx context.Context, commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) { | |||
func getLastCommitForPathsByCache(commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) { | |||
var unHitEntryPaths []string | |||
results := make(map[string]*Commit) | |||
for _, p := range paths { |
@@ -29,6 +29,7 @@ type GrepOptions struct { | |||
ContextLineNumber int | |||
IsFuzzy bool | |||
MaxLineLength int // the maximum length of a line to parse, exceeding chars will be truncated | |||
PathspecList []string | |||
} | |||
func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepOptions) ([]*GrepResult, error) { | |||
@@ -62,6 +63,7 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO | |||
cmd.AddOptionValues("-e", strings.TrimLeft(search, "-")) | |||
} | |||
cmd.AddDynamicArguments(util.IfZero(opts.RefName, "HEAD")) | |||
cmd.AddDashesAndList(opts.PathspecList...) | |||
opts.MaxResultLimit = util.IfZero(opts.MaxResultLimit, 50) | |||
stderr := bytes.Buffer{} | |||
err = cmd.Run(&RunOpts{ |
@@ -31,6 +31,26 @@ func TestGrepSearch(t *testing.T) { | |||
}, | |||
}, res) | |||
res, err = GrepSearch(context.Background(), repo, "void", GrepOptions{PathspecList: []string{":(glob)java-hello/*"}}) | |||
assert.NoError(t, err) | |||
assert.Equal(t, []*GrepResult{ | |||
{ | |||
Filename: "java-hello/main.java", | |||
LineNumbers: []int{3}, | |||
LineCodes: []string{" public static void main(String[] args)"}, | |||
}, | |||
}, res) | |||
res, err = GrepSearch(context.Background(), repo, "void", GrepOptions{PathspecList: []string{":(glob,exclude)java-hello/*"}}) | |||
assert.NoError(t, err) | |||
assert.Equal(t, []*GrepResult{ | |||
{ | |||
Filename: "main.vendor.java", | |||
LineNumbers: []int{3}, | |||
LineCodes: []string{" public static void main(String[] args)"}, | |||
}, | |||
}, res) | |||
res, err = GrepSearch(context.Background(), repo, "void", GrepOptions{MaxResultLimit: 1}) | |||
assert.NoError(t, err) | |||
assert.Equal(t, []*GrepResult{ |
@@ -18,7 +18,7 @@ import ( | |||
) | |||
// ParseTreeEntries parses the output of a `git ls-tree -l` command. | |||
func ParseTreeEntries(h ObjectFormat, data []byte) ([]*TreeEntry, error) { | |||
func ParseTreeEntries(data []byte) ([]*TreeEntry, error) { | |||
return parseTreeEntries(data, nil) | |||
} | |||
@@ -67,7 +67,7 @@ func TestParseTreeEntries(t *testing.T) { | |||
} | |||
for _, testCase := range testCases { | |||
entries, err := ParseTreeEntries(Sha1ObjectFormat, []byte(testCase.Input)) | |||
entries, err := ParseTreeEntries([]byte(testCase.Input)) | |||
assert.NoError(t, err) | |||
if len(entries) > 1 { | |||
fmt.Println(testCase.Expected[0].ID) |
@@ -17,13 +17,13 @@ import ( | |||
) | |||
// ParseTreeEntries parses the output of a `git ls-tree -l` command. | |||
func ParseTreeEntries(objectFormat ObjectFormat, data []byte) ([]*TreeEntry, error) { | |||
return parseTreeEntries(objectFormat, data, nil) | |||
func ParseTreeEntries(data []byte) ([]*TreeEntry, error) { | |||
return parseTreeEntries(data, nil) | |||
} | |||
var sepSpace = []byte{' '} | |||
func parseTreeEntries(objectFormat ObjectFormat, data []byte, ptree *Tree) ([]*TreeEntry, error) { | |||
func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) { | |||
var err error | |||
entries := make([]*TreeEntry, 0, bytes.Count(data, []byte{'\n'})+1) | |||
for pos := 0; pos < len(data); { |
@@ -12,8 +12,6 @@ import ( | |||
) | |||
func TestParseTreeEntriesLong(t *testing.T) { | |||
objectFormat := Sha1ObjectFormat | |||
testCases := []struct { | |||
Input string | |||
Expected []*TreeEntry | |||
@@ -56,7 +54,7 @@ func TestParseTreeEntriesLong(t *testing.T) { | |||
}, | |||
} | |||
for _, testCase := range testCases { | |||
entries, err := ParseTreeEntries(objectFormat, []byte(testCase.Input)) | |||
entries, err := ParseTreeEntries([]byte(testCase.Input)) | |||
assert.NoError(t, err) | |||
assert.Len(t, entries, len(testCase.Expected)) | |||
for i, entry := range entries { | |||
@@ -66,8 +64,6 @@ func TestParseTreeEntriesLong(t *testing.T) { | |||
} | |||
func TestParseTreeEntriesShort(t *testing.T) { | |||
objectFormat := Sha1ObjectFormat | |||
testCases := []struct { | |||
Input string | |||
Expected []*TreeEntry | |||
@@ -91,7 +87,7 @@ func TestParseTreeEntriesShort(t *testing.T) { | |||
}, | |||
} | |||
for _, testCase := range testCases { | |||
entries, err := ParseTreeEntries(objectFormat, []byte(testCase.Input)) | |||
entries, err := ParseTreeEntries([]byte(testCase.Input)) | |||
assert.NoError(t, err) | |||
assert.Len(t, entries, len(testCase.Expected)) | |||
for i, entry := range entries { | |||
@@ -102,7 +98,7 @@ func TestParseTreeEntriesShort(t *testing.T) { | |||
func TestParseTreeEntriesInvalid(t *testing.T) { | |||
// there was a panic: "runtime error: slice bounds out of range" when the input was invalid: #20315 | |||
entries, err := ParseTreeEntries(Sha1ObjectFormat, []byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af")) | |||
entries, err := ParseTreeEntries([]byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af")) | |||
assert.Error(t, err) | |||
assert.Len(t, entries, 0) | |||
} |
@@ -77,11 +77,8 @@ func (t *Tree) ListEntries() (Entries, error) { | |||
return nil, runErr | |||
} | |||
objectFormat, err := t.repo.GetObjectFormat() | |||
if err != nil { | |||
return nil, err | |||
} | |||
t.entries, err = parseTreeEntries(objectFormat, stdout, t) | |||
var err error | |||
t.entries, err = parseTreeEntries(stdout, t) | |||
if err == nil { | |||
t.entriesParsed = true | |||
} | |||
@@ -104,11 +101,8 @@ func (t *Tree) listEntriesRecursive(extraArgs TrustedCmdArgs) (Entries, error) { | |||
return nil, runErr | |||
} | |||
objectFormat, err := t.repo.GetObjectFormat() | |||
if err != nil { | |||
return nil, err | |||
} | |||
t.entriesRecursive, err = parseTreeEntries(objectFormat, stdout, t) | |||
var err error | |||
t.entriesRecursive, err = parseTreeEntries(stdout, t) | |||
if err == nil { | |||
t.entriesRecursiveParsed = true | |||
} |
@@ -17,11 +17,14 @@ import ( | |||
"time" | |||
charsetModule "code.gitea.io/gitea/modules/charset" | |||
"code.gitea.io/gitea/modules/container" | |||
"code.gitea.io/gitea/modules/httpcache" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/typesniffer" | |||
"code.gitea.io/gitea/modules/util" | |||
"github.com/klauspost/compress/gzhttp" | |||
) | |||
type ServeHeaderOptions struct { | |||
@@ -38,6 +41,11 @@ type ServeHeaderOptions struct { | |||
func ServeSetHeaders(w http.ResponseWriter, opts *ServeHeaderOptions) { | |||
header := w.Header() | |||
skipCompressionExts := container.SetOf(".gz", ".bz2", ".zip", ".xz", ".zst", ".deb", ".apk", ".jar", ".png", ".jpg", ".webp") | |||
if skipCompressionExts.Contains(strings.ToLower(path.Ext(opts.Filename))) { | |||
w.Header().Add(gzhttp.HeaderNoCompression, "1") | |||
} | |||
contentType := typesniffer.ApplicationOctetStream | |||
if opts.ContentType != "" { | |||
if opts.ContentTypeCharset != "" { |
@@ -39,8 +39,6 @@ import ( | |||
const ( | |||
unicodeNormalizeName = "unicodeNormalize" | |||
maxBatchSize = 16 | |||
// fuzzyDenominator determines the levenshtein distance per each character of a keyword | |||
fuzzyDenominator = 4 | |||
) | |||
func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error { | |||
@@ -245,7 +243,7 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int | |||
phraseQuery.Analyzer = repoIndexerAnalyzer | |||
keywordQuery = phraseQuery | |||
if opts.IsKeywordFuzzy { | |||
phraseQuery.Fuzziness = len(opts.Keyword) / fuzzyDenominator | |||
phraseQuery.Fuzziness = inner_bleve.GuessFuzzinessByKeyword(opts.Keyword) | |||
} | |||
if len(opts.RepoIDs) > 0 { |
@@ -62,8 +62,8 @@ func isIndexable(entry *git.TreeEntry) bool { | |||
} | |||
// parseGitLsTreeOutput parses the output of a `git ls-tree -r --full-name` command | |||
func parseGitLsTreeOutput(objectFormat git.ObjectFormat, stdout []byte) ([]internal.FileUpdate, error) { | |||
entries, err := git.ParseTreeEntries(objectFormat, stdout) | |||
func parseGitLsTreeOutput(stdout []byte) ([]internal.FileUpdate, error) { | |||
entries, err := git.ParseTreeEntries(stdout) | |||
if err != nil { | |||
return nil, err | |||
} | |||
@@ -91,10 +91,8 @@ func genesisChanges(ctx context.Context, repo *repo_model.Repository, revision s | |||
return nil, runErr | |||
} | |||
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName) | |||
var err error | |||
changes.Updates, err = parseGitLsTreeOutput(objectFormat, stdout) | |||
changes.Updates, err = parseGitLsTreeOutput(stdout) | |||
return &changes, err | |||
} | |||
@@ -172,8 +170,6 @@ func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revisio | |||
return nil, err | |||
} | |||
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName) | |||
changes.Updates, err = parseGitLsTreeOutput(objectFormat, lsTreeStdout) | |||
changes.Updates, err = parseGitLsTreeOutput(lsTreeStdout) | |||
return &changes, err | |||
} |
@@ -178,12 +178,6 @@ func Init() { | |||
}() | |||
rIndexer = elasticsearch.NewIndexer(setting.Indexer.RepoConnStr, setting.Indexer.RepoIndexerName) | |||
if err != nil { | |||
cancel() | |||
(*globalIndexer.Load()).Close() | |||
close(waitChannel) | |||
log.Fatal("PID: %d Unable to create the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), setting.Indexer.RepoConnStr, err) | |||
} | |||
existed, err = rIndexer.Init(ctx) | |||
if err != nil { | |||
cancel() |
@@ -47,3 +47,15 @@ func openIndexer(path string, latestVersion int) (bleve.Index, int, error) { | |||
return index, 0, nil | |||
} | |||
func GuessFuzzinessByKeyword(s string) int { | |||
// according to https://github.com/blevesearch/bleve/issues/1563, the supported max fuzziness is 2 | |||
// magic number 4 was chosen to determine the levenshtein distance per each character of a keyword | |||
// BUT, when using CJK (eg: `갃갃갃` `啊啊啊`), it mismatches a lot. | |||
for _, r := range s { | |||
if r >= 128 { | |||
return 0 | |||
} | |||
} | |||
return min(2, len(s)/4) | |||
} |
@@ -35,11 +35,7 @@ func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error { | |||
}) | |||
} | |||
const ( | |||
maxBatchSize = 16 | |||
// fuzzyDenominator determines the levenshtein distance per each character of a keyword | |||
fuzzyDenominator = 4 | |||
) | |||
const maxBatchSize = 16 | |||
// IndexerData an update to the issue indexer | |||
type IndexerData internal.IndexerData | |||
@@ -162,7 +158,7 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( | |||
if options.Keyword != "" { | |||
fuzziness := 0 | |||
if options.IsFuzzyKeyword { | |||
fuzziness = len(options.Keyword) / fuzzyDenominator | |||
fuzziness = inner_bleve.GuessFuzzinessByKeyword(options.Keyword) | |||
} | |||
queries = append(queries, bleve.NewDisjunctionQuery([]query.Query{ |
@@ -10,6 +10,7 @@ import ( | |||
"path" | |||
"path/filepath" | |||
"regexp" | |||
"slices" | |||
"strings" | |||
"sync" | |||
@@ -54,7 +55,7 @@ var ( | |||
shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`) | |||
// anyHashPattern splits url containing SHA into parts | |||
anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~_%.a-zA-Z0-9/]+)?(#[-+~_%.a-zA-Z0-9]+)?`) | |||
anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~%./\w]+)?(\?[-+~%.\w&=]+)?(#[-+~%.\w]+)?`) | |||
// comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash" | |||
comparePattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(#[-+~_%.a-zA-Z0-9]+)?`) | |||
@@ -591,17 +592,17 @@ func replaceContentList(node *html.Node, i, j int, newNodes []*html.Node) { | |||
func mentionProcessor(ctx *RenderContext, node *html.Node) { | |||
start := 0 | |||
next := node.NextSibling | |||
for node != nil && node != next && start < len(node.Data) { | |||
// We replace only the first mention; other mentions will be addressed later | |||
found, loc := references.FindFirstMentionBytes([]byte(node.Data[start:])) | |||
nodeStop := node.NextSibling | |||
for node != nodeStop { | |||
found, loc := references.FindFirstMentionBytes(util.UnsafeStringToBytes(node.Data[start:])) | |||
if !found { | |||
return | |||
node = node.NextSibling | |||
start = 0 | |||
continue | |||
} | |||
loc.Start += start | |||
loc.End += start | |||
mention := node.Data[loc.Start:loc.End] | |||
var teams string | |||
teams, ok := ctx.Metas["teams"] | |||
// FIXME: util.URLJoin may not be necessary here: | |||
// - setting.AppURL is defined to have a terminal '/' so unless mention[1:] | |||
@@ -623,10 +624,10 @@ func mentionProcessor(ctx *RenderContext, node *html.Node) { | |||
if DefaultProcessorHelper.IsUsernameMentionable != nil && DefaultProcessorHelper.IsUsernameMentionable(ctx.Ctx, mentionedUsername) { | |||
replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(ctx.Links.Prefix(), mentionedUsername), mention, "mention")) | |||
node = node.NextSibling.NextSibling | |||
start = 0 | |||
} else { | |||
node = node.NextSibling | |||
start = loc.End | |||
} | |||
start = 0 | |||
} | |||
} | |||
@@ -963,57 +964,68 @@ func commitCrossReferencePatternProcessor(ctx *RenderContext, node *html.Node) { | |||
} | |||
} | |||
// fullHashPatternProcessor renders SHA containing URLs | |||
func fullHashPatternProcessor(ctx *RenderContext, node *html.Node) { | |||
if ctx.Metas == nil { | |||
return | |||
type anyHashPatternResult struct { | |||
PosStart int | |||
PosEnd int | |||
FullURL string | |||
CommitID string | |||
SubPath string | |||
QueryHash string | |||
} | |||
func anyHashPatternExtract(s string) (ret anyHashPatternResult, ok bool) { | |||
m := anyHashPattern.FindStringSubmatchIndex(s) | |||
if m == nil { | |||
return ret, false | |||
} | |||
next := node.NextSibling | |||
for node != nil && node != next { | |||
m := anyHashPattern.FindStringSubmatchIndex(node.Data) | |||
if m == nil { | |||
return | |||
ret.PosStart, ret.PosEnd = m[0], m[1] | |||
ret.FullURL = s[ret.PosStart:ret.PosEnd] | |||
if strings.HasSuffix(ret.FullURL, ".") { | |||
// if url ends in '.', it's very likely that it is not part of the actual url but used to finish a sentence. | |||
ret.PosEnd-- | |||
ret.FullURL = ret.FullURL[:len(ret.FullURL)-1] | |||
for i := 0; i < len(m); i++ { | |||
m[i] = min(m[i], ret.PosEnd) | |||
} | |||
} | |||
urlFull := node.Data[m[0]:m[1]] | |||
text := base.ShortSha(node.Data[m[2]:m[3]]) | |||
ret.CommitID = s[m[2]:m[3]] | |||
if m[5] > 0 { | |||
ret.SubPath = s[m[4]:m[5]] | |||
} | |||
// 3rd capture group matches a optional path | |||
subpath := "" | |||
if m[5] > 0 { | |||
subpath = node.Data[m[4]:m[5]] | |||
} | |||
lastStart, lastEnd := m[len(m)-2], m[len(m)-1] | |||
if lastEnd > 0 { | |||
ret.QueryHash = s[lastStart:lastEnd][1:] | |||
} | |||
return ret, true | |||
} | |||
// 4th capture group matches a optional url hash | |||
hash := "" | |||
if m[7] > 0 { | |||
hash = node.Data[m[6]:m[7]][1:] | |||
// fullHashPatternProcessor renders SHA containing URLs | |||
func fullHashPatternProcessor(ctx *RenderContext, node *html.Node) { | |||
if ctx.Metas == nil { | |||
return | |||
} | |||
nodeStop := node.NextSibling | |||
for node != nodeStop { | |||
if node.Type != html.TextNode { | |||
node = node.NextSibling | |||
continue | |||
} | |||
start := m[0] | |||
end := m[1] | |||
// If url ends in '.', it's very likely that it is not part of the | |||
// actual url but used to finish a sentence. | |||
if strings.HasSuffix(urlFull, ".") { | |||
end-- | |||
urlFull = urlFull[:len(urlFull)-1] | |||
if hash != "" { | |||
hash = hash[:len(hash)-1] | |||
} else if subpath != "" { | |||
subpath = subpath[:len(subpath)-1] | |||
} | |||
ret, ok := anyHashPatternExtract(node.Data) | |||
if !ok { | |||
node = node.NextSibling | |||
continue | |||
} | |||
if subpath != "" { | |||
text += subpath | |||
text := base.ShortSha(ret.CommitID) | |||
if ret.SubPath != "" { | |||
text += ret.SubPath | |||
} | |||
if hash != "" { | |||
text += " (" + hash + ")" | |||
if ret.QueryHash != "" { | |||
text += " (" + ret.QueryHash + ")" | |||
} | |||
replaceContent(node, start, end, createCodeLink(urlFull, text, "commit")) | |||
replaceContent(node, ret.PosStart, ret.PosEnd, createCodeLink(ret.FullURL, text, "commit")) | |||
node = node.NextSibling.NextSibling | |||
} | |||
} | |||
@@ -1022,19 +1034,16 @@ func comparePatternProcessor(ctx *RenderContext, node *html.Node) { | |||
if ctx.Metas == nil { | |||
return | |||
} | |||
next := node.NextSibling | |||
for node != nil && node != next { | |||
m := comparePattern.FindStringSubmatchIndex(node.Data) | |||
if m == nil { | |||
return | |||
nodeStop := node.NextSibling | |||
for node != nodeStop { | |||
if node.Type != html.TextNode { | |||
node = node.NextSibling | |||
continue | |||
} | |||
// Ensure that every group (m[0]...m[7]) has a match | |||
for i := 0; i < 8; i++ { | |||
if m[i] == -1 { | |||
return | |||
} | |||
m := comparePattern.FindStringSubmatchIndex(node.Data) | |||
if m == nil || slices.Contains(m[:8], -1) { // ensure that every group (m[0]...m[7]) has a match | |||
node = node.NextSibling | |||
continue | |||
} | |||
urlFull := node.Data[m[0]:m[1]] |
@@ -60,7 +60,8 @@ func renderCodeBlock(ctx *RenderContext, node *html.Node) (urlPosStart, urlPosSt | |||
} | |||
func codePreviewPatternProcessor(ctx *RenderContext, node *html.Node) { | |||
for node != nil { | |||
nodeStop := node.NextSibling | |||
for node != nodeStop { | |||
if node.Type != html.TextNode { | |||
node = node.NextSibling | |||
continue |
@@ -399,36 +399,61 @@ func TestRegExp_sha1CurrentPattern(t *testing.T) { | |||
} | |||
func TestRegExp_anySHA1Pattern(t *testing.T) { | |||
testCases := map[string][]string{ | |||
testCases := map[string]anyHashPatternResult{ | |||
"https://github.com/jquery/jquery/blob/a644101ed04d0beacea864ce805e0c4f86ba1cd1/test/unit/event.js#L2703": { | |||
"a644101ed04d0beacea864ce805e0c4f86ba1cd1", | |||
"/test/unit/event.js", | |||
"#L2703", | |||
CommitID: "a644101ed04d0beacea864ce805e0c4f86ba1cd1", | |||
SubPath: "/test/unit/event.js", | |||
QueryHash: "L2703", | |||
}, | |||
"https://github.com/jquery/jquery/blob/a644101ed04d0beacea864ce805e0c4f86ba1cd1/test/unit/event.js": { | |||
"a644101ed04d0beacea864ce805e0c4f86ba1cd1", | |||
"/test/unit/event.js", | |||
"", | |||
CommitID: "a644101ed04d0beacea864ce805e0c4f86ba1cd1", | |||
SubPath: "/test/unit/event.js", | |||
}, | |||
"https://github.com/jquery/jquery/commit/0705be475092aede1eddae01319ec931fb9c65fc": { | |||
"0705be475092aede1eddae01319ec931fb9c65fc", | |||
"", | |||
"", | |||
CommitID: "0705be475092aede1eddae01319ec931fb9c65fc", | |||
}, | |||
"https://github.com/jquery/jquery/tree/0705be475092aede1eddae01319ec931fb9c65fc/src": { | |||
"0705be475092aede1eddae01319ec931fb9c65fc", | |||
"/src", | |||
"", | |||
CommitID: "0705be475092aede1eddae01319ec931fb9c65fc", | |||
SubPath: "/src", | |||
}, | |||
"https://try.gogs.io/gogs/gogs/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2": { | |||
"d8a994ef243349f321568f9e36d5c3f444b99cae", | |||
"", | |||
"#diff-2", | |||
CommitID: "d8a994ef243349f321568f9e36d5c3f444b99cae", | |||
QueryHash: "diff-2", | |||
}, | |||
"non-url": {}, | |||
"http://a/b/c/d/e/1234567812345678123456781234567812345678123456781234567812345678?a=b#L1-L2": { | |||
CommitID: "1234567812345678123456781234567812345678123456781234567812345678", | |||
QueryHash: "L1-L2", | |||
}, | |||
"http://a/b/c/d/e/1234567812345678123456781234567812345678123456781234567812345678.": { | |||
CommitID: "1234567812345678123456781234567812345678123456781234567812345678", | |||
}, | |||
"http://a/b/c/d/e/1234567812345678123456781234567812345678123456781234567812345678/sub.": { | |||
CommitID: "1234567812345678123456781234567812345678123456781234567812345678", | |||
SubPath: "/sub", | |||
}, | |||
"http://a/b/c/d/e/1234567812345678123456781234567812345678123456781234567812345678?a=b.": { | |||
CommitID: "1234567812345678123456781234567812345678123456781234567812345678", | |||
}, | |||
"http://a/b/c/d/e/1234567812345678123456781234567812345678123456781234567812345678?a=b&c=d": { | |||
CommitID: "1234567812345678123456781234567812345678123456781234567812345678", | |||
}, | |||
"http://a/b/c/d/e/1234567812345678123456781234567812345678123456781234567812345678#hash.": { | |||
CommitID: "1234567812345678123456781234567812345678123456781234567812345678", | |||
QueryHash: "hash", | |||
}, | |||
} | |||
for k, v := range testCases { | |||
assert.Equal(t, anyHashPattern.FindStringSubmatch(k)[1:], v) | |||
ret, ok := anyHashPatternExtract(k) | |||
if v.CommitID == "" { | |||
assert.False(t, ok) | |||
} else { | |||
assert.EqualValues(t, strings.TrimSuffix(k, "."), ret.FullURL) | |||
assert.EqualValues(t, v.CommitID, ret.CommitID) | |||
assert.EqualValues(t, v.SubPath, ret.SubPath) | |||
assert.EqualValues(t, v.QueryHash, ret.QueryHash) | |||
} | |||
} | |||
} | |||
@@ -124,6 +124,11 @@ func TestRender_CrossReferences(t *testing.T) { | |||
test( | |||
util.URLJoin(markup.TestAppURL, "gogitea", "some-repo-name", "issues", "12345"), | |||
`<p><a href="`+util.URLJoin(markup.TestAppURL, "gogitea", "some-repo-name", "issues", "12345")+`" class="ref-issue" rel="nofollow">gogitea/some-repo-name#12345</a></p>`) | |||
inputURL := "https://host/a/b/commit/0123456789012345678901234567890123456789/foo.txt?a=b#L2-L3" | |||
test( | |||
inputURL, | |||
`<p><a href="`+inputURL+`" rel="nofollow"><code>0123456789/foo.txt (L2-L3)</code></a></p>`) | |||
} | |||
func TestMisc_IsSameDomain(t *testing.T) { | |||
@@ -695,7 +700,7 @@ func TestIssue18471(t *testing.T) { | |||
}, strings.NewReader(data), &res) | |||
assert.NoError(t, err) | |||
assert.Equal(t, "<a href=\"http://domain/org/repo/compare/783b039...da951ce\" class=\"compare\"><code class=\"nohighlight\">783b039...da951ce</code></a>", res.String()) | |||
assert.Equal(t, `<a href="http://domain/org/repo/compare/783b039...da951ce" class="compare"><code class="nohighlight">783b039...da951ce</code></a>`, res.String()) | |||
} | |||
func TestIsFullURL(t *testing.T) { |
@@ -65,11 +65,11 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa | |||
case *ast.Paragraph: | |||
g.applyElementDir(v) | |||
case *ast.Image: | |||
g.transformImage(ctx, v, reader) | |||
g.transformImage(ctx, v) | |||
case *ast.Link: | |||
g.transformLink(ctx, v, reader) | |||
g.transformLink(ctx, v) | |||
case *ast.List: | |||
g.transformList(ctx, v, reader, rc) | |||
g.transformList(ctx, v, rc) | |||
case *ast.Text: | |||
if v.SoftLineBreak() && !v.HardLineBreak() { | |||
if ctx.Metas["mode"] != "document" { |
@@ -68,7 +68,7 @@ func cssColorHandler(value string) bool { | |||
return css.HSLA.MatchString(value) | |||
} | |||
func (g *ASTTransformer) transformCodeSpan(ctx *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) { | |||
func (g *ASTTransformer) transformCodeSpan(_ *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) { | |||
colorContent := v.Text(reader.Source()) | |||
if cssColorHandler(string(colorContent)) { | |||
v.AppendChild(v, NewColorPreview(colorContent)) |
@@ -13,7 +13,7 @@ import ( | |||
"github.com/yuin/goldmark/util" | |||
) | |||
func (g *ASTTransformer) transformHeading(ctx *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) { | |||
func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) { | |||
for _, attr := range v.Attributes() { | |||
if _, ok := attr.Value.([]byte); !ok { | |||
v.SetAttribute(attr.Name, []byte(fmt.Sprintf("%v", attr.Value))) |
@@ -10,10 +10,9 @@ import ( | |||
giteautil "code.gitea.io/gitea/modules/util" | |||
"github.com/yuin/goldmark/ast" | |||
"github.com/yuin/goldmark/text" | |||
) | |||
func (g *ASTTransformer) transformImage(ctx *markup.RenderContext, v *ast.Image, reader text.Reader) { | |||
func (g *ASTTransformer) transformImage(ctx *markup.RenderContext, v *ast.Image) { | |||
// Images need two things: | |||
// | |||
// 1. Their src needs to munged to be a real value |
@@ -10,10 +10,9 @@ import ( | |||
giteautil "code.gitea.io/gitea/modules/util" | |||
"github.com/yuin/goldmark/ast" | |||
"github.com/yuin/goldmark/text" | |||
) | |||
func (g *ASTTransformer) transformLink(ctx *markup.RenderContext, v *ast.Link, reader text.Reader) { | |||
func (g *ASTTransformer) transformLink(ctx *markup.RenderContext, v *ast.Link) { | |||
// Links need their href to munged to be a real value | |||
link := v.Destination | |||
isAnchorFragment := len(link) > 0 && link[0] == '#' |
@@ -11,7 +11,6 @@ import ( | |||
"github.com/yuin/goldmark/ast" | |||
east "github.com/yuin/goldmark/extension/ast" | |||
"github.com/yuin/goldmark/renderer/html" | |||
"github.com/yuin/goldmark/text" | |||
"github.com/yuin/goldmark/util" | |||
) | |||
@@ -50,7 +49,7 @@ func (r *HTMLRenderer) renderTaskCheckBox(w util.BufWriter, source []byte, node | |||
return ast.WalkContinue, nil | |||
} | |||
func (g *ASTTransformer) transformList(ctx *markup.RenderContext, v *ast.List, reader text.Reader, rc *RenderConfig) { | |||
func (g *ASTTransformer) transformList(_ *markup.RenderContext, v *ast.List, rc *RenderConfig) { | |||
if v.HasChildren() { | |||
children := make([]ast.Node, 0, v.ChildCount()) | |||
child := v.FirstChild() |
@@ -54,7 +54,7 @@ func (r *stripRenderer) Render(w io.Writer, source []byte, doc ast.Node) error { | |||
} | |||
return ast.WalkContinue, nil | |||
case *ast.Link: | |||
r.processLink(w, v.Destination) | |||
r.processLink(v.Destination) | |||
return ast.WalkSkipChildren, nil | |||
case *ast.AutoLink: | |||
// This could be a reference to an issue or pull - if so convert it | |||
@@ -124,7 +124,7 @@ func (r *stripRenderer) processAutoLink(w io.Writer, link []byte) { | |||
_, _ = w.Write([]byte(parts[4])) | |||
} | |||
func (r *stripRenderer) processLink(w io.Writer, link []byte) { | |||
func (r *stripRenderer) processLink(link []byte) { | |||
// Links are processed out of band | |||
r.links = append(r.links, string(link)) | |||
} |
@@ -22,7 +22,7 @@ func TestOption(t *testing.T) { | |||
assert.Equal(t, int(0), none.Value()) | |||
assert.Equal(t, int(1), none.ValueOrDefault(1)) | |||
some := optional.Some[int](1) | |||
some := optional.Some(1) | |||
assert.True(t, some.Has()) | |||
assert.Equal(t, int(1), some.Value()) | |||
assert.Equal(t, int(1), some.ValueOrDefault(2)) |
@@ -78,6 +78,7 @@ type PackageMetadataVersion struct { | |||
Repository Repository `json:"repository,omitempty"` | |||
Keywords []string `json:"keywords,omitempty"` | |||
Dependencies map[string]string `json:"dependencies,omitempty"` | |||
BundleDependencies []string `json:"bundleDependencies,omitempty"` | |||
DevDependencies map[string]string `json:"devDependencies,omitempty"` | |||
PeerDependencies map[string]string `json:"peerDependencies,omitempty"` | |||
Bin map[string]string `json:"bin,omitempty"` | |||
@@ -218,6 +219,7 @@ func ParsePackage(r io.Reader) (*Package, error) { | |||
ProjectURL: meta.Homepage, | |||
Keywords: meta.Keywords, | |||
Dependencies: meta.Dependencies, | |||
BundleDependencies: meta.BundleDependencies, | |||
DevelopmentDependencies: meta.DevDependencies, | |||
PeerDependencies: meta.PeerDependencies, | |||
OptionalDependencies: meta.OptionalDependencies, |
@@ -16,6 +16,7 @@ type Metadata struct { | |||
ProjectURL string `json:"project_url,omitempty"` | |||
Keywords []string `json:"keywords,omitempty"` | |||
Dependencies map[string]string `json:"dependencies,omitempty"` | |||
BundleDependencies []string `json:"bundleDependencies,omitempty"` | |||
DevelopmentDependencies map[string]string `json:"development_dependencies,omitempty"` | |||
PeerDependencies map[string]string `json:"peer_dependencies,omitempty"` | |||
OptionalDependencies map[string]string `json:"optional_dependencies,omitempty"` |
@@ -29,7 +29,7 @@ var ( | |||
// TODO: fix invalid linking issue | |||
// mentionPattern matches all mentions in the form of "@user" or "@org/team" | |||
mentionPattern = regexp.MustCompile(`(?:\s|^|\(|\[)(@[0-9a-zA-Z-_]+|@[0-9a-zA-Z-_]+\/?[0-9a-zA-Z-_]+|@[0-9a-zA-Z-_][0-9a-zA-Z-_.]+\/?[0-9a-zA-Z-_.]+[0-9a-zA-Z-_])(?:\s|[:,;.?!]\s|[:,;.?!]?$|\)|\])`) | |||
mentionPattern = regexp.MustCompile(`(?:\s|^|\(|\[)(@[-\w][-.\w]*?|@[-\w][-.\w]*?/[-\w][-.\w]*?)(?:\s|$|[:,;.?!](\s|$)|'|\)|\])`) | |||
// issueNumericPattern matches string that references to a numeric issue, e.g. #1287 | |||
issueNumericPattern = regexp.MustCompile(`(?:\s|^|\(|\[|\'|\")([#!][0-9]+)(?:\s|$|\)|\]|\'|\"|[:;,.?!]\s|[:;,.?!]$)`) | |||
// issueAlphanumericPattern matches string that references to an alphanumeric issue, e.g. ABC-1234 |
@@ -392,6 +392,7 @@ func TestRegExp_mentionPattern(t *testing.T) { | |||
{"@gitea,", "@gitea"}, | |||
{"@gitea;", "@gitea"}, | |||
{"@gitea/team1;", "@gitea/team1"}, | |||
{"@user's idea", "@user"}, | |||
} | |||
falseTestCases := []string{ | |||
"@ 0", | |||
@@ -412,7 +413,6 @@ func TestRegExp_mentionPattern(t *testing.T) { | |||
for _, testCase := range trueTestCases { | |||
found := mentionPattern.FindStringSubmatch(testCase.pat) | |||
assert.Len(t, found, 2) | |||
assert.Equal(t, testCase.exp, found[1]) | |||
} | |||
for _, testCase := range falseTestCases { |
@@ -0,0 +1,32 @@ | |||
// Copyright 2024 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package setting | |||
import "github.com/gobwas/glob" | |||
type GlobMatcher struct { | |||
compiledGlob glob.Glob | |||
patternString string | |||
} | |||
var _ glob.Glob = (*GlobMatcher)(nil) | |||
func (g *GlobMatcher) Match(s string) bool { | |||
return g.compiledGlob.Match(s) | |||
} | |||
func (g *GlobMatcher) PatternString() string { | |||
return g.patternString | |||
} | |||
func GlobMatcherCompile(pattern string, separators ...rune) (*GlobMatcher, error) { | |||
g, err := glob.Compile(pattern, separators...) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return &GlobMatcher{ | |||
compiledGlob: g, | |||
patternString: pattern, | |||
}, nil | |||
} |
@@ -38,12 +38,12 @@ func loadIncomingEmailFrom(rootCfg ConfigProvider) { | |||
return | |||
} | |||
if err := checkReplyToAddress(IncomingEmail.ReplyToAddress); err != nil { | |||
if err := checkReplyToAddress(); err != nil { | |||
log.Fatal("Invalid incoming_mail.REPLY_TO_ADDRESS (%s): %v", IncomingEmail.ReplyToAddress, err) | |||
} | |||
} | |||
func checkReplyToAddress(address string) error { | |||
func checkReplyToAddress() error { | |||
parsed, err := mail.ParseAddress(IncomingEmail.ReplyToAddress) | |||
if err != nil { | |||
return err |
@@ -10,8 +10,6 @@ import ( | |||
"time" | |||
"code.gitea.io/gitea/modules/log" | |||
"github.com/gobwas/glob" | |||
) | |||
// Indexer settings | |||
@@ -30,8 +28,8 @@ var Indexer = struct { | |||
RepoConnStr string | |||
RepoIndexerName string | |||
MaxIndexerFileSize int64 | |||
IncludePatterns []glob.Glob | |||
ExcludePatterns []glob.Glob | |||
IncludePatterns []*GlobMatcher | |||
ExcludePatterns []*GlobMatcher | |||
ExcludeVendored bool | |||
}{ | |||
IssueType: "bleve", | |||
@@ -93,12 +91,12 @@ func loadIndexerFrom(rootCfg ConfigProvider) { | |||
} | |||
// IndexerGlobFromString parses a comma separated list of patterns and returns a glob.Glob slice suited for repo indexing | |||
func IndexerGlobFromString(globstr string) []glob.Glob { | |||
extarr := make([]glob.Glob, 0, 10) | |||
func IndexerGlobFromString(globstr string) []*GlobMatcher { | |||
extarr := make([]*GlobMatcher, 0, 10) | |||
for _, expr := range strings.Split(strings.ToLower(globstr), ",") { | |||
expr = strings.TrimSpace(expr) | |||
if expr != "" { | |||
if g, err := glob.Compile(expr, '.', '/'); err != nil { | |||
if g, err := GlobMatcherCompile(expr, '.', '/'); err != nil { | |||
log.Info("Invalid glob expression '%s' (skipped): %v", expr, err) | |||
} else { | |||
extarr = append(extarr, g) |
@@ -97,7 +97,7 @@ func getStorage(rootCfg ConfigProvider, name, typ string, sec ConfigSection) (*S | |||
return nil, err | |||
} | |||
overrideSec := getStorageOverrideSection(rootCfg, targetSec, sec, tp, name) | |||
overrideSec := getStorageOverrideSection(rootCfg, sec, tp, name) | |||
targetType := targetSec.Key("STORAGE_TYPE").String() | |||
switch targetType { | |||
@@ -189,7 +189,7 @@ func getStorageTargetSection(rootCfg ConfigProvider, name, typ string, sec Confi | |||
} | |||
// getStorageOverrideSection override section will be read SERVE_DIRECT, PATH, MINIO_BASE_PATH, MINIO_BUCKET to override the targetsec when possible | |||
func getStorageOverrideSection(rootConfig ConfigProvider, targetSec, sec ConfigSection, targetSecType targetSecType, name string) ConfigSection { | |||
func getStorageOverrideSection(rootConfig ConfigProvider, sec ConfigSection, targetSecType targetSecType, name string) ConfigSection { | |||
if targetSecType == targetSecIsSec { | |||
return nil | |||
} |
@@ -85,7 +85,7 @@ type CreatePullRequestOption struct { | |||
// EditPullRequestOption options when modify pull request | |||
type EditPullRequestOption struct { | |||
Title string `json:"title"` | |||
Body string `json:"body"` | |||
Body *string `json:"body"` | |||
Base string `json:"base"` | |||
Assignee string `json:"assignee"` | |||
Assignees []string `json:"assignees"` |
@@ -0,0 +1,34 @@ | |||
// Copyright 2023 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package structs | |||
import ( | |||
"time" | |||
) | |||
// ActionTask represents a ActionTask | |||
type ActionTask struct { | |||
ID int64 `json:"id"` | |||
Name string `json:"name"` | |||
HeadBranch string `json:"head_branch"` | |||
HeadSHA string `json:"head_sha"` | |||
RunNumber int64 `json:"run_number"` | |||
Event string `json:"event"` | |||
DisplayTitle string `json:"display_title"` | |||
Status string `json:"status"` | |||
WorkflowID string `json:"workflow_id"` | |||
URL string `json:"url"` | |||
// swagger:strfmt date-time | |||
CreatedAt time.Time `json:"created_at"` | |||
// swagger:strfmt date-time | |||
UpdatedAt time.Time `json:"updated_at"` | |||
// swagger:strfmt date-time | |||
RunStartedAt time.Time `json:"run_started_at"` | |||
} | |||
// ActionTaskResponse returns a ActionTask | |||
type ActionTaskResponse struct { | |||
Entries []*ActionTask `json:"workflow_runs"` | |||
TotalCount int64 `json:"total_count"` | |||
} |
@@ -121,29 +121,25 @@ func RenderIssueTitle(ctx context.Context, text string, metas map[string]string) | |||
// RenderLabel renders a label | |||
// locale is needed due to an import cycle with our context providing the `Tr` function | |||
func RenderLabel(ctx context.Context, locale translation.Locale, label *issues_model.Label) template.HTML { | |||
var ( | |||
archivedCSSClass string | |||
textColor = util.ContrastColor(label.Color) | |||
labelScope = label.ExclusiveScope() | |||
) | |||
description := emoji.ReplaceAliases(template.HTMLEscapeString(label.Description)) | |||
var extraCSSClasses string | |||
textColor := util.ContrastColor(label.Color) | |||
labelScope := label.ExclusiveScope() | |||
descriptionText := emoji.ReplaceAliases(label.Description) | |||
if label.IsArchived() { | |||
archivedCSSClass = "archived-label" | |||
description = fmt.Sprintf("(%s) %s", locale.TrString("archived"), description) | |||
extraCSSClasses = "archived-label" | |||
descriptionText = fmt.Sprintf("(%s) %s", locale.TrString("archived"), descriptionText) | |||
} | |||
if labelScope == "" { | |||
// Regular label | |||
s := fmt.Sprintf("<div class='ui label %s' style='color: %s !important; background-color: %s !important;' data-tooltip-content title='%s'>%s</div>", | |||
archivedCSSClass, textColor, label.Color, description, RenderEmoji(ctx, label.Name)) | |||
return template.HTML(s) | |||
return HTMLFormat(`<div class="ui label %s" style="color: %s !important; background-color: %s !important;" data-tooltip-content title="%s">%s</div>`, | |||
extraCSSClasses, textColor, label.Color, descriptionText, RenderEmoji(ctx, label.Name)) | |||
} | |||
// Scoped label | |||
scopeText := RenderEmoji(ctx, labelScope) | |||
itemText := RenderEmoji(ctx, label.Name[len(labelScope)+1:]) | |||
scopeHTML := RenderEmoji(ctx, labelScope) | |||
itemHTML := RenderEmoji(ctx, label.Name[len(labelScope)+1:]) | |||
// Make scope and item background colors slightly darker and lighter respectively. | |||
// More contrast needed with higher luminance, empirically tweaked. | |||
@@ -171,14 +167,13 @@ func RenderLabel(ctx context.Context, locale translation.Locale, label *issues_m | |||
itemColor := "#" + hex.EncodeToString(itemBytes) | |||
scopeColor := "#" + hex.EncodeToString(scopeBytes) | |||
s := fmt.Sprintf("<span class='ui label %s scope-parent' data-tooltip-content title='%s'>"+ | |||
"<div class='ui label scope-left' style='color: %s !important; background-color: %s !important'>%s</div>"+ | |||
"<div class='ui label scope-right' style='color: %s !important; background-color: %s !important'>%s</div>"+ | |||
"</span>", | |||
archivedCSSClass, description, | |||
textColor, scopeColor, scopeText, | |||
textColor, itemColor, itemText) | |||
return template.HTML(s) | |||
return HTMLFormat(`<span class="ui label %s scope-parent" data-tooltip-content title="%s">`+ | |||
`<div class="ui label scope-left" style="color: %s !important; background-color: %s !important">%s</div>`+ | |||
`<div class="ui label scope-right" style="color: %s !important; background-color: %s !important">%s</div>`+ | |||
`</span>`, | |||
extraCSSClasses, descriptionText, | |||
textColor, scopeColor, scopeHTML, | |||
textColor, itemColor, itemHTML) | |||
} | |||
// RenderEmoji renders html text with emoji post processors |
@@ -207,3 +207,8 @@ func TestRenderLabels(t *testing.T) { | |||
expected = `/owner/repo/pulls?labels=123` | |||
assert.Contains(t, RenderLabels(ctx, locale, []*issues.Label{label}, "/owner/repo", issue), expected) | |||
} | |||
func TestUserMention(t *testing.T) { | |||
rendered := RenderMarkdownToHtml(context.Background(), "@no-such-user @mention-user @mention-user") | |||
assert.EqualValues(t, `<p>@no-such-user <a href="/mention-user" rel="nofollow">@mention-user</a> <a href="/mention-user" rel="nofollow">@mention-user</a></p>`, strings.TrimSpace(string(rendered))) | |||
} |
@@ -3495,6 +3495,7 @@ npm.install = To install the package using npm, run the following command: | |||
npm.install2 = or add it to the package.json file: | |||
npm.dependencies = Dependencies | |||
npm.dependencies.development = Development Dependencies | |||
npm.dependencies.bundle = Bundled Dependencies | |||
npm.dependencies.peer = Peer Dependencies | |||
npm.dependencies.optional = Optional Dependencies | |||
npm.details.tag = Tag |
@@ -3495,6 +3495,7 @@ npm.install=Para instalar o pacote usando o npm, execute o seguinte comando: | |||
npm.install2=ou adicione-o ao ficheiro <code>package.json</code>: | |||
npm.dependencies=Dependências | |||
npm.dependencies.development=Dependências de desenvolvimento | |||
npm.dependencies.bundle=Dependências agregadas | |||
npm.dependencies.peer=Dependências de pares | |||
npm.dependencies.optional=Dependências opcionais | |||
npm.details.tag=Etiqueta |
@@ -14,6 +14,7 @@ | |||
"@github/text-expander-element": "2.6.1", | |||
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3", | |||
"@primer/octicons": "19.9.0", | |||
"@silverwind/vue3-calendar-heatmap": "2.0.6", | |||
"add-asset-webpack-plugin": "2.0.1", | |||
"ansi_up": "6.0.2", | |||
"asciinema-player": "3.7.1", | |||
@@ -42,7 +43,6 @@ | |||
"postcss": "8.4.38", | |||
"postcss-loader": "8.1.1", | |||
"postcss-nesting": "12.1.2", | |||
"pretty-ms": "9.0.0", | |||
"sortablejs": "1.15.2", | |||
"swagger-ui-dist": "5.17.2", | |||
"tailwindcss": "3.4.3", | |||
@@ -58,7 +58,6 @@ | |||
"vue-bar-graph": "2.0.0", | |||
"vue-chartjs": "5.3.1", | |||
"vue-loader": "17.4.2", | |||
"vue3-calendar-heatmap": "2.0.5", | |||
"webpack": "5.91.0", | |||
"webpack-cli": "5.1.4", | |||
"wrap-ansi": "9.0.0" | |||
@@ -1627,6 +1626,18 @@ | |||
"win32" | |||
] | |||
}, | |||
"node_modules/@silverwind/vue3-calendar-heatmap": { | |||
"version": "2.0.6", | |||
"resolved": "https://registry.npmjs.org/@silverwind/vue3-calendar-heatmap/-/vue3-calendar-heatmap-2.0.6.tgz", | |||
"integrity": "sha512-efX+nf2GR7EfA7iNgZDeM9Jue5ksglSXvN0C/ja0M1bTmkCpAxKlGJ3vki7wfTPQgX1O0nCfAM62IKqUUEM0cQ==", | |||
"engines": { | |||
"node": ">=16" | |||
}, | |||
"peerDependencies": { | |||
"tippy.js": "^6.3.7", | |||
"vue": "^3.2.29" | |||
} | |||
}, | |||
"node_modules/@sinclair/typebox": { | |||
"version": "0.27.8", | |||
"resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", | |||
@@ -9170,17 +9181,6 @@ | |||
"url": "https://github.com/sponsors/sindresorhus" | |||
} | |||
}, | |||
"node_modules/parse-ms": { | |||
"version": "4.0.0", | |||
"resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz", | |||
"integrity": "sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==", | |||
"engines": { | |||
"node": ">=18" | |||
}, | |||
"funding": { | |||
"url": "https://github.com/sponsors/sindresorhus" | |||
} | |||
}, | |||
"node_modules/path-exists": { | |||
"version": "4.0.0", | |||
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", | |||
@@ -9772,20 +9772,6 @@ | |||
"url": "https://github.com/chalk/ansi-styles?sponsor=1" | |||
} | |||
}, | |||
"node_modules/pretty-ms": { | |||
"version": "9.0.0", | |||
"resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-9.0.0.tgz", | |||
"integrity": "sha512-E9e9HJ9R9NasGOgPaPE8VMeiPKAyWR5jcFpNnwIejslIhWqdqOrb2wShBsncMPUb+BcCd2OPYfh7p2W6oemTng==", | |||
"dependencies": { | |||
"parse-ms": "^4.0.0" | |||
}, | |||
"engines": { | |||
"node": ">=18" | |||
}, | |||
"funding": { | |||
"url": "https://github.com/sponsors/sindresorhus" | |||
} | |||
}, | |||
"node_modules/printable-characters": { | |||
"version": "1.0.42", | |||
"resolved": "https://registry.npmjs.org/printable-characters/-/printable-characters-1.0.42.tgz", | |||
@@ -12226,18 +12212,6 @@ | |||
} | |||
} | |||
}, | |||
"node_modules/vue3-calendar-heatmap": { | |||
"version": "2.0.5", | |||
"resolved": "https://registry.npmjs.org/vue3-calendar-heatmap/-/vue3-calendar-heatmap-2.0.5.tgz", | |||
"integrity": "sha512-qvveNQlTS5Aw7AvRLs0zOyu3uP5iGJlXJAnkrkG2ElDdyQ8H1TJhQ8rL702CROjAg16ezIveUY10nCO7lqZ25w==", | |||
"engines": { | |||
"node": ">=16" | |||
}, | |||
"peerDependencies": { | |||
"tippy.js": "^6.3.7", | |||
"vue": "^3.2.29" | |||
} | |||
}, | |||
"node_modules/watchpack": { | |||
"version": "2.4.1", | |||
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.1.tgz", |
@@ -13,6 +13,7 @@ | |||
"@github/text-expander-element": "2.6.1", | |||
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3", | |||
"@primer/octicons": "19.9.0", | |||
"@silverwind/vue3-calendar-heatmap": "2.0.6", | |||
"add-asset-webpack-plugin": "2.0.1", | |||
"ansi_up": "6.0.2", | |||
"asciinema-player": "3.7.1", | |||
@@ -41,7 +42,6 @@ | |||
"postcss": "8.4.38", | |||
"postcss-loader": "8.1.1", | |||
"postcss-nesting": "12.1.2", | |||
"pretty-ms": "9.0.0", | |||
"sortablejs": "1.15.2", | |||
"swagger-ui-dist": "5.17.2", | |||
"tailwindcss": "3.4.3", | |||
@@ -57,7 +57,6 @@ | |||
"vue-bar-graph": "2.0.0", | |||
"vue-chartjs": "5.3.1", | |||
"vue-loader": "17.4.2", | |||
"vue3-calendar-heatmap": "2.0.5", | |||
"webpack": "5.91.0", | |||
"webpack-cli": "5.1.4", | |||
"wrap-ansi": "9.0.0" |
@@ -140,9 +140,7 @@ func serveMavenMetadata(ctx *context.Context, params parameters) { | |||
ctx.Resp.Header().Set("Content-Length", strconv.Itoa(len(xmlMetadataWithHeader))) | |||
ctx.Resp.Header().Set("Content-Type", contentTypeXML) | |||
if _, err := ctx.Resp.Write(xmlMetadataWithHeader); err != nil { | |||
log.Error("write bytes failed: %v", err) | |||
} | |||
_, _ = ctx.Resp.Write(xmlMetadataWithHeader) | |||
} | |||
func servePackageFile(ctx *context.Context, params parameters, serveContent bool) { |
@@ -64,6 +64,7 @@ func createPackageMetadataVersion(registryURL string, pd *packages_model.Package | |||
Homepage: metadata.ProjectURL, | |||
License: metadata.License, | |||
Dependencies: metadata.Dependencies, | |||
BundleDependencies: metadata.BundleDependencies, | |||
DevDependencies: metadata.DevelopmentDependencies, | |||
PeerDependencies: metadata.PeerDependencies, | |||
OptionalDependencies: metadata.OptionalDependencies, |
@@ -67,7 +67,7 @@ func AddUserBadges(ctx *context.APIContext) { | |||
// "$ref": "#/responses/forbidden" | |||
form := web.GetForm(ctx).(*api.UserBadgeOption) | |||
badges := prepareBadgesForReplaceOrAdd(ctx, *form) | |||
badges := prepareBadgesForReplaceOrAdd(*form) | |||
if err := user_model.AddUserBadges(ctx, ctx.ContextUser, badges); err != nil { | |||
ctx.Error(http.StatusInternalServerError, "ReplaceUserBadges", err) | |||
@@ -103,7 +103,7 @@ func DeleteUserBadges(ctx *context.APIContext) { | |||
// "$ref": "#/responses/validationError" | |||
form := web.GetForm(ctx).(*api.UserBadgeOption) | |||
badges := prepareBadgesForReplaceOrAdd(ctx, *form) | |||
badges := prepareBadgesForReplaceOrAdd(*form) | |||
if err := user_model.RemoveUserBadges(ctx, ctx.ContextUser, badges); err != nil { | |||
ctx.Error(http.StatusInternalServerError, "ReplaceUserBadges", err) | |||
@@ -113,7 +113,7 @@ func DeleteUserBadges(ctx *context.APIContext) { | |||
ctx.Status(http.StatusNoContent) | |||
} | |||
func prepareBadgesForReplaceOrAdd(ctx *context.APIContext, form api.UserBadgeOption) []*user_model.Badge { | |||
func prepareBadgesForReplaceOrAdd(form api.UserBadgeOption) []*user_model.Badge { | |||
badges := make([]*user_model.Badge, len(form.BadgeSlugs)) | |||
for i, badge := range form.BadgeSlugs { | |||
badges[i] = &user_model.Badge{ |
@@ -1168,6 +1168,9 @@ func Routes() *web.Route { | |||
m.Post("", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, bind(api.CreateTagOption{}), repo.CreateTag) | |||
m.Delete("/*", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, repo.DeleteTag) | |||
}, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo(true)) | |||
m.Group("/actions", func() { | |||
m.Get("/tasks", repo.ListActionTasks) | |||
}, reqRepoReader(unit.TypeActions), context.ReferencesGitRepo(true)) | |||
m.Group("/keys", func() { | |||
m.Combo("").Get(repo.ListDeployKeys). | |||
Post(bind(api.CreateKeyOption{}), repo.CreateDeployKey) |
@@ -17,6 +17,7 @@ import ( | |||
"code.gitea.io/gitea/routers/api/v1/utils" | |||
actions_service "code.gitea.io/gitea/services/actions" | |||
"code.gitea.io/gitea/services/context" | |||
"code.gitea.io/gitea/services/convert" | |||
secret_service "code.gitea.io/gitea/services/secrets" | |||
) | |||
@@ -517,3 +518,68 @@ type Action struct{} | |||
func NewAction() actions_service.API { | |||
return Action{} | |||
} | |||
// ListActionTasks list all the actions of a repository | |||
func ListActionTasks(ctx *context.APIContext) { | |||
// swagger:operation GET /repos/{owner}/{repo}/actions/tasks repository ListActionTasks | |||
// --- | |||
// summary: List a repository's action tasks | |||
// produces: | |||
// - application/json | |||
// parameters: | |||
// - name: owner | |||
// in: path | |||
// description: owner of the repo | |||
// type: string | |||
// required: true | |||
// - name: repo | |||
// in: path | |||
// description: name of the repo | |||
// type: string | |||
// required: true | |||
// - name: page | |||
// in: query | |||
// description: page number of results to return (1-based) | |||
// type: integer | |||
// - name: limit | |||
// in: query | |||
// description: page size of results, default maximum page size is 50 | |||
// type: integer | |||
// responses: | |||
// "200": | |||
// "$ref": "#/responses/TasksList" | |||
// "400": | |||
// "$ref": "#/responses/error" | |||
// "403": | |||
// "$ref": "#/responses/forbidden" | |||
// "404": | |||
// "$ref": "#/responses/notFound" | |||
// "409": | |||
// "$ref": "#/responses/conflict" | |||
// "422": | |||
// "$ref": "#/responses/validationError" | |||
tasks, total, err := db.FindAndCount[actions_model.ActionTask](ctx, &actions_model.FindTaskOptions{ | |||
ListOptions: utils.GetListOptions(ctx), | |||
RepoID: ctx.Repo.Repository.ID, | |||
}) | |||
if err != nil { | |||
ctx.Error(http.StatusInternalServerError, "ListActionTasks", err) | |||
return | |||
} | |||
res := new(api.ActionTaskResponse) | |||
res.TotalCount = total | |||
res.Entries = make([]*api.ActionTask, len(tasks)) | |||
for i := range tasks { | |||
convertedTask, err := convert.ToActionTask(ctx, tasks[i]) | |||
if err != nil { | |||
ctx.Error(http.StatusInternalServerError, "ToActionTask", err) | |||
return | |||
} | |||
res.Entries[i] = convertedTask | |||
} | |||
ctx.JSON(http.StatusOK, &res) | |||
} |
@@ -29,7 +29,6 @@ import ( | |||
"code.gitea.io/gitea/services/context" | |||
"code.gitea.io/gitea/services/convert" | |||
issue_service "code.gitea.io/gitea/services/issue" | |||
notify_service "code.gitea.io/gitea/services/notify" | |||
) | |||
// SearchIssues searches for issues across the repositories that the user has access to | |||
@@ -803,12 +802,19 @@ func EditIssue(ctx *context.APIContext) { | |||
return | |||
} | |||
oldTitle := issue.Title | |||
if len(form.Title) > 0 { | |||
issue.Title = form.Title | |||
err = issue_service.ChangeTitle(ctx, issue, ctx.Doer, form.Title) | |||
if err != nil { | |||
ctx.Error(http.StatusInternalServerError, "ChangeTitle", err) | |||
return | |||
} | |||
} | |||
if form.Body != nil { | |||
issue.Content = *form.Body | |||
err = issue_service.ChangeContent(ctx, issue, ctx.Doer, *form.Body) | |||
if err != nil { | |||
ctx.Error(http.StatusInternalServerError, "ChangeContent", err) | |||
return | |||
} | |||
} | |||
if form.Ref != nil { | |||
err = issue_service.ChangeIssueRef(ctx, issue, ctx.Doer, *form.Ref) | |||
@@ -880,24 +886,14 @@ func EditIssue(ctx *context.APIContext) { | |||
return | |||
} | |||
} | |||
issue.IsClosed = api.StateClosed == api.StateType(*form.State) | |||
} | |||
statusChangeComment, titleChanged, err := issues_model.UpdateIssueByAPI(ctx, issue, ctx.Doer) | |||
if err != nil { | |||
if issues_model.IsErrDependenciesLeft(err) { | |||
ctx.Error(http.StatusPreconditionFailed, "DependenciesLeft", "cannot close this issue because it still has open dependencies") | |||
if err := issue_service.ChangeStatus(ctx, issue, ctx.Doer, "", api.StateClosed == api.StateType(*form.State)); err != nil { | |||
if issues_model.IsErrDependenciesLeft(err) { | |||
ctx.Error(http.StatusPreconditionFailed, "DependenciesLeft", "cannot close this issue because it still has open dependencies") | |||
return | |||
} | |||
ctx.Error(http.StatusInternalServerError, "ChangeStatus", err) | |||
return | |||
} | |||
ctx.Error(http.StatusInternalServerError, "UpdateIssueByAPI", err) | |||
return | |||
} | |||
if titleChanged { | |||
notify_service.IssueChangeTitle(ctx, ctx.Doer, issue, oldTitle) | |||
} | |||
if statusChangeComment != nil { | |||
notify_service.IssueChangeStatus(ctx, ctx.Doer, "", issue, statusChangeComment, issue.IsClosed) | |||
} | |||
// Refetch from database to assign some automatic values |
@@ -14,6 +14,7 @@ import ( | |||
"code.gitea.io/gitea/modules/web" | |||
"code.gitea.io/gitea/services/attachment" | |||
"code.gitea.io/gitea/services/context" | |||
"code.gitea.io/gitea/services/context/upload" | |||
"code.gitea.io/gitea/services/convert" | |||
issue_service "code.gitea.io/gitea/services/issue" | |||
) | |||
@@ -153,6 +154,8 @@ func CreateIssueAttachment(ctx *context.APIContext) { | |||
// "$ref": "#/responses/error" | |||
// "404": | |||
// "$ref": "#/responses/error" | |||
// "422": | |||
// "$ref": "#/responses/validationError" | |||
// "423": | |||
// "$ref": "#/responses/repoArchivedError" | |||
@@ -185,7 +188,11 @@ func CreateIssueAttachment(ctx *context.APIContext) { | |||
IssueID: issue.ID, | |||
}) | |||
if err != nil { | |||
ctx.Error(http.StatusInternalServerError, "UploadAttachment", err) | |||
if upload.IsErrFileTypeForbidden(err) { | |||
ctx.Error(http.StatusUnprocessableEntity, "", err) | |||
} else { | |||
ctx.Error(http.StatusInternalServerError, "UploadAttachment", err) | |||
} | |||
return | |||
} | |||
@@ -16,6 +16,7 @@ import ( | |||
"code.gitea.io/gitea/modules/web" | |||
"code.gitea.io/gitea/services/attachment" | |||
"code.gitea.io/gitea/services/context" | |||
"code.gitea.io/gitea/services/context/upload" | |||
"code.gitea.io/gitea/services/convert" | |||
issue_service "code.gitea.io/gitea/services/issue" | |||
) | |||
@@ -160,6 +161,8 @@ func CreateIssueCommentAttachment(ctx *context.APIContext) { | |||
// "$ref": "#/responses/forbidden" | |||
// "404": | |||
// "$ref": "#/responses/error" | |||
// "422": | |||
// "$ref": "#/responses/validationError" | |||
// "423": | |||
// "$ref": "#/responses/repoArchivedError" | |||
@@ -194,9 +197,14 @@ func CreateIssueCommentAttachment(ctx *context.APIContext) { | |||
CommentID: comment.ID, | |||
}) | |||
if err != nil { | |||
ctx.Error(http.StatusInternalServerError, "UploadAttachment", err) | |||
if upload.IsErrFileTypeForbidden(err) { | |||
ctx.Error(http.StatusUnprocessableEntity, "", err) | |||
} else { | |||
ctx.Error(http.StatusInternalServerError, "UploadAttachment", err) | |||
} | |||
return | |||
} | |||
if err := comment.LoadAttachments(ctx); err != nil { | |||
ctx.Error(http.StatusInternalServerError, "LoadAttachments", err) | |||
return |
@@ -180,7 +180,7 @@ func Migrate(ctx *context.APIContext) { | |||
Status: repo_model.RepositoryBeingMigrated, | |||
}) | |||
if err != nil { | |||
handleMigrateError(ctx, repoOwner, remoteAddr, err) | |||
handleMigrateError(ctx, repoOwner, err) | |||
return | |||
} | |||
@@ -207,7 +207,7 @@ func Migrate(ctx *context.APIContext) { | |||
}() | |||
if repo, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.Doer, repoOwner.Name, opts, nil); err != nil { | |||
handleMigrateError(ctx, repoOwner, remoteAddr, err) | |||
handleMigrateError(ctx, repoOwner, err) | |||
return | |||
} | |||
@@ -215,7 +215,7 @@ func Migrate(ctx *context.APIContext) { | |||
ctx.JSON(http.StatusCreated, convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeAdmin})) | |||
} | |||
func handleMigrateError(ctx *context.APIContext, repoOwner *user_model.User, remoteAddr string, err error) { | |||
func handleMigrateError(ctx *context.APIContext, repoOwner *user_model.User, err error) { | |||
switch { | |||
case repo_model.IsErrRepoAlreadyExist(err): | |||
ctx.Error(http.StatusConflict, "", "The repository with the same name already exists.") |
@@ -602,12 +602,19 @@ func EditPullRequest(ctx *context.APIContext) { | |||
return | |||
} | |||
oldTitle := issue.Title | |||
if len(form.Title) > 0 { | |||
issue.Title = form.Title | |||
err = issue_service.ChangeTitle(ctx, issue, ctx.Doer, form.Title) | |||
if err != nil { | |||
ctx.Error(http.StatusInternalServerError, "ChangeTitle", err) | |||
return | |||
} | |||
} | |||
if len(form.Body) > 0 { | |||
issue.Content = form.Body | |||
if form.Body != nil { | |||
err = issue_service.ChangeContent(ctx, issue, ctx.Doer, *form.Body) | |||
if err != nil { | |||
ctx.Error(http.StatusInternalServerError, "ChangeContent", err) | |||
return | |||
} | |||
} | |||
// Update or remove deadline if set | |||
@@ -686,24 +693,14 @@ func EditPullRequest(ctx *context.APIContext) { | |||
ctx.Error(http.StatusPreconditionFailed, "MergedPRState", "cannot change state of this pull request, it was already merged") | |||
return | |||
} | |||
issue.IsClosed = api.StateClosed == api.StateType(*form.State) | |||
} | |||
statusChangeComment, titleChanged, err := issues_model.UpdateIssueByAPI(ctx, issue, ctx.Doer) | |||
if err != nil { | |||
if issues_model.IsErrDependenciesLeft(err) { | |||
ctx.Error(http.StatusPreconditionFailed, "DependenciesLeft", "cannot close this pull request because it still has open dependencies") | |||
if err := issue_service.ChangeStatus(ctx, issue, ctx.Doer, "", api.StateClosed == api.StateType(*form.State)); err != nil { | |||
if issues_model.IsErrDependenciesLeft(err) { | |||
ctx.Error(http.StatusPreconditionFailed, "DependenciesLeft", "cannot close this pull request because it still has open dependencies") | |||
return | |||
} | |||
ctx.Error(http.StatusInternalServerError, "ChangeStatus", err) | |||
return | |||
} | |||
ctx.Error(http.StatusInternalServerError, "UpdateIssueByAPI", err) | |||
return | |||
} | |||
if titleChanged { | |||
notify_service.IssueChangeTitle(ctx, ctx.Doer, issue, oldTitle) | |||
} | |||
if statusChangeComment != nil { | |||
notify_service.IssueChangeStatus(ctx, ctx.Doer, "", issue, statusChangeComment, issue.IsClosed) | |||
} | |||
// change pull target branch |
@@ -415,6 +415,13 @@ type swaggerRepoNewIssuePinsAllowed struct { | |||
Body api.NewIssuePinsAllowed `json:"body"` | |||
} | |||
// TasksList | |||
// swagger:response TasksList | |||
type swaggerRepoTasksList struct { | |||
// in:body | |||
Body api.ActionTaskResponse `json:"body"` | |||
} | |||
// swagger:response Compare | |||
type swaggerCompare struct { | |||
// in:body |
@@ -6,10 +6,8 @@ package user | |||
import ( | |||
"net/http" | |||
"code.gitea.io/gitea/models/perm" | |||
access_model "code.gitea.io/gitea/models/perm/access" | |||
repo_model "code.gitea.io/gitea/models/repo" | |||
unit_model "code.gitea.io/gitea/models/unit" | |||
user_model "code.gitea.io/gitea/models/user" | |||
api "code.gitea.io/gitea/modules/structs" | |||
"code.gitea.io/gitea/routers/api/v1/utils" | |||
@@ -44,7 +42,7 @@ func listUserRepos(ctx *context.APIContext, u *user_model.User, private bool) { | |||
ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err) | |||
return | |||
} | |||
if ctx.IsSigned && ctx.Doer.IsAdmin || permission.UnitAccessMode(unit_model.TypeCode) >= perm.AccessModeRead { | |||
if ctx.IsSigned && ctx.Doer.IsAdmin || permission.HasAnyUnitAccess() { | |||
apiRepos = append(apiRepos, convert.ToRepo(ctx, repos[i], permission)) | |||
} | |||
} |
@@ -117,16 +117,14 @@ func HookPostReceive(ctx *gitea_context.PrivateContext) { | |||
} | |||
} | |||
if len(branchesToSync) > 0 { | |||
if gitRepo == nil { | |||
var err error | |||
gitRepo, err = gitrepo.OpenRepository(ctx, repo) | |||
if err != nil { | |||
log.Error("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err) | |||
ctx.JSON(http.StatusInternalServerError, private.HookPostReceiveResult{ | |||
Err: fmt.Sprintf("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err), | |||
}) | |||
return | |||
} | |||
var err error | |||
gitRepo, err = gitrepo.OpenRepository(ctx, repo) | |||
if err != nil { | |||
log.Error("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err) | |||
ctx.JSON(http.StatusInternalServerError, private.HookPostReceiveResult{ | |||
Err: fmt.Sprintf("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err), | |||
}) | |||
return | |||
} | |||
var ( |
@@ -121,9 +121,9 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) { | |||
case refFullName.IsBranch(): | |||
preReceiveBranch(ourCtx, oldCommitID, newCommitID, refFullName) | |||
case refFullName.IsTag(): | |||
preReceiveTag(ourCtx, oldCommitID, newCommitID, refFullName) | |||
preReceiveTag(ourCtx, refFullName) | |||
case git.DefaultFeatures.SupportProcReceive && refFullName.IsFor(): | |||
preReceiveFor(ourCtx, oldCommitID, newCommitID, refFullName) | |||
preReceiveFor(ourCtx, refFullName) | |||
default: | |||
ourCtx.AssertCanWriteCode() | |||
} | |||
@@ -368,7 +368,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID string, r | |||
} | |||
} | |||
func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) { | |||
func preReceiveTag(ctx *preReceiveContext, refFullName git.RefName) { | |||
if !ctx.AssertCanWriteCode() { | |||
return | |||
} | |||
@@ -404,7 +404,7 @@ func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refF | |||
} | |||
} | |||
func preReceiveFor(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) { | |||
func preReceiveFor(ctx *preReceiveContext, refFullName git.RefName) { | |||
if !ctx.AssertCreatePullRequest() { | |||
return | |||
} |
@@ -159,7 +159,7 @@ func DashboardPost(ctx *context.Context) { | |||
switch form.Op { | |||
case "sync_repo_branches": | |||
go func() { | |||
if err := repo_service.AddAllRepoBranchesToSyncQueue(graceful.GetManager().ShutdownContext(), ctx.Doer.ID); err != nil { | |||
if err := repo_service.AddAllRepoBranchesToSyncQueue(graceful.GetManager().ShutdownContext()); err != nil { | |||
log.Error("AddAllRepoBranchesToSyncQueue: %v: %v", ctx.Doer.ID, err) | |||
} | |||
}() |
@@ -470,8 +470,9 @@ func AuthorizeOAuth(ctx *context.Context) { | |||
return | |||
} | |||
// Redirect if user already granted access | |||
if grant != nil { | |||
// Redirect if user already granted access and the application is confidential. | |||
// I.e. always require authorization for public clients as recommended by RFC 6749 Section 10.2 | |||
if app.ConfidentialClient && grant != nil { | |||
code, err := grant.GenerateNewAuthorizationCode(ctx, form.RedirectURI, form.CodeChallenge, form.CodeChallengeMethod) | |||
if err != nil { | |||
handleServerError(ctx, form.State, form.RedirectURI) |
@@ -279,7 +279,7 @@ func GetFeedType(name string, req *http.Request) (bool, string, string) { | |||
} | |||
// feedActionsToFeedItems convert gitea's Repo's Releases to feeds Item | |||
func releasesToFeedItems(ctx *context.Context, releases []*repo_model.Release, isReleasesOnly bool) (items []*feeds.Item, err error) { | |||
func releasesToFeedItems(ctx *context.Context, releases []*repo_model.Release) (items []*feeds.Item, err error) { | |||
for _, rel := range releases { | |||
err := rel.LoadAttributes(ctx) | |||
if err != nil { |
@@ -42,7 +42,7 @@ func ShowReleaseFeed(ctx *context.Context, repo *repo_model.Repository, isReleas | |||
Created: time.Now(), | |||
} | |||
feed.Items, err = releasesToFeedItems(ctx, releases, isReleasesOnly) | |||
feed.Items, err = releasesToFeedItems(ctx, releases) | |||
if err != nil { | |||
ctx.ServerError("releasesToFeedItems", err) | |||
return |
@@ -419,11 +419,9 @@ func DiffPreviewPost(ctx *context.Context) { | |||
return | |||
} | |||
if diff.NumFiles == 0 { | |||
ctx.PlainText(http.StatusOK, ctx.Locale.TrString("repo.editor.no_changes_to_show")) | |||
return | |||
if diff.NumFiles != 0 { | |||
ctx.Data["File"] = diff.Files[0] | |||
} | |||
ctx.Data["File"] = diff.Files[0] | |||
ctx.HTML(http.StatusOK, tplEditDiffPreview) | |||
} |
@@ -2177,7 +2177,10 @@ func GetIssueInfo(ctx *context.Context) { | |||
} | |||
} | |||
ctx.JSON(http.StatusOK, convert.ToIssue(ctx, ctx.Doer, issue)) | |||
ctx.JSON(http.StatusOK, map[string]any{ | |||
"convertedIssue": convert.ToIssue(ctx, ctx.Doer, issue), | |||
"renderedLabels": templates.RenderLabels(ctx, ctx.Locale, issue.Labels, ctx.Repo.RepoLink, issue), | |||
}) | |||
} | |||
// UpdateIssueTitle change issue's title |
@@ -17,6 +17,16 @@ import ( | |||
const tplSearch base.TplName = "repo/search" | |||
func indexSettingToGitGrepPathspecList() (list []string) { | |||
for _, expr := range setting.Indexer.IncludePatterns { | |||
list = append(list, ":(glob)"+expr.PatternString()) | |||
} | |||
for _, expr := range setting.Indexer.ExcludePatterns { | |||
list = append(list, ":(glob,exclude)"+expr.PatternString()) | |||
} | |||
return list | |||
} | |||
// Search render repository search page | |||
func Search(ctx *context.Context) { | |||
language := ctx.FormTrim("l") | |||
@@ -28,6 +38,7 @@ func Search(ctx *context.Context) { | |||
ctx.Data["Language"] = language | |||
ctx.Data["IsFuzzy"] = isFuzzy | |||
ctx.Data["PageIsViewCode"] = true | |||
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled | |||
if keyword == "" { | |||
ctx.HTML(http.StatusOK, tplSearch) | |||
@@ -64,8 +75,14 @@ func Search(ctx *context.Context) { | |||
ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx) | |||
} | |||
} else { | |||
res, err := git.GrepSearch(ctx, ctx.Repo.GitRepo, keyword, git.GrepOptions{ContextLineNumber: 3, IsFuzzy: isFuzzy}) | |||
res, err := git.GrepSearch(ctx, ctx.Repo.GitRepo, keyword, git.GrepOptions{ | |||
ContextLineNumber: 1, | |||
IsFuzzy: isFuzzy, | |||
RefName: git.RefNameFromBranch(ctx.Repo.BranchName).String(), // BranchName should be default branch or the first existing branch | |||
PathspecList: indexSettingToGitGrepPathspecList(), | |||
}) | |||
if err != nil { | |||
// TODO: if no branch exists, it reports: exit status 128, fatal: this operation must be run in a work tree. | |||
ctx.ServerError("GrepSearch", err) | |||
return | |||
} | |||
@@ -86,7 +103,6 @@ func Search(ctx *context.Context) { | |||
} | |||
} | |||
ctx.Data["CodeIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled | |||
ctx.Data["Repo"] = ctx.Repo.Repository | |||
ctx.Data["SearchResults"] = searchResults | |||
ctx.Data["SearchResultLanguages"] = searchResultLanguages |
@@ -0,0 +1,19 @@ | |||
// Copyright 2024 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package repo | |||
import ( | |||
"testing" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/test" | |||
"github.com/stretchr/testify/assert" | |||
) | |||
func TestIndexSettingToGitGrepPathspecList(t *testing.T) { | |||
defer test.MockVariableValue(&setting.Indexer.IncludePatterns, setting.IndexerGlobFromString("a"))() | |||
defer test.MockVariableValue(&setting.Indexer.ExcludePatterns, setting.IndexerGlobFromString("b"))() | |||
assert.Equal(t, []string{":(glob)a", ":(glob,exclude)b"}, indexSettingToGitGrepPathspecList()) | |||
} |
@@ -65,7 +65,7 @@ func SettingsCtxData(ctx *context.Context) { | |||
signing, _ := asymkey_service.SigningKey(ctx, ctx.Repo.Repository.RepoPath()) | |||
ctx.Data["SigningKeyAvailable"] = len(signing) > 0 | |||
ctx.Data["SigningSettings"] = setting.Repository.Signing | |||
ctx.Data["CodeIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled | |||
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled | |||
if ctx.Doer.IsAdmin { | |||
if setting.Indexer.RepoIndexerEnabled { | |||
@@ -110,7 +110,7 @@ func SettingsPost(ctx *context.Context) { | |||
signing, _ := asymkey_service.SigningKey(ctx, ctx.Repo.Repository.RepoPath()) | |||
ctx.Data["SigningKeyAvailable"] = len(signing) > 0 | |||
ctx.Data["SigningSettings"] = setting.Repository.Signing | |||
ctx.Data["CodeIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled | |||
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled | |||
repo := ctx.Repo.Repository | |||
@@ -54,7 +54,7 @@ import ( | |||
"github.com/prometheus/client_golang/prometheus" | |||
) | |||
const GzipMinSize = 1400 // min size to compress for the body size of response | |||
var GzipMinSize = 1400 // min size to compress for the body size of response | |||
// optionsCorsHandler return a http handler which sets CORS options if enabled by config, it blocks non-CORS OPTIONS requests. | |||
func optionsCorsHandler() func(next http.Handler) http.Handler { |
@@ -182,7 +182,7 @@ func createProvider(providerName string, source *Source) (goth.Provider, error) | |||
} | |||
// always set the name if provider is created so we can support multiple setups of 1 provider | |||
if err == nil && provider != nil { | |||
if provider != nil { | |||
provider.SetName(providerName) | |||
} | |||
@@ -234,9 +234,7 @@ func (b *Base) plainTextInternal(skip, status int, bs []byte) { | |||
b.Resp.Header().Set("Content-Type", "text/plain;charset=utf-8") | |||
b.Resp.Header().Set("X-Content-Type-Options", "nosniff") | |||
b.Resp.WriteHeader(status) | |||
if _, err := b.Resp.Write(bs); err != nil { | |||
log.ErrorWithSkip(skip, "plainTextInternal (status=%d): write bytes failed: %v", status, err) | |||
} | |||
_, _ = b.Resp.Write(bs) | |||
} | |||
// PlainTextBytes renders bytes as plain text |
@@ -13,6 +13,7 @@ import ( | |||
"path" | |||
"strconv" | |||
"strings" | |||
"syscall" | |||
"time" | |||
user_model "code.gitea.io/gitea/models/user" | |||
@@ -77,7 +78,7 @@ func (ctx *Context) HTML(status int, name base.TplName) { | |||
} | |||
err := ctx.Render.HTML(ctx.Resp, status, string(name), ctx.Data, ctx.TemplateContext) | |||
if err == nil { | |||
if err == nil || errors.Is(err, syscall.EPIPE) { | |||
return | |||
} | |||
@@ -787,7 +787,7 @@ func (rt RepoRefType) RefTypeIncludesTags() bool { | |||
return false | |||
} | |||
func getRefNameFromPath(ctx *Base, repo *Repository, path string, isExist func(string) bool) string { | |||
func getRefNameFromPath(repo *Repository, path string, isExist func(string) bool) string { | |||
refName := "" | |||
parts := strings.Split(path, "/") | |||
for i, part := range parts { | |||
@@ -823,7 +823,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string { | |||
repo.TreePath = path | |||
return repo.Repository.DefaultBranch | |||
case RepoRefBranch: | |||
ref := getRefNameFromPath(ctx, repo, path, repo.GitRepo.IsBranchExist) | |||
ref := getRefNameFromPath(repo, path, repo.GitRepo.IsBranchExist) | |||
if len(ref) == 0 { | |||
// check if ref is HEAD | |||
parts := strings.Split(path, "/") | |||
@@ -833,7 +833,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string { | |||
} | |||
// maybe it's a renamed branch | |||
return getRefNameFromPath(ctx, repo, path, func(s string) bool { | |||
return getRefNameFromPath(repo, path, func(s string) bool { | |||
b, exist, err := git_model.FindRenamedBranch(ctx, repo.Repository.ID, s) | |||
if err != nil { | |||
log.Error("FindRenamedBranch: %v", err) | |||
@@ -853,7 +853,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string { | |||
return ref | |||
case RepoRefTag: | |||
return getRefNameFromPath(ctx, repo, path, repo.GitRepo.IsTagExist) | |||
return getRefNameFromPath(repo, path, repo.GitRepo.IsTagExist) | |||
case RepoRefCommit: | |||
parts := strings.Split(path, "/") | |||
@@ -11,6 +11,7 @@ import ( | |||
"strings" | |||
"time" | |||
actions_model "code.gitea.io/gitea/models/actions" | |||
asymkey_model "code.gitea.io/gitea/models/asymkey" | |||
"code.gitea.io/gitea/models/auth" | |||
git_model "code.gitea.io/gitea/models/git" | |||
@@ -24,6 +25,7 @@ import ( | |||
"code.gitea.io/gitea/modules/container" | |||
"code.gitea.io/gitea/modules/git" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/setting" | |||
api "code.gitea.io/gitea/modules/structs" | |||
"code.gitea.io/gitea/modules/util" | |||
"code.gitea.io/gitea/services/gitdiff" | |||
@@ -193,6 +195,31 @@ func ToTag(repo *repo_model.Repository, t *git.Tag) *api.Tag { | |||
} | |||
} | |||
// ToActionTask convert a actions_model.ActionTask to an api.ActionTask | |||
func ToActionTask(ctx context.Context, t *actions_model.ActionTask) (*api.ActionTask, error) { | |||
if err := t.LoadAttributes(ctx); err != nil { | |||
return nil, err | |||
} | |||
url := strings.TrimSuffix(setting.AppURL, "/") + t.GetRunLink() | |||
return &api.ActionTask{ | |||
ID: t.ID, | |||
Name: t.Job.Name, | |||
HeadBranch: t.Job.Run.PrettyRef(), | |||
HeadSHA: t.Job.CommitSHA, | |||
RunNumber: t.Job.Run.Index, | |||
Event: t.Job.Run.TriggerEvent, | |||
DisplayTitle: t.Job.Run.Title, | |||
Status: t.Status.String(), | |||
WorkflowID: t.Job.Run.WorkflowID, | |||
URL: url, | |||
CreatedAt: t.Created.AsLocalTime(), | |||
UpdatedAt: t.Updated.AsLocalTime(), | |||
RunStartedAt: t.Started.AsLocalTime(), | |||
}, nil | |||
} | |||
// ToVerification convert a git.Commit.Signature to an api.PayloadCommitVerification | |||
func ToVerification(ctx context.Context, c *git.Commit) *api.PayloadCommitVerification { | |||
verif := asymkey_model.ParseCommitWithSignature(ctx, c) |
@@ -211,13 +211,11 @@ func ToLabel(label *issues_model.Label, repo *repo_model.Repository, org *user_m | |||
IsArchived: label.IsArchived(), | |||
} | |||
labelBelongsToRepo := label.BelongsToRepo() | |||
// calculate URL | |||
if label.BelongsToRepo() && repo != nil { | |||
if repo != nil { | |||
result.URL = fmt.Sprintf("%s/labels/%d", repo.APIURL(), label.ID) | |||
} else { | |||
log.Error("ToLabel did not get repo to calculate url for label with id '%d'", label.ID) | |||
} | |||
if labelBelongsToRepo && repo != nil { | |||
result.URL = fmt.Sprintf("%s/labels/%d", repo.APIURL(), label.ID) | |||
} else { // BelongsToOrg | |||
if org != nil { | |||
result.URL = fmt.Sprintf("%sapi/v1/orgs/%s/labels/%d", setting.AppURL, url.PathEscape(org.Name), label.ID) | |||
@@ -226,6 +224,10 @@ func ToLabel(label *issues_model.Label, repo *repo_model.Repository, org *user_m | |||
} | |||
} | |||
if labelBelongsToRepo && repo == nil { | |||
log.Error("ToLabel did not get repo to calculate url for label with id '%d'", label.ID) | |||
} | |||
return result | |||
} | |||
@@ -27,7 +27,7 @@ type commonStorageCheckOptions struct { | |||
name string | |||
} | |||
func commonCheckStorage(ctx context.Context, logger log.Logger, autofix bool, opts *commonStorageCheckOptions) error { | |||
func commonCheckStorage(logger log.Logger, autofix bool, opts *commonStorageCheckOptions) error { | |||
totalCount, orphanedCount := 0, 0 | |||
totalSize, orphanedSize := int64(0), int64(0) | |||
@@ -98,7 +98,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo | |||
} | |||
if opts.Attachments || opts.All { | |||
if err := commonCheckStorage(ctx, logger, autofix, | |||
if err := commonCheckStorage(logger, autofix, | |||
&commonStorageCheckOptions{ | |||
storer: storage.Attachments, | |||
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { | |||
@@ -116,7 +116,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo | |||
logger.Info("LFS isn't enabled (skipped)") | |||
return nil | |||
} | |||
if err := commonCheckStorage(ctx, logger, autofix, | |||
if err := commonCheckStorage(logger, autofix, | |||
&commonStorageCheckOptions{ | |||
storer: storage.LFS, | |||
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { | |||
@@ -132,7 +132,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo | |||
} | |||
if opts.Avatars || opts.All { | |||
if err := commonCheckStorage(ctx, logger, autofix, | |||
if err := commonCheckStorage(logger, autofix, | |||
&commonStorageCheckOptions{ | |||
storer: storage.Avatars, | |||
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { | |||
@@ -146,7 +146,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo | |||
} | |||
if opts.RepoAvatars || opts.All { | |||
if err := commonCheckStorage(ctx, logger, autofix, | |||
if err := commonCheckStorage(logger, autofix, | |||
&commonStorageCheckOptions{ | |||
storer: storage.RepoAvatars, | |||
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { | |||
@@ -160,7 +160,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo | |||
} | |||
if opts.RepoArchives || opts.All { | |||
if err := commonCheckStorage(ctx, logger, autofix, | |||
if err := commonCheckStorage(logger, autofix, | |||
&commonStorageCheckOptions{ | |||
storer: storage.RepoArchives, | |||
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { | |||
@@ -182,7 +182,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo | |||
logger.Info("Packages isn't enabled (skipped)") | |||
return nil | |||
} | |||
if err := commonCheckStorage(ctx, logger, autofix, | |||
if err := commonCheckStorage(logger, autofix, | |||
&commonStorageCheckOptions{ | |||
storer: storage.Packages, | |||
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { |
@@ -289,8 +289,8 @@ func composeIssueCommentMessages(ctx *mailCommentContext, lang string, recipient | |||
} | |||
// Make sure to compose independent messages to avoid leaking user emails | |||
msgID := createReference(ctx.Issue, ctx.Comment, ctx.ActionType) | |||
reference := createReference(ctx.Issue, nil, activities_model.ActionType(0)) | |||
msgID := generateMessageIDForIssue(ctx.Issue, ctx.Comment, ctx.ActionType) | |||
reference := generateMessageIDForIssue(ctx.Issue, nil, activities_model.ActionType(0)) | |||
var replyPayload []byte | |||
if ctx.Comment != nil { | |||
@@ -362,7 +362,7 @@ func composeIssueCommentMessages(ctx *mailCommentContext, lang string, recipient | |||
return msgs, nil | |||
} | |||
func createReference(issue *issues_model.Issue, comment *issues_model.Comment, actionType activities_model.ActionType) string { | |||
func generateMessageIDForIssue(issue *issues_model.Issue, comment *issues_model.Comment, actionType activities_model.ActionType) string { | |||
var path string | |||
if issue.IsPull { | |||
path = "pulls" | |||
@@ -389,6 +389,10 @@ func createReference(issue *issues_model.Issue, comment *issues_model.Comment, a | |||
return fmt.Sprintf("<%s/%s/%d%s@%s>", issue.Repo.FullName(), path, issue.Index, extra, setting.Domain) | |||
} | |||
func generateMessageIDForRelease(release *repo_model.Release) string { | |||
return fmt.Sprintf("<%s/releases/%d@%s>", release.Repo.FullName(), release.ID, setting.Domain) | |||
} | |||
func generateAdditionalHeaders(ctx *mailCommentContext, reason string, recipient *user_model.User) map[string]string { | |||
repo := ctx.Issue.Repo | |||
@@ -86,11 +86,11 @@ func mailNewRelease(ctx context.Context, lang string, tos []string, rel *repo_mo | |||
msgs := make([]*Message, 0, len(tos)) | |||
publisherName := rel.Publisher.DisplayName() | |||
relURL := "<" + rel.HTMLURL() + ">" | |||
msgID := generateMessageIDForRelease(rel) | |||
for _, to := range tos { | |||
msg := NewMessageFrom(to, publisherName, setting.MailService.FromEmail, subject, mailBody.String()) | |||
msg.Info = subject | |||
msg.SetHeader("Message-ID", relURL) | |||
msg.SetHeader("Message-ID", msgID) | |||
msgs = append(msgs, msg) | |||
} | |||
@@ -288,7 +288,7 @@ func TestGenerateAdditionalHeaders(t *testing.T) { | |||
} | |||
} | |||
func Test_createReference(t *testing.T) { | |||
func TestGenerateMessageIDForIssue(t *testing.T) { | |||
_, _, issue, comment := prepareMailerTest(t) | |||
_, _, pullIssue, _ := prepareMailerTest(t) | |||
pullIssue.IsPull = true | |||
@@ -388,10 +388,18 @@ func Test_createReference(t *testing.T) { | |||
} | |||
for _, tt := range tests { | |||
t.Run(tt.name, func(t *testing.T) { | |||
got := createReference(tt.args.issue, tt.args.comment, tt.args.actionType) | |||
got := generateMessageIDForIssue(tt.args.issue, tt.args.comment, tt.args.actionType) | |||
if !strings.HasPrefix(got, tt.prefix) { | |||
t.Errorf("createReference() = %v, want %v", got, tt.prefix) | |||
t.Errorf("generateMessageIDForIssue() = %v, want %v", got, tt.prefix) | |||
} | |||
}) | |||
} | |||
} | |||
func TestGenerateMessageIDForRelease(t *testing.T) { | |||
msgID := generateMessageIDForRelease(&repo_model.Release{ | |||
ID: 1, | |||
Repo: &repo_model.Repository{OwnerName: "owner", Name: "repo"}, | |||
}) | |||
assert.Equal(t, "<owner/repo/releases/1@localhost>", msgID) | |||
} |
@@ -977,25 +977,24 @@ func (g *GiteaLocalUploader) Finish() error { | |||
} | |||
func (g *GiteaLocalUploader) remapUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error { | |||
var userid int64 | |||
var userID int64 | |||
var err error | |||
if g.sameApp { | |||
userid, err = g.remapLocalUser(source, target) | |||
userID, err = g.remapLocalUser(source) | |||
} else { | |||
userid, err = g.remapExternalUser(source, target) | |||
userID, err = g.remapExternalUser(source) | |||
} | |||
if err != nil { | |||
return err | |||
} | |||
if userid > 0 { | |||
return target.RemapExternalUser("", 0, userid) | |||
if userID > 0 { | |||
return target.RemapExternalUser("", 0, userID) | |||
} | |||
return target.RemapExternalUser(source.GetExternalName(), source.GetExternalID(), g.doer.ID) | |||
} | |||
func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) (int64, error) { | |||
func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated) (int64, error) { | |||
userid, ok := g.userMap[source.GetExternalID()] | |||
if !ok { | |||
name, err := user_model.GetUserNameByID(g.ctx, source.GetExternalID()) | |||
@@ -1013,7 +1012,7 @@ func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrat | |||
return userid, nil | |||
} | |||
func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) (userid int64, err error) { | |||
func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated) (userid int64, err error) { | |||
userid, ok := g.userMap[source.GetExternalID()] | |||
if !ok { | |||
userid, err = user_model.GetUserIDByExternalUserID(g.ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID())) |
@@ -90,7 +90,7 @@ func Update(ctx context.Context, pullLimit, pushLimit int) error { | |||
pullMirrorsRequested := 0 | |||
if pullLimit != 0 { | |||
if err := repo_model.MirrorsIterate(ctx, pullLimit, func(idx int, bean any) error { | |||
if err := repo_model.MirrorsIterate(ctx, pullLimit, func(_ int, bean any) error { | |||
if err := handler(bean); err != nil { | |||
return err | |||
} |
@@ -49,7 +49,7 @@ var ErrSubmitReviewOnClosedPR = errors.New("can't submit review for a closed or | |||
// checkInvalidation checks if the line of code comment got changed by another commit. | |||
// If the line got changed the comment is going to be invalidated. | |||
func checkInvalidation(ctx context.Context, c *issues_model.Comment, doer *user_model.User, repo *git.Repository, branch string) error { | |||
func checkInvalidation(ctx context.Context, c *issues_model.Comment, repo *git.Repository, branch string) error { | |||
// FIXME differentiate between previous and proposed line | |||
commit, err := repo.LineBlame(branch, repo.Path, c.TreePath, uint(c.UnsignedLine())) | |||
if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) { | |||
@@ -83,7 +83,7 @@ func InvalidateCodeComments(ctx context.Context, prs issues_model.PullRequestLis | |||
return fmt.Errorf("find code comments: %v", err) | |||
} | |||
for _, comment := range codeComments { | |||
if err := checkInvalidation(ctx, comment, doer, repo, branch); err != nil { | |||
if err := checkInvalidation(ctx, comment, repo, branch); err != nil { | |||
return err | |||
} | |||
} |
@@ -39,7 +39,7 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model. | |||
go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "") | |||
}() | |||
return updateHeadByRebaseOnToBase(ctx, pr, doer, message) | |||
return updateHeadByRebaseOnToBase(ctx, pr, doer) | |||
} | |||
if err := pr.LoadBaseRepo(ctx); err != nil { |
@@ -18,7 +18,7 @@ import ( | |||
) | |||
// updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch | |||
func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string) error { | |||
func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) error { | |||
// "Clone" base repo and add the cache headers for the head repo and branch | |||
mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "") | |||
if err != nil { |
@@ -80,7 +80,7 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR | |||
return fmt.Errorf("getRepositoryByID: %w", err) | |||
} | |||
if err := adoptRepository(ctx, repoPath, doer, repo, opts.DefaultBranch); err != nil { | |||
if err := adoptRepository(ctx, repoPath, repo, opts.DefaultBranch); err != nil { | |||
return fmt.Errorf("createDelegateHooks: %w", err) | |||
} | |||
@@ -111,7 +111,7 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR | |||
return repo, nil | |||
} | |||
func adoptRepository(ctx context.Context, repoPath string, u *user_model.User, repo *repo_model.Repository, defaultBranch string) (err error) { | |||
func adoptRepository(ctx context.Context, repoPath string, repo *repo_model.Repository, defaultBranch string) (err error) { | |||
isExist, err := util.IsExist(repoPath) | |||
if err != nil { | |||
log.Error("Unable to check if %s exists. Error: %v", repoPath, err) |
@@ -527,7 +527,7 @@ func handlerBranchSync(items ...*BranchSyncOptions) []*BranchSyncOptions { | |||
return nil | |||
} | |||
func addRepoToBranchSyncQueue(repoID, doerID int64) error { | |||
func addRepoToBranchSyncQueue(repoID int64) error { | |||
return branchSyncQueue.Push(&BranchSyncOptions{ | |||
RepoID: repoID, | |||
}) | |||
@@ -543,9 +543,9 @@ func initBranchSyncQueue(ctx context.Context) error { | |||
return nil | |||
} | |||
func AddAllRepoBranchesToSyncQueue(ctx context.Context, doerID int64) error { | |||
func AddAllRepoBranchesToSyncQueue(ctx context.Context) error { | |||
if err := db.Iterate(ctx, builder.Eq{"is_empty": false}, func(ctx context.Context, repo *repo_model.Repository) error { | |||
return addRepoToBranchSyncQueue(repo.ID, doerID) | |||
return addRepoToBranchSyncQueue(repo.ID) | |||
}); err != nil { | |||
return fmt.Errorf("run sync all branches failed: %v", err) | |||
} |
@@ -211,7 +211,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use | |||
} | |||
for _, file := range opts.Files { | |||
if err := handleCheckErrors(file, commit, opts, repo); err != nil { | |||
if err := handleCheckErrors(file, commit, opts); err != nil { | |||
return nil, err | |||
} | |||
} | |||
@@ -277,7 +277,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use | |||
} | |||
// handles the check for various issues for ChangeRepoFiles | |||
func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions, repo *repo_model.Repository) error { | |||
func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions) error { | |||
if file.Operation == "update" || file.Operation == "delete" { | |||
fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath) | |||
if err != nil { |
@@ -35,7 +35,7 @@ func TestUpdateUser(t *testing.T) { | |||
Description: optional.Some("description"), | |||
AllowGitHook: optional.Some(true), | |||
AllowImportLocal: optional.Some(true), | |||
MaxRepoCreation: optional.Some[int](10), | |||
MaxRepoCreation: optional.Some(10), | |||
IsRestricted: optional.Some(true), | |||
IsActive: optional.Some(false), | |||
IsAdmin: optional.Some(true), |