Bläddra i källkod

Enable more `revive` linter rules (#30608)

Noteable additions:

- `redefines-builtin-id` forbid variable names that shadow go builtins
- `empty-lines` remove unnecessary empty lines that `gofumpt` does not
remove for some reason
- `superfluous-else` eliminate more superfluous `else` branches

Rules are also sorted alphabetically and I cleaned up various parts of
`.golangci.yml`.
tags/v1.22.0-rc1
silverwind 3 veckor sedan
förälder
incheckning
74f0c84fa4
Inget konto är kopplat till bidragsgivarens mejladress
76 ändrade filer med 133 tillägg och 188 borttagningar
  1. 32
    23
      .golangci.yml
  2. 1
    1
      cmd/hook.go
  3. 0
    1
      models/asymkey/gpg_key_commit_verification.go
  4. 0
    1
      models/db/engine.go
  5. 0
    2
      models/issues/review.go
  6. 0
    5
      models/migrations/base/db.go
  7. 0
    2
      models/migrations/v1_11/v111.go
  8. 2
    2
      models/migrations/v1_20/v250.go
  9. 0
    1
      models/migrations/v1_6/v71.go
  10. 0
    1
      models/migrations/v1_9/v85.go
  11. 1
    2
      models/organization/team.go
  12. 0
    2
      models/project/board.go
  13. 0
    1
      models/repo/user_repo.go
  14. 1
    2
      models/user/user.go
  15. 6
    6
      modules/auth/password/password.go
  16. 3
    3
      modules/git/batch_reader.go
  17. 1
    2
      modules/git/commit_reader.go
  18. 0
    1
      modules/git/pipeline/lfs_nogogit.go
  19. 4
    4
      modules/git/repo_commit.go
  20. 0
    1
      modules/git/submodule.go
  21. 0
    2
      modules/indexer/code/bleve/bleve.go
  22. 0
    1
      modules/indexer/issues/elasticsearch/elasticsearch.go
  23. 0
    1
      modules/log/event_format.go
  24. 0
    2
      modules/markup/markdown/markdown_test.go
  25. 16
    16
      modules/packages/rubygems/marshal.go
  26. 0
    1
      modules/process/manager_stacktraces.go
  27. 0
    1
      modules/repository/temp.go
  28. 1
    2
      modules/setting/time.go
  29. 2
    3
      modules/templates/htmlrenderer.go
  30. 1
    2
      modules/templates/mailer.go
  31. 3
    3
      modules/util/util_test.go
  32. 0
    1
      routers/api/actions/artifacts.go
  33. 2
    2
      routers/api/packages/alpine/alpine.go
  34. 2
    2
      routers/api/packages/conan/conan.go
  35. 2
    2
      routers/api/packages/conda/conda.go
  36. 3
    3
      routers/api/packages/container/container.go
  37. 2
    2
      routers/api/packages/cran/cran.go
  38. 2
    2
      routers/api/packages/debian/debian.go
  39. 2
    2
      routers/api/packages/generic/generic.go
  40. 2
    2
      routers/api/packages/goproxy/goproxy.go
  41. 2
    2
      routers/api/packages/nuget/nuget.go
  42. 2
    2
      routers/api/packages/rpm/rpm.go
  43. 2
    2
      routers/api/packages/rubygems/rubygems.go
  44. 0
    1
      routers/api/v1/repo/issue.go
  45. 0
    1
      routers/api/v1/repo/mirror.go
  46. 0
    2
      routers/api/v1/repo/pull.go
  47. 0
    1
      routers/api/v1/repo/pull_review.go
  48. 0
    1
      routers/api/v1/repo/repo.go
  49. 0
    1
      routers/api/v1/repo/wiki.go
  50. 0
    1
      routers/private/hook_pre_receive.go
  51. 0
    1
      routers/web/repo/actions/view.go
  52. 0
    4
      routers/web/repo/issue.go
  53. 0
    3
      routers/web/repo/pull.go
  54. 0
    1
      routers/web/repo/pull_review.go
  55. 0
    1
      routers/web/repo/view.go
  56. 5
    3
      services/actions/notifier_helper.go
  57. 0
    1
      services/auth/source/ldap/source_sync.go
  58. 0
    2
      services/context/repo.go
  59. 4
    5
      services/gitdiff/gitdiff.go
  60. 4
    5
      services/issue/commit.go
  61. 5
    4
      services/markup/processorhelper_codepreview.go
  62. 0
    1
      services/migrations/gitea_downloader.go
  63. 0
    1
      services/migrations/gitlab.go
  64. 3
    4
      services/mirror/mirror_pull.go
  65. 3
    3
      services/pull/merge.go
  66. 0
    1
      services/pull/pull.go
  67. 0
    1
      services/repository/adopt.go
  68. 0
    1
      services/repository/contributors_graph.go
  69. 0
    2
      services/repository/files/update.go
  70. 0
    1
      services/user/delete.go
  71. 3
    3
      services/user/update_test.go
  72. 0
    2
      services/webhook/discord.go
  73. 0
    1
      services/webhook/matrix.go
  74. 4
    2
      tests/e2e/e2e_test.go
  75. 5
    5
      tests/integration/api_notification_test.go
  76. 0
    1
      tests/integration/pull_status_test.go

+ 32
- 23
.golangci.yml Visa fil

linters: linters:
enable-all: false
disable-all: true
fast: false
enable: enable:
- bidichk - bidichk
# - deadcode # deprecated - https://github.com/golangci/golangci-lint/issues/1841
- depguard - depguard
- dupl - dupl
- errcheck - errcheck
- forbidigo - forbidigo
- gocritic - gocritic
# - gocyclo # The cyclomatic complexety of a lot of functions is too high, we should refactor those another time.
- gofmt - gofmt
- gofumpt - gofumpt
- gosimple - gosimple
- nolintlint - nolintlint
- revive - revive
- staticcheck - staticcheck
# - structcheck # deprecated - https://github.com/golangci/golangci-lint/issues/1841
- stylecheck - stylecheck
- typecheck - typecheck
- unconvert - unconvert
- unused - unused
# - varcheck # deprecated - https://github.com/golangci/golangci-lint/issues/1841
- wastedassign - wastedassign
enable-all: false
disable-all: true
fast: false


run: run:
timeout: 10m timeout: 10m


output:
sort-results: true

linters-settings: linters-settings:
stylecheck: stylecheck:
checks: ["all", "-ST1005", "-ST1003"] checks: ["all", "-ST1005", "-ST1003"]
errorCode: 1 errorCode: 1
warningCode: 1 warningCode: 1
rules: rules:
- name: atomic
- name: bare-return
- name: blank-imports - name: blank-imports
- name: constant-logical-expr
- name: context-as-argument - name: context-as-argument
- name: context-keys-type - name: context-keys-type
- name: dot-imports - name: dot-imports
- name: duplicated-imports
- name: empty-lines
- name: error-naming
- name: error-return - name: error-return
- name: error-strings - name: error-strings
- name: error-naming
- name: errorf
- name: exported - name: exported
- name: identical-branches
- name: if-return - name: if-return
- name: increment-decrement - name: increment-decrement
- name: var-naming
- name: var-declaration
- name: indent-error-flow
- name: modifies-value-receiver
- name: package-comments - name: package-comments
- name: range - name: range
- name: receiver-naming - name: receiver-naming
- name: redefines-builtin-id
- name: string-of-int
- name: superfluous-else
- name: time-naming - name: time-naming
- name: unconditional-recursion
- name: unexported-return - name: unexported-return
- name: indent-error-flow
- name: errorf
- name: duplicated-imports
- name: modifies-value-receiver
- name: unreachable-code
- name: var-declaration
- name: var-naming
gofumpt: gofumpt:
extra-rules: true extra-rules: true
depguard: depguard:
max-issues-per-linter: 0 max-issues-per-linter: 0
max-same-issues: 0 max-same-issues: 0
exclude-dirs: [node_modules, public, web_src] exclude-dirs: [node_modules, public, web_src]
exclude-case-sensitive: true
exclude-rules: exclude-rules:
# Exclude some linters from running on tests files.
- path: _test\.go - path: _test\.go
linters: linters:
- gocyclo - gocyclo
- path: cmd - path: cmd
linters: linters:
- forbidigo - forbidigo
- linters:
- text: "webhook"
linters:
- dupl - dupl
text: "webhook"
- linters:
- text: "`ID' should not be capitalized"
linters:
- gocritic - gocritic
text: "`ID' should not be capitalized"
- linters:
- text: "swagger"
linters:
- unused - unused
- deadcode - deadcode
text: "swagger"
- linters:
- text: "argument x is overwritten before first use"
linters:
- staticcheck - staticcheck
text: "argument x is overwritten before first use"
- text: "commentFormatting: put a space between `//` and comment text" - text: "commentFormatting: put a space between `//` and comment text"
linters: linters:
- gocritic - gocritic

+ 1
- 1
cmd/hook.go Visa fil

fmt.Fprintf(os.Stderr, " %s\n", url) fmt.Fprintf(os.Stderr, " %s\n", url)
} }
fmt.Fprintln(os.Stderr, "") fmt.Fprintln(os.Stderr, "")
os.Stderr.Sync()
_ = os.Stderr.Sync()
} }


func pushOptions() map[string]string { func pushOptions() map[string]string {

+ 0
- 1
models/asymkey/gpg_key_commit_verification.go Visa fil

Reason: "gpg.error.no_committer_account", Reason: "gpg.error.no_committer_account",
} }
} }

} }
} }



+ 0
- 1
models/db/engine.go Visa fil

// Need to map provided names to beans... // Need to map provided names to beans...
beanMap := make(map[string]any) beanMap := make(map[string]any)
for _, bean := range tables { for _, bean := range tables {

beanMap[strings.ToLower(reflect.Indirect(reflect.ValueOf(bean)).Type().Name())] = bean beanMap[strings.ToLower(reflect.Indirect(reflect.ValueOf(bean)).Type().Name())] = bean
beanMap[strings.ToLower(x.TableName(bean))] = bean beanMap[strings.ToLower(x.TableName(bean))] = bean
beanMap[strings.ToLower(x.TableName(bean, true))] = bean beanMap[strings.ToLower(x.TableName(bean, true))] = bean

+ 0
- 2
models/issues/review.go Visa fil

return nil, err return nil, err
} }
} }

} else if opts.ReviewerTeam != nil { } else if opts.ReviewerTeam != nil {
review.Type = ReviewTypeRequest review.Type = ReviewTypeRequest
review.ReviewerTeamID = opts.ReviewerTeam.ID review.ReviewerTeamID = opts.ReviewerTeam.ID

} else { } else {
return nil, fmt.Errorf("provide either reviewer or reviewer team") return nil, fmt.Errorf("provide either reviewer or reviewer team")
} }

+ 0
- 5
models/migrations/base/db.go Visa fil

log.Error("Unable to recreate uniques on table %s. Error: %v", tableName, err) log.Error("Unable to recreate uniques on table %s. Error: %v", tableName, err)
return err return err
} }

case setting.Database.Type.IsMySQL(): case setting.Database.Type.IsMySQL():
// MySQL will drop all the constraints on the old table // MySQL will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil { if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
return err return err
} }
sequenceMap[sequence] = sequenceData sequenceMap[sequence] = sequenceData

} }


// CASCADE causes postgres to drop all the constraints on the old table // CASCADE causes postgres to drop all the constraints on the old table
return err return err
} }
} }

} }

case setting.Database.Type.IsMSSQL(): case setting.Database.Type.IsMSSQL():
// MSSQL will drop all the constraints on the old table // MSSQL will drop all the constraints on the old table
if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil { if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
log.Error("Unable to rename %s to %s. Error: %v", tempTableName, tableName, err) log.Error("Unable to rename %s to %s. Error: %v", tempTableName, tableName, err)
return err return err
} }

default: default:
log.Fatal("Unrecognized DB") log.Fatal("Unrecognized DB")
} }

+ 0
- 2
models/migrations/v1_11/v111.go Visa fil

for _, u := range units { for _, u := range units {
var found bool var found bool
for _, team := range teams { for _, team := range teams {

var teamU []*TeamUnit var teamU []*TeamUnit
var unitEnabled bool var unitEnabled bool
err = sess.Where("team_id = ?", team.ID).Find(&teamU) err = sess.Where("team_id = ?", team.ID).Find(&teamU)
} }


if !protectedBranch.EnableApprovalsWhitelist { if !protectedBranch.EnableApprovalsWhitelist {

perm, err := getUserRepoPermission(sess, baseRepo, reviewer) perm, err := getUserRepoPermission(sess, baseRepo, reviewer)
if err != nil { if err != nil {
return false, err return false, err

+ 2
- 2
models/migrations/v1_20/v250.go Visa fil



// Convert to new metadata format // Convert to new metadata format


new := &MetadataNew{
newMetadata := &MetadataNew{
Type: old.Type, Type: old.Type,
IsTagged: old.IsTagged, IsTagged: old.IsTagged,
Platform: old.Platform, Platform: old.Platform,
Manifests: manifests, Manifests: manifests,
} }


metadataJSON, err := json.Marshal(new)
metadataJSON, err := json.Marshal(newMetadata)
if err != nil { if err != nil {
return err return err
} }

+ 0
- 1
models/migrations/v1_6/v71.go Visa fil

if _, err := sess.ID(tfa.ID).Cols("scratch_salt, scratch_hash").Update(tfa); err != nil { if _, err := sess.ID(tfa.ID).Cols("scratch_salt, scratch_hash").Update(tfa); err != nil {
return fmt.Errorf("couldn't add in scratch_hash and scratch_salt: %w", err) return fmt.Errorf("couldn't add in scratch_hash and scratch_salt: %w", err)
} }

} }
} }



+ 0
- 1
models/migrations/v1_9/v85.go Visa fil

if _, err := sess.ID(token.ID).Cols("token_hash, token_salt, token_last_eight, sha1").Update(token); err != nil { if _, err := sess.ID(token.ID).Cols("token_hash, token_salt, token_last_eight, sha1").Update(token); err != nil {
return fmt.Errorf("couldn't add in sha1, token_hash, token_salt and token_last_eight: %w", err) return fmt.Errorf("couldn't add in sha1, token_hash, token_salt and token_last_eight: %w", err)
} }

} }
} }



+ 1
- 2
models/organization/team.go Visa fil

if err != nil { if err != nil {
if ignoreNonExistent { if ignoreNonExistent {
continue continue
} else {
return nil, err
} }
return nil, err
} }
ids = append(ids, u.ID) ids = append(ids, u.ID)
} }

+ 0
- 2
models/project/board.go Visa fil

var items []string var items []string


switch project.BoardType { switch project.BoardType {

case BoardTypeBugTriage: case BoardTypeBugTriage:
items = setting.Project.ProjectBoardBugTriageType items = setting.Project.ProjectBoardBugTriageType


case BoardTypeBasicKanban: case BoardTypeBasicKanban:
items = setting.Project.ProjectBoardBasicKanbanType items = setting.Project.ProjectBoardBasicKanbanType

case BoardTypeNone: case BoardTypeNone:
fallthrough fallthrough
default: default:

+ 0
- 1
models/repo/user_repo.go Visa fil

// the owner of a private repo needs to be explicitly added. // the owner of a private repo needs to be explicitly added.
cond = cond.Or(builder.Eq{"`user`.id": repo.Owner.ID}) cond = cond.Or(builder.Eq{"`user`.id": repo.Owner.ID})
} }

} else { } else {
// This is a "public" repository: // This is a "public" repository:
// Any user that has read access, is a watcher or organization member can be requested to review // Any user that has read access, is a watcher or organization member can be requested to review

+ 1
- 2
models/user/user.go Visa fil

if err != nil { if err != nil {
if ignoreNonExistent { if ignoreNonExistent {
continue continue
} else {
return nil, err
} }
return nil, err
} }
ids = append(ids, u.ID) ids = append(ids, u.ID)
} }

+ 6
- 6
modules/auth/password/password.go Visa fil

func setupComplexity(values []string) { func setupComplexity(values []string) {
if len(values) != 1 || values[0] != "off" { if len(values) != 1 || values[0] != "off" {
for _, val := range values { for _, val := range values {
if complex, ok := charComplexities[val]; ok {
validChars += complex.ValidChars
requiredList = append(requiredList, complex)
if complexity, ok := charComplexities[val]; ok {
validChars += complexity.ValidChars
requiredList = append(requiredList, complexity)
} }
} }
if len(requiredList) == 0 { if len(requiredList) == 0 {
// No valid character classes found; use all classes as default // No valid character classes found; use all classes as default
for _, complex := range charComplexities {
validChars += complex.ValidChars
requiredList = append(requiredList, complex)
for _, complexity := range charComplexities {
validChars += complexity.ValidChars
requiredList = append(requiredList, complexity)
} }
} }
} }

+ 3
- 3
modules/git/batch_reader.go Visa fil



// Deal with the binary hash // Deal with the binary hash
idx = 0 idx = 0
len := objectFormat.FullLength() / 2
for idx < len {
length := objectFormat.FullLength() / 2
for idx < length {
var read int var read int
read, err = rd.Read(shaBuf[idx:len])
read, err = rd.Read(shaBuf[idx:length])
n += read n += read
if err != nil { if err != nil {
return mode, fname, sha, n, err return mode, fname, sha, n, err

+ 1
- 2
modules/git/commit_reader.go Visa fil

if len(line) > 0 && line[0] == ' ' { if len(line) > 0 && line[0] == ' ' {
_, _ = signatureSB.Write(line[1:]) _, _ = signatureSB.Write(line[1:])
continue continue
} else {
pgpsig = false
} }
pgpsig = false
} }


if !message { if !message {

+ 0
- 1
modules/git/pipeline/lfs_nogogit.go Visa fil

errChan <- err errChan <- err
break break
} }

} }
}() }()



+ 4
- 4
modules/git/repo_commit.go Visa fil

return nil, err return nil, err
} }


len := objectFormat.FullLength()
length := objectFormat.FullLength()
commits := []*Commit{} commits := []*Commit{}
shaline := make([]byte, len+1)
shaline := make([]byte, length+1)
for { for {
n, err := io.ReadFull(stdoutReader, shaline) n, err := io.ReadFull(stdoutReader, shaline)
if err != nil || n < len {
if err != nil || n < length {
if err == io.EOF { if err == io.EOF {
err = nil err = nil
} }
return commits, err return commits, err
} }
objectID, err := NewIDFromString(string(shaline[0:len]))
objectID, err := NewIDFromString(string(shaline[0:length]))
if err != nil { if err != nil {
return nil, err return nil, err
} }

+ 0
- 1
modules/git/submodule.go Visa fil

// ex: git@try.gitea.io:go-gitea/gitea // ex: git@try.gitea.io:go-gitea/gitea
match := scpSyntax.FindAllStringSubmatch(refURI, -1) match := scpSyntax.FindAllStringSubmatch(refURI, -1)
if len(match) > 0 { if len(match) > 0 {

m := match[0] m := match[0]
refHostname := m[2] refHostname := m[2]
pth := m[3] pth := m[3]

+ 0
- 2
modules/indexer/code/bleve/bleve.go Visa fil

func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *internal.RepoChanges) error { func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *internal.RepoChanges) error {
batch := inner_bleve.NewFlushingBatch(b.inner.Indexer, maxBatchSize) batch := inner_bleve.NewFlushingBatch(b.inner.Indexer, maxBatchSize)
if len(changes.Updates) > 0 { if len(changes.Updates) > 0 {

// Now because of some insanity with git cat-file not immediately failing if not run in a valid git directory we need to run git rev-parse first! // Now because of some insanity with git cat-file not immediately failing if not run in a valid git directory we need to run git rev-parse first!
if err := git.EnsureValidGitRepository(ctx, repo.RepoPath()); err != nil { if err := git.EnsureValidGitRepository(ctx, repo.RepoPath()); err != nil {
log.Error("Unable to open git repo: %s for %-v: %v", repo.RepoPath(), repo, err) log.Error("Unable to open git repo: %s for %-v: %v", repo.RepoPath(), repo, err)
if result, err = b.inner.Indexer.Search(facetRequest); err != nil { if result, err = b.inner.Indexer.Search(facetRequest); err != nil {
return 0, nil, nil, err return 0, nil, nil, err
} }

} }
languagesFacet := result.Facets["languages"] languagesFacet := result.Facets["languages"]
for _, term := range languagesFacet.Terms.Terms() { for _, term := range languagesFacet.Terms.Terms() {

+ 0
- 1
modules/indexer/issues/elasticsearch/elasticsearch.go Visa fil

query := elastic.NewBoolQuery() query := elastic.NewBoolQuery()


if options.Keyword != "" { if options.Keyword != "" {

searchType := esMultiMatchTypePhrasePrefix searchType := esMultiMatchTypePhrasePrefix
if options.IsFuzzyKeyword { if options.IsFuzzyKeyword {
searchType = esMultiMatchTypeBestFields searchType = esMultiMatchTypeBestFields

+ 0
- 1
modules/log/event_format.go Visa fil

if mode.Colorize { if mode.Colorize {
buf = append(buf, resetBytes...) buf = append(buf, resetBytes...)
} }

} }
if flags&(Lshortfile|Llongfile) != 0 { if flags&(Lshortfile|Llongfile) != 0 {
if mode.Colorize { if mode.Colorize {

+ 0
- 2
modules/markup/markdown/markdown_test.go Visa fil

res, err := markdown.RenderString(&markup.RenderContext{Ctx: git.DefaultContext}, test.testcase) res, err := markdown.RenderString(&markup.RenderContext{Ctx: git.DefaultContext}, test.testcase)
assert.NoError(t, err, "Unexpected error in testcase: %q", test.testcase) assert.NoError(t, err, "Unexpected error in testcase: %q", test.testcase)
assert.Equal(t, template.HTML(test.expected), res, "Unexpected result in testcase %q", test.testcase) assert.Equal(t, template.HTML(test.expected), res, "Unexpected result in testcase %q", test.testcase)

} }


negativeTests := []string{ negativeTests := []string{
res, err := markdown.RenderString(&markup.RenderContext{Ctx: git.DefaultContext}, test.testcase) res, err := markdown.RenderString(&markup.RenderContext{Ctx: git.DefaultContext}, test.testcase)
assert.NoError(t, err, "Unexpected error in testcase: %q", test.testcase) assert.NoError(t, err, "Unexpected error in testcase: %q", test.testcase)
assert.Equal(t, template.HTML(test.expected), res, "Unexpected result in testcase %q", test.testcase) assert.Equal(t, template.HTML(test.expected), res, "Unexpected result in testcase %q", test.testcase)

} }
} }



+ 16
- 16
modules/packages/rubygems/marshal.go Visa fil

return e.w.WriteByte(byte(i - 5)) return e.w.WriteByte(byte(i - 5))
} }


var len int
var length int
if 122 < i && i <= 0xff { if 122 < i && i <= 0xff {
len = 1
length = 1
} else if 0xff < i && i <= 0xffff { } else if 0xff < i && i <= 0xffff {
len = 2
length = 2
} else if 0xffff < i && i <= 0xffffff { } else if 0xffff < i && i <= 0xffffff {
len = 3
length = 3
} else if 0xffffff < i && i <= 0x3fffffff { } else if 0xffffff < i && i <= 0x3fffffff {
len = 4
length = 4
} else if -0x100 <= i && i < -123 { } else if -0x100 <= i && i < -123 {
len = -1
length = -1
} else if -0x10000 <= i && i < -0x100 { } else if -0x10000 <= i && i < -0x100 {
len = -2
length = -2
} else if -0x1000000 <= i && i < -0x100000 { } else if -0x1000000 <= i && i < -0x100000 {
len = -3
length = -3
} else if -0x40000000 <= i && i < -0x1000000 { } else if -0x40000000 <= i && i < -0x1000000 {
len = -4
length = -4
} else { } else {
return ErrInvalidIntRange return ErrInvalidIntRange
} }


if err := e.w.WriteByte(byte(len)); err != nil {
if err := e.w.WriteByte(byte(length)); err != nil {
return err return err
} }
if len < 0 {
len = -len
if length < 0 {
length = -length
} }


for c := 0; c < len; c++ {
for c := 0; c < length; c++ {
if err := e.w.WriteByte(byte(i >> uint(8*c) & 0xff)); err != nil { if err := e.w.WriteByte(byte(i >> uint(8*c) & 0xff)); err != nil {
return err return err
} }
return err return err
} }


len := arr.Len()
length := arr.Len()


if err := e.marshalIntInternal(int64(len)); err != nil {
if err := e.marshalIntInternal(int64(length)); err != nil {
return err return err
} }


for i := 0; i < len; i++ {
for i := 0; i < length; i++ {
if err := e.marshal(arr.Index(i).Interface()); err != nil { if err := e.marshal(arr.Index(i).Interface()); err != nil {
return err return err
} }

+ 0
- 1
modules/process/manager_stacktraces.go Visa fil

} }
sort.Slice(processes, after(processes)) sort.Slice(processes, after(processes))
if !flat { if !flat {

var sortChildren func(process *Process) var sortChildren func(process *Process)


sortChildren = func(process *Process) { sortChildren = func(process *Process) {

+ 0
- 1
modules/repository/temp.go Visa fil

if err != nil { if err != nil {
log.Error("Unable to create temporary directory: %s-*.git (%v)", prefix, err) log.Error("Unable to create temporary directory: %s-*.git (%v)", prefix, err)
return "", fmt.Errorf("Failed to create dir %s-*.git: %w", prefix, err) return "", fmt.Errorf("Failed to create dir %s-*.git: %w", prefix, err)

} }
return basePath, nil return basePath, nil
} }

+ 1
- 2
modules/setting/time.go Visa fil

DefaultUILocation, err = time.LoadLocation(zone) DefaultUILocation, err = time.LoadLocation(zone)
if err != nil { if err != nil {
log.Fatal("Load time zone failed: %v", err) log.Fatal("Load time zone failed: %v", err)
} else {
log.Info("Default UI Location is %v", zone)
} }
log.Info("Default UI Location is %v", zone)
} }
if DefaultUILocation == nil { if DefaultUILocation == nil {
DefaultUILocation = time.Local DefaultUILocation = time.Local

+ 2
- 3
modules/templates/htmlrenderer.go Visa fil

if setting.IsProd { if setting.IsProd {
// in prod mode, Gitea must have correct templates to run // in prod mode, Gitea must have correct templates to run
log.Fatal("Gitea can't run with template errors: %s", msg) log.Fatal("Gitea can't run with template errors: %s", msg)
} else {
// in dev mode, do not need to really exit, because the template errors could be fixed by developer soon and the templates get reloaded
log.Error("There are template errors but Gitea continues to run in dev mode: %s", msg)
} }
// in dev mode, do not need to really exit, because the template errors could be fixed by developer soon and the templates get reloaded
log.Error("There are template errors but Gitea continues to run in dev mode: %s", msg)
} }


type templateErrorPrettier struct { type templateErrorPrettier struct {

+ 1
- 2
modules/templates/mailer.go Visa fil

if err = buildSubjectBodyTemplate(subjectTemplates, bodyTemplates, tmplName, content); err != nil { if err = buildSubjectBodyTemplate(subjectTemplates, bodyTemplates, tmplName, content); err != nil {
if firstRun { if firstRun {
log.Fatal("Failed to parse mail template, err: %v", err) log.Fatal("Failed to parse mail template, err: %v", err)
} else {
log.Error("Failed to parse mail template, err: %v", err)
} }
log.Error("Failed to parse mail template, err: %v", err)
} }
} }
} }

+ 3
- 3
modules/util/util_test.go Visa fil

} }


func Test_RandomInt(t *testing.T) { func Test_RandomInt(t *testing.T) {
int, err := CryptoRandomInt(255)
assert.True(t, int >= 0)
assert.True(t, int <= 255)
randInt, err := CryptoRandomInt(255)
assert.True(t, randInt >= 0)
assert.True(t, randInt <= 255)
assert.NoError(t, err) assert.NoError(t, err)
} }



+ 0
- 1
routers/api/actions/artifacts.go Visa fil



var task *actions.ActionTask var task *actions.ActionTask
if err == nil { if err == nil {

task, err = actions.GetTaskByID(req.Context(), tID) task, err = actions.GetTaskByID(req.Context(), tID)
if err != nil { if err != nil {
log.Error("Error runner api getting task by ID: %v", err) log.Error("Error runner api getting task by ID: %v", err)

+ 2
- 2
routers/api/packages/alpine/alpine.go Visa fil

return return
} }


upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 2
- 2
routers/api/packages/conan/conan.go Visa fil

return return
} }


upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusBadRequest, err) apiError(ctx, http.StatusBadRequest, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 2
- 2
routers/api/packages/conda/conda.go Visa fil

} }


func UploadPackageFile(ctx *context.Context) { func UploadPackageFile(ctx *context.Context) {
upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 3
- 3
routers/api/packages/container/container.go Visa fil

} }
return return
} }
close := true
doClose := true
defer func() { defer func() {
if close {
if doClose {
uploader.Close() uploader.Close()
} }
}() }()
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)
return return
} }
close = false
doClose = false


if err := container_service.RemoveBlobUploadByID(ctx, uploader.ID); err != nil { if err := container_service.RemoveBlobUploadByID(ctx, uploader.ID); err != nil {
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)

+ 2
- 2
routers/api/packages/cran/cran.go Visa fil

} }


func uploadPackageFile(ctx *context.Context, compositeKey string, properties map[string]string) { func uploadPackageFile(ctx *context.Context, compositeKey string, properties map[string]string) {
upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusBadRequest, err) apiError(ctx, http.StatusBadRequest, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 2
- 2
routers/api/packages/debian/debian.go Visa fil

return return
} }


upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 2
- 2
routers/api/packages/generic/generic.go Visa fil

return return
} }


upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 2
- 2
routers/api/packages/goproxy/goproxy.go Visa fil

} }


func UploadPackage(ctx *context.Context) { func UploadPackage(ctx *context.Context) {
upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 2
- 2
routers/api/packages/nuget/nuget.go Visa fil

func processUploadedFile(ctx *context.Context, expectedType nuget_module.PackageType) (*nuget_module.Package, *packages_module.HashedBuffer, []io.Closer) { func processUploadedFile(ctx *context.Context, expectedType nuget_module.PackageType) (*nuget_module.Package, *packages_module.HashedBuffer, []io.Closer) {
closables := make([]io.Closer, 0, 2) closables := make([]io.Closer, 0, 2)


upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusBadRequest, err) apiError(ctx, http.StatusBadRequest, err)
return nil, nil, closables return nil, nil, closables
} }


if close {
if needToClose {
closables = append(closables, upload) closables = append(closables, upload)
} }



+ 2
- 2
routers/api/packages/rpm/rpm.go Visa fil

} }


func UploadPackageFile(ctx *context.Context) { func UploadPackageFile(ctx *context.Context) {
upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 2
- 2
routers/api/packages/rubygems/rubygems.go Visa fil



// UploadPackageFile adds a file to the package. If the package does not exist, it gets created. // UploadPackageFile adds a file to the package. If the package does not exist, it gets created.
func UploadPackageFile(ctx *context.Context) { func UploadPackageFile(ctx *context.Context) {
upload, close, err := ctx.UploadStream()
upload, needToClose, err := ctx.UploadStream()
if err != nil { if err != nil {
apiError(ctx, http.StatusBadRequest, err) apiError(ctx, http.StatusBadRequest, err)
return return
} }
if close {
if needToClose {
defer upload.Close() defer upload.Close()
} }



+ 0
- 1
routers/api/v1/repo/issue.go Visa fil



var includedAnyLabels []int64 var includedAnyLabels []int64
{ {

labels := ctx.FormTrim("labels") labels := ctx.FormTrim("labels")
var includedLabelNames []string var includedLabelNames []string
if len(labels) > 0 { if len(labels) > 0 {

+ 0
- 1
routers/api/v1/repo/mirror.go Visa fil

if err == nil { if err == nil {
responsePushMirrors = append(responsePushMirrors, m) responsePushMirrors = append(responsePushMirrors, m)
} }

} }
ctx.SetLinkHeader(len(responsePushMirrors), utils.GetListOptions(ctx).PageSize) ctx.SetLinkHeader(len(responsePushMirrors), utils.GetListOptions(ctx).PageSize)
ctx.SetTotalCountHeader(count) ctx.SetTotalCountHeader(count)

+ 0
- 2
routers/api/v1/repo/pull.go Visa fil

isSameRepo = true isSameRepo = true
headUser = ctx.Repo.Owner headUser = ctx.Repo.Owner
headBranch = headInfos[0] headBranch = headInfos[0]

} else if len(headInfos) == 2 { } else if len(headInfos) == 2 {
headUser, err = user_model.GetUserByName(ctx, headInfos[0]) headUser, err = user_model.GetUserByName(ctx, headInfos[0])
if err != nil { if err != nil {
headBranch = headInfos[1] headBranch = headInfos[1]
// The head repository can also point to the same repo // The head repository can also point to the same repo
isSameRepo = ctx.Repo.Owner.ID == headUser.ID isSameRepo = ctx.Repo.Owner.ID == headUser.ID

} else { } else {
ctx.NotFound() ctx.NotFound()
return nil, nil, nil, nil, "", "" return nil, nil, nil, nil, "", ""

+ 0
- 1
routers/api/v1/repo/pull_review.go Visa fil

} }


if ctx.Repo.Repository.Owner.IsOrganization() && len(opts.TeamReviewers) > 0 { if ctx.Repo.Repository.Owner.IsOrganization() && len(opts.TeamReviewers) > 0 {

teamReviewers := make([]*organization.Team, 0, len(opts.TeamReviewers)) teamReviewers := make([]*organization.Team, 0, len(opts.TeamReviewers))
for _, t := range opts.TeamReviewers { for _, t := range opts.TeamReviewers {
var teamReviewer *organization.Team var teamReviewer *organization.Team

+ 0
- 1
routers/api/v1/repo/repo.go Visa fil



// update MirrorInterval // update MirrorInterval
if opts.MirrorInterval != nil { if opts.MirrorInterval != nil {

// MirrorInterval should be a duration // MirrorInterval should be a duration
interval, err := time.ParseDuration(*opts.MirrorInterval) interval, err := time.ParseDuration(*opts.MirrorInterval)
if err != nil { if err != nil {

+ 0
- 1
routers/api/v1/repo/wiki.go Visa fil

func findWikiRepoCommit(ctx *context.APIContext) (*git.Repository, *git.Commit) { func findWikiRepoCommit(ctx *context.APIContext) (*git.Repository, *git.Commit) {
wikiRepo, err := gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository) wikiRepo, err := gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
if err != nil { if err != nil {

if git.IsErrNotExist(err) || err.Error() == "no such file or directory" { if git.IsErrNotExist(err) || err.Error() == "no such file or directory" {
ctx.NotFound(err) ctx.NotFound(err)
} else { } else {

+ 0
- 1
routers/private/hook_pre_receive.go Visa fil

UserMsg: fmt.Sprintf("branch %s is protected from force push", branchName), UserMsg: fmt.Sprintf("branch %s is protected from force push", branchName),
}) })
return return

} }
} }



+ 0
- 1
routers/web/repo/actions/view.go Visa fil

writer := zip.NewWriter(ctx.Resp) writer := zip.NewWriter(ctx.Resp)
defer writer.Close() defer writer.Close()
for _, art := range artifacts { for _, art := range artifacts {

f, err := storage.ActionsArtifacts.Open(art.StoragePath) f, err := storage.ActionsArtifacts.Open(art.StoragePath)
if err != nil { if err != nil {
ctx.Error(http.StatusInternalServerError, err.Error()) ctx.Error(http.StatusInternalServerError, err.Error())

+ 0
- 4
routers/web/repo/issue.go Visa fil

} }
} }
} }

} }


if template.Ref != "" && !strings.HasPrefix(template.Ref, "refs/") { // Assume that the ref intended is always a branch - for tags users should use refs/tags/<ref> if template.Ref != "" && !strings.HasPrefix(template.Ref, "refs/") { // Assume that the ref intended is always a branch - for tags users should use refs/tags/<ref>
if comment.ProjectID > 0 && comment.Project == nil { if comment.ProjectID > 0 && comment.Project == nil {
comment.Project = ghostProject comment.Project = ghostProject
} }

} else if comment.Type == issues_model.CommentTypeAssignees || comment.Type == issues_model.CommentTypeReviewRequest { } else if comment.Type == issues_model.CommentTypeAssignees || comment.Type == issues_model.CommentTypeReviewRequest {
if err = comment.LoadAssigneeUserAndTeam(ctx); err != nil { if err = comment.LoadAssigneeUserAndTeam(ctx); err != nil {
ctx.ServerError("LoadAssigneeUserAndTeam", err) ctx.ServerError("LoadAssigneeUserAndTeam", err)


var includedAnyLabels []int64 var includedAnyLabels []int64
{ {

labels := ctx.FormTrim("labels") labels := ctx.FormTrim("labels")
var includedLabelNames []string var includedLabelNames []string
if len(labels) > 0 { if len(labels) > 0 {
if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) && if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) &&
(form.Status == "reopen" || form.Status == "close") && (form.Status == "reopen" || form.Status == "close") &&
!(issue.IsPull && issue.PullRequest.HasMerged) { !(issue.IsPull && issue.PullRequest.HasMerged) {

// Duplication and conflict check should apply to reopen pull request. // Duplication and conflict check should apply to reopen pull request.
var pr *issues_model.PullRequest var pr *issues_model.PullRequest



+ 0
- 3
routers/web/repo/pull.go Visa fil

} }


if pb != nil && pb.EnableStatusCheck { if pb != nil && pb.EnableStatusCheck {

var missingRequiredChecks []string var missingRequiredChecks []string
for _, requiredContext := range pb.StatusCheckContexts { for _, requiredContext := range pb.StatusCheckContexts {
contextFound := false contextFound := false


// Validate the given commit sha to show (if any passed) // Validate the given commit sha to show (if any passed)
if willShowSpecifiedCommit || willShowSpecifiedCommitRange { if willShowSpecifiedCommit || willShowSpecifiedCommitRange {

foundStartCommit := len(specifiedStartCommit) == 0 foundStartCommit := len(specifiedStartCommit) == 0
foundEndCommit := len(specifiedEndCommit) == 0 foundEndCommit := len(specifiedEndCommit) == 0


ctx.Flash.Error(flashError) ctx.Flash.Error(flashError)
ctx.Redirect(issue.Link()) ctx.Redirect(issue.Link())
return return

} }
ctx.Flash.Error(err.Error()) ctx.Flash.Error(err.Error())
ctx.Redirect(issue.Link()) ctx.Redirect(issue.Link())

+ 0
- 1
routers/web/repo/pull_review.go Visa fil



updatedFiles := make(map[string]pull_model.ViewedState, len(data.Files)) updatedFiles := make(map[string]pull_model.ViewedState, len(data.Files))
for file, viewed := range data.Files { for file, viewed := range data.Files {

// Only unviewed and viewed are possible, has-changed can not be set from the outside // Only unviewed and viewed are possible, has-changed can not be set from the outside
state := pull_model.Unviewed state := pull_model.Unviewed
if viewed { if viewed {

+ 0
- 1
routers/web/repo/view.go Visa fil

// or of directory if not in root directory. // or of directory if not in root directory.
ctx.Data["LatestCommit"] = latestCommit ctx.Data["LatestCommit"] = latestCommit
if latestCommit != nil { if latestCommit != nil {

verification := asymkey_model.ParseCommitWithSignature(ctx, latestCommit) verification := asymkey_model.ParseCommitWithSignature(ctx, latestCommit)


if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) { if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {

+ 5
- 3
services/actions/notifier_helper.go Visa fil

TriggerEvent: dwf.TriggerEvent.Name, TriggerEvent: dwf.TriggerEvent.Name,
Status: actions_model.StatusWaiting, Status: actions_model.StatusWaiting,
} }
if need, err := ifNeedApproval(ctx, run, input.Repo, input.Doer); err != nil {

need, err := ifNeedApproval(ctx, run, input.Repo, input.Doer)
if err != nil {
log.Error("check if need approval for repo %d with user %d: %v", input.Repo.ID, input.Doer.ID, err) log.Error("check if need approval for repo %d with user %d: %v", input.Repo.ID, input.Doer.ID, err)
continue continue
} else {
run.NeedApproval = need
} }


run.NeedApproval = need

if err := run.LoadAttributes(ctx); err != nil { if err := run.LoadAttributes(ctx); err != nil {
log.Error("LoadAttributes: %v", err) log.Error("LoadAttributes: %v", err)
continue continue

+ 0
- 1
services/auth/source/ldap/source_sync.go Visa fil

!strings.EqualFold(usr.Email, su.Mail) || !strings.EqualFold(usr.Email, su.Mail) ||
usr.FullName != fullName || usr.FullName != fullName ||
!usr.IsActive { !usr.IsActive {

log.Trace("SyncExternalUsers[%s]: Updating user %s", source.authSource.Name, usr.Name) log.Trace("SyncExternalUsers[%s]: Updating user %s", source.authSource.Name, usr.Name)


opts := &user_service.UpdateOptions{ opts := &user_service.UpdateOptions{

+ 0
- 2
services/context/repo.go Visa fil

case RepoRefBranch: case RepoRefBranch:
ref := getRefNameFromPath(ctx, repo, path, repo.GitRepo.IsBranchExist) ref := getRefNameFromPath(ctx, repo, path, repo.GitRepo.IsBranchExist)
if len(ref) == 0 { if len(ref) == 0 {

// check if ref is HEAD // check if ref is HEAD
parts := strings.Split(path, "/") parts := strings.Split(path, "/")
if parts[0] == headRefName { if parts[0] == headRefName {
return cancel return cancel
} }
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String() ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()

} else if refType.RefTypeIncludesTags() && ctx.Repo.GitRepo.IsTagExist(refName) { } else if refType.RefTypeIncludesTags() && ctx.Repo.GitRepo.IsTagExist(refName) {
ctx.Repo.IsViewTag = true ctx.Repo.IsViewTag = true
ctx.Repo.TagName = refName ctx.Repo.TagName = refName

+ 4
- 5
services/gitdiff/gitdiff.go Visa fil

// diff --git a/b b/b b/b b/b b/b b/b // diff --git a/b b/b b/b b/b b/b b/b
// //
midpoint := (len(line) + len(cmdDiffHead) - 1) / 2 midpoint := (len(line) + len(cmdDiffHead) - 1) / 2
new, old := line[len(cmdDiffHead):midpoint], line[midpoint+1:]
if len(new) > 2 && len(old) > 2 && new[2:] == old[2:] {
curFile.OldName = old[2:]
curFile.Name = old[2:]
newPart, oldPart := line[len(cmdDiffHead):midpoint], line[midpoint+1:]
if len(newPart) > 2 && len(oldPart) > 2 && newPart[2:] == oldPart[2:] {
curFile.OldName = oldPart[2:]
curFile.Name = oldPart[2:]
} }
} }
} }
defer deferable() defer deferable()


for _, diffFile := range diff.Files { for _, diffFile := range diff.Files {

isVendored := optional.None[bool]() isVendored := optional.None[bool]()
isGenerated := optional.None[bool]() isGenerated := optional.None[bool]()
if checker != nil { if checker != nil {

+ 4
- 5
services/issue/commit.go Visa fil

var refIssue *issues_model.Issue var refIssue *issues_model.Issue
var err error var err error
for _, ref := range references.FindAllIssueReferences(c.Message) { for _, ref := range references.FindAllIssueReferences(c.Message) {

// issue is from another repo // issue is from another repo
if len(ref.Owner) > 0 && len(ref.Name) > 0 { if len(ref.Owner) > 0 && len(ref.Name) > 0 {
refRepo, err = repo_model.GetRepositoryByOwnerAndName(ctx, ref.Owner, ref.Name) refRepo, err = repo_model.GetRepositoryByOwnerAndName(ctx, ref.Owner, ref.Name)
continue continue
} }
} }
close := ref.Action == references.XRefActionCloses
if close && len(ref.TimeLog) > 0 {
isClosed := ref.Action == references.XRefActionCloses
if isClosed && len(ref.TimeLog) > 0 {
if err := issueAddTime(ctx, refIssue, doer, c.Timestamp, ref.TimeLog); err != nil { if err := issueAddTime(ctx, refIssue, doer, c.Timestamp, ref.TimeLog); err != nil {
return err return err
} }
} }
if close != refIssue.IsClosed {
if isClosed != refIssue.IsClosed {
refIssue.Repo = refRepo refIssue.Repo = refRepo
if err := ChangeStatus(ctx, refIssue, doer, c.Sha1, close); err != nil {
if err := ChangeStatus(ctx, refIssue, doer, c.Sha1, isClosed); err != nil {
return err return err
} }
} }

+ 5
- 4
services/markup/processorhelper_codepreview.go Visa fil

lineNums := make([]int, 0, lineCount) lineNums := make([]int, 0, lineCount)
lineCodes := make([]string, 0, lineCount) lineCodes := make([]string, 0, lineCount)
for i := opts.LineStart; i <= opts.LineStop; i++ { for i := opts.LineStart; i <= opts.LineStop; i++ {
if line, err := reader.ReadString('\n'); err != nil && line == "" {
line, err := reader.ReadString('\n')
if err != nil && line == "" {
break break
} else {
lineNums = append(lineNums, i)
lineCodes = append(lineCodes, line)
} }

lineNums = append(lineNums, i)
lineCodes = append(lineCodes, line)
} }
realLineStop := max(opts.LineStart, opts.LineStart+len(lineNums)-1) realLineStop := max(opts.LineStart, opts.LineStart+len(lineNums)-1)
highlightLines := code.HighlightSearchResultCode(opts.FilePath, language, lineNums, strings.Join(lineCodes, "")) highlightLines := code.HighlightSearchResultCode(opts.FilePath, language, lineNums, strings.Join(lineCodes, ""))

+ 0
- 1
services/migrations/gitea_downloader.go Visa fil

return nil, false, fmt.Errorf("error while listing issues: %w", err) return nil, false, fmt.Errorf("error while listing issues: %w", err)
} }
for _, issue := range issues { for _, issue := range issues {

labels := make([]*base.Label, 0, len(issue.Labels)) labels := make([]*base.Label, 0, len(issue.Labels))
for i := range issue.Labels { for i := range issue.Labels {
labels = append(labels, g.convertGiteaLabel(issue.Labels[i])) labels = append(labels, g.convertGiteaLabel(issue.Labels[i]))

+ 0
- 1
services/migrations/gitlab.go Visa fil

return nil, false, fmt.Errorf("error while listing issues: %w", err) return nil, false, fmt.Errorf("error while listing issues: %w", err)
} }
for _, issue := range issues { for _, issue := range issues {

labels := make([]*base.Label, 0, len(issue.Labels)) labels := make([]*base.Label, 0, len(issue.Labels))
for _, l := range issue.Labels { for _, l := range issue.Labels {
labels = append(labels, &base.Label{ labels = append(labels, &base.Label{

+ 3
- 4
services/mirror/mirror_pull.go Visa fil

theCommits.Commits = theCommits.Commits[:setting.UI.FeedMaxCommitNum] theCommits.Commits = theCommits.Commits[:setting.UI.FeedMaxCommitNum]
} }


if newCommit, err := gitRepo.GetCommit(newCommitID); err != nil {
newCommit, err := gitRepo.GetCommit(newCommitID)
if err != nil {
log.Error("SyncMirrors [repo: %-v]: unable to get commit %s: %v", m.Repo, newCommitID, err) log.Error("SyncMirrors [repo: %-v]: unable to get commit %s: %v", m.Repo, newCommitID, err)
continue continue
} else {
theCommits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
} }


theCommits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
theCommits.CompareURL = m.Repo.ComposeCompareURL(oldCommitID, newCommitID) theCommits.CompareURL = m.Repo.ComposeCompareURL(oldCommitID, newCommitID)


notify_service.SyncPushCommits(ctx, m.Repo.MustOwner(ctx), m.Repo, &repo_module.PushUpdateOptions{ notify_service.SyncPushCommits(ctx, m.Repo.MustOwner(ctx), m.Repo, &repo_module.PushUpdateOptions{
log.Error("SyncMirrors [repo: %-v]: unable to update repository 'updated_unix': %v", m.Repo, err) log.Error("SyncMirrors [repo: %-v]: unable to update repository 'updated_unix': %v", m.Repo, err)
return false return false
} }

} }


log.Trace("SyncMirrors [repo: %-v]: Successfully updated", m.Repo) log.Trace("SyncMirrors [repo: %-v]: Successfully updated", m.Repo)

+ 3
- 3
services/pull/merge.go Visa fil

if err = ref.Issue.LoadRepo(ctx); err != nil { if err = ref.Issue.LoadRepo(ctx); err != nil {
return err return err
} }
close := ref.RefAction == references.XRefActionCloses
if close != ref.Issue.IsClosed {
if err = issue_service.ChangeStatus(ctx, ref.Issue, doer, pr.MergedCommitID, close); err != nil {
isClosed := ref.RefAction == references.XRefActionCloses
if isClosed != ref.Issue.IsClosed {
if err = issue_service.ChangeStatus(ctx, ref.Issue, doer, pr.MergedCommitID, isClosed); err != nil {
// Allow ErrDependenciesLeft // Allow ErrDependenciesLeft
if !issues_model.IsErrDependenciesLeft(err) { if !issues_model.IsErrDependenciesLeft(err) {
return err return err

+ 0
- 1
services/pull/pull.go Visa fil

if err != nil { if err != nil {
log.Error("Unable to get commits between: %s %s Error: %v", pr.HeadBranch, pr.MergeBase, err) log.Error("Unable to get commits between: %s %s Error: %v", pr.HeadBranch, pr.MergeBase, err)
return "" return ""

} }
if len(commits) == 0 { if len(commits) == 0 {
break break

+ 0
- 1
services/repository/adopt.go Visa fil

return err return err
} }
repoNamesToCheck = repoNamesToCheck[:0] repoNamesToCheck = repoNamesToCheck[:0]

} }
return filepath.SkipDir return filepath.SkipDir
}); err != nil { }); err != nil {

+ 0
- 1
services/repository/contributors_graph.go Visa fil

Stats: &commitStats, Stats: &commitStats,
} }
extendedCommitStats = append(extendedCommitStats, res) extendedCommitStats = append(extendedCommitStats, res)

} }
_ = stdoutReader.Close() _ = stdoutReader.Close()
return nil return nil

+ 0
- 2
services/repository/files/update.go Visa fil

return nil, fmt.Errorf("ConvertToSHA1: Invalid last commit ID: %w", err) return nil, fmt.Errorf("ConvertToSHA1: Invalid last commit ID: %w", err)
} }
opts.LastCommitID = lastCommitID.String() opts.LastCommitID = lastCommitID.String()

} }


for _, file := range opts.Files { for _, file := range opts.Files {
Path: file.Options.treePath, Path: file.Options.treePath,
} }
} }

} }
} }



+ 0
- 1
services/user/delete.go Visa fil



if purge || (setting.Service.UserDeleteWithCommentsMaxTime != 0 && if purge || (setting.Service.UserDeleteWithCommentsMaxTime != 0 &&
u.CreatedUnix.AsTime().Add(setting.Service.UserDeleteWithCommentsMaxTime).After(time.Now())) { u.CreatedUnix.AsTime().Add(setting.Service.UserDeleteWithCommentsMaxTime).After(time.Now())) {

// Delete Comments // Delete Comments
const batchSize = 50 const batchSize = 50
for { for {

+ 3
- 3
services/user/update_test.go Visa fil

assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())


user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 28}) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 28})
copy := *user
userCopy := *user


assert.NoError(t, UpdateAuth(db.DefaultContext, user, &UpdateAuthOptions{ assert.NoError(t, UpdateAuth(db.DefaultContext, user, &UpdateAuthOptions{
LoginName: optional.Some("new-login"), LoginName: optional.Some("new-login"),
MustChangePassword: optional.Some(true), MustChangePassword: optional.Some(true),
})) }))
assert.True(t, user.MustChangePassword) assert.True(t, user.MustChangePassword)
assert.NotEqual(t, copy.Passwd, user.Passwd)
assert.NotEqual(t, copy.Salt, user.Salt)
assert.NotEqual(t, userCopy.Passwd, user.Passwd)
assert.NotEqual(t, userCopy.Salt, user.Salt)


assert.NoError(t, UpdateAuth(db.DefaultContext, user, &UpdateAuthOptions{ assert.NoError(t, UpdateAuth(db.DefaultContext, user, &UpdateAuthOptions{
ProhibitLogin: optional.Some(true), ProhibitLogin: optional.Some(true),

+ 0
- 2
services/webhook/discord.go Visa fil



func parseHookPullRequestEventType(event webhook_module.HookEventType) (string, error) { func parseHookPullRequestEventType(event webhook_module.HookEventType) (string, error) {
switch event { switch event {

case webhook_module.HookEventPullRequestReviewApproved: case webhook_module.HookEventPullRequestReviewApproved:
return "approved", nil return "approved", nil
case webhook_module.HookEventPullRequestReviewRejected: case webhook_module.HookEventPullRequestReviewRejected:
return "rejected", nil return "rejected", nil
case webhook_module.HookEventPullRequestReviewComment: case webhook_module.HookEventPullRequestReviewComment:
return "comment", nil return "comment", nil

default: default:
return "", errors.New("unknown event type") return "", errors.New("unknown event type")
} }

+ 0
- 1
services/webhook/matrix.go Visa fil

if i < len(p.Commits)-1 { if i < len(p.Commits)-1 {
text += "<br>" text += "<br>"
} }

} }


return m.newPayload(text, p.Commits...) return m.newPayload(text, p.Commits...)

+ 4
- 2
tests/e2e/e2e_test.go Visa fil

cmd := exec.Command(runArgs[0], runArgs...) cmd := exec.Command(runArgs[0], runArgs...)
cmd.Env = os.Environ() cmd.Env = os.Environ()
cmd.Env = append(cmd.Env, fmt.Sprintf("GITEA_URL=%s", setting.AppURL)) cmd.Env = append(cmd.Env, fmt.Sprintf("GITEA_URL=%s", setting.AppURL))

var stdout, stderr bytes.Buffer var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout cmd.Stdout = &stdout
cmd.Stderr = &stderr cmd.Stderr = &stderr

err := cmd.Run() err := cmd.Run()
if err != nil { if err != nil {
// Currently colored output is conflicting. Using Printf until that is resolved. // Currently colored output is conflicting. Using Printf until that is resolved.
fmt.Printf("%v", stdout.String()) fmt.Printf("%v", stdout.String())
fmt.Printf("%v", stderr.String()) fmt.Printf("%v", stderr.String())
log.Fatal("Playwright Failed: %s", err) log.Fatal("Playwright Failed: %s", err)
} else {
fmt.Printf("%v", stdout.String())
} }

fmt.Printf("%v", stdout.String())
}) })
}) })
} }

+ 5
- 5
tests/integration/api_notification_test.go Visa fil



MakeRequest(t, NewRequest(t, "GET", "/api/v1/notifications/new"), http.StatusUnauthorized) MakeRequest(t, NewRequest(t, "GET", "/api/v1/notifications/new"), http.StatusUnauthorized)


new := struct {
newStruct := struct {
New int64 `json:"new"` New int64 `json:"new"`
}{} }{}


req = NewRequest(t, "GET", "/api/v1/notifications/new"). req = NewRequest(t, "GET", "/api/v1/notifications/new").
AddTokenAuth(token) AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK) resp = MakeRequest(t, req, http.StatusOK)
DecodeJSON(t, resp, &new)
assert.True(t, new.New > 0)
DecodeJSON(t, resp, &newStruct)
assert.True(t, newStruct.New > 0)


// -- mark notifications as read -- // -- mark notifications as read --
req = NewRequest(t, "GET", "/api/v1/notifications?status-types=unread"). req = NewRequest(t, "GET", "/api/v1/notifications?status-types=unread").
req = NewRequest(t, "GET", "/api/v1/notifications/new"). req = NewRequest(t, "GET", "/api/v1/notifications/new").
AddTokenAuth(token) AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK) resp = MakeRequest(t, req, http.StatusOK)
DecodeJSON(t, resp, &new)
assert.True(t, new.New == 0)
DecodeJSON(t, resp, &newStruct)
assert.True(t, newStruct.New == 0)
} }


func TestAPINotificationPUT(t *testing.T) { func TestAPINotificationPUT(t *testing.T) {

+ 0
- 1
tests/integration/pull_status_test.go Visa fil



// Update commit status, and check if icon is updated as well // Update commit status, and check if icon is updated as well
for _, status := range statusList { for _, status := range statusList {

// Call API to add status for commit // Call API to add status for commit
t.Run("CreateStatus", doAPICreateCommitStatus(testCtx, commitID, api.CreateStatusOption{ t.Run("CreateStatus", doAPICreateCommitStatus(testCtx, commitID, api.CreateStatusOption{
State: status, State: status,

Laddar…
Avbryt
Spara