@@ -8,6 +8,15 @@ delay = 1000 | |||
include_ext = ["go", "tmpl"] | |||
include_file = ["main.go"] | |||
include_dir = ["cmd", "models", "modules", "options", "routers", "services"] | |||
exclude_dir = ["modules/git/tests", "services/gitdiff/testdata", "modules/avatar/testdata", "models/fixtures", "models/migrations/fixtures", "modules/migration/file_format_testdata", "modules/avatar/identicon/testdata"] | |||
exclude_dir = [ | |||
"models/fixtures", | |||
"models/migrations/fixtures", | |||
"modules/avatar/identicon/testdata", | |||
"modules/avatar/testdata", | |||
"modules/git/tests", | |||
"modules/migration/file_format_testdata", | |||
"routers/private/tests", | |||
"services/gitdiff/testdata", | |||
] | |||
exclude_regex = ["_test.go$", "_gen.go$"] | |||
stop_on_error = true |
@@ -4,7 +4,7 @@ | |||
"features": { | |||
// installs nodejs into container | |||
"ghcr.io/devcontainers/features/node:1": { | |||
"version":"20" | |||
"version": "20" | |||
}, | |||
"ghcr.io/devcontainers/features/git-lfs:1.1.0": {}, | |||
"ghcr.io/devcontainers-contrib/features/poetry:2": {}, | |||
@@ -24,7 +24,7 @@ | |||
"DavidAnson.vscode-markdownlint", | |||
"Vue.volar", | |||
"ms-azuretools.vscode-docker", | |||
"zixuanchen.vitest-explorer", | |||
"vitest.explorer", | |||
"qwtel.sqlite-viewer", | |||
"GitHub.vscode-pull-request-github" | |||
] |
@@ -14,7 +14,7 @@ _test | |||
# MS VSCode | |||
.vscode | |||
__debug_bin | |||
__debug_bin* | |||
# Architecture specific extensions/prefixes | |||
*.[568vq] | |||
@@ -62,7 +62,6 @@ cpu.out | |||
/data | |||
/indexers | |||
/log | |||
/public/img/avatar | |||
/tests/integration/gitea-integration-* | |||
/tests/integration/indexers-* | |||
/tests/e2e/gitea-e2e-* | |||
@@ -78,6 +77,7 @@ cpu.out | |||
/public/assets/js | |||
/public/assets/css | |||
/public/assets/fonts | |||
/public/assets/img/avatar | |||
/public/assets/img/webpack | |||
/vendor | |||
/web_src/fomantic/node_modules |
@@ -42,10 +42,6 @@ overrides: | |||
worker: true | |||
rules: | |||
no-restricted-globals: [2, addEventListener, blur, close, closed, confirm, defaultStatus, defaultstatus, error, event, external, find, focus, frameElement, frames, history, innerHeight, innerWidth, isFinite, isNaN, length, locationbar, menubar, moveBy, moveTo, name, onblur, onerror, onfocus, onload, onresize, onunload, open, opener, opera, outerHeight, outerWidth, pageXOffset, pageYOffset, parent, print, removeEventListener, resizeBy, resizeTo, screen, screenLeft, screenTop, screenX, screenY, scroll, scrollbars, scrollBy, scrollTo, scrollX, scrollY, status, statusbar, stop, toolbar, top] | |||
- files: ["build/generate-images.js"] | |||
rules: | |||
i/no-unresolved: [0] | |||
i/no-extraneous-dependencies: [0] | |||
- files: ["*.config.*"] | |||
rules: | |||
i/no-unused-modules: [0] | |||
@@ -123,7 +119,7 @@ rules: | |||
"@stylistic/js/arrow-spacing": [2, {before: true, after: true}] | |||
"@stylistic/js/block-spacing": [0] | |||
"@stylistic/js/brace-style": [2, 1tbs, {allowSingleLine: true}] | |||
"@stylistic/js/comma-dangle": [2, only-multiline] | |||
"@stylistic/js/comma-dangle": [2, always-multiline] | |||
"@stylistic/js/comma-spacing": [2, {before: false, after: true}] | |||
"@stylistic/js/comma-style": [2, last] | |||
"@stylistic/js/computed-property-spacing": [2, never] | |||
@@ -171,7 +167,7 @@ rules: | |||
"@stylistic/js/semi-spacing": [2, {before: false, after: true}] | |||
"@stylistic/js/semi-style": [2, last] | |||
"@stylistic/js/space-before-blocks": [2, always] | |||
"@stylistic/js/space-before-function-paren": [0] | |||
"@stylistic/js/space-before-function-paren": [2, {anonymous: ignore, named: never, asyncArrow: always}] | |||
"@stylistic/js/space-in-parens": [2, never] | |||
"@stylistic/js/space-infix-ops": [2] | |||
"@stylistic/js/space-unary-ops": [2] | |||
@@ -283,14 +279,14 @@ rules: | |||
i/unambiguous: [0] | |||
init-declarations: [0] | |||
jquery/no-ajax-events: [2] | |||
jquery/no-ajax: [0] | |||
jquery/no-ajax: [2] | |||
jquery/no-animate: [2] | |||
jquery/no-attr: [0] | |||
jquery/no-attr: [2] | |||
jquery/no-bind: [2] | |||
jquery/no-class: [0] | |||
jquery/no-clone: [2] | |||
jquery/no-closest: [0] | |||
jquery/no-css: [0] | |||
jquery/no-css: [2] | |||
jquery/no-data: [0] | |||
jquery/no-deferred: [2] | |||
jquery/no-delegate: [2] | |||
@@ -307,7 +303,7 @@ rules: | |||
jquery/no-in-array: [2] | |||
jquery/no-is-array: [2] | |||
jquery/no-is-function: [2] | |||
jquery/no-is: [0] | |||
jquery/no-is: [2] | |||
jquery/no-load: [2] | |||
jquery/no-map: [2] | |||
jquery/no-merge: [2] | |||
@@ -315,7 +311,7 @@ rules: | |||
jquery/no-parent: [0] | |||
jquery/no-parents: [0] | |||
jquery/no-parse-html: [2] | |||
jquery/no-prop: [0] | |||
jquery/no-prop: [2] | |||
jquery/no-proxy: [2] | |||
jquery/no-ready: [2] | |||
jquery/no-serialize: [2] | |||
@@ -396,12 +392,12 @@ rules: | |||
no-irregular-whitespace: [2] | |||
no-iterator: [2] | |||
no-jquery/no-ajax-events: [2] | |||
no-jquery/no-ajax: [0] | |||
no-jquery/no-ajax: [2] | |||
no-jquery/no-and-self: [2] | |||
no-jquery/no-animate-toggle: [2] | |||
no-jquery/no-animate: [2] | |||
no-jquery/no-append-html: [0] | |||
no-jquery/no-attr: [0] | |||
no-jquery/no-append-html: [2] | |||
no-jquery/no-attr: [2] | |||
no-jquery/no-bind: [2] | |||
no-jquery/no-box-model: [2] | |||
no-jquery/no-browser: [2] | |||
@@ -413,7 +409,7 @@ rules: | |||
no-jquery/no-constructor-attributes: [2] | |||
no-jquery/no-contains: [2] | |||
no-jquery/no-context-prop: [2] | |||
no-jquery/no-css: [0] | |||
no-jquery/no-css: [2] | |||
no-jquery/no-data: [0] | |||
no-jquery/no-deferred: [2] | |||
no-jquery/no-delegate: [2] | |||
@@ -444,7 +440,7 @@ rules: | |||
no-jquery/no-is-numeric: [2] | |||
no-jquery/no-is-plain-object: [2] | |||
no-jquery/no-is-window: [2] | |||
no-jquery/no-is: [0] | |||
no-jquery/no-is: [2] | |||
no-jquery/no-jquery-constructor: [0] | |||
no-jquery/no-live: [2] | |||
no-jquery/no-load-shorthand: [2] | |||
@@ -466,7 +462,7 @@ rules: | |||
no-jquery/no-parse-html: [2] | |||
no-jquery/no-parse-json: [2] | |||
no-jquery/no-parse-xml: [2] | |||
no-jquery/no-prop: [0] | |||
no-jquery/no-prop: [2] | |||
no-jquery/no-proxy: [2] | |||
no-jquery/no-ready-shorthand: [2] | |||
no-jquery/no-ready: [2] | |||
@@ -487,7 +483,7 @@ rules: | |||
no-jquery/no-visibility: [2] | |||
no-jquery/no-when: [2] | |||
no-jquery/no-wrap: [2] | |||
no-jquery/variable-pattern: [0] | |||
no-jquery/variable-pattern: [2] | |||
no-label-var: [2] | |||
no-labels: [0] # handled by no-restricted-syntax | |||
no-lone-blocks: [2] |
@@ -67,11 +67,10 @@ modifies/dependencies: | |||
- any-glob-to-any-file: | |||
- "package.json" | |||
- "package-lock.json" | |||
- "poetry.toml" | |||
- "pyproject.toml" | |||
- "poetry.lock" | |||
- "go.mod" | |||
- "go.sum" | |||
- "pyproject.toml" | |||
modifies/go: | |||
- changed-files: | |||
@@ -82,3 +81,4 @@ modifies/js: | |||
- changed-files: | |||
- any-glob-to-any-file: | |||
- "**/*.js" | |||
- "**/*.vue" |
@@ -1,23 +0,0 @@ | |||
name: cron-lock | |||
on: | |||
schedule: | |||
- cron: "0 0 * * *" # every day at 00:00 UTC | |||
workflow_dispatch: | |||
permissions: | |||
issues: write | |||
pull-requests: write | |||
concurrency: | |||
group: lock | |||
jobs: | |||
action: | |||
runs-on: ubuntu-latest | |||
if: github.repository == 'go-gitea/gitea' | |||
steps: | |||
- uses: dessant/lock-threads@v5 | |||
with: | |||
issue-inactive-days: 10 | |||
pr-inactive-days: 7 |
@@ -73,6 +73,7 @@ jobs: | |||
- "Makefile" | |||
templates: | |||
- "tools/lint-templates-*.js" | |||
- "templates/**/*.tmpl" | |||
- "pyproject.toml" | |||
- "poetry.lock" |
@@ -35,8 +35,12 @@ jobs: | |||
- uses: actions/setup-python@v5 | |||
with: | |||
python-version: "3.12" | |||
- uses: actions/setup-node@v4 | |||
with: | |||
node-version: 20 | |||
- run: pip install poetry | |||
- run: make deps-py | |||
- run: make deps-frontend | |||
- run: make lint-templates | |||
lint-yaml: |
@@ -58,7 +58,7 @@ cpu.out | |||
/data | |||
/indexers | |||
/log | |||
/public/img/avatar | |||
/public/assets/img/avatar | |||
/tests/integration/gitea-integration-* | |||
/tests/integration/indexers-* | |||
/tests/e2e/gitea-e2e-* |
@@ -42,7 +42,7 @@ vscode: | |||
- DavidAnson.vscode-markdownlint | |||
- Vue.volar | |||
- ms-azuretools.vscode-docker | |||
- zixuanchen.vitest-explorer | |||
- vitest.explorer | |||
- qwtel.sqlite-viewer | |||
- GitHub.vscode-pull-request-github | |||
@@ -30,7 +30,7 @@ rules: | |||
"@stylistic/block-opening-brace-newline-after": null | |||
"@stylistic/block-opening-brace-newline-before": null | |||
"@stylistic/block-opening-brace-space-after": null | |||
"@stylistic/block-opening-brace-space-before": null | |||
"@stylistic/block-opening-brace-space-before": always | |||
"@stylistic/color-hex-case": lower | |||
"@stylistic/declaration-bang-space-after": never | |||
"@stylistic/declaration-bang-space-before": null | |||
@@ -140,7 +140,7 @@ rules: | |||
function-disallowed-list: null | |||
function-linear-gradient-no-nonstandard-direction: true | |||
function-name-case: lower | |||
function-no-unknown: null | |||
function-no-unknown: true | |||
function-url-no-scheme-relative: null | |||
function-url-quotes: always | |||
function-url-scheme-allowed-list: null | |||
@@ -168,7 +168,7 @@ rules: | |||
no-duplicate-selectors: true | |||
no-empty-source: true | |||
no-invalid-double-slash-comments: true | |||
no-invalid-position-at-import-rule: null | |||
no-invalid-position-at-import-rule: [true, ignoreAtRules: [tailwind]] | |||
no-irregular-whitespace: true | |||
no-unknown-animations: null | |||
no-unknown-custom-properties: null | |||
@@ -181,6 +181,7 @@ rules: | |||
rule-empty-line-before: null | |||
rule-selector-property-disallowed-list: null | |||
scale-unlimited/declaration-strict-value: [[/color$/, font-weight], {ignoreValues: /^(inherit|transparent|unset|initial|currentcolor|none)$/, ignoreFunctions: false, disableFix: true, expandShorthand: true}] | |||
selector-anb-no-unmatchable: true | |||
selector-attribute-name-disallowed-list: null | |||
selector-attribute-operator-allowed-list: null | |||
selector-attribute-operator-disallowed-list: null |
@@ -60,3 +60,4 @@ Nanguan Lin <nanguanlin6@gmail.com> (@lng2020) | |||
kerwin612 <kerwin612@qq.com> (@kerwin612) | |||
Gary Wang <git@blumia.net> (@BLumia) | |||
Tim-Niclas Oelschläger <zokki.softwareschmiede@gmail.com> (@zokkis) | |||
Yu Liu <1240335630@qq.com> (@HEREYUA) |
@@ -31,7 +31,7 @@ GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.6.0 | |||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.56.1 | |||
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.11 | |||
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.4.1 | |||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.30.5 | |||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@db51e79a0e37c572d8b59ae0c58bf2bbbbe53285 | |||
XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest | |||
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1.6.0 | |||
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1.0.3 | |||
@@ -42,9 +42,6 @@ DOCKER_TAG ?= latest | |||
DOCKER_REF := $(DOCKER_IMAGE):$(DOCKER_TAG) | |||
ifeq ($(HAS_GO), yes) | |||
GOPATH ?= $(shell $(GO) env GOPATH) | |||
export PATH := $(GOPATH)/bin:$(PATH) | |||
CGO_EXTRA_CFLAGS := -DSQLITE_MAX_VARIABLE_NUMBER=32766 | |||
CGO_CFLAGS ?= $(shell $(GO) env CGO_CFLAGS) $(CGO_EXTRA_CFLAGS) | |||
endif | |||
@@ -147,6 +144,8 @@ TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(FOMAN | |||
GO_DIRS := build cmd models modules routers services tests | |||
WEB_DIRS := web_src/js web_src/css | |||
ESLINT_FILES := web_src/js tools *.config.js tests/e2e | |||
STYLELINT_FILES := web_src/css web_src/js/components/*.vue | |||
SPELLCHECK_FILES := $(GO_DIRS) $(WEB_DIRS) docs/content templates options/locale/locale_en-US.ini .github | |||
EDITORCONFIG_FILES := templates .github/workflows options/locale/locale_en-US.ini | |||
@@ -375,19 +374,19 @@ lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig | |||
.PHONY: lint-js | |||
lint-js: node_modules | |||
npx eslint --color --max-warnings=0 --ext js,vue web_src/js build *.config.js tests/e2e | |||
npx eslint --color --max-warnings=0 --ext js,vue $(ESLINT_FILES) | |||
.PHONY: lint-js-fix | |||
lint-js-fix: node_modules | |||
npx eslint --color --max-warnings=0 --ext js,vue web_src/js build *.config.js tests/e2e --fix | |||
npx eslint --color --max-warnings=0 --ext js,vue $(ESLINT_FILES) --fix | |||
.PHONY: lint-css | |||
lint-css: node_modules | |||
npx stylelint --color --max-warnings=0 web_src/css web_src/js/components/*.vue | |||
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES) | |||
.PHONY: lint-css-fix | |||
lint-css-fix: node_modules | |||
npx stylelint --color --max-warnings=0 web_src/css web_src/js/components/*.vue --fix | |||
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES) --fix | |||
.PHONY: lint-swagger | |||
lint-swagger: node_modules | |||
@@ -435,7 +434,8 @@ lint-actions: | |||
$(GO) run $(ACTIONLINT_PACKAGE) | |||
.PHONY: lint-templates | |||
lint-templates: .venv | |||
lint-templates: .venv node_modules | |||
@node tools/lint-templates-svg.js | |||
@poetry run djlint $(shell find templates -type f -iname '*.tmpl') | |||
.PHONY: lint-yaml | |||
@@ -444,7 +444,7 @@ lint-yaml: .venv | |||
.PHONY: watch | |||
watch: | |||
@bash build/watch.sh | |||
@bash tools/watch.sh | |||
.PHONY: watch-frontend | |||
watch-frontend: node-check node_modules | |||
@@ -839,10 +839,6 @@ release-sources: | $(DIST_DIRS) | |||
release-docs: | $(DIST_DIRS) docs | |||
tar -czf $(DIST)/release/gitea-docs-$(VERSION).tar.gz -C ./docs . | |||
.PHONY: docs | |||
docs: | |||
cd docs; bash scripts/trans-copy.sh; | |||
.PHONY: deps | |||
deps: deps-frontend deps-backend deps-tools deps-py | |||
@@ -920,7 +916,7 @@ $(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) package-lock.json | |||
.PHONY: svg | |||
svg: node-check | node_modules | |||
rm -rf $(SVG_DEST_DIR) | |||
node build/generate-svg.js | |||
node tools/generate-svg.js | |||
.PHONY: svg-check | |||
svg-check: svg | |||
@@ -963,8 +959,8 @@ generate-gitignore: | |||
.PHONY: generate-images | |||
generate-images: | node_modules | |||
npm install --no-save fabric@6.0.0-beta19 imagemin-zopfli@7 | |||
node build/generate-images.js $(TAGS) | |||
npm install --no-save fabric@6.0.0-beta20 imagemin-zopfli@7 | |||
node tools/generate-images.js $(TAGS) | |||
.PHONY: generate-manpage | |||
generate-manpage: |
@@ -1,55 +1,18 @@ | |||
<p align="center"> | |||
<a href="https://gitea.io/"> | |||
<img alt="Gitea" src="https://raw.githubusercontent.com/go-gitea/gitea/main/public/assets/img/gitea.svg" width="220"/> | |||
</a> | |||
</p> | |||
<h1 align="center">Gitea - Git with a cup of tea</h1> | |||
<p align="center"> | |||
<a href="https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml?query=branch%3Amain" title="Release Nightly"> | |||
<img src="https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml/badge.svg?branch=main"> | |||
</a> | |||
<a href="https://discord.gg/Gitea" title="Join the Discord chat at https://discord.gg/Gitea"> | |||
<img src="https://img.shields.io/discord/322538954119184384.svg?logo=discord&logoColor=white&label=Discord&color=5865F2"> | |||
</a> | |||
<a href="https://app.codecov.io/gh/go-gitea/gitea" title="Codecov"> | |||
<img src="https://codecov.io/gh/go-gitea/gitea/branch/main/graph/badge.svg"> | |||
</a> | |||
<a href="https://goreportcard.com/report/code.gitea.io/gitea" title="Go Report Card"> | |||
<img src="https://goreportcard.com/badge/code.gitea.io/gitea"> | |||
</a> | |||
<a href="https://pkg.go.dev/code.gitea.io/gitea" title="GoDoc"> | |||
<img src="https://pkg.go.dev/badge/code.gitea.io/gitea?status.svg"> | |||
</a> | |||
<a href="https://github.com/go-gitea/gitea/releases/latest" title="GitHub release"> | |||
<img src="https://img.shields.io/github/release/go-gitea/gitea.svg"> | |||
</a> | |||
<a href="https://www.codetriage.com/go-gitea/gitea" title="Help Contribute to Open Source"> | |||
<img src="https://www.codetriage.com/go-gitea/gitea/badges/users.svg"> | |||
</a> | |||
<a href="https://opencollective.com/gitea" title="Become a backer/sponsor of gitea"> | |||
<img src="https://opencollective.com/gitea/tiers/backers/badge.svg?label=backers&color=brightgreen"> | |||
</a> | |||
<a href="https://opensource.org/licenses/MIT" title="License: MIT"> | |||
<img src="https://img.shields.io/badge/License-MIT-blue.svg"> | |||
</a> | |||
<a href="https://gitpod.io/#https://github.com/go-gitea/gitea"> | |||
<img | |||
src="https://img.shields.io/badge/Contribute%20with-Gitpod-908a85?logo=gitpod" | |||
alt="Contribute with Gitpod" | |||
/> | |||
</a> | |||
<a href="https://crowdin.com/project/gitea" title="Crowdin"> | |||
<img src="https://badges.crowdin.net/gitea/localized.svg"> | |||
</a> | |||
<a href="https://www.tickgit.com/browse?repo=github.com/go-gitea/gitea&branch=main" title="TODOs"> | |||
<img src="https://badgen.net/https/api.tickgit.com/badgen/github.com/go-gitea/gitea/main"> | |||
</a> | |||
</p> | |||
<p align="center"> | |||
<a href="README_ZH.md">View this document in Chinese</a> | |||
</p> | |||
# Gitea | |||
[![](https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml/badge.svg?branch=main)](https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml?query=branch%3Amain "Release Nightly") | |||
[![](https://img.shields.io/discord/322538954119184384.svg?logo=discord&logoColor=white&label=Discord&color=5865F2)](https://discord.gg/Gitea "Join the Discord chat at https://discord.gg/Gitea") | |||
[![](https://goreportcard.com/badge/code.gitea.io/gitea)](https://goreportcard.com/report/code.gitea.io/gitea "Go Report Card") | |||
[![](https://pkg.go.dev/badge/code.gitea.io/gitea?status.svg)](https://pkg.go.dev/code.gitea.io/gitea "GoDoc") | |||
[![](https://img.shields.io/github/release/go-gitea/gitea.svg)](https://github.com/go-gitea/gitea/releases/latest "GitHub release") | |||
[![](https://www.codetriage.com/go-gitea/gitea/badges/users.svg)](https://www.codetriage.com/go-gitea/gitea "Help Contribute to Open Source") | |||
[![](https://opencollective.com/gitea/tiers/backers/badge.svg?label=backers&color=brightgreen)](https://opencollective.com/gitea "Become a backer/sponsor of gitea") | |||
[![](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT "License: MIT") | |||
[![Contribute with Gitpod](https://img.shields.io/badge/Contribute%20with-Gitpod-908a85?logo=gitpod)](https://gitpod.io/#https://github.com/go-gitea/gitea) | |||
[![](https://badges.crowdin.net/gitea/localized.svg)](https://crowdin.com/project/gitea "Crowdin") | |||
[![](https://badgen.net/https/api.tickgit.com/badgen/github.com/go-gitea/gitea/main)](https://www.tickgit.com/browse?repo=github.com/go-gitea/gitea&branch=main "TODOs") | |||
[View this document in Chinese](./README_ZH.md) | |||
## Purpose | |||
@@ -1,55 +1,18 @@ | |||
<p align="center"> | |||
<a href="https://gitea.io/"> | |||
<img alt="Gitea" src="https://raw.githubusercontent.com/go-gitea/gitea/main/public/assets/img/gitea.svg" width="220"/> | |||
</a> | |||
</p> | |||
<h1 align="center">Gitea - Git with a cup of tea</h1> | |||
<p align="center"> | |||
<a href="https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml?query=branch%3Amain" title="Release Nightly"> | |||
<img src="https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml/badge.svg?branch=main"> | |||
</a> | |||
<a href="https://discord.gg/Gitea" title="Join the Discord chat at https://discord.gg/Gitea"> | |||
<img src="https://img.shields.io/discord/322538954119184384.svg"> | |||
</a> | |||
<a href="https://app.codecov.io/gh/go-gitea/gitea" title="Codecov"> | |||
<img src="https://codecov.io/gh/go-gitea/gitea/branch/main/graph/badge.svg"> | |||
</a> | |||
<a href="https://goreportcard.com/report/code.gitea.io/gitea" title="Go Report Card"> | |||
<img src="https://goreportcard.com/badge/code.gitea.io/gitea"> | |||
</a> | |||
<a href="https://pkg.go.dev/code.gitea.io/gitea" title="GoDoc"> | |||
<img src="https://pkg.go.dev/badge/code.gitea.io/gitea?status.svg"> | |||
</a> | |||
<a href="https://github.com/go-gitea/gitea/releases/latest" title="GitHub release"> | |||
<img src="https://img.shields.io/github/release/go-gitea/gitea.svg"> | |||
</a> | |||
<a href="https://www.codetriage.com/go-gitea/gitea" title="Help Contribute to Open Source"> | |||
<img src="https://www.codetriage.com/go-gitea/gitea/badges/users.svg"> | |||
</a> | |||
<a href="https://opencollective.com/gitea" title="Become a backer/sponsor of gitea"> | |||
<img src="https://opencollective.com/gitea/tiers/backers/badge.svg?label=backers&color=brightgreen"> | |||
</a> | |||
<a href="https://opensource.org/licenses/MIT" title="License: MIT"> | |||
<img src="https://img.shields.io/badge/License-MIT-blue.svg"> | |||
</a> | |||
<a href="https://gitpod.io/#https://github.com/go-gitea/gitea"> | |||
<img | |||
src="https://img.shields.io/badge/Contribute%20with-Gitpod-908a85?logo=gitpod" | |||
alt="Contribute with Gitpod" | |||
/> | |||
</a> | |||
<a href="https://crowdin.com/project/gitea" title="Crowdin"> | |||
<img src="https://badges.crowdin.net/gitea/localized.svg"> | |||
</a> | |||
<a href="https://www.tickgit.com/browse?repo=github.com/go-gitea/gitea&branch=main" title="TODOs"> | |||
<img src="https://badgen.net/https/api.tickgit.com/badgen/github.com/go-gitea/gitea/main"> | |||
</a> | |||
</p> | |||
<p align="center"> | |||
<a href="README.md">View this document in English</a> | |||
</p> | |||
# Gitea | |||
[![](https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml/badge.svg?branch=main)](https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml?query=branch%3Amain "Release Nightly") | |||
[![](https://img.shields.io/discord/322538954119184384.svg?logo=discord&logoColor=white&label=Discord&color=5865F2)](https://discord.gg/Gitea "Join the Discord chat at https://discord.gg/Gitea") | |||
[![](https://goreportcard.com/badge/code.gitea.io/gitea)](https://goreportcard.com/report/code.gitea.io/gitea "Go Report Card") | |||
[![](https://pkg.go.dev/badge/code.gitea.io/gitea?status.svg)](https://pkg.go.dev/code.gitea.io/gitea "GoDoc") | |||
[![](https://img.shields.io/github/release/go-gitea/gitea.svg)](https://github.com/go-gitea/gitea/releases/latest "GitHub release") | |||
[![](https://www.codetriage.com/go-gitea/gitea/badges/users.svg)](https://www.codetriage.com/go-gitea/gitea "Help Contribute to Open Source") | |||
[![](https://opencollective.com/gitea/tiers/backers/badge.svg?label=backers&color=brightgreen)](https://opencollective.com/gitea "Become a backer/sponsor of gitea") | |||
[![](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT "License: MIT") | |||
[![Contribute with Gitpod](https://img.shields.io/badge/Contribute%20with-Gitpod-908a85?logo=gitpod)](https://gitpod.io/#https://github.com/go-gitea/gitea) | |||
[![](https://badges.crowdin.net/gitea/localized.svg)](https://crowdin.com/project/gitea "Crowdin") | |||
[![](https://badgen.net/https/api.tickgit.com/badgen/github.com/go-gitea/gitea/main)](https://www.tickgit.com/browse?repo=github.com/go-gitea/gitea&branch=main "TODOs") | |||
[View this document in English](./README.md) | |||
## 目标 | |||
@@ -441,7 +441,7 @@ INTERNAL_TOKEN = | |||
;INTERNAL_TOKEN_URI = file:/etc/gitea/internal_token | |||
;; | |||
;; How long to remember that a user is logged in before requiring relogin (in days) | |||
;LOGIN_REMEMBER_DAYS = 7 | |||
;LOGIN_REMEMBER_DAYS = 31 | |||
;; | |||
;; Name of the cookie used to store the current username. | |||
;COOKIE_USERNAME = gitea_awesome | |||
@@ -2608,7 +2608,7 @@ LEVEL = Info | |||
;ENDLESS_TASK_TIMEOUT = 3h | |||
;; Timeout to cancel the jobs which have waiting status, but haven't been picked by a runner for a long time | |||
;ABANDONED_JOB_TIMEOUT = 24h | |||
;; Strings committers can place inside a commit message to skip executing the corresponding actions workflow | |||
;; Strings committers can place inside a commit message or PR title to skip executing the corresponding actions workflow | |||
;SKIP_WORKFLOW_STRINGS = [skip ci],[ci skip],[no ci],[skip actions],[actions skip] | |||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; |
@@ -37,7 +37,7 @@ gitea embedded list [--include-vendored] [patterns...] | |||
- 列出所有模板文件,无论在哪个虚拟目录下:`**.tmpl` | |||
- 列出所有邮件模板文件:`templates/mail/**.tmpl` | |||
- 列出 `public/img` 目录下的所有文件:`public/img/**` | |||
列出 `public/assets/img` 目录下的所有文件:`public/assets/img/**` | |||
不要忘记为模式使用引号,因为空格、`*` 和其他字符可能对命令行解释器有特殊含义。 | |||
@@ -49,8 +49,8 @@ gitea embedded list [--include-vendored] [patterns...] | |||
```sh | |||
$ gitea embedded list '**openid**' | |||
public/img/auth/openid_connect.svg | |||
public/img/openid-16x16.png | |||
public/assets/img/auth/openid_connect.svg | |||
public/assets/img/openid-16x16.png | |||
templates/user/auth/finalize_openid.tmpl | |||
templates/user/auth/signin_openid.tmpl | |||
templates/user/auth/signup_openid_connect.tmpl |
@@ -528,7 +528,7 @@ And the following unique queues: | |||
- `INSTALL_LOCK`: **false**: Controls access to the installation page. When set to "true", the installation page is not accessible. | |||
- `SECRET_KEY`: **\<random at every install\>**: Global secret key. This key is VERY IMPORTANT, if you lost it, the data encrypted by it (like 2FA secret) can't be decrypted anymore. | |||
- `SECRET_KEY_URI`: **_empty_**: Instead of defining SECRET_KEY, this option can be used to use the key stored in a file (example value: `file:/etc/gitea/secret_key`). It shouldn't be lost like SECRET_KEY. | |||
- `LOGIN_REMEMBER_DAYS`: **7**: Cookie lifetime, in days. | |||
- `LOGIN_REMEMBER_DAYS`: **31**: How long to remember that a user is logged in before requiring relogin (in days). | |||
- `COOKIE_REMEMBER_NAME`: **gitea\_incredible**: Name of cookie used to store authentication | |||
information. | |||
- `REVERSE_PROXY_AUTHENTICATION_USER`: **X-WEBAUTH-USER**: Header name for reverse proxy | |||
@@ -590,7 +590,7 @@ And the following unique queues: | |||
## OpenID (`openid`) | |||
- `ENABLE_OPENID_SIGNIN`: **false**: Allow authentication in via OpenID. | |||
- `ENABLE_OPENID_SIGNIN`: **true**: Allow authentication in via OpenID. | |||
- `ENABLE_OPENID_SIGNUP`: **! DISABLE\_REGISTRATION**: Allow registering via OpenID. | |||
- `WHITELISTED_URIS`: **_empty_**: If non-empty, list of POSIX regex patterns matching | |||
OpenID URI's to permit. | |||
@@ -1406,7 +1406,7 @@ PROXY_HOSTS = *.github.com | |||
- `ZOMBIE_TASK_TIMEOUT`: **10m**: Timeout to stop the task which have running status, but haven't been updated for a long time | |||
- `ENDLESS_TASK_TIMEOUT`: **3h**: Timeout to stop the tasks which have running status and continuous updates, but don't end for a long time | |||
- `ABANDONED_JOB_TIMEOUT`: **24h**: Timeout to cancel the jobs which have waiting status, but haven't been picked by a runner for a long time | |||
- `SKIP_WORKFLOW_STRINGS`: **[skip ci],[ci skip],[no ci],[skip actions],[actions skip]**: Strings committers can place inside a commit message to skip executing the corresponding actions workflow | |||
- `SKIP_WORKFLOW_STRINGS`: **[skip ci],[ci skip],[no ci],[skip actions],[actions skip]**: Strings committers can place inside a commit message or PR title to skip executing the corresponding actions workflow | |||
`DEFAULT_ACTIONS_URL` indicates where the Gitea Actions runners should find the actions with relative path. | |||
For example, `uses: actions/checkout@v4` means `https://github.com/actions/checkout@v4` since the value of `DEFAULT_ACTIONS_URL` is `github`. |
@@ -507,7 +507,7 @@ Gitea 创建以下非唯一队列: | |||
- `INSTALL_LOCK`: **false**:控制是否能够访问安装向导页面,设置为 `true` 则禁止访问安装向导页面。 | |||
- `SECRET_KEY`: **\<每次安装时随机生成\>**:全局服务器安全密钥。这个密钥非常重要,如果丢失将无法解密加密的数据(例如 2FA)。 | |||
- `SECRET_KEY_URI`: **_empty_**:与定义 `SECRET_KEY` 不同,此选项可用于使用存储在文件中的密钥(示例值:`file:/etc/gitea/secret_key`)。它不应该像 `SECRET_KEY` 一样容易丢失。 | |||
- `LOGIN_REMEMBER_DAYS`: **7**:Cookie 保存时间,单位为天。 | |||
- `LOGIN_REMEMBER_DAYS`: **31**:在要求重新登录之前,记住用户的登录状态多长时间(以天为单位)。 | |||
- `COOKIE_REMEMBER_NAME`: **gitea\_incredible**:保存自动登录信息的 Cookie 名称。 | |||
- `REVERSE_PROXY_AUTHENTICATION_USER`: **X-WEBAUTH-USER**:反向代理认证的 HTTP 头部名称,用于提供用户信息。 | |||
- `REVERSE_PROXY_AUTHENTICATION_EMAIL`: **X-WEBAUTH-EMAIL**:反向代理认证的 HTTP 头部名称,用于提供邮箱信息。 | |||
@@ -562,7 +562,7 @@ Gitea 创建以下非唯一队列: | |||
## OpenID (`openid`) | |||
- `ENABLE_OPENID_SIGNIN`: **false**:允许通过OpenID进行身份验证。 | |||
- `ENABLE_OPENID_SIGNIN`: **true**:允许通过OpenID进行身份验证。 | |||
- `ENABLE_OPENID_SIGNUP`: **! DISABLE\_REGISTRATION**:允许通过OpenID进行注册。 | |||
- `WHITELISTED_URIS`: **_empty_**:如果非空,是一组匹配OpenID URI的POSIX正则表达式模式,用于允许访问。 | |||
- `BLACKLISTED_URIS`: **_empty_**:如果非空,是一组匹配OpenID URI的POSIX正则表达式模式,用于阻止访问。 |
@@ -17,6 +17,12 @@ menu: | |||
# Repository indexer | |||
## Builtin repository code search without indexer | |||
Users could do repository-level code search without setting up a repository indexer. | |||
The builtin code search is based on the `git grep` command, which is fast and efficient for small repositories. | |||
Better code search support could be achieved by setting up the repository indexer. | |||
## Setting up the repository indexer | |||
Gitea can search through the files of the repositories by enabling this function in your [`app.ini`](administration/config-cheat-sheet.md): |
@@ -47,7 +47,7 @@ We recommend [Google HTML/CSS Style Guide](https://google.github.io/styleguide/h | |||
9. Avoid unnecessary `!important` in CSS, add comments to explain why it's necessary if it can't be avoided. | |||
10. Avoid mixing different events in one event listener, prefer to use individual event listeners for every event. | |||
11. Custom event names are recommended to use `ce-` prefix. | |||
12. Prefer using Tailwind CSS which is available via `tw-` prefix, e.g. `tw-relative`. Gitea's helper CSS classes use `gt-` prefix (`gt-df`), while Gitea's own private framework-level CSS classes use `g-` prefix (`g-modal-confirm`). | |||
12. Prefer using Tailwind CSS which is available via `tw-` prefix, e.g. `tw-relative`. Gitea's helper CSS classes use `gt-` prefix (`gt-word-break`), while Gitea's own private framework-level CSS classes use `g-` prefix (`g-modal-confirm`). | |||
13. Avoid inline scripts & styles as much as possible, it's recommended to put JS code into JS files and use CSS classes. If inline scripts & styles are unavoidable, explain the reason why it can't be avoided. | |||
### Accessibility / ARIA | |||
@@ -118,7 +118,7 @@ However, there are still some special cases, so the current guideline is: | |||
### Show/Hide Elements | |||
* Vue components are recommended to use `v-if` and `v-show` to show/hide elements. | |||
* Go template code should use Gitea's `.gt-hidden` and `showElem()/hideElem()/toggleElem()`, see more details in `.gt-hidden`'s comment. | |||
* Go template code should use `.tw-hidden` and `showElem()/hideElem()/toggleElem()`, see more details in `.tw-hidden`'s comment. | |||
### Styles and Attributes in Go HTML Template | |||
@@ -47,7 +47,7 @@ HTML 页面由[Go HTML Template](https://pkg.go.dev/html/template)渲染。 | |||
9. 避免在 CSS 中使用不必要的`!important`,如果无法避免,添加注释解释为什么需要它。 | |||
10. 避免在一个事件监听器中混合不同的事件,优先为每个事件使用独立的事件监听器。 | |||
11. 推荐使用自定义事件名称前缀`ce-`。 | |||
12. 建议使用 Tailwind CSS,它可以通过 `tw-` 前缀获得,例如 `tw-relative`. Gitea 自身的助手类 CSS 使用 `gt-` 前缀(`gt-df`),Gitea 自身的私有框架级 CSS 类使用 `g-` 前缀(`g-modal-confirm`)。 | |||
12. 建议使用 Tailwind CSS,它可以通过 `tw-` 前缀获得,例如 `tw-relative`. Gitea 自身的助手类 CSS 使用 `gt-` 前缀(`gt-word-break`),Gitea 自身的私有框架级 CSS 类使用 `g-` 前缀(`g-modal-confirm`)。 | |||
13. 尽量避免内联脚本和样式,建议将JS代码放入JS文件中并使用CSS类。如果内联脚本和样式不可避免,请解释无法避免的原因。 | |||
### 可访问性 / ARIA | |||
@@ -117,7 +117,7 @@ Gitea 使用一些补丁使 Fomantic UI 更具可访问性(参见 `aria.md`) | |||
### 显示/隐藏元素 | |||
* 推荐在Vue组件中使用`v-if`和`v-show`来显示/隐藏元素。 | |||
* Go 模板代码应使用 Gitea 的 `.gt-hidden` 和 `showElem()/hideElem()/toggleElem()` 来显示/隐藏元素,请参阅`.gt-hidden`的注释以获取更多详细信息。 | |||
* Go 模板代码应使用 `.tw-hidden` 和 `showElem()/hideElem()/toggleElem()` 来显示/隐藏元素,请参阅`.tw-hidden`的注释以获取更多详细信息。 | |||
### Go HTML 模板中的样式和属性 | |||
@@ -214,7 +214,7 @@ REPO_INDEXER_CONN_STR = http://elastic:changeme@localhost:9200 | |||
### Building and adding SVGs | |||
SVG icons are built using the `make svg` target which compiles the icon sources defined in `build/generate-svg.js` into the output directory `public/assets/img/svg`. Custom icons can be added in the `web_src/svg` directory. | |||
SVG icons are built using the `make svg` target which compiles the icon sources into the output directory `public/assets/img/svg`. Custom icons can be added in the `web_src/svg` directory. | |||
### Building the Logo | |||
@@ -333,14 +333,9 @@ Documentation for the website is found in `docs/`. If you change this you | |||
can test your changes to ensure that they pass continuous integration using: | |||
```bash | |||
# from the docs directory within Gitea | |||
make trans-copy clean build | |||
make lint-md | |||
``` | |||
You will require a copy of [Hugo](https://gohugo.io/) to run this task. Please | |||
note: this may generate a number of untracked Git objects, which will need to | |||
be cleaned up. | |||
## Visual Studio Code | |||
A `launch.json` and `tasks.json` are provided within `contrib/ide/vscode` for |
@@ -201,7 +201,7 @@ REPO_INDEXER_CONN_STR = http://elastic:changeme@localhost:9200 | |||
### 构建和添加 SVGs | |||
SVG 图标是使用 `make svg` 目标构建的,该目标将 `build/generate-svg.js` 中定义的图标源编译到输出目录 `public/img/svg` 中。可以在 `web_src/svg` 目录中添加自定义图标。 | |||
SVG 图标是使用 `make svg` 命令构建的,该命令将图标资源编译到输出目录 `public/assets/img/svg` 中。可以在 `web_src/svg` 目录中添加自定义图标。 | |||
### 构建 Logo | |||
@@ -307,13 +307,9 @@ TAGS="bindata sqlite sqlite_unlock_notify" make build test-sqlite | |||
该网站的文档位于 `docs/` 中。如果你改变了文档内容,你可以使用以下测试方法进行持续集成: | |||
```bash | |||
# 来自 Gitea 中的 docs 目录 | |||
make trans-copy clean build | |||
make lint-md | |||
``` | |||
运行此任务依赖于 [Hugo](https://gohugo.io/)。请注意:这可能会生成一些未跟踪的 Git 对象, | |||
需要被清理干净。 | |||
## Visual Studio Code | |||
`contrib/ide/vscode` 中为 Visual Studio Code 提供了 `launch.json` 和 `tasks.json`。查看 |
@@ -87,6 +87,9 @@ _Symbols used in table:_ | |||
| Git Blame | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | |||
| Visual comparison of image changes | ✓ | ✘ | ✓ | ? | ? | ? | ✘ | ✘ | | |||
- Gitea has builtin repository-level code search | |||
- Better code search support could be achieved by [using a repository indexer](administration/repo-indexer.md) | |||
## Issue Tracker | |||
| Feature | Gitea | Gogs | GitHub EE | GitLab CE | GitLab EE | BitBucket | RhodeCode CE | RhodeCode EE | |
@@ -1,34 +0,0 @@ | |||
#!/usr/bin/env bash | |||
set -e | |||
# | |||
# This script is used to copy the en-US content to our available locales as a | |||
# fallback to always show all pages when displaying a specific locale that is | |||
# missing some documents to be translated. | |||
# | |||
# Just execute the script without any argument and you will get the missing | |||
# files copied into the content folder. We are calling this script within the CI | |||
# server simply by `make trans-copy`. | |||
# | |||
declare -a LOCALES=( | |||
"fr-fr" | |||
"nl-nl" | |||
"pt-br" | |||
"zh-cn" | |||
"zh-tw" | |||
) | |||
ROOT=$(realpath $(dirname $0)/..) | |||
for SOURCE in $(find ${ROOT}/content -type f -iname *.en-us.md); do | |||
for LOCALE in "${LOCALES[@]}"; do | |||
DEST="${SOURCE%.en-us.md}.${LOCALE}.md" | |||
if [[ ! -f ${DEST} ]]; then | |||
cp ${SOURCE} ${DEST} | |||
sed -i.bak "s/en\-us/${LOCALE}/g" ${DEST} | |||
rm ${DEST}.bak | |||
fi | |||
done | |||
done |
@@ -1,27 +1,27 @@ | |||
module code.gitea.io/gitea | |||
go 1.21 | |||
go 1.22 | |||
require ( | |||
code.gitea.io/actions-proto-go v0.3.1 | |||
code.gitea.io/actions-proto-go v0.4.0 | |||
code.gitea.io/gitea-vet v0.2.3 | |||
code.gitea.io/sdk/gitea v0.17.1 | |||
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570 | |||
gitea.com/go-chi/binding v0.0.0-20230415142243-04b515c6d669 | |||
connectrpc.com/connect v1.15.0 | |||
gitea.com/go-chi/binding v0.0.0-20240316035258-17450c5f3028 | |||
gitea.com/go-chi/cache v0.2.0 | |||
gitea.com/go-chi/captcha v0.0.0-20230415143339-2c0754df4384 | |||
gitea.com/go-chi/session v0.0.0-20230613035928-39541325faa3 | |||
gitea.com/go-chi/captcha v0.0.0-20240315150714-fb487f629098 | |||
gitea.com/go-chi/session v0.0.0-20240316035857-16768d98ec96 | |||
gitea.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96 | |||
gitea.com/lunny/levelqueue v0.4.2-0.20230414023320-3c0159fe0fe4 | |||
github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121 | |||
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 | |||
github.com/NYTimes/gziphandler v1.1.1 | |||
github.com/PuerkitoBio/goquery v1.8.1 | |||
github.com/PuerkitoBio/goquery v1.9.1 | |||
github.com/alecthomas/chroma/v2 v2.13.0 | |||
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb | |||
github.com/blevesearch/bleve/v2 v2.3.10 | |||
github.com/bufbuild/connect-go v1.10.0 | |||
github.com/buildkite/terminal-to-html/v3 v3.10.1 | |||
github.com/buildkite/terminal-to-html/v3 v3.11.0 | |||
github.com/caddyserver/certmagic v0.20.0 | |||
github.com/chi-middleware/proxy v1.1.1 | |||
github.com/denisenkom/go-mssqldb v0.12.3 | |||
@@ -30,33 +30,33 @@ require ( | |||
github.com/djherbis/nio/v3 v3.0.1 | |||
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 | |||
github.com/dustin/go-humanize v1.0.1 | |||
github.com/editorconfig/editorconfig-core-go/v2 v2.6.0 | |||
github.com/editorconfig/editorconfig-core-go/v2 v2.6.1 | |||
github.com/emersion/go-imap v1.2.1 | |||
github.com/emirpasic/gods v1.18.1 | |||
github.com/ethantkoenig/rupture v1.0.1 | |||
github.com/felixge/fgprof v0.9.3 | |||
github.com/felixge/fgprof v0.9.4 | |||
github.com/fsnotify/fsnotify v1.7.0 | |||
github.com/gliderlabs/ssh v0.3.6 | |||
github.com/go-ap/activitypub v0.0.0-20231114162308-e219254dc5c9 | |||
github.com/go-ap/activitypub v0.0.0-20240316125321-b61fd6a83225 | |||
github.com/go-ap/jsonld v0.0.0-20221030091449-f2a191312c73 | |||
github.com/go-chi/chi/v5 v5.0.11 | |||
github.com/go-chi/chi/v5 v5.0.12 | |||
github.com/go-chi/cors v1.2.1 | |||
github.com/go-co-op/gocron v1.37.0 | |||
github.com/go-enry/go-enry/v2 v2.8.6 | |||
github.com/go-enry/go-enry/v2 v2.8.7 | |||
github.com/go-fed/httpsig v1.1.1-0.20201223112313-55836744818e | |||
github.com/go-git/go-billy/v5 v5.5.0 | |||
github.com/go-git/go-git/v5 v5.11.0 | |||
github.com/go-ldap/ldap/v3 v3.4.6 | |||
github.com/go-sql-driver/mysql v1.7.1 | |||
github.com/go-sql-driver/mysql v1.8.0 | |||
github.com/go-swagger/go-swagger v0.30.5 | |||
github.com/go-testfixtures/testfixtures/v3 v3.10.0 | |||
github.com/go-webauthn/webauthn v0.10.0 | |||
github.com/go-webauthn/webauthn v0.10.2 | |||
github.com/gobwas/glob v0.2.3 | |||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f | |||
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 | |||
github.com/golang-jwt/jwt/v5 v5.2.0 | |||
github.com/golang-jwt/jwt/v5 v5.2.1 | |||
github.com/google/go-github/v57 v57.0.0 | |||
github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 | |||
github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7 | |||
github.com/google/uuid v1.6.0 | |||
github.com/gorilla/feeds v1.1.2 | |||
github.com/gorilla/sessions v1.2.2 | |||
@@ -64,55 +64,55 @@ require ( | |||
github.com/hashicorp/golang-lru/v2 v2.0.7 | |||
github.com/huandu/xstrings v1.4.0 | |||
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056 | |||
github.com/jhillyerd/enmime v1.1.0 | |||
github.com/jhillyerd/enmime v1.2.0 | |||
github.com/json-iterator/go v1.1.12 | |||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 | |||
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4 | |||
github.com/klauspost/compress v1.17.4 | |||
github.com/klauspost/cpuid/v2 v2.2.6 | |||
github.com/klauspost/compress v1.17.7 | |||
github.com/klauspost/cpuid/v2 v2.2.7 | |||
github.com/lib/pq v1.10.9 | |||
github.com/markbates/goth v1.78.0 | |||
github.com/markbates/goth v1.79.0 | |||
github.com/mattn/go-isatty v0.0.20 | |||
github.com/mattn/go-sqlite3 v1.14.22 | |||
github.com/meilisearch/meilisearch-go v0.26.1 | |||
github.com/meilisearch/meilisearch-go v0.26.2 | |||
github.com/mholt/archiver/v3 v3.5.1 | |||
github.com/microcosm-cc/bluemonday v1.0.26 | |||
github.com/minio/minio-go/v7 v7.0.66 | |||
github.com/minio/minio-go/v7 v7.0.69 | |||
github.com/msteinert/pam v1.2.0 | |||
github.com/nektos/act v0.2.52 | |||
github.com/niklasfasching/go-org v1.7.0 | |||
github.com/olivere/elastic/v7 v7.0.32 | |||
github.com/opencontainers/go-digest v1.0.0 | |||
github.com/opencontainers/image-spec v1.1.0-rc6 | |||
github.com/opencontainers/image-spec v1.1.0 | |||
github.com/pkg/errors v0.9.1 | |||
github.com/pquerna/otp v1.4.0 | |||
github.com/prometheus/client_golang v1.18.0 | |||
github.com/prometheus/client_golang v1.19.0 | |||
github.com/quasoft/websspi v1.1.2 | |||
github.com/redis/go-redis/v9 v9.4.0 | |||
github.com/redis/go-redis/v9 v9.5.1 | |||
github.com/robfig/cron/v3 v3.0.1 | |||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 | |||
github.com/sassoftware/go-rpmutils v0.2.1-0.20240124161140-277b154961dd | |||
github.com/sassoftware/go-rpmutils v0.3.0 | |||
github.com/sergi/go-diff v1.3.1 | |||
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 | |||
github.com/stretchr/testify v1.8.4 | |||
github.com/stretchr/testify v1.9.0 | |||
github.com/syndtr/goleveldb v1.0.0 | |||
github.com/tstranex/u2f v1.0.0 | |||
github.com/ulikunitz/xz v0.5.11 | |||
github.com/urfave/cli/v2 v2.27.1 | |||
github.com/xanzy/go-gitlab v0.96.0 | |||
github.com/xanzy/go-gitlab v0.100.0 | |||
github.com/xeipuuv/gojsonschema v1.2.0 | |||
github.com/yohcop/openid-go v1.0.1 | |||
github.com/yuin/goldmark v1.6.0 | |||
github.com/yuin/goldmark v1.7.0 | |||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc | |||
github.com/yuin/goldmark-meta v1.1.0 | |||
golang.org/x/crypto v0.18.0 | |||
golang.org/x/crypto v0.21.0 | |||
golang.org/x/image v0.15.0 | |||
golang.org/x/net v0.20.0 | |||
golang.org/x/oauth2 v0.16.0 | |||
golang.org/x/sys v0.16.0 | |||
golang.org/x/net v0.22.0 | |||
golang.org/x/oauth2 v0.18.0 | |||
golang.org/x/sys v0.18.0 | |||
golang.org/x/text v0.14.0 | |||
golang.org/x/tools v0.17.0 | |||
google.golang.org/grpc v1.60.1 | |||
golang.org/x/tools v0.19.0 | |||
google.golang.org/grpc v1.62.1 | |||
google.golang.org/protobuf v1.33.0 | |||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df | |||
gopkg.in/ini.v1 v1.67.0 | |||
@@ -120,23 +120,24 @@ require ( | |||
mvdan.cc/xurls/v2 v2.5.0 | |||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 | |||
xorm.io/builder v0.3.13 | |||
xorm.io/xorm v1.3.7 | |||
xorm.io/xorm v1.3.8 | |||
) | |||
require ( | |||
cloud.google.com/go/compute v1.23.3 // indirect | |||
cloud.google.com/go/compute v1.25.1 // indirect | |||
cloud.google.com/go/compute/metadata v0.2.3 // indirect | |||
dario.cat/mergo v1.0.0 // indirect | |||
filippo.io/edwards25519 v1.1.0 // indirect | |||
git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 // indirect | |||
github.com/ClickHouse/ch-go v0.61.1 // indirect | |||
github.com/ClickHouse/clickhouse-go/v2 v2.18.0 // indirect | |||
github.com/ClickHouse/ch-go v0.61.5 // indirect | |||
github.com/ClickHouse/clickhouse-go/v2 v2.22.0 // indirect | |||
github.com/DataDog/zstd v1.5.5 // indirect | |||
github.com/Masterminds/goutils v1.1.1 // indirect | |||
github.com/Masterminds/semver/v3 v3.2.1 // indirect | |||
github.com/Masterminds/sprig/v3 v3.2.3 // indirect | |||
github.com/Microsoft/go-winio v0.6.1 // indirect | |||
github.com/ProtonMail/go-crypto v1.0.0 // indirect | |||
github.com/RoaringBitmap/roaring v1.7.0 // indirect | |||
github.com/RoaringBitmap/roaring v1.9.0 // indirect | |||
github.com/andybalholm/brotli v1.1.0 // indirect | |||
github.com/andybalholm/cascadia v1.3.2 // indirect | |||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect | |||
@@ -144,12 +145,12 @@ require ( | |||
github.com/aymerick/douceur v0.2.0 // indirect | |||
github.com/beorn7/perks v1.0.1 // indirect | |||
github.com/bits-and-blooms/bitset v1.13.0 // indirect | |||
github.com/blevesearch/bleve_index_api v1.1.5 // indirect | |||
github.com/blevesearch/geo v0.1.19 // indirect | |||
github.com/blevesearch/bleve_index_api v1.1.6 // indirect | |||
github.com/blevesearch/geo v0.1.20 // indirect | |||
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect | |||
github.com/blevesearch/gtreap v0.1.1 // indirect | |||
github.com/blevesearch/mmap-go v1.0.4 // indirect | |||
github.com/blevesearch/scorch_segment_api/v2 v2.2.6 // indirect | |||
github.com/blevesearch/scorch_segment_api/v2 v2.2.8 // indirect | |||
github.com/blevesearch/segment v0.9.1 // indirect | |||
github.com/blevesearch/snowballstem v0.9.0 // indirect | |||
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect | |||
@@ -165,7 +166,7 @@ require ( | |||
github.com/cespare/xxhash/v2 v2.2.0 // indirect | |||
github.com/cloudflare/circl v1.3.7 // indirect | |||
github.com/couchbase/go-couchbase v0.1.1 // indirect | |||
github.com/couchbase/gomemcached v0.3.0 // indirect | |||
github.com/couchbase/gomemcached v0.3.1 // indirect | |||
github.com/couchbase/goutils v0.1.2 // indirect | |||
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect | |||
github.com/cyphar/filepath-securejoin v0.2.4 // indirect | |||
@@ -176,32 +177,32 @@ require ( | |||
github.com/emersion/go-sasl v0.0.0-20231106173351-e73c9f7bad43 // indirect | |||
github.com/fatih/color v1.16.0 // indirect | |||
github.com/felixge/httpsnoop v1.0.4 // indirect | |||
github.com/fxamacker/cbor/v2 v2.5.0 // indirect | |||
github.com/go-ap/errors v0.0.0-20231003111023-183eef4b31b7 // indirect | |||
github.com/fxamacker/cbor/v2 v2.6.0 // indirect | |||
github.com/go-ap/errors v0.0.0-20240304112515-6077fa9c17b0 // indirect | |||
github.com/go-asn1-ber/asn1-ber v1.5.5 // indirect | |||
github.com/go-enry/go-oniguruma v1.2.1 // indirect | |||
github.com/go-faster/city v1.0.1 // indirect | |||
github.com/go-faster/errors v0.7.1 // indirect | |||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect | |||
github.com/go-openapi/analysis v0.22.2 // indirect | |||
github.com/go-openapi/errors v0.21.0 // indirect | |||
github.com/go-openapi/inflect v0.19.0 // indirect | |||
github.com/go-openapi/jsonpointer v0.20.2 // indirect | |||
github.com/go-openapi/jsonreference v0.20.4 // indirect | |||
github.com/go-openapi/loads v0.21.5 // indirect | |||
github.com/go-openapi/runtime v0.26.2 // indirect | |||
github.com/go-openapi/spec v0.20.14 // indirect | |||
github.com/go-openapi/strfmt v0.22.0 // indirect | |||
github.com/go-openapi/swag v0.22.7 // indirect | |||
github.com/go-openapi/validate v0.22.6 // indirect | |||
github.com/go-webauthn/x v0.1.6 // indirect | |||
github.com/go-openapi/analysis v0.23.0 // indirect | |||
github.com/go-openapi/errors v0.22.0 // indirect | |||
github.com/go-openapi/inflect v0.21.0 // indirect | |||
github.com/go-openapi/jsonpointer v0.21.0 // indirect | |||
github.com/go-openapi/jsonreference v0.21.0 // indirect | |||
github.com/go-openapi/loads v0.22.0 // indirect | |||
github.com/go-openapi/runtime v0.28.0 // indirect | |||
github.com/go-openapi/spec v0.21.0 // indirect | |||
github.com/go-openapi/strfmt v0.23.0 // indirect | |||
github.com/go-openapi/swag v0.23.0 // indirect | |||
github.com/go-openapi/validate v0.24.0 // indirect | |||
github.com/go-webauthn/x v0.1.9 // indirect | |||
github.com/goccy/go-json v0.10.2 // indirect | |||
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect | |||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect | |||
github.com/golang-sql/sqlexp v0.1.0 // indirect | |||
github.com/golang/geo v0.0.0-20230421003525-6adc56603217 // indirect | |||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect | |||
github.com/golang/protobuf v1.5.3 // indirect | |||
github.com/golang/protobuf v1.5.4 // indirect | |||
github.com/golang/snappy v0.0.4 // indirect | |||
github.com/google/go-querystring v1.1.0 // indirect | |||
github.com/google/go-tpm v0.9.0 // indirect | |||
@@ -246,11 +247,11 @@ require ( | |||
github.com/pierrec/lz4/v4 v4.1.21 // indirect | |||
github.com/pjbgf/sha1cd v0.3.0 // indirect | |||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect | |||
github.com/prometheus/client_model v0.5.0 // indirect | |||
github.com/prometheus/common v0.46.0 // indirect | |||
github.com/prometheus/procfs v0.12.0 // indirect | |||
github.com/rhysd/actionlint v1.6.26 // indirect | |||
github.com/rivo/uniseg v0.4.4 // indirect | |||
github.com/prometheus/client_model v0.6.0 // indirect | |||
github.com/prometheus/common v0.50.0 // indirect | |||
github.com/prometheus/procfs v0.13.0 // indirect | |||
github.com/rhysd/actionlint v1.6.27 // indirect | |||
github.com/rivo/uniseg v0.4.7 // indirect | |||
github.com/rogpeppe/go-internal v1.12.0 // indirect | |||
github.com/rs/xid v1.5.0 // indirect | |||
github.com/russross/blackfriday/v2 v2.1.0 // indirect | |||
@@ -260,7 +261,7 @@ require ( | |||
github.com/shopspring/decimal v1.3.1 // indirect | |||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect | |||
github.com/sirupsen/logrus v1.9.3 // indirect | |||
github.com/skeema/knownhosts v1.2.1 // indirect | |||
github.com/skeema/knownhosts v1.2.2 // indirect | |||
github.com/sourcegraph/conc v0.3.0 // indirect | |||
github.com/spf13/afero v1.11.0 // indirect | |||
github.com/spf13/cast v1.6.0 // indirect | |||
@@ -271,28 +272,28 @@ require ( | |||
github.com/toqueteos/webbrowser v1.2.0 // indirect | |||
github.com/unknwon/com v1.0.1 // indirect | |||
github.com/valyala/bytebufferpool v1.0.0 // indirect | |||
github.com/valyala/fasthttp v1.51.0 // indirect | |||
github.com/valyala/fasthttp v1.52.0 // indirect | |||
github.com/valyala/fastjson v1.6.4 // indirect | |||
github.com/x448/float16 v0.8.4 // indirect | |||
github.com/xanzy/ssh-agent v0.3.3 // indirect | |||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect | |||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect | |||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect | |||
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e // indirect | |||
github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 // indirect | |||
github.com/zeebo/blake3 v0.2.3 // indirect | |||
go.etcd.io/bbolt v1.3.8 // indirect | |||
go.mongodb.org/mongo-driver v1.13.1 // indirect | |||
go.opentelemetry.io/otel v1.22.0 // indirect | |||
go.opentelemetry.io/otel/trace v1.22.0 // indirect | |||
go.etcd.io/bbolt v1.3.9 // indirect | |||
go.mongodb.org/mongo-driver v1.14.0 // indirect | |||
go.opentelemetry.io/otel v1.24.0 // indirect | |||
go.opentelemetry.io/otel/trace v1.24.0 // indirect | |||
go.uber.org/atomic v1.11.0 // indirect | |||
go.uber.org/multierr v1.11.0 // indirect | |||
go.uber.org/zap v1.26.0 // indirect | |||
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a // indirect | |||
golang.org/x/mod v0.14.0 // indirect | |||
go.uber.org/zap v1.27.0 // indirect | |||
golang.org/x/exp v0.0.0-20240314144324-c7f7c6466f7f // indirect | |||
golang.org/x/mod v0.16.0 // indirect | |||
golang.org/x/sync v0.6.0 // indirect | |||
golang.org/x/time v0.5.0 // indirect | |||
google.golang.org/appengine v1.6.8 // indirect | |||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240116215550-a9fa1716bcac // indirect | |||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c // indirect | |||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect | |||
gopkg.in/warnings.v0 v0.1.2 // indirect | |||
gopkg.in/yaml.v2 v2.4.0 // indirect |
@@ -170,15 +170,16 @@ func updateRepoRunsNumbers(ctx context.Context, repo *repo_model.Repository) err | |||
return err | |||
} | |||
// CancelRunningJobs cancels all running and waiting jobs associated with a specific workflow. | |||
func CancelRunningJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error { | |||
// Find all runs in the specified repository, reference, and workflow with statuses 'Running' or 'Waiting'. | |||
// CancelPreviousJobs cancels all previous jobs of the same repository, reference, workflow, and event. | |||
// It's useful when a new run is triggered, and all previous runs needn't be continued anymore. | |||
func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error { | |||
// Find all runs in the specified repository, reference, and workflow with non-final status | |||
runs, total, err := db.FindAndCount[ActionRun](ctx, FindRunOptions{ | |||
RepoID: repoID, | |||
Ref: ref, | |||
WorkflowID: workflowID, | |||
TriggerEvent: event, | |||
Status: []Status{StatusRunning, StatusWaiting}, | |||
Status: []Status{StatusRunning, StatusWaiting, StatusBlocked}, | |||
}) | |||
if err != nil { | |||
return err |
@@ -127,14 +127,14 @@ func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) er | |||
return fmt.Errorf("DeleteCronTaskByRepo: %v", err) | |||
} | |||
// cancel running cron jobs of this repository and delete old schedules | |||
if err := CancelRunningJobs( | |||
if err := CancelPreviousJobs( | |||
ctx, | |||
repo.ID, | |||
repo.DefaultBranch, | |||
"", | |||
webhook_module.HookEventSchedule, | |||
); err != nil { | |||
return fmt.Errorf("CancelRunningJobs: %v", err) | |||
return fmt.Errorf("CancelPreviousJobs: %v", err) | |||
} | |||
return nil | |||
} |
@@ -12,12 +12,8 @@ import ( | |||
"code.gitea.io/gitea/models/db" | |||
issues_model "code.gitea.io/gitea/models/issues" | |||
"code.gitea.io/gitea/models/organization" | |||
access_model "code.gitea.io/gitea/models/perm/access" | |||
repo_model "code.gitea.io/gitea/models/repo" | |||
"code.gitea.io/gitea/models/unit" | |||
user_model "code.gitea.io/gitea/models/user" | |||
"code.gitea.io/gitea/modules/container" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
@@ -79,53 +75,6 @@ func init() { | |||
db.RegisterModel(new(Notification)) | |||
} | |||
// FindNotificationOptions represent the filters for notifications. If an ID is 0 it will be ignored. | |||
type FindNotificationOptions struct { | |||
db.ListOptions | |||
UserID int64 | |||
RepoID int64 | |||
IssueID int64 | |||
Status []NotificationStatus | |||
Source []NotificationSource | |||
UpdatedAfterUnix int64 | |||
UpdatedBeforeUnix int64 | |||
} | |||
// ToCond will convert each condition into a xorm-Cond | |||
func (opts FindNotificationOptions) ToConds() builder.Cond { | |||
cond := builder.NewCond() | |||
if opts.UserID != 0 { | |||
cond = cond.And(builder.Eq{"notification.user_id": opts.UserID}) | |||
} | |||
if opts.RepoID != 0 { | |||
cond = cond.And(builder.Eq{"notification.repo_id": opts.RepoID}) | |||
} | |||
if opts.IssueID != 0 { | |||
cond = cond.And(builder.Eq{"notification.issue_id": opts.IssueID}) | |||
} | |||
if len(opts.Status) > 0 { | |||
if len(opts.Status) == 1 { | |||
cond = cond.And(builder.Eq{"notification.status": opts.Status[0]}) | |||
} else { | |||
cond = cond.And(builder.In("notification.status", opts.Status)) | |||
} | |||
} | |||
if len(opts.Source) > 0 { | |||
cond = cond.And(builder.In("notification.source", opts.Source)) | |||
} | |||
if opts.UpdatedAfterUnix != 0 { | |||
cond = cond.And(builder.Gte{"notification.updated_unix": opts.UpdatedAfterUnix}) | |||
} | |||
if opts.UpdatedBeforeUnix != 0 { | |||
cond = cond.And(builder.Lte{"notification.updated_unix": opts.UpdatedBeforeUnix}) | |||
} | |||
return cond | |||
} | |||
func (opts FindNotificationOptions) ToOrders() string { | |||
return "notification.updated_unix DESC" | |||
} | |||
// CreateRepoTransferNotification creates notification for the user a repository was transferred to | |||
func CreateRepoTransferNotification(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository) error { | |||
return db.WithTx(ctx, func(ctx context.Context) error { | |||
@@ -159,109 +108,6 @@ func CreateRepoTransferNotification(ctx context.Context, doer, newOwner *user_mo | |||
}) | |||
} | |||
// CreateOrUpdateIssueNotifications creates an issue notification | |||
// for each watcher, or updates it if already exists | |||
// receiverID > 0 just send to receiver, else send to all watcher | |||
func CreateOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error { | |||
ctx, committer, err := db.TxContext(ctx) | |||
if err != nil { | |||
return err | |||
} | |||
defer committer.Close() | |||
if err := createOrUpdateIssueNotifications(ctx, issueID, commentID, notificationAuthorID, receiverID); err != nil { | |||
return err | |||
} | |||
return committer.Commit() | |||
} | |||
func createOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error { | |||
// init | |||
var toNotify container.Set[int64] | |||
notifications, err := db.Find[Notification](ctx, FindNotificationOptions{ | |||
IssueID: issueID, | |||
}) | |||
if err != nil { | |||
return err | |||
} | |||
issue, err := issues_model.GetIssueByID(ctx, issueID) | |||
if err != nil { | |||
return err | |||
} | |||
if receiverID > 0 { | |||
toNotify = make(container.Set[int64], 1) | |||
toNotify.Add(receiverID) | |||
} else { | |||
toNotify = make(container.Set[int64], 32) | |||
issueWatches, err := issues_model.GetIssueWatchersIDs(ctx, issueID, true) | |||
if err != nil { | |||
return err | |||
} | |||
toNotify.AddMultiple(issueWatches...) | |||
if !(issue.IsPull && issues_model.HasWorkInProgressPrefix(issue.Title)) { | |||
repoWatches, err := repo_model.GetRepoWatchersIDs(ctx, issue.RepoID) | |||
if err != nil { | |||
return err | |||
} | |||
toNotify.AddMultiple(repoWatches...) | |||
} | |||
issueParticipants, err := issue.GetParticipantIDsByIssue(ctx) | |||
if err != nil { | |||
return err | |||
} | |||
toNotify.AddMultiple(issueParticipants...) | |||
// dont notify user who cause notification | |||
delete(toNotify, notificationAuthorID) | |||
// explicit unwatch on issue | |||
issueUnWatches, err := issues_model.GetIssueWatchersIDs(ctx, issueID, false) | |||
if err != nil { | |||
return err | |||
} | |||
for _, id := range issueUnWatches { | |||
toNotify.Remove(id) | |||
} | |||
} | |||
err = issue.LoadRepo(ctx) | |||
if err != nil { | |||
return err | |||
} | |||
// notify | |||
for userID := range toNotify { | |||
issue.Repo.Units = nil | |||
user, err := user_model.GetUserByID(ctx, userID) | |||
if err != nil { | |||
if user_model.IsErrUserNotExist(err) { | |||
continue | |||
} | |||
return err | |||
} | |||
if issue.IsPull && !access_model.CheckRepoUnitUser(ctx, issue.Repo, user, unit.TypePullRequests) { | |||
continue | |||
} | |||
if !issue.IsPull && !access_model.CheckRepoUnitUser(ctx, issue.Repo, user, unit.TypeIssues) { | |||
continue | |||
} | |||
if notificationExists(notifications, issue.ID, userID) { | |||
if err = updateIssueNotification(ctx, userID, issue.ID, commentID, notificationAuthorID); err != nil { | |||
return err | |||
} | |||
continue | |||
} | |||
if err = createIssueNotification(ctx, userID, issue, commentID, notificationAuthorID); err != nil { | |||
return err | |||
} | |||
} | |||
return nil | |||
} | |||
func createIssueNotification(ctx context.Context, userID int64, issue *issues_model.Issue, commentID, updatedByID int64) error { | |||
notification := &Notification{ | |||
UserID: userID, | |||
@@ -449,309 +295,6 @@ func GetUIDsAndNotificationCounts(ctx context.Context, since, until timeutil.Tim | |||
return res, db.GetEngine(ctx).SQL(sql, since, until, NotificationStatusUnread).Find(&res) | |||
} | |||
// NotificationList contains a list of notifications | |||
type NotificationList []*Notification | |||
// LoadAttributes load Repo Issue User and Comment if not loaded | |||
func (nl NotificationList) LoadAttributes(ctx context.Context) error { | |||
if _, _, err := nl.LoadRepos(ctx); err != nil { | |||
return err | |||
} | |||
if _, err := nl.LoadIssues(ctx); err != nil { | |||
return err | |||
} | |||
if _, err := nl.LoadUsers(ctx); err != nil { | |||
return err | |||
} | |||
if _, err := nl.LoadComments(ctx); err != nil { | |||
return err | |||
} | |||
return nil | |||
} | |||
func (nl NotificationList) getPendingRepoIDs() []int64 { | |||
ids := make(container.Set[int64], len(nl)) | |||
for _, notification := range nl { | |||
if notification.Repository != nil { | |||
continue | |||
} | |||
ids.Add(notification.RepoID) | |||
} | |||
return ids.Values() | |||
} | |||
// LoadRepos loads repositories from database | |||
func (nl NotificationList) LoadRepos(ctx context.Context) (repo_model.RepositoryList, []int, error) { | |||
if len(nl) == 0 { | |||
return repo_model.RepositoryList{}, []int{}, nil | |||
} | |||
repoIDs := nl.getPendingRepoIDs() | |||
repos := make(map[int64]*repo_model.Repository, len(repoIDs)) | |||
left := len(repoIDs) | |||
for left > 0 { | |||
limit := db.DefaultMaxInSize | |||
if left < limit { | |||
limit = left | |||
} | |||
rows, err := db.GetEngine(ctx). | |||
In("id", repoIDs[:limit]). | |||
Rows(new(repo_model.Repository)) | |||
if err != nil { | |||
return nil, nil, err | |||
} | |||
for rows.Next() { | |||
var repo repo_model.Repository | |||
err = rows.Scan(&repo) | |||
if err != nil { | |||
rows.Close() | |||
return nil, nil, err | |||
} | |||
repos[repo.ID] = &repo | |||
} | |||
_ = rows.Close() | |||
left -= limit | |||
repoIDs = repoIDs[limit:] | |||
} | |||
failed := []int{} | |||
reposList := make(repo_model.RepositoryList, 0, len(repoIDs)) | |||
for i, notification := range nl { | |||
if notification.Repository == nil { | |||
notification.Repository = repos[notification.RepoID] | |||
} | |||
if notification.Repository == nil { | |||
log.Error("Notification[%d]: RepoID: %d not found", notification.ID, notification.RepoID) | |||
failed = append(failed, i) | |||
continue | |||
} | |||
var found bool | |||
for _, r := range reposList { | |||
if r.ID == notification.RepoID { | |||
found = true | |||
break | |||
} | |||
} | |||
if !found { | |||
reposList = append(reposList, notification.Repository) | |||
} | |||
} | |||
return reposList, failed, nil | |||
} | |||
func (nl NotificationList) getPendingIssueIDs() []int64 { | |||
ids := make(container.Set[int64], len(nl)) | |||
for _, notification := range nl { | |||
if notification.Issue != nil { | |||
continue | |||
} | |||
ids.Add(notification.IssueID) | |||
} | |||
return ids.Values() | |||
} | |||
// LoadIssues loads issues from database | |||
func (nl NotificationList) LoadIssues(ctx context.Context) ([]int, error) { | |||
if len(nl) == 0 { | |||
return []int{}, nil | |||
} | |||
issueIDs := nl.getPendingIssueIDs() | |||
issues := make(map[int64]*issues_model.Issue, len(issueIDs)) | |||
left := len(issueIDs) | |||
for left > 0 { | |||
limit := db.DefaultMaxInSize | |||
if left < limit { | |||
limit = left | |||
} | |||
rows, err := db.GetEngine(ctx). | |||
In("id", issueIDs[:limit]). | |||
Rows(new(issues_model.Issue)) | |||
if err != nil { | |||
return nil, err | |||
} | |||
for rows.Next() { | |||
var issue issues_model.Issue | |||
err = rows.Scan(&issue) | |||
if err != nil { | |||
rows.Close() | |||
return nil, err | |||
} | |||
issues[issue.ID] = &issue | |||
} | |||
_ = rows.Close() | |||
left -= limit | |||
issueIDs = issueIDs[limit:] | |||
} | |||
failures := []int{} | |||
for i, notification := range nl { | |||
if notification.Issue == nil { | |||
notification.Issue = issues[notification.IssueID] | |||
if notification.Issue == nil { | |||
if notification.IssueID != 0 { | |||
log.Error("Notification[%d]: IssueID: %d Not Found", notification.ID, notification.IssueID) | |||
failures = append(failures, i) | |||
} | |||
continue | |||
} | |||
notification.Issue.Repo = notification.Repository | |||
} | |||
} | |||
return failures, nil | |||
} | |||
// Without returns the notification list without the failures | |||
func (nl NotificationList) Without(failures []int) NotificationList { | |||
if len(failures) == 0 { | |||
return nl | |||
} | |||
remaining := make([]*Notification, 0, len(nl)) | |||
last := -1 | |||
var i int | |||
for _, i = range failures { | |||
remaining = append(remaining, nl[last+1:i]...) | |||
last = i | |||
} | |||
if len(nl) > i { | |||
remaining = append(remaining, nl[i+1:]...) | |||
} | |||
return remaining | |||
} | |||
func (nl NotificationList) getPendingCommentIDs() []int64 { | |||
ids := make(container.Set[int64], len(nl)) | |||
for _, notification := range nl { | |||
if notification.CommentID == 0 || notification.Comment != nil { | |||
continue | |||
} | |||
ids.Add(notification.CommentID) | |||
} | |||
return ids.Values() | |||
} | |||
func (nl NotificationList) getUserIDs() []int64 { | |||
ids := make(container.Set[int64], len(nl)) | |||
for _, notification := range nl { | |||
if notification.UserID == 0 || notification.User != nil { | |||
continue | |||
} | |||
ids.Add(notification.UserID) | |||
} | |||
return ids.Values() | |||
} | |||
// LoadUsers loads users from database | |||
func (nl NotificationList) LoadUsers(ctx context.Context) ([]int, error) { | |||
if len(nl) == 0 { | |||
return []int{}, nil | |||
} | |||
userIDs := nl.getUserIDs() | |||
users := make(map[int64]*user_model.User, len(userIDs)) | |||
left := len(userIDs) | |||
for left > 0 { | |||
limit := db.DefaultMaxInSize | |||
if left < limit { | |||
limit = left | |||
} | |||
rows, err := db.GetEngine(ctx). | |||
In("id", userIDs[:limit]). | |||
Rows(new(user_model.User)) | |||
if err != nil { | |||
return nil, err | |||
} | |||
for rows.Next() { | |||
var user user_model.User | |||
err = rows.Scan(&user) | |||
if err != nil { | |||
rows.Close() | |||
return nil, err | |||
} | |||
users[user.ID] = &user | |||
} | |||
_ = rows.Close() | |||
left -= limit | |||
userIDs = userIDs[limit:] | |||
} | |||
failures := []int{} | |||
for i, notification := range nl { | |||
if notification.UserID > 0 && notification.User == nil && users[notification.UserID] != nil { | |||
notification.User = users[notification.UserID] | |||
if notification.User == nil { | |||
log.Error("Notification[%d]: UserID[%d] failed to load", notification.ID, notification.UserID) | |||
failures = append(failures, i) | |||
continue | |||
} | |||
} | |||
} | |||
return failures, nil | |||
} | |||
// LoadComments loads comments from database | |||
func (nl NotificationList) LoadComments(ctx context.Context) ([]int, error) { | |||
if len(nl) == 0 { | |||
return []int{}, nil | |||
} | |||
commentIDs := nl.getPendingCommentIDs() | |||
comments := make(map[int64]*issues_model.Comment, len(commentIDs)) | |||
left := len(commentIDs) | |||
for left > 0 { | |||
limit := db.DefaultMaxInSize | |||
if left < limit { | |||
limit = left | |||
} | |||
rows, err := db.GetEngine(ctx). | |||
In("id", commentIDs[:limit]). | |||
Rows(new(issues_model.Comment)) | |||
if err != nil { | |||
return nil, err | |||
} | |||
for rows.Next() { | |||
var comment issues_model.Comment | |||
err = rows.Scan(&comment) | |||
if err != nil { | |||
rows.Close() | |||
return nil, err | |||
} | |||
comments[comment.ID] = &comment | |||
} | |||
_ = rows.Close() | |||
left -= limit | |||
commentIDs = commentIDs[limit:] | |||
} | |||
failures := []int{} | |||
for i, notification := range nl { | |||
if notification.CommentID > 0 && notification.Comment == nil && comments[notification.CommentID] != nil { | |||
notification.Comment = comments[notification.CommentID] | |||
if notification.Comment == nil { | |||
log.Error("Notification[%d]: CommentID[%d] failed to load", notification.ID, notification.CommentID) | |||
failures = append(failures, i) | |||
continue | |||
} | |||
notification.Comment.Issue = notification.Issue | |||
} | |||
} | |||
return failures, nil | |||
} | |||
// SetIssueReadBy sets issue to be read by given user. | |||
func SetIssueReadBy(ctx context.Context, issueID, userID int64) error { | |||
if err := issues_model.UpdateIssueUserByRead(ctx, userID, issueID); err != nil { |
@@ -0,0 +1,501 @@ | |||
// Copyright 2024 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package activities | |||
import ( | |||
"context" | |||
"code.gitea.io/gitea/models/db" | |||
issues_model "code.gitea.io/gitea/models/issues" | |||
access_model "code.gitea.io/gitea/models/perm/access" | |||
repo_model "code.gitea.io/gitea/models/repo" | |||
"code.gitea.io/gitea/models/unit" | |||
user_model "code.gitea.io/gitea/models/user" | |||
"code.gitea.io/gitea/modules/container" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/util" | |||
"xorm.io/builder" | |||
) | |||
// FindNotificationOptions represent the filters for notifications. If an ID is 0 it will be ignored. | |||
type FindNotificationOptions struct { | |||
db.ListOptions | |||
UserID int64 | |||
RepoID int64 | |||
IssueID int64 | |||
Status []NotificationStatus | |||
Source []NotificationSource | |||
UpdatedAfterUnix int64 | |||
UpdatedBeforeUnix int64 | |||
} | |||
// ToCond will convert each condition into a xorm-Cond | |||
func (opts FindNotificationOptions) ToConds() builder.Cond { | |||
cond := builder.NewCond() | |||
if opts.UserID != 0 { | |||
cond = cond.And(builder.Eq{"notification.user_id": opts.UserID}) | |||
} | |||
if opts.RepoID != 0 { | |||
cond = cond.And(builder.Eq{"notification.repo_id": opts.RepoID}) | |||
} | |||
if opts.IssueID != 0 { | |||
cond = cond.And(builder.Eq{"notification.issue_id": opts.IssueID}) | |||
} | |||
if len(opts.Status) > 0 { | |||
if len(opts.Status) == 1 { | |||
cond = cond.And(builder.Eq{"notification.status": opts.Status[0]}) | |||
} else { | |||
cond = cond.And(builder.In("notification.status", opts.Status)) | |||
} | |||
} | |||
if len(opts.Source) > 0 { | |||
cond = cond.And(builder.In("notification.source", opts.Source)) | |||
} | |||
if opts.UpdatedAfterUnix != 0 { | |||
cond = cond.And(builder.Gte{"notification.updated_unix": opts.UpdatedAfterUnix}) | |||
} | |||
if opts.UpdatedBeforeUnix != 0 { | |||
cond = cond.And(builder.Lte{"notification.updated_unix": opts.UpdatedBeforeUnix}) | |||
} | |||
return cond | |||
} | |||
func (opts FindNotificationOptions) ToOrders() string { | |||
return "notification.updated_unix DESC" | |||
} | |||
// CreateOrUpdateIssueNotifications creates an issue notification | |||
// for each watcher, or updates it if already exists | |||
// receiverID > 0 just send to receiver, else send to all watcher | |||
func CreateOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error { | |||
ctx, committer, err := db.TxContext(ctx) | |||
if err != nil { | |||
return err | |||
} | |||
defer committer.Close() | |||
if err := createOrUpdateIssueNotifications(ctx, issueID, commentID, notificationAuthorID, receiverID); err != nil { | |||
return err | |||
} | |||
return committer.Commit() | |||
} | |||
func createOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error { | |||
// init | |||
var toNotify container.Set[int64] | |||
notifications, err := db.Find[Notification](ctx, FindNotificationOptions{ | |||
IssueID: issueID, | |||
}) | |||
if err != nil { | |||
return err | |||
} | |||
issue, err := issues_model.GetIssueByID(ctx, issueID) | |||
if err != nil { | |||
return err | |||
} | |||
if receiverID > 0 { | |||
toNotify = make(container.Set[int64], 1) | |||
toNotify.Add(receiverID) | |||
} else { | |||
toNotify = make(container.Set[int64], 32) | |||
issueWatches, err := issues_model.GetIssueWatchersIDs(ctx, issueID, true) | |||
if err != nil { | |||
return err | |||
} | |||
toNotify.AddMultiple(issueWatches...) | |||
if !(issue.IsPull && issues_model.HasWorkInProgressPrefix(issue.Title)) { | |||
repoWatches, err := repo_model.GetRepoWatchersIDs(ctx, issue.RepoID) | |||
if err != nil { | |||
return err | |||
} | |||
toNotify.AddMultiple(repoWatches...) | |||
} | |||
issueParticipants, err := issue.GetParticipantIDsByIssue(ctx) | |||
if err != nil { | |||
return err | |||
} | |||
toNotify.AddMultiple(issueParticipants...) | |||
// dont notify user who cause notification | |||
delete(toNotify, notificationAuthorID) | |||
// explicit unwatch on issue | |||
issueUnWatches, err := issues_model.GetIssueWatchersIDs(ctx, issueID, false) | |||
if err != nil { | |||
return err | |||
} | |||
for _, id := range issueUnWatches { | |||
toNotify.Remove(id) | |||
} | |||
} | |||
err = issue.LoadRepo(ctx) | |||
if err != nil { | |||
return err | |||
} | |||
// notify | |||
for userID := range toNotify { | |||
issue.Repo.Units = nil | |||
user, err := user_model.GetUserByID(ctx, userID) | |||
if err != nil { | |||
if user_model.IsErrUserNotExist(err) { | |||
continue | |||
} | |||
return err | |||
} | |||
if issue.IsPull && !access_model.CheckRepoUnitUser(ctx, issue.Repo, user, unit.TypePullRequests) { | |||
continue | |||
} | |||
if !issue.IsPull && !access_model.CheckRepoUnitUser(ctx, issue.Repo, user, unit.TypeIssues) { | |||
continue | |||
} | |||
if notificationExists(notifications, issue.ID, userID) { | |||
if err = updateIssueNotification(ctx, userID, issue.ID, commentID, notificationAuthorID); err != nil { | |||
return err | |||
} | |||
continue | |||
} | |||
if err = createIssueNotification(ctx, userID, issue, commentID, notificationAuthorID); err != nil { | |||
return err | |||
} | |||
} | |||
return nil | |||
} | |||
// NotificationList contains a list of notifications | |||
type NotificationList []*Notification | |||
// LoadAttributes load Repo Issue User and Comment if not loaded | |||
func (nl NotificationList) LoadAttributes(ctx context.Context) error { | |||
if _, _, err := nl.LoadRepos(ctx); err != nil { | |||
return err | |||
} | |||
if _, err := nl.LoadIssues(ctx); err != nil { | |||
return err | |||
} | |||
if _, err := nl.LoadUsers(ctx); err != nil { | |||
return err | |||
} | |||
if _, err := nl.LoadComments(ctx); err != nil { | |||
return err | |||
} | |||
return nil | |||
} | |||
func (nl NotificationList) getPendingRepoIDs() []int64 { | |||
ids := make(container.Set[int64], len(nl)) | |||
for _, notification := range nl { | |||
if notification.Repository != nil { | |||
continue | |||
} | |||
ids.Add(notification.RepoID) | |||
} | |||
return ids.Values() | |||
} | |||
// LoadRepos loads repositories from database | |||
func (nl NotificationList) LoadRepos(ctx context.Context) (repo_model.RepositoryList, []int, error) { | |||
if len(nl) == 0 { | |||
return repo_model.RepositoryList{}, []int{}, nil | |||
} | |||
repoIDs := nl.getPendingRepoIDs() | |||
repos := make(map[int64]*repo_model.Repository, len(repoIDs)) | |||
left := len(repoIDs) | |||
for left > 0 { | |||
limit := db.DefaultMaxInSize | |||
if left < limit { | |||
limit = left | |||
} | |||
rows, err := db.GetEngine(ctx). | |||
In("id", repoIDs[:limit]). | |||
Rows(new(repo_model.Repository)) | |||
if err != nil { | |||
return nil, nil, err | |||
} | |||
for rows.Next() { | |||
var repo repo_model.Repository | |||
err = rows.Scan(&repo) | |||
if err != nil { | |||
rows.Close() | |||
return nil, nil, err | |||
} | |||
repos[repo.ID] = &repo | |||
} | |||
_ = rows.Close() | |||
left -= limit | |||
repoIDs = repoIDs[limit:] | |||
} | |||
failed := []int{} | |||
reposList := make(repo_model.RepositoryList, 0, len(repoIDs)) | |||
for i, notification := range nl { | |||
if notification.Repository == nil { | |||
notification.Repository = repos[notification.RepoID] | |||
} | |||
if notification.Repository == nil { | |||
log.Error("Notification[%d]: RepoID: %d not found", notification.ID, notification.RepoID) | |||
failed = append(failed, i) | |||
continue | |||
} | |||
var found bool | |||
for _, r := range reposList { | |||
if r.ID == notification.RepoID { | |||
found = true | |||
break | |||
} | |||
} | |||
if !found { | |||
reposList = append(reposList, notification.Repository) | |||
} | |||
} | |||
return reposList, failed, nil | |||
} | |||
func (nl NotificationList) getPendingIssueIDs() []int64 { | |||
ids := make(container.Set[int64], len(nl)) | |||
for _, notification := range nl { | |||
if notification.Issue != nil { | |||
continue | |||
} | |||
ids.Add(notification.IssueID) | |||
} | |||
return ids.Values() | |||
} | |||
// LoadIssues loads issues from database | |||
func (nl NotificationList) LoadIssues(ctx context.Context) ([]int, error) { | |||
if len(nl) == 0 { | |||
return []int{}, nil | |||
} | |||
issueIDs := nl.getPendingIssueIDs() | |||
issues := make(map[int64]*issues_model.Issue, len(issueIDs)) | |||
left := len(issueIDs) | |||
for left > 0 { | |||
limit := db.DefaultMaxInSize | |||
if left < limit { | |||
limit = left | |||
} | |||
rows, err := db.GetEngine(ctx). | |||
In("id", issueIDs[:limit]). | |||
Rows(new(issues_model.Issue)) | |||
if err != nil { | |||
return nil, err | |||
} | |||
for rows.Next() { | |||
var issue issues_model.Issue | |||
err = rows.Scan(&issue) | |||
if err != nil { | |||
rows.Close() | |||
return nil, err | |||
} | |||
issues[issue.ID] = &issue | |||
} | |||
_ = rows.Close() | |||
left -= limit | |||
issueIDs = issueIDs[limit:] | |||
} | |||
failures := []int{} | |||
for i, notification := range nl { | |||
if notification.Issue == nil { | |||
notification.Issue = issues[notification.IssueID] | |||
if notification.Issue == nil { | |||
if notification.IssueID != 0 { | |||
log.Error("Notification[%d]: IssueID: %d Not Found", notification.ID, notification.IssueID) | |||
failures = append(failures, i) | |||
} | |||
continue | |||
} | |||
notification.Issue.Repo = notification.Repository | |||
} | |||
} | |||
return failures, nil | |||
} | |||
// Without returns the notification list without the failures | |||
func (nl NotificationList) Without(failures []int) NotificationList { | |||
if len(failures) == 0 { | |||
return nl | |||
} | |||
remaining := make([]*Notification, 0, len(nl)) | |||
last := -1 | |||
var i int | |||
for _, i = range failures { | |||
remaining = append(remaining, nl[last+1:i]...) | |||
last = i | |||
} | |||
if len(nl) > i { | |||
remaining = append(remaining, nl[i+1:]...) | |||
} | |||
return remaining | |||
} | |||
func (nl NotificationList) getPendingCommentIDs() []int64 { | |||
ids := make(container.Set[int64], len(nl)) | |||
for _, notification := range nl { | |||
if notification.CommentID == 0 || notification.Comment != nil { | |||
continue | |||
} | |||
ids.Add(notification.CommentID) | |||
} | |||
return ids.Values() | |||
} | |||
func (nl NotificationList) getUserIDs() []int64 { | |||
ids := make(container.Set[int64], len(nl)) | |||
for _, notification := range nl { | |||
if notification.UserID == 0 || notification.User != nil { | |||
continue | |||
} | |||
ids.Add(notification.UserID) | |||
} | |||
return ids.Values() | |||
} | |||
// LoadUsers loads users from database | |||
func (nl NotificationList) LoadUsers(ctx context.Context) ([]int, error) { | |||
if len(nl) == 0 { | |||
return []int{}, nil | |||
} | |||
userIDs := nl.getUserIDs() | |||
users := make(map[int64]*user_model.User, len(userIDs)) | |||
left := len(userIDs) | |||
for left > 0 { | |||
limit := db.DefaultMaxInSize | |||
if left < limit { | |||
limit = left | |||
} | |||
rows, err := db.GetEngine(ctx). | |||
In("id", userIDs[:limit]). | |||
Rows(new(user_model.User)) | |||
if err != nil { | |||
return nil, err | |||
} | |||
for rows.Next() { | |||
var user user_model.User | |||
err = rows.Scan(&user) | |||
if err != nil { | |||
rows.Close() | |||
return nil, err | |||
} | |||
users[user.ID] = &user | |||
} | |||
_ = rows.Close() | |||
left -= limit | |||
userIDs = userIDs[limit:] | |||
} | |||
failures := []int{} | |||
for i, notification := range nl { | |||
if notification.UserID > 0 && notification.User == nil && users[notification.UserID] != nil { | |||
notification.User = users[notification.UserID] | |||
if notification.User == nil { | |||
log.Error("Notification[%d]: UserID[%d] failed to load", notification.ID, notification.UserID) | |||
failures = append(failures, i) | |||
continue | |||
} | |||
} | |||
} | |||
return failures, nil | |||
} | |||
// LoadComments loads comments from database | |||
func (nl NotificationList) LoadComments(ctx context.Context) ([]int, error) { | |||
if len(nl) == 0 { | |||
return []int{}, nil | |||
} | |||
commentIDs := nl.getPendingCommentIDs() | |||
comments := make(map[int64]*issues_model.Comment, len(commentIDs)) | |||
left := len(commentIDs) | |||
for left > 0 { | |||
limit := db.DefaultMaxInSize | |||
if left < limit { | |||
limit = left | |||
} | |||
rows, err := db.GetEngine(ctx). | |||
In("id", commentIDs[:limit]). | |||
Rows(new(issues_model.Comment)) | |||
if err != nil { | |||
return nil, err | |||
} | |||
for rows.Next() { | |||
var comment issues_model.Comment | |||
err = rows.Scan(&comment) | |||
if err != nil { | |||
rows.Close() | |||
return nil, err | |||
} | |||
comments[comment.ID] = &comment | |||
} | |||
_ = rows.Close() | |||
left -= limit | |||
commentIDs = commentIDs[limit:] | |||
} | |||
failures := []int{} | |||
for i, notification := range nl { | |||
if notification.CommentID > 0 && notification.Comment == nil && comments[notification.CommentID] != nil { | |||
notification.Comment = comments[notification.CommentID] | |||
if notification.Comment == nil { | |||
log.Error("Notification[%d]: CommentID[%d] failed to load", notification.ID, notification.CommentID) | |||
failures = append(failures, i) | |||
continue | |||
} | |||
notification.Comment.Issue = notification.Issue | |||
} | |||
} | |||
return failures, nil | |||
} | |||
// LoadIssuePullRequests loads all issues' pull requests if possible | |||
func (nl NotificationList) LoadIssuePullRequests(ctx context.Context) error { | |||
issues := make(map[int64]*issues_model.Issue, len(nl)) | |||
for _, notification := range nl { | |||
if notification.Issue != nil && notification.Issue.IsPull && notification.Issue.PullRequest == nil { | |||
issues[notification.Issue.ID] = notification.Issue | |||
} | |||
} | |||
if len(issues) == 0 { | |||
return nil | |||
} | |||
pulls, err := issues_model.GetPullRequestByIssueIDs(ctx, util.KeysOfMap(issues)) | |||
if err != nil { | |||
return err | |||
} | |||
for _, pull := range pulls { | |||
if issue := issues[pull.IssueID]; issue != nil { | |||
issue.PullRequest = pull | |||
issue.PullRequest.Issue = issue | |||
} | |||
} | |||
return nil | |||
} |
@@ -139,6 +139,8 @@ func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error { | |||
if err != nil { | |||
return err | |||
} | |||
defer f.Close() | |||
scanner := bufio.NewScanner(f) | |||
for scanner.Scan() { | |||
line := scanner.Text() | |||
@@ -148,11 +150,12 @@ func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error { | |||
} | |||
_, err = t.WriteString(line + "\n") | |||
if err != nil { | |||
f.Close() | |||
return err | |||
} | |||
} | |||
f.Close() | |||
if err = scanner.Err(); err != nil { | |||
return fmt.Errorf("RegeneratePublicKeys scan: %w", err) | |||
} | |||
} | |||
return nil | |||
} |
@@ -24,7 +24,7 @@ import ( | |||
const ( | |||
// DefaultAvatarClass is the default class of a rendered avatar | |||
DefaultAvatarClass = "ui avatar gt-vm" | |||
DefaultAvatarClass = "ui avatar tw-align-middle" | |||
// DefaultAvatarPixelSize is the default size in pixels of a rendered avatar | |||
DefaultAvatarPixelSize = 28 | |||
) |
@@ -120,6 +120,16 @@ func (c *halfCommitter) Close() error { | |||
// TxContext represents a transaction Context, | |||
// it will reuse the existing transaction in the parent context or create a new one. | |||
// Some tips to use: | |||
// | |||
// 1 It's always recommended to use `WithTx` in new code instead of `TxContext`, since `WithTx` will handle the transaction automatically. | |||
// 2. To maintain the old code which uses `TxContext`: | |||
// a. Always call `Close()` before returning regardless of whether `Commit()` has been called. | |||
// b. Always call `Commit()` before returning if there are no errors, even if the code did not change any data. | |||
// c. Remember the `Committer` will be a halfCommitter when a transaction is being reused. | |||
// So calling `Commit()` will do nothing, but calling `Close()` without calling `Commit()` will rollback the transaction. | |||
// And all operations submitted by the caller stack will be rollbacked as well, not only the operations in the current function. | |||
// d. It doesn't mean rollback is forbidden, but always do it only when there is an error, and you do want to rollback. | |||
func TxContext(parentCtx context.Context) (*Context, Committer, error) { | |||
if sess, ok := inTransaction(parentCtx); ok { | |||
return newContext(parentCtx, sess, true), &halfCommitter{committer: sess}, nil |
@@ -75,3 +75,11 @@ | |||
content: "comment in private pository" | |||
created_unix: 946684811 | |||
updated_unix: 946684811 | |||
- | |||
id: 9 | |||
type: 22 # review | |||
poster_id: 2 | |||
issue_id: 2 # in repo_id 1 | |||
review_id: 20 | |||
created_unix: 946684810 |
@@ -45,3 +45,27 @@ | |||
type: 2 | |||
created_unix: 1688973000 | |||
updated_unix: 1688973000 | |||
- | |||
id: 5 | |||
title: project without default column | |||
owner_id: 2 | |||
repo_id: 0 | |||
is_closed: false | |||
creator_id: 2 | |||
board_type: 1 | |||
type: 2 | |||
created_unix: 1688973000 | |||
updated_unix: 1688973000 | |||
- | |||
id: 6 | |||
title: project with multiple default columns | |||
owner_id: 2 | |||
repo_id: 0 | |||
is_closed: false | |||
creator_id: 2 | |||
board_type: 1 | |||
type: 2 | |||
created_unix: 1688973000 | |||
updated_unix: 1688973000 |
@@ -3,6 +3,7 @@ | |||
project_id: 1 | |||
title: To Do | |||
creator_id: 2 | |||
default: true | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 | |||
@@ -29,3 +30,48 @@ | |||
creator_id: 2 | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 | |||
- | |||
id: 5 | |||
project_id: 2 | |||
title: Backlog | |||
creator_id: 2 | |||
default: true | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 | |||
- | |||
id: 6 | |||
project_id: 4 | |||
title: Backlog | |||
creator_id: 2 | |||
default: true | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 | |||
- | |||
id: 7 | |||
project_id: 5 | |||
title: Done | |||
creator_id: 2 | |||
default: false | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 | |||
- | |||
id: 8 | |||
project_id: 6 | |||
title: Backlog | |||
creator_id: 2 | |||
default: true | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 | |||
- | |||
id: 9 | |||
project_id: 6 | |||
title: Uncategorized | |||
creator_id: 2 | |||
default: true | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 |
@@ -170,3 +170,12 @@ | |||
content: "review request for user15" | |||
updated_unix: 946684835 | |||
created_unix: 946684835 | |||
- | |||
id: 20 | |||
type: 22 | |||
reviewer_id: 1 | |||
issue_id: 2 | |||
content: "Review Comment" | |||
updated_unix: 946684810 | |||
created_unix: 946684810 |
@@ -25,6 +25,7 @@ import ( | |||
"code.gitea.io/gitea/modules/translation" | |||
"xorm.io/builder" | |||
"xorm.io/xorm" | |||
) | |||
// CommitStatus holds a single Status of a single Commit | |||
@@ -269,44 +270,48 @@ type CommitStatusIndex struct { | |||
// GetLatestCommitStatus returns all statuses with a unique context for a given commit. | |||
func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, int64, error) { | |||
ids := make([]int64, 0, 10) | |||
sess := db.GetEngine(ctx).Table(&CommitStatus{}). | |||
Where("repo_id = ?", repoID).And("sha = ?", sha). | |||
Select("max( id ) as id"). | |||
GroupBy("context_hash").OrderBy("max( id ) desc") | |||
getBase := func() *xorm.Session { | |||
return db.GetEngine(ctx).Table(&CommitStatus{}). | |||
Where("repo_id = ?", repoID).And("sha = ?", sha) | |||
} | |||
indices := make([]int64, 0, 10) | |||
sess := getBase().Select("max( `index` ) as `index`"). | |||
GroupBy("context_hash").OrderBy("max( `index` ) desc") | |||
if !listOptions.IsListAll() { | |||
sess = db.SetSessionPagination(sess, &listOptions) | |||
} | |||
count, err := sess.FindAndCount(&ids) | |||
count, err := sess.FindAndCount(&indices) | |||
if err != nil { | |||
return nil, count, err | |||
} | |||
statuses := make([]*CommitStatus, 0, len(ids)) | |||
if len(ids) == 0 { | |||
statuses := make([]*CommitStatus, 0, len(indices)) | |||
if len(indices) == 0 { | |||
return statuses, count, nil | |||
} | |||
return statuses, count, db.GetEngine(ctx).In("id", ids).Find(&statuses) | |||
return statuses, count, getBase().And(builder.In("`index`", indices)).Find(&statuses) | |||
} | |||
// GetLatestCommitStatusForPairs returns all statuses with a unique context for a given list of repo-sha pairs | |||
func GetLatestCommitStatusForPairs(ctx context.Context, repoIDsToLatestCommitSHAs map[int64]string, listOptions db.ListOptions) (map[int64][]*CommitStatus, error) { | |||
type result struct { | |||
ID int64 | |||
Index int64 | |||
RepoID int64 | |||
} | |||
results := make([]result, 0, len(repoIDsToLatestCommitSHAs)) | |||
sess := db.GetEngine(ctx).Table(&CommitStatus{}) | |||
getBase := func() *xorm.Session { | |||
return db.GetEngine(ctx).Table(&CommitStatus{}) | |||
} | |||
// Create a disjunction of conditions for each repoID and SHA pair | |||
conds := make([]builder.Cond, 0, len(repoIDsToLatestCommitSHAs)) | |||
for repoID, sha := range repoIDsToLatestCommitSHAs { | |||
conds = append(conds, builder.Eq{"repo_id": repoID, "sha": sha}) | |||
} | |||
sess = sess.Where(builder.Or(conds...)). | |||
Select("max( id ) as id, repo_id"). | |||
GroupBy("context_hash, repo_id").OrderBy("max( id ) desc") | |||
sess := getBase().Where(builder.Or(conds...)). | |||
Select("max( `index` ) as `index`, repo_id"). | |||
GroupBy("context_hash, repo_id").OrderBy("max( `index` ) desc") | |||
if !listOptions.IsListAll() { | |||
sess = db.SetSessionPagination(sess, &listOptions) | |||
@@ -317,15 +322,21 @@ func GetLatestCommitStatusForPairs(ctx context.Context, repoIDsToLatestCommitSHA | |||
return nil, err | |||
} | |||
ids := make([]int64, 0, len(results)) | |||
repoStatuses := make(map[int64][]*CommitStatus) | |||
for _, result := range results { | |||
ids = append(ids, result.ID) | |||
} | |||
statuses := make([]*CommitStatus, 0, len(ids)) | |||
if len(ids) > 0 { | |||
err = db.GetEngine(ctx).In("id", ids).Find(&statuses) | |||
if len(results) > 0 { | |||
statuses := make([]*CommitStatus, 0, len(results)) | |||
conds = make([]builder.Cond, 0, len(results)) | |||
for _, result := range results { | |||
cond := builder.Eq{ | |||
"`index`": result.Index, | |||
"repo_id": result.RepoID, | |||
"sha": repoIDsToLatestCommitSHAs[result.RepoID], | |||
} | |||
conds = append(conds, cond) | |||
} | |||
err = getBase().Where(builder.Or(conds...)).Find(&statuses) | |||
if err != nil { | |||
return nil, err | |||
} | |||
@@ -342,42 +353,43 @@ func GetLatestCommitStatusForPairs(ctx context.Context, repoIDsToLatestCommitSHA | |||
// GetLatestCommitStatusForRepoCommitIDs returns all statuses with a unique context for a given list of repo-sha pairs | |||
func GetLatestCommitStatusForRepoCommitIDs(ctx context.Context, repoID int64, commitIDs []string) (map[string][]*CommitStatus, error) { | |||
type result struct { | |||
ID int64 | |||
Sha string | |||
Index int64 | |||
SHA string | |||
} | |||
getBase := func() *xorm.Session { | |||
return db.GetEngine(ctx).Table(&CommitStatus{}).Where("repo_id = ?", repoID) | |||
} | |||
results := make([]result, 0, len(commitIDs)) | |||
sess := db.GetEngine(ctx).Table(&CommitStatus{}) | |||
// Create a disjunction of conditions for each repoID and SHA pair | |||
conds := make([]builder.Cond, 0, len(commitIDs)) | |||
for _, sha := range commitIDs { | |||
conds = append(conds, builder.Eq{"sha": sha}) | |||
} | |||
sess = sess.Where(builder.Eq{"repo_id": repoID}.And(builder.Or(conds...))). | |||
Select("max( id ) as id, sha"). | |||
GroupBy("context_hash, sha").OrderBy("max( id ) desc") | |||
sess := getBase().And(builder.Or(conds...)). | |||
Select("max( `index` ) as `index`, sha"). | |||
GroupBy("context_hash, sha").OrderBy("max( `index` ) desc") | |||
err := sess.Find(&results) | |||
if err != nil { | |||
return nil, err | |||
} | |||
ids := make([]int64, 0, len(results)) | |||
repoStatuses := make(map[string][]*CommitStatus) | |||
for _, result := range results { | |||
ids = append(ids, result.ID) | |||
} | |||
statuses := make([]*CommitStatus, 0, len(ids)) | |||
if len(ids) > 0 { | |||
err = db.GetEngine(ctx).In("id", ids).Find(&statuses) | |||
if len(results) > 0 { | |||
statuses := make([]*CommitStatus, 0, len(results)) | |||
conds = make([]builder.Cond, 0, len(results)) | |||
for _, result := range results { | |||
conds = append(conds, builder.Eq{"`index`": result.Index, "sha": result.SHA}) | |||
} | |||
err = getBase().And(builder.Or(conds...)).Find(&statuses) | |||
if err != nil { | |||
return nil, err | |||
} | |||
// Group the statuses by repo ID | |||
// Group the statuses by commit | |||
for _, status := range statuses { | |||
repoStatuses[status.SHA] = append(repoStatuses[status.SHA], status) | |||
} | |||
@@ -388,22 +400,36 @@ func GetLatestCommitStatusForRepoCommitIDs(ctx context.Context, repoID int64, co | |||
// FindRepoRecentCommitStatusContexts returns repository's recent commit status contexts | |||
func FindRepoRecentCommitStatusContexts(ctx context.Context, repoID int64, before time.Duration) ([]string, error) { | |||
type result struct { | |||
Index int64 | |||
SHA string | |||
} | |||
getBase := func() *xorm.Session { | |||
return db.GetEngine(ctx).Table(&CommitStatus{}).Where("repo_id = ?", repoID) | |||
} | |||
start := timeutil.TimeStampNow().AddDuration(-before) | |||
ids := make([]int64, 0, 10) | |||
if err := db.GetEngine(ctx).Table("commit_status"). | |||
Where("repo_id = ?", repoID). | |||
And("updated_unix >= ?", start). | |||
Select("max( id ) as id"). | |||
GroupBy("context_hash").OrderBy("max( id ) desc"). | |||
Find(&ids); err != nil { | |||
results := make([]result, 0, 10) | |||
sess := getBase().And("updated_unix >= ?", start). | |||
Select("max( `index` ) as `index`, sha"). | |||
GroupBy("context_hash, sha").OrderBy("max( `index` ) desc") | |||
err := sess.Find(&results) | |||
if err != nil { | |||
return nil, err | |||
} | |||
contexts := make([]string, 0, len(ids)) | |||
if len(ids) == 0 { | |||
contexts := make([]string, 0, len(results)) | |||
if len(results) == 0 { | |||
return contexts, nil | |||
} | |||
return contexts, db.GetEngine(ctx).Select("context").Table("commit_status").In("id", ids).Find(&contexts) | |||
conds := make([]builder.Cond, 0, len(results)) | |||
for _, result := range results { | |||
conds = append(conds, builder.Eq{"`index`": result.Index, "sha": result.SHA}) | |||
} | |||
return contexts, getBase().And(builder.Or(conds...)).Select("context").Find(&contexts) | |||
} | |||
// NewCommitStatusOptions holds options for creating a CommitStatus |
@@ -74,6 +74,10 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu | |||
return nil, err | |||
} | |||
if err := comments.LoadAttachments(ctx); err != nil { | |||
return nil, err | |||
} | |||
// Find all reviews by ReviewID | |||
reviews := make(map[int64]*Review) | |||
ids := make([]int64, 0, len(comments)) |
@@ -193,20 +193,6 @@ func (issue *Issue) IsTimetrackerEnabled(ctx context.Context) bool { | |||
return issue.Repo.IsTimetrackerEnabled(ctx) | |||
} | |||
// GetPullRequest returns the issue pull request | |||
func (issue *Issue) GetPullRequest(ctx context.Context) (pr *PullRequest, err error) { | |||
if !issue.IsPull { | |||
return nil, fmt.Errorf("Issue is not a pull request") | |||
} | |||
pr, err = GetPullRequestByIssueID(ctx, issue.ID) | |||
if err != nil { | |||
return nil, err | |||
} | |||
pr.Issue = issue | |||
return pr, err | |||
} | |||
// LoadPoster loads poster | |||
func (issue *Issue) LoadPoster(ctx context.Context) (err error) { | |||
if issue.Poster == nil && issue.PosterID != 0 { |
@@ -370,6 +370,9 @@ func (issues IssueList) LoadPullRequests(ctx context.Context) error { | |||
for _, issue := range issues { | |||
issue.PullRequest = pullRequestMaps[issue.ID] | |||
if issue.PullRequest != nil { | |||
issue.PullRequest.Issue = issue | |||
} | |||
} | |||
return nil | |||
} |
@@ -49,18 +49,13 @@ func (issue *Issue) ProjectBoardID(ctx context.Context) int64 { | |||
// LoadIssuesFromBoard load issues assigned to this board | |||
func LoadIssuesFromBoard(ctx context.Context, b *project_model.Board) (IssueList, error) { | |||
issueList := make(IssueList, 0, 10) | |||
if b.ID > 0 { | |||
issues, err := Issues(ctx, &IssuesOptions{ | |||
ProjectBoardID: b.ID, | |||
ProjectID: b.ProjectID, | |||
SortType: "project-column-sorting", | |||
}) | |||
if err != nil { | |||
return nil, err | |||
} | |||
issueList = issues | |||
issueList, err := Issues(ctx, &IssuesOptions{ | |||
ProjectBoardID: b.ID, | |||
ProjectID: b.ProjectID, | |||
SortType: "project-column-sorting", | |||
}) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if b.Default { |
@@ -21,7 +21,7 @@ import ( | |||
// IssuesOptions represents options of an issue. | |||
type IssuesOptions struct { //nolint | |||
db.Paginator | |||
Paginator *db.ListOptions | |||
RepoIDs []int64 // overwrites RepoCond if the length is not 0 | |||
AllPublic bool // include also all public repositories | |||
RepoCond builder.Cond | |||
@@ -104,23 +104,11 @@ func applyLimit(sess *xorm.Session, opts *IssuesOptions) *xorm.Session { | |||
return sess | |||
} | |||
// Warning: Do not use GetSkipTake() for *db.ListOptions | |||
// Its implementation could reset the page size with setting.API.MaxResponseItems | |||
if listOptions, ok := opts.Paginator.(*db.ListOptions); ok { | |||
if listOptions.Page >= 0 && listOptions.PageSize > 0 { | |||
var start int | |||
if listOptions.Page == 0 { | |||
start = 0 | |||
} else { | |||
start = (listOptions.Page - 1) * listOptions.PageSize | |||
} | |||
sess.Limit(listOptions.PageSize, start) | |||
} | |||
return sess | |||
start := 0 | |||
if opts.Paginator.Page > 1 { | |||
start = (opts.Paginator.Page - 1) * opts.Paginator.PageSize | |||
} | |||
start, limit := opts.Paginator.GetSkipTake() | |||
sess.Limit(limit, start) | |||
sess.Limit(opts.Paginator.PageSize, start) | |||
return sess | |||
} |
@@ -68,13 +68,17 @@ func CountIssuesByRepo(ctx context.Context, opts *IssuesOptions) (map[int64]int6 | |||
} | |||
// CountIssues number return of issues by given conditions. | |||
func CountIssues(ctx context.Context, opts *IssuesOptions) (int64, error) { | |||
func CountIssues(ctx context.Context, opts *IssuesOptions, otherConds ...builder.Cond) (int64, error) { | |||
sess := db.GetEngine(ctx). | |||
Select("COUNT(issue.id) AS count"). | |||
Table("issue"). | |||
Join("INNER", "repository", "`issue`.repo_id = `repository`.id") | |||
applyConditions(sess, opts) | |||
for _, cond := range otherConds { | |||
sess.And(cond) | |||
} | |||
return sess.Count() | |||
} | |||
@@ -19,7 +19,6 @@ import ( | |||
repo_model "code.gitea.io/gitea/models/repo" | |||
user_model "code.gitea.io/gitea/models/user" | |||
"code.gitea.io/gitea/modules/git" | |||
"code.gitea.io/gitea/modules/gitrepo" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
@@ -884,77 +883,6 @@ func MergeBlockedByOutdatedBranch(protectBranch *git_model.ProtectedBranch, pr * | |||
return protectBranch.BlockOnOutdatedBranch && pr.CommitsBehind > 0 | |||
} | |||
func PullRequestCodeOwnersReview(ctx context.Context, pull *Issue, pr *PullRequest) error { | |||
files := []string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"} | |||
if pr.IsWorkInProgress(ctx) { | |||
return nil | |||
} | |||
if err := pr.LoadBaseRepo(ctx); err != nil { | |||
return err | |||
} | |||
repo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo) | |||
if err != nil { | |||
return err | |||
} | |||
defer repo.Close() | |||
commit, err := repo.GetBranchCommit(pr.BaseRepo.DefaultBranch) | |||
if err != nil { | |||
return err | |||
} | |||
var data string | |||
for _, file := range files { | |||
if blob, err := commit.GetBlobByPath(file); err == nil { | |||
data, err = blob.GetBlobContent(setting.UI.MaxDisplayFileSize) | |||
if err == nil { | |||
break | |||
} | |||
} | |||
} | |||
rules, _ := GetCodeOwnersFromContent(ctx, data) | |||
changedFiles, err := repo.GetFilesChangedBetween(git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName()) | |||
if err != nil { | |||
return err | |||
} | |||
uniqUsers := make(map[int64]*user_model.User) | |||
uniqTeams := make(map[string]*org_model.Team) | |||
for _, rule := range rules { | |||
for _, f := range changedFiles { | |||
if (rule.Rule.MatchString(f) && !rule.Negative) || (!rule.Rule.MatchString(f) && rule.Negative) { | |||
for _, u := range rule.Users { | |||
uniqUsers[u.ID] = u | |||
} | |||
for _, t := range rule.Teams { | |||
uniqTeams[fmt.Sprintf("%d/%d", t.OrgID, t.ID)] = t | |||
} | |||
} | |||
} | |||
} | |||
for _, u := range uniqUsers { | |||
if u.ID != pull.Poster.ID { | |||
if _, err := AddReviewRequest(ctx, pull, u, pull.Poster); err != nil { | |||
log.Warn("Failed add assignee user: %s to PR review: %s#%d, error: %s", u.Name, pr.BaseRepo.Name, pr.ID, err) | |||
return err | |||
} | |||
} | |||
} | |||
for _, t := range uniqTeams { | |||
if _, err := AddTeamReviewRequest(ctx, pull, t, pull.Poster); err != nil { | |||
log.Warn("Failed add assignee team: %s to PR review: %s#%d, error: %s", t.Name, pr.BaseRepo.Name, pr.ID, err) | |||
return err | |||
} | |||
} | |||
return nil | |||
} | |||
// GetCodeOwnersFromContent returns the code owners configuration | |||
// Return empty slice if files missing | |||
// Return warning messages on parsing errors |
@@ -11,7 +11,6 @@ import ( | |||
access_model "code.gitea.io/gitea/models/perm/access" | |||
"code.gitea.io/gitea/models/unit" | |||
user_model "code.gitea.io/gitea/models/user" | |||
"code.gitea.io/gitea/modules/base" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/util" | |||
@@ -23,7 +22,7 @@ type PullRequestsOptions struct { | |||
db.ListOptions | |||
State string | |||
SortType string | |||
Labels []string | |||
Labels []int64 | |||
MilestoneID int64 | |||
} | |||
@@ -36,11 +35,9 @@ func listPullRequestStatement(ctx context.Context, baseRepoID int64, opts *PullR | |||
sess.And("issue.is_closed=?", opts.State == "closed") | |||
} | |||
if labelIDs, err := base.StringsToInt64s(opts.Labels); err != nil { | |||
return nil, err | |||
} else if len(labelIDs) > 0 { | |||
if len(opts.Labels) > 0 { | |||
sess.Join("INNER", "issue_label", "issue.id = issue_label.issue_id"). | |||
In("issue_label.label_id", labelIDs) | |||
In("issue_label.label_id", opts.Labels) | |||
} | |||
if opts.MilestoneID > 0 { | |||
@@ -212,3 +209,12 @@ func HasMergedPullRequestInRepo(ctx context.Context, repoID, posterID int64) (bo | |||
Limit(1). | |||
Get(new(Issue)) | |||
} | |||
// GetPullRequestByIssueIDs returns all pull requests by issue ids | |||
func GetPullRequestByIssueIDs(ctx context.Context, issueIDs []int64) (PullRequestList, error) { | |||
prs := make([]*PullRequest, 0, len(issueIDs)) | |||
return prs, db.GetEngine(ctx). | |||
Where("issue_id > 0"). | |||
In("issue_id", issueIDs). | |||
Find(&prs) | |||
} |
@@ -66,7 +66,6 @@ func TestPullRequestsNewest(t *testing.T) { | |||
}, | |||
State: "open", | |||
SortType: "newest", | |||
Labels: []string{}, | |||
}) | |||
assert.NoError(t, err) | |||
assert.EqualValues(t, 3, count) | |||
@@ -113,7 +112,6 @@ func TestPullRequestsOldest(t *testing.T) { | |||
}, | |||
State: "open", | |||
SortType: "oldest", | |||
Labels: []string{}, | |||
}) | |||
assert.NoError(t, err) | |||
assert.EqualValues(t, 3, count) |
@@ -66,6 +66,23 @@ func (err ErrNotValidReviewRequest) Unwrap() error { | |||
return util.ErrInvalidArgument | |||
} | |||
// ErrReviewRequestOnClosedPR represents an error when an user tries to request a re-review on a closed or merged PR. | |||
type ErrReviewRequestOnClosedPR struct{} | |||
// IsErrReviewRequestOnClosedPR checks if an error is an ErrReviewRequestOnClosedPR. | |||
func IsErrReviewRequestOnClosedPR(err error) bool { | |||
_, ok := err.(ErrReviewRequestOnClosedPR) | |||
return ok | |||
} | |||
func (err ErrReviewRequestOnClosedPR) Error() string { | |||
return "cannot request a re-review on a closed or merged PR" | |||
} | |||
func (err ErrReviewRequestOnClosedPR) Unwrap() error { | |||
return util.ErrPermissionDenied | |||
} | |||
// ReviewType defines the sort of feedback a review gives | |||
type ReviewType int | |||
@@ -239,11 +256,11 @@ type CreateReviewOptions struct { | |||
// IsOfficialReviewer check if at least one of the provided reviewers can make official reviews in issue (counts towards required approvals) | |||
func IsOfficialReviewer(ctx context.Context, issue *Issue, reviewer *user_model.User) (bool, error) { | |||
pr, err := GetPullRequestByIssueID(ctx, issue.ID) | |||
if err != nil { | |||
if err := issue.LoadPullRequest(ctx); err != nil { | |||
return false, err | |||
} | |||
pr := issue.PullRequest | |||
rule, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch) | |||
if err != nil { | |||
return false, err | |||
@@ -271,11 +288,10 @@ func IsOfficialReviewer(ctx context.Context, issue *Issue, reviewer *user_model. | |||
// IsOfficialReviewerTeam check if reviewer in this team can make official reviews in issue (counts towards required approvals) | |||
func IsOfficialReviewerTeam(ctx context.Context, issue *Issue, team *organization.Team) (bool, error) { | |||
pr, err := GetPullRequestByIssueID(ctx, issue.ID) | |||
if err != nil { | |||
if err := issue.LoadPullRequest(ctx); err != nil { | |||
return false, err | |||
} | |||
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch) | |||
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, issue.PullRequest.BaseRepoID, issue.PullRequest.BaseBranch) | |||
if err != nil { | |||
return false, err | |||
} | |||
@@ -619,9 +635,24 @@ func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_mo | |||
return nil, err | |||
} | |||
// skip it when reviewer hase been request to review | |||
if review != nil && review.Type == ReviewTypeRequest { | |||
return nil, nil | |||
if review != nil { | |||
// skip it when reviewer hase been request to review | |||
if review.Type == ReviewTypeRequest { | |||
return nil, committer.Commit() // still commit the transaction, or committer.Close() will rollback it, even if it's a reused transaction. | |||
} | |||
if issue.IsClosed { | |||
return nil, ErrReviewRequestOnClosedPR{} | |||
} | |||
if issue.IsPull { | |||
if err := issue.LoadPullRequest(ctx); err != nil { | |||
return nil, err | |||
} | |||
if issue.PullRequest.HasMerged { | |||
return nil, ErrReviewRequestOnClosedPR{} | |||
} | |||
} | |||
} | |||
// if the reviewer is an official reviewer, |
@@ -288,3 +288,33 @@ func TestDeleteDismissedReview(t *testing.T) { | |||
assert.NoError(t, issues_model.DeleteReview(db.DefaultContext, review)) | |||
unittest.AssertNotExistsBean(t, &issues_model.Comment{ID: comment.ID}) | |||
} | |||
func TestAddReviewRequest(t *testing.T) { | |||
assert.NoError(t, unittest.PrepareTestDatabase()) | |||
pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}) | |||
assert.NoError(t, pull.LoadIssue(db.DefaultContext)) | |||
issue := pull.Issue | |||
assert.NoError(t, issue.LoadRepo(db.DefaultContext)) | |||
reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) | |||
_, err := issues_model.CreateReview(db.DefaultContext, issues_model.CreateReviewOptions{ | |||
Issue: issue, | |||
Reviewer: reviewer, | |||
Type: issues_model.ReviewTypeReject, | |||
}) | |||
assert.NoError(t, err) | |||
pull.HasMerged = false | |||
assert.NoError(t, pull.UpdateCols(db.DefaultContext, "has_merged")) | |||
issue.IsClosed = true | |||
_, err = issues_model.AddReviewRequest(db.DefaultContext, issue, reviewer, &user_model.User{}) | |||
assert.Error(t, err) | |||
assert.True(t, issues_model.IsErrReviewRequestOnClosedPR(err)) | |||
pull.HasMerged = true | |||
assert.NoError(t, pull.UpdateCols(db.DefaultContext, "has_merged")) | |||
issue.IsClosed = false | |||
_, err = issues_model.AddReviewRequest(db.DefaultContext, issue, reviewer, &user_model.User{}) | |||
assert.Error(t, err) | |||
assert.True(t, issues_model.IsErrReviewRequestOnClosedPR(err)) | |||
} |
@@ -0,0 +1,23 @@ | |||
- | |||
id: 1 | |||
title: project without default column | |||
owner_id: 2 | |||
repo_id: 0 | |||
is_closed: false | |||
creator_id: 2 | |||
board_type: 1 | |||
type: 2 | |||
created_unix: 1688973000 | |||
updated_unix: 1688973000 | |||
- | |||
id: 2 | |||
title: project with multiple default columns | |||
owner_id: 2 | |||
repo_id: 0 | |||
is_closed: false | |||
creator_id: 2 | |||
board_type: 1 | |||
type: 2 | |||
created_unix: 1688973000 | |||
updated_unix: 1688973000 |
@@ -0,0 +1,26 @@ | |||
- | |||
id: 1 | |||
project_id: 1 | |||
title: Done | |||
creator_id: 2 | |||
default: false | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 | |||
- | |||
id: 2 | |||
project_id: 2 | |||
title: Backlog | |||
creator_id: 2 | |||
default: true | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 | |||
- | |||
id: 3 | |||
project_id: 2 | |||
title: Uncategorized | |||
creator_id: 2 | |||
default: true | |||
created_unix: 1588117528 | |||
updated_unix: 1588117528 |
@@ -568,6 +568,10 @@ var migrations = []Migration{ | |||
NewMigration("Add PayloadVersion to HookTask", v1_22.AddPayloadVersionToHookTaskTable), | |||
// v291 -> v292 | |||
NewMigration("Add Index to attachment.comment_id", v1_22.AddCommentIDIndexofAttachment), | |||
// v292 -> v293 | |||
NewMigration("Ensure every project has exactly one default column - No Op", noopMigration), | |||
// v293 -> v294 | |||
NewMigration("Ensure every project has exactly one default column", v1_22.CheckProjectColumnsConsistency), | |||
} | |||
// GetCurrentDBVersion returns the current db version |
@@ -0,0 +1,9 @@ | |||
// Copyright 2024 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package v1_22 //nolint | |||
// NOTE: noop the original migration has bug which some projects will be skip, so | |||
// these projects will have no default board. | |||
// So that this migration will be skipped and go to v293.go | |||
// This file is a placeholder so that readers can know what happened |
@@ -0,0 +1,108 @@ | |||
// Copyright 2024 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package v1_22 //nolint | |||
import ( | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
"xorm.io/xorm" | |||
) | |||
// CheckProjectColumnsConsistency ensures there is exactly one default board per project present | |||
func CheckProjectColumnsConsistency(x *xorm.Engine) error { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
limit := setting.Database.IterateBufferSize | |||
if limit <= 0 { | |||
limit = 50 | |||
} | |||
type Project struct { | |||
ID int64 | |||
CreatorID int64 | |||
BoardID int64 | |||
} | |||
type ProjectBoard struct { | |||
ID int64 `xorm:"pk autoincr"` | |||
Title string | |||
Default bool `xorm:"NOT NULL DEFAULT false"` // issues not assigned to a specific board will be assigned to this board | |||
Sorting int8 `xorm:"NOT NULL DEFAULT 0"` | |||
Color string `xorm:"VARCHAR(7)"` | |||
ProjectID int64 `xorm:"INDEX NOT NULL"` | |||
CreatorID int64 `xorm:"NOT NULL"` | |||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | |||
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | |||
} | |||
for { | |||
if err := sess.Begin(); err != nil { | |||
return err | |||
} | |||
// all these projects without defaults will be fixed in the same loop, so | |||
// we just need to always get projects without defaults until no such project | |||
var projects []*Project | |||
if err := sess.Select("project.id as id, project.creator_id, project_board.id as board_id"). | |||
Join("LEFT", "project_board", "project_board.project_id = project.id AND project_board.`default`=?", true). | |||
Where("project_board.id is NULL OR project_board.id = 0"). | |||
Limit(limit). | |||
Find(&projects); err != nil { | |||
return err | |||
} | |||
for _, p := range projects { | |||
if _, err := sess.Insert(ProjectBoard{ | |||
ProjectID: p.ID, | |||
Default: true, | |||
Title: "Uncategorized", | |||
CreatorID: p.CreatorID, | |||
}); err != nil { | |||
return err | |||
} | |||
} | |||
if err := sess.Commit(); err != nil { | |||
return err | |||
} | |||
if len(projects) == 0 { | |||
break | |||
} | |||
} | |||
sess.Close() | |||
return removeDuplicatedBoardDefault(x) | |||
} | |||
func removeDuplicatedBoardDefault(x *xorm.Engine) error { | |||
type ProjectInfo struct { | |||
ProjectID int64 | |||
DefaultNum int | |||
} | |||
var projects []ProjectInfo | |||
if err := x.Select("project_id, count(*) AS default_num"). | |||
Table("project_board"). | |||
Where("`default` = ?", true). | |||
GroupBy("project_id"). | |||
Having("count(*) > 1"). | |||
Find(&projects); err != nil { | |||
return err | |||
} | |||
for _, project := range projects { | |||
if _, err := x.Where("project_id=?", project.ProjectID). | |||
Table("project_board"). | |||
Limit(project.DefaultNum - 1). | |||
Update(map[string]bool{ | |||
"`default`": false, | |||
}); err != nil { | |||
return err | |||
} | |||
} | |||
return nil | |||
} |
@@ -0,0 +1,44 @@ | |||
// Copyright 2024 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package v1_22 //nolint | |||
import ( | |||
"testing" | |||
"code.gitea.io/gitea/models/db" | |||
"code.gitea.io/gitea/models/migrations/base" | |||
"code.gitea.io/gitea/models/project" | |||
"github.com/stretchr/testify/assert" | |||
) | |||
func Test_CheckProjectColumnsConsistency(t *testing.T) { | |||
// Prepare and load the testing database | |||
x, deferable := base.PrepareTestEnv(t, 0, new(project.Project), new(project.Board)) | |||
defer deferable() | |||
if x == nil || t.Failed() { | |||
return | |||
} | |||
assert.NoError(t, CheckProjectColumnsConsistency(x)) | |||
// check if default board was added | |||
var defaultBoard project.Board | |||
has, err := x.Where("project_id=? AND `default` = ?", 1, true).Get(&defaultBoard) | |||
assert.NoError(t, err) | |||
assert.True(t, has) | |||
assert.Equal(t, int64(1), defaultBoard.ProjectID) | |||
assert.True(t, defaultBoard.Default) | |||
// check if multiple defaults, previous were removed and last will be kept | |||
expectDefaultBoard, err := project.GetBoard(db.DefaultContext, 2) | |||
assert.NoError(t, err) | |||
assert.Equal(t, int64(2), expectDefaultBoard.ProjectID) | |||
assert.False(t, expectDefaultBoard.Default) | |||
expectNonDefaultBoard, err := project.GetBoard(db.DefaultContext, 3) | |||
assert.NoError(t, err) | |||
assert.Equal(t, int64(2), expectNonDefaultBoard.ProjectID) | |||
assert.True(t, expectNonDefaultBoard.Default) | |||
} |
@@ -319,8 +319,9 @@ func CreateOrganization(ctx context.Context, org *Organization, owner *user_mode | |||
// Add initial creator to organization and owner team. | |||
if err = db.Insert(ctx, &OrgUser{ | |||
UID: owner.ID, | |||
OrgID: org.ID, | |||
UID: owner.ID, | |||
OrgID: org.ID, | |||
IsPublic: setting.Service.DefaultOrgMemberVisible, | |||
}); err != nil { | |||
return fmt.Errorf("insert org-user relation: %w", err) | |||
} |
@@ -123,6 +123,17 @@ func createBoardsForProjectsType(ctx context.Context, project *Project) error { | |||
return nil | |||
} | |||
board := Board{ | |||
CreatedUnix: timeutil.TimeStampNow(), | |||
CreatorID: project.CreatorID, | |||
Title: "Backlog", | |||
ProjectID: project.ID, | |||
Default: true, | |||
} | |||
if err := db.Insert(ctx, board); err != nil { | |||
return err | |||
} | |||
if len(items) == 0 { | |||
return nil | |||
} | |||
@@ -176,6 +187,10 @@ func deleteBoardByID(ctx context.Context, boardID int64) error { | |||
return err | |||
} | |||
if board.Default { | |||
return fmt.Errorf("deleteBoardByID: cannot delete default board") | |||
} | |||
if err = board.removeIssues(ctx); err != nil { | |||
return err | |||
} | |||
@@ -194,7 +209,6 @@ func deleteBoardByProjectID(ctx context.Context, projectID int64) error { | |||
// GetBoard fetches the current board of a project | |||
func GetBoard(ctx context.Context, boardID int64) (*Board, error) { | |||
board := new(Board) | |||
has, err := db.GetEngine(ctx).ID(boardID).Get(board) | |||
if err != nil { | |||
return nil, err | |||
@@ -228,7 +242,6 @@ func UpdateBoard(ctx context.Context, board *Board) error { | |||
} | |||
// GetBoards fetches all boards related to a project | |||
// if no default board set, first board is a temporary "Uncategorized" board | |||
func (p *Project) GetBoards(ctx context.Context) (BoardList, error) { | |||
boards := make([]*Board, 0, 5) | |||
@@ -244,53 +257,64 @@ func (p *Project) GetBoards(ctx context.Context) (BoardList, error) { | |||
return append([]*Board{defaultB}, boards...), nil | |||
} | |||
// getDefaultBoard return default board and create a dummy if none exist | |||
// getDefaultBoard return default board and ensure only one exists | |||
func (p *Project) getDefaultBoard(ctx context.Context) (*Board, error) { | |||
var board Board | |||
exist, err := db.GetEngine(ctx).Where("project_id=? AND `default`=?", p.ID, true).Get(&board) | |||
has, err := db.GetEngine(ctx). | |||
Where("project_id=? AND `default` = ?", p.ID, true). | |||
Desc("id").Get(&board) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if exist { | |||
if has { | |||
return &board, nil | |||
} | |||
// represents a board for issues not assigned to one | |||
return &Board{ | |||
// create a default board if none is found | |||
board = Board{ | |||
ProjectID: p.ID, | |||
Title: "Uncategorized", | |||
Default: true, | |||
}, nil | |||
Title: "Uncategorized", | |||
CreatorID: p.CreatorID, | |||
} | |||
if _, err := db.GetEngine(ctx).Insert(&board); err != nil { | |||
return nil, err | |||
} | |||
return &board, nil | |||
} | |||
// SetDefaultBoard represents a board for issues not assigned to one | |||
// if boardID is 0 unset default | |||
func SetDefaultBoard(ctx context.Context, projectID, boardID int64) error { | |||
_, err := db.GetEngine(ctx).Where(builder.Eq{ | |||
"project_id": projectID, | |||
"`default`": true, | |||
}).Cols("`default`").Update(&Board{Default: false}) | |||
if err != nil { | |||
return err | |||
} | |||
return db.WithTx(ctx, func(ctx context.Context) error { | |||
if _, err := GetBoard(ctx, boardID); err != nil { | |||
return err | |||
} | |||
if boardID > 0 { | |||
_, err = db.GetEngine(ctx).ID(boardID).Where(builder.Eq{"project_id": projectID}). | |||
Cols("`default`").Update(&Board{Default: true}) | |||
} | |||
if _, err := db.GetEngine(ctx).Where(builder.Eq{ | |||
"project_id": projectID, | |||
"`default`": true, | |||
}).Cols("`default`").Update(&Board{Default: false}); err != nil { | |||
return err | |||
} | |||
return err | |||
_, err := db.GetEngine(ctx).ID(boardID). | |||
Where(builder.Eq{"project_id": projectID}). | |||
Cols("`default`").Update(&Board{Default: true}) | |||
return err | |||
}) | |||
} | |||
// UpdateBoardSorting update project board sorting | |||
func UpdateBoardSorting(ctx context.Context, bs BoardList) error { | |||
for i := range bs { | |||
_, err := db.GetEngine(ctx).ID(bs[i].ID).Cols( | |||
"sorting", | |||
).Update(bs[i]) | |||
if err != nil { | |||
return err | |||
return db.WithTx(ctx, func(ctx context.Context) error { | |||
for i := range bs { | |||
if _, err := db.GetEngine(ctx).ID(bs[i].ID).Cols( | |||
"sorting", | |||
).Update(bs[i]); err != nil { | |||
return err | |||
} | |||
} | |||
} | |||
return nil | |||
return nil | |||
}) | |||
} |
@@ -0,0 +1,44 @@ | |||
// Copyright 2020 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package project | |||
import ( | |||
"testing" | |||
"code.gitea.io/gitea/models/db" | |||
"code.gitea.io/gitea/models/unittest" | |||
"github.com/stretchr/testify/assert" | |||
) | |||
func TestGetDefaultBoard(t *testing.T) { | |||
assert.NoError(t, unittest.PrepareTestDatabase()) | |||
projectWithoutDefault, err := GetProjectByID(db.DefaultContext, 5) | |||
assert.NoError(t, err) | |||
// check if default board was added | |||
board, err := projectWithoutDefault.getDefaultBoard(db.DefaultContext) | |||
assert.NoError(t, err) | |||
assert.Equal(t, int64(5), board.ProjectID) | |||
assert.Equal(t, "Uncategorized", board.Title) | |||
projectWithMultipleDefaults, err := GetProjectByID(db.DefaultContext, 6) | |||
assert.NoError(t, err) | |||
// check if multiple defaults were removed | |||
board, err = projectWithMultipleDefaults.getDefaultBoard(db.DefaultContext) | |||
assert.NoError(t, err) | |||
assert.Equal(t, int64(6), board.ProjectID) | |||
assert.Equal(t, int64(9), board.ID) | |||
// set 8 as default board | |||
assert.NoError(t, SetDefaultBoard(db.DefaultContext, board.ProjectID, 8)) | |||
// then 9 will become a non-default board | |||
board, err = GetBoard(db.DefaultContext, 9) | |||
assert.NoError(t, err) | |||
assert.Equal(t, int64(6), board.ProjectID) | |||
assert.False(t, board.Default) | |||
} |
@@ -92,19 +92,19 @@ func TestProjectsSort(t *testing.T) { | |||
}{ | |||
{ | |||
sortType: "default", | |||
wants: []int64{1, 3, 2, 4}, | |||
wants: []int64{1, 3, 2, 6, 5, 4}, | |||
}, | |||
{ | |||
sortType: "oldest", | |||
wants: []int64{4, 2, 3, 1}, | |||
wants: []int64{4, 5, 6, 2, 3, 1}, | |||
}, | |||
{ | |||
sortType: "recentupdate", | |||
wants: []int64{1, 3, 2, 4}, | |||
wants: []int64{1, 3, 2, 6, 5, 4}, | |||
}, | |||
{ | |||
sortType: "leastupdate", | |||
wants: []int64{4, 2, 3, 1}, | |||
wants: []int64{4, 5, 6, 2, 3, 1}, | |||
}, | |||
} | |||
@@ -113,8 +113,8 @@ func TestProjectsSort(t *testing.T) { | |||
OrderBy: GetSearchOrderByBySortType(tt.sortType), | |||
}) | |||
assert.NoError(t, err) | |||
assert.EqualValues(t, int64(4), count) | |||
if assert.Len(t, projects, 4) { | |||
assert.EqualValues(t, int64(6), count) | |||
if assert.Len(t, projects, 6) { | |||
for i := range projects { | |||
assert.EqualValues(t, tt.wants[i], projects[i].ID) | |||
} |
@@ -434,7 +434,7 @@ func SearchEmails(ctx context.Context, opts *SearchEmailOptions) ([]*SearchEmail | |||
cond = cond.And(builder.Eq{"email_address.is_activated": opts.IsActivated.Value()}) | |||
} | |||
count, err := db.GetEngine(ctx).Join("INNER", "`user`", "`user`.ID = email_address.uid"). | |||
count, err := db.GetEngine(ctx).Join("INNER", "`user`", "`user`.id = email_address.uid"). | |||
Where(cond).Count(new(EmailAddress)) | |||
if err != nil { | |||
return nil, 0, fmt.Errorf("Count: %w", err) | |||
@@ -450,7 +450,7 @@ func SearchEmails(ctx context.Context, opts *SearchEmailOptions) ([]*SearchEmail | |||
emails := make([]*SearchEmailResult, 0, opts.PageSize) | |||
err = db.GetEngine(ctx).Table("email_address"). | |||
Select("email_address.*, `user`.name, `user`.full_name"). | |||
Join("INNER", "`user`", "`user`.ID = email_address.uid"). | |||
Join("INNER", "`user`", "`user`.id = email_address.uid"). | |||
Where(cond). | |||
OrderBy(orderby). | |||
Limit(opts.PageSize, (opts.Page-1)*opts.PageSize). |
@@ -100,7 +100,7 @@ func ReadLogs(ctx context.Context, inStorage bool, filename string, offset, limi | |||
} | |||
if err := scanner.Err(); err != nil { | |||
return nil, fmt.Errorf("scan: %w", err) | |||
return nil, fmt.Errorf("ReadLogs scan: %w", err) | |||
} | |||
return rows, nil |
@@ -41,6 +41,12 @@ func FullSteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep { | |||
} | |||
logIndex += preStep.LogLength | |||
// lastHasRunStep is the last step that has run. | |||
// For example, | |||
// 1. preStep(Success) -> step1(Success) -> step2(Running) -> step3(Waiting) -> postStep(Waiting): lastHasRunStep is step1. | |||
// 2. preStep(Success) -> step1(Success) -> step2(Success) -> step3(Success) -> postStep(Success): lastHasRunStep is step3. | |||
// 3. preStep(Success) -> step1(Success) -> step2(Failure) -> step3 -> postStep(Waiting): lastHasRunStep is step2. | |||
// So its Stopped is the Started of postStep when there are no more steps to run. | |||
var lastHasRunStep *actions_model.ActionTaskStep | |||
for _, step := range task.Steps { | |||
if step.Status.HasRun() { | |||
@@ -56,11 +62,15 @@ func FullSteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep { | |||
Name: postStepName, | |||
Status: actions_model.StatusWaiting, | |||
} | |||
if task.Status.IsDone() { | |||
// If the lastHasRunStep is the last step, or it has failed, postStep has started. | |||
if lastHasRunStep.Status.IsFailure() || lastHasRunStep == task.Steps[len(task.Steps)-1] { | |||
postStep.LogIndex = logIndex | |||
postStep.LogLength = task.LogLength - postStep.LogIndex | |||
postStep.Status = task.Status | |||
postStep.Started = lastHasRunStep.Stopped | |||
postStep.Status = actions_model.StatusRunning | |||
} | |||
if task.Status.IsDone() { | |||
postStep.Status = task.Status | |||
postStep.Stopped = task.Stopped | |||
} | |||
ret := make([]*actions_model.ActionTaskStep, 0, len(task.Steps)+2) |
@@ -103,6 +103,40 @@ func TestFullSteps(t *testing.T) { | |||
{Name: postStepName, Status: actions_model.StatusSuccess, LogIndex: 100, LogLength: 0, Started: 10100, Stopped: 10100}, | |||
}, | |||
}, | |||
{ | |||
name: "all steps finished but task is running", | |||
task: &actions_model.ActionTask{ | |||
Steps: []*actions_model.ActionTaskStep{ | |||
{Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090}, | |||
}, | |||
Status: actions_model.StatusRunning, | |||
Started: 10000, | |||
Stopped: 0, | |||
LogLength: 100, | |||
}, | |||
want: []*actions_model.ActionTaskStep{ | |||
{Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 10, Started: 10000, Stopped: 10010}, | |||
{Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090}, | |||
{Name: postStepName, Status: actions_model.StatusRunning, LogIndex: 90, LogLength: 10, Started: 10090, Stopped: 0}, | |||
}, | |||
}, | |||
{ | |||
name: "skipped task", | |||
task: &actions_model.ActionTask{ | |||
Steps: []*actions_model.ActionTaskStep{ | |||
{Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0}, | |||
}, | |||
Status: actions_model.StatusSkipped, | |||
Started: 0, | |||
Stopped: 0, | |||
LogLength: 0, | |||
}, | |||
want: []*actions_model.ActionTaskStep{ | |||
{Name: preStepName, Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0}, | |||
{Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0}, | |||
{Name: postStepName, Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0}, | |||
}, | |||
}, | |||
} | |||
for _, tt := range tests { | |||
t.Run(tt.name, func(t *testing.T) { |
@@ -150,13 +150,16 @@ func TruncateString(str string, limit int) string { | |||
// StringsToInt64s converts a slice of string to a slice of int64. | |||
func StringsToInt64s(strs []string) ([]int64, error) { | |||
ints := make([]int64, len(strs)) | |||
for i := range strs { | |||
n, err := strconv.ParseInt(strs[i], 10, 64) | |||
if strs == nil { | |||
return nil, nil | |||
} | |||
ints := make([]int64, 0, len(strs)) | |||
for _, s := range strs { | |||
n, err := strconv.ParseInt(s, 10, 64) | |||
if err != nil { | |||
return ints, err | |||
return nil, err | |||
} | |||
ints[i] = n | |||
ints = append(ints, n) | |||
} | |||
return ints, nil | |||
} |
@@ -138,12 +138,13 @@ func TestStringsToInt64s(t *testing.T) { | |||
assert.NoError(t, err) | |||
assert.Equal(t, expected, result) | |||
} | |||
testSuccess(nil, nil) | |||
testSuccess([]string{}, []int64{}) | |||
testSuccess([]string{"-1234"}, []int64{-1234}) | |||
testSuccess([]string{"1", "4", "16", "64", "256"}, | |||
[]int64{1, 4, 16, 64, 256}) | |||
testSuccess([]string{"1", "4", "16", "64", "256"}, []int64{1, 4, 16, 64, 256}) | |||
_, err := StringsToInt64s([]string{"-1", "a", "$"}) | |||
ints, err := StringsToInt64s([]string{"-1", "a"}) | |||
assert.Len(t, ints, 0) | |||
assert.Error(t, err) | |||
} | |||
@@ -367,7 +367,6 @@ type RunStdError interface { | |||
error | |||
Unwrap() error | |||
Stderr() string | |||
IsExitCode(code int) bool | |||
} | |||
type runStdError struct { | |||
@@ -392,9 +391,9 @@ func (r *runStdError) Stderr() string { | |||
return r.stderr | |||
} | |||
func (r *runStdError) IsExitCode(code int) bool { | |||
func IsErrorExitCode(err error, code int) bool { | |||
var exitError *exec.ExitError | |||
if errors.As(r.err, &exitError) { | |||
if errors.As(err, &exitError) { | |||
return exitError.ExitCode() == code | |||
} | |||
return false |
@@ -9,6 +9,7 @@ import ( | |||
"bytes" | |||
"context" | |||
"errors" | |||
"fmt" | |||
"io" | |||
"os/exec" | |||
"strconv" | |||
@@ -396,6 +397,9 @@ func (c *Commit) GetSubModules() (*ObjectCache, error) { | |||
} | |||
} | |||
} | |||
if err = scanner.Err(); err != nil { | |||
return nil, fmt.Errorf("GetSubModules scan: %w", err) | |||
} | |||
return c.submoduleCache, nil | |||
} |
@@ -341,7 +341,7 @@ func checkGitVersionCompatibility(gitVer *version.Version) error { | |||
func configSet(key, value string) error { | |||
stdout, _, err := NewCommand(DefaultContext, "config", "--global", "--get").AddDynamicArguments(key).RunStdString(nil) | |||
if err != nil && !err.IsExitCode(1) { | |||
if err != nil && !IsErrorExitCode(err, 1) { | |||
return fmt.Errorf("failed to get git config %s, err: %w", key, err) | |||
} | |||
@@ -364,7 +364,7 @@ func configSetNonExist(key, value string) error { | |||
// already exist | |||
return nil | |||
} | |||
if err.IsExitCode(1) { | |||
if IsErrorExitCode(err, 1) { | |||
// not exist, set new config | |||
_, _, err = NewCommand(DefaultContext, "config", "--global").AddDynamicArguments(key, value).RunStdString(nil) | |||
if err != nil { | |||
@@ -382,7 +382,7 @@ func configAddNonExist(key, value string) error { | |||
// already exist | |||
return nil | |||
} | |||
if err.IsExitCode(1) { | |||
if IsErrorExitCode(err, 1) { | |||
// not exist, add new config | |||
_, _, err = NewCommand(DefaultContext, "config", "--global", "--add").AddDynamicArguments(key, value).RunStdString(nil) | |||
if err != nil { | |||
@@ -403,7 +403,7 @@ func configUnsetAll(key, value string) error { | |||
} | |||
return nil | |||
} | |||
if err.IsExitCode(1) { | |||
if IsErrorExitCode(err, 1) { | |||
// not exist | |||
return nil | |||
} |
@@ -0,0 +1,118 @@ | |||
// Copyright 2024 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package git | |||
import ( | |||
"bufio" | |||
"bytes" | |||
"context" | |||
"errors" | |||
"fmt" | |||
"os" | |||
"strconv" | |||
"strings" | |||
"code.gitea.io/gitea/modules/util" | |||
) | |||
type GrepResult struct { | |||
Filename string | |||
LineNumbers []int | |||
LineCodes []string | |||
} | |||
type GrepOptions struct { | |||
RefName string | |||
MaxResultLimit int | |||
ContextLineNumber int | |||
IsFuzzy bool | |||
} | |||
func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepOptions) ([]*GrepResult, error) { | |||
stdoutReader, stdoutWriter, err := os.Pipe() | |||
if err != nil { | |||
return nil, fmt.Errorf("unable to create os pipe to grep: %w", err) | |||
} | |||
defer func() { | |||
_ = stdoutReader.Close() | |||
_ = stdoutWriter.Close() | |||
}() | |||
/* | |||
The output is like this ( "^@" means \x00): | |||
HEAD:.air.toml | |||
6^@bin = "gitea" | |||
HEAD:.changelog.yml | |||
2^@repo: go-gitea/gitea | |||
*/ | |||
var results []*GrepResult | |||
cmd := NewCommand(ctx, "grep", "--null", "--break", "--heading", "--fixed-strings", "--line-number", "--ignore-case", "--full-name") | |||
cmd.AddOptionValues("--context", fmt.Sprint(opts.ContextLineNumber)) | |||
if opts.IsFuzzy { | |||
words := strings.Fields(search) | |||
for _, word := range words { | |||
cmd.AddOptionValues("-e", strings.TrimLeft(word, "-")) | |||
} | |||
} else { | |||
cmd.AddOptionValues("-e", strings.TrimLeft(search, "-")) | |||
} | |||
cmd.AddDynamicArguments(util.IfZero(opts.RefName, "HEAD")) | |||
opts.MaxResultLimit = util.IfZero(opts.MaxResultLimit, 50) | |||
stderr := bytes.Buffer{} | |||
err = cmd.Run(&RunOpts{ | |||
Dir: repo.Path, | |||
Stdout: stdoutWriter, | |||
Stderr: &stderr, | |||
PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { | |||
_ = stdoutWriter.Close() | |||
defer stdoutReader.Close() | |||
isInBlock := false | |||
scanner := bufio.NewScanner(stdoutReader) | |||
var res *GrepResult | |||
for scanner.Scan() { | |||
line := scanner.Text() | |||
if !isInBlock { | |||
if _ /* ref */, filename, ok := strings.Cut(line, ":"); ok { | |||
isInBlock = true | |||
res = &GrepResult{Filename: filename} | |||
results = append(results, res) | |||
} | |||
continue | |||
} | |||
if line == "" { | |||
if len(results) >= opts.MaxResultLimit { | |||
cancel() | |||
break | |||
} | |||
isInBlock = false | |||
continue | |||
} | |||
if line == "--" { | |||
continue | |||
} | |||
if lineNum, lineCode, ok := strings.Cut(line, "\x00"); ok { | |||
lineNumInt, _ := strconv.Atoi(lineNum) | |||
res.LineNumbers = append(res.LineNumbers, lineNumInt) | |||
res.LineCodes = append(res.LineCodes, lineCode) | |||
} | |||
} | |||
return scanner.Err() | |||
}, | |||
}) | |||
// git grep exits by cancel (killed), usually it is caused by the limit of results | |||
if IsErrorExitCode(err, -1) && stderr.Len() == 0 { | |||
return results, nil | |||
} | |||
// git grep exits with 1 if no results are found | |||
if IsErrorExitCode(err, 1) && stderr.Len() == 0 { | |||
return nil, nil | |||
} | |||
if err != nil && !errors.Is(err, context.Canceled) { | |||
return nil, fmt.Errorf("unable to run git grep: %w, stderr: %s", err, stderr.String()) | |||
} | |||
return results, nil | |||
} |
@@ -0,0 +1,51 @@ | |||
// Copyright 2024 The Gitea Authors. All rights reserved. | |||
// SPDX-License-Identifier: MIT | |||
package git | |||
import ( | |||
"context" | |||
"path/filepath" | |||
"testing" | |||
"github.com/stretchr/testify/assert" | |||
) | |||
func TestGrepSearch(t *testing.T) { | |||
repo, err := openRepositoryWithDefaultContext(filepath.Join(testReposDir, "language_stats_repo")) | |||
assert.NoError(t, err) | |||
defer repo.Close() | |||
res, err := GrepSearch(context.Background(), repo, "void", GrepOptions{}) | |||
assert.NoError(t, err) | |||
assert.Equal(t, []*GrepResult{ | |||
{ | |||
Filename: "java-hello/main.java", | |||
LineNumbers: []int{3}, | |||
LineCodes: []string{" public static void main(String[] args)"}, | |||
}, | |||
{ | |||
Filename: "main.vendor.java", | |||
LineNumbers: []int{3}, | |||
LineCodes: []string{" public static void main(String[] args)"}, | |||
}, | |||
}, res) | |||
res, err = GrepSearch(context.Background(), repo, "void", GrepOptions{MaxResultLimit: 1}) | |||
assert.NoError(t, err) | |||
assert.Equal(t, []*GrepResult{ | |||
{ | |||
Filename: "java-hello/main.java", | |||
LineNumbers: []int{3}, | |||
LineCodes: []string{" public static void main(String[] args)"}, | |||
}, | |||
}, res) | |||
res, err = GrepSearch(context.Background(), repo, "no-such-content", GrepOptions{}) | |||
assert.NoError(t, err) | |||
assert.Len(t, res, 0) | |||
res, err = GrepSearch(context.Background(), &Repository{Path: "no-such-git-repo"}, "no-such-content", GrepOptions{}) | |||
assert.Error(t, err) | |||
assert.Len(t, res, 0) | |||
} |
@@ -283,7 +283,7 @@ type DivergeObject struct { | |||
// GetDivergingCommits returns the number of commits a targetBranch is ahead or behind a baseBranch | |||
func GetDivergingCommits(ctx context.Context, repoPath, baseBranch, targetBranch string) (do DivergeObject, err error) { | |||
cmd := NewCommand(ctx, "rev-list", "--count", "--left-right"). | |||
AddDynamicArguments(baseBranch + "..." + targetBranch) | |||
AddDynamicArguments(baseBranch + "..." + targetBranch).AddArguments("--") | |||
stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) | |||
if err != nil { | |||
return do, err |
@@ -124,6 +124,10 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) | |||
} | |||
} | |||
} | |||
if err = scanner.Err(); err != nil { | |||
_ = stdoutReader.Close() | |||
return fmt.Errorf("GetCodeActivityStats scan: %w", err) | |||
} | |||
a := make([]*CodeActivityAuthor, 0, len(authors)) | |||
for _, v := range authors { | |||
a = append(a, v) |
@@ -233,7 +233,10 @@ func (g *Manager) setStateTransition(old, new state) bool { | |||
// At the moment the total number of servers (numberOfServersToCreate) are pre-defined as a const before global init, | |||
// so this function MUST be called if a server is not used. | |||
func (g *Manager) InformCleanup() { | |||
g.createServerWaitGroup.Done() | |||
g.createServerCond.L.Lock() | |||
defer g.createServerCond.L.Unlock() | |||
g.createdServer++ | |||
g.createServerCond.Signal() | |||
} | |||
// Done allows the manager to be viewed as a context.Context, it returns a channel that is closed when the server is finished terminating |
@@ -42,8 +42,9 @@ type Manager struct { | |||
terminateCtxCancel context.CancelFunc | |||
managerCtxCancel context.CancelFunc | |||
runningServerWaitGroup sync.WaitGroup | |||
createServerWaitGroup sync.WaitGroup | |||
terminateWaitGroup sync.WaitGroup | |||
createServerCond sync.Cond | |||
createdServer int | |||
shutdownRequested chan struct{} | |||
toRunAtShutdown []func() | |||
@@ -52,7 +53,7 @@ type Manager struct { | |||
func newGracefulManager(ctx context.Context) *Manager { | |||
manager := &Manager{ctx: ctx, shutdownRequested: make(chan struct{})} | |||
manager.createServerWaitGroup.Add(numberOfServersToCreate) | |||
manager.createServerCond.L = &sync.Mutex{} | |||
manager.prepare(ctx) | |||
manager.start() | |||
return manager |
@@ -57,20 +57,27 @@ func (g *Manager) start() { | |||
// Handle clean up of unused provided listeners and delayed start-up | |||
startupDone := make(chan struct{}) | |||
go func() { | |||
defer close(startupDone) | |||
// Wait till we're done getting all the listeners and then close the unused ones | |||
func() { | |||
// FIXME: there is a fundamental design problem of the "manager" and the "wait group". | |||
// If nothing has started, the "Wait" just panics: sync: WaitGroup is reused before previous Wait has returned | |||
// There is no clear solution besides a complete rewriting of the "manager" | |||
defer func() { | |||
_ = recover() | |||
}() | |||
g.createServerWaitGroup.Wait() | |||
defer func() { | |||
close(startupDone) | |||
// Close the unused listeners | |||
closeProvidedListeners() | |||
}() | |||
// Ignore the error here there's not much we can do with it, they're logged in the CloseProvidedListeners function | |||
_ = CloseProvidedListeners() | |||
g.notify(readyMsg) | |||
// Wait for all servers to be created | |||
g.createServerCond.L.Lock() | |||
for { | |||
if g.createdServer >= numberOfServersToCreate { | |||
g.createServerCond.L.Unlock() | |||
g.notify(readyMsg) | |||
return | |||
} | |||
select { | |||
case <-g.IsShutdown(): | |||
g.createServerCond.L.Unlock() | |||
return | |||
default: | |||
} | |||
g.createServerCond.Wait() | |||
} | |||
}() | |||
if setting.StartupTimeout > 0 { | |||
go func() { | |||
@@ -78,16 +85,7 @@ func (g *Manager) start() { | |||
case <-startupDone: | |||
return | |||
case <-g.IsShutdown(): | |||
func() { | |||
// When WaitGroup counter goes negative it will panic - we don't care about this so we can just ignore it. | |||
defer func() { | |||
_ = recover() | |||
}() | |||
// Ensure that the createServerWaitGroup stops waiting | |||
for { | |||
g.createServerWaitGroup.Done() | |||
} | |||
}() | |||
g.createServerCond.Signal() | |||
return | |||
case <-time.After(setting.StartupTimeout): | |||
log.Error("Startup took too long! Shutting down") |
@@ -149,33 +149,35 @@ hammerLoop: | |||
func (g *Manager) awaitServer(limit time.Duration) bool { | |||
c := make(chan struct{}) | |||
go func() { | |||
defer close(c) | |||
func() { | |||
// FIXME: there is a fundamental design problem of the "manager" and the "wait group". | |||
// If nothing has started, the "Wait" just panics: sync: WaitGroup is reused before previous Wait has returned | |||
// There is no clear solution besides a complete rewriting of the "manager" | |||
defer func() { | |||
_ = recover() | |||
}() | |||
g.createServerWaitGroup.Wait() | |||
}() | |||
g.createServerCond.L.Lock() | |||
for { | |||
if g.createdServer >= numberOfServersToCreate { | |||
g.createServerCond.L.Unlock() | |||
close(c) | |||
return | |||
} | |||
select { | |||
case <-g.IsShutdown(): | |||
g.createServerCond.L.Unlock() | |||
return | |||
default: | |||
} | |||
g.createServerCond.Wait() | |||
} | |||
}() | |||
var tc <-chan time.Time | |||
if limit > 0 { | |||
select { | |||
case <-c: | |||
return true // completed normally | |||
case <-time.After(limit): | |||
return false // timed out | |||
case <-g.IsShutdown(): | |||
return false | |||
} | |||
} else { | |||
select { | |||
case <-c: | |||
return true // completed normally | |||
case <-g.IsShutdown(): | |||
return false | |||
} | |||
tc = time.After(limit) | |||
} | |||
select { | |||
case <-c: | |||
return true // completed normally | |||
case <-tc: | |||
return false // timed out | |||
case <-g.IsShutdown(): | |||
g.createServerCond.Signal() | |||
return false | |||
} | |||
} | |||
@@ -129,25 +129,17 @@ func getProvidedFDs() (savedErr error) { | |||
return savedErr | |||
} | |||
// CloseProvidedListeners closes all unused provided listeners. | |||
func CloseProvidedListeners() error { | |||
// closeProvidedListeners closes all unused provided listeners. | |||
func closeProvidedListeners() { | |||
mutex.Lock() | |||
defer mutex.Unlock() | |||
var returnableError error | |||
for _, l := range providedListeners { | |||
err := l.Close() | |||
if err != nil { | |||
log.Error("Error in closing unused provided listener: %v", err) | |||
if returnableError != nil { | |||
returnableError = fmt.Errorf("%v & %w", returnableError, err) | |||
} else { | |||
returnableError = err | |||
} | |||
} | |||
} | |||
providedListeners = []net.Listener{} | |||
return returnableError | |||
} | |||
// DefaultGetListener obtains a listener for the stream-oriented local network address: |
@@ -8,20 +8,42 @@ import ( | |||
"strings" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/util" | |||
) | |||
// IsRiskyRedirectURL returns true if the URL is considered risky for redirects | |||
func IsRiskyRedirectURL(s string) bool { | |||
func urlIsRelative(s string, u *url.URL) bool { | |||
// Unfortunately browsers consider a redirect Location with preceding "//", "\\", "/\" and "\/" as meaning redirect to "http(s)://REST_OF_PATH" | |||
// Therefore we should ignore these redirect locations to prevent open redirects | |||
if len(s) > 1 && (s[0] == '/' || s[0] == '\\') && (s[1] == '/' || s[1] == '\\') { | |||
return true | |||
return false | |||
} | |||
return u != nil && u.Scheme == "" && u.Host == "" | |||
} | |||
// IsRelativeURL detects if a URL is relative (no scheme or host) | |||
func IsRelativeURL(s string) bool { | |||
u, err := url.Parse(s) | |||
if err != nil || ((u.Scheme != "" || u.Host != "") && !strings.HasPrefix(strings.ToLower(s), strings.ToLower(setting.AppURL))) { | |||
return true | |||
} | |||
return err == nil && urlIsRelative(s, u) | |||
} | |||
return false | |||
func IsCurrentGiteaSiteURL(s string) bool { | |||
u, err := url.Parse(s) | |||
if err != nil { | |||
return false | |||
} | |||
if u.Path != "" { | |||
cleanedPath := util.PathJoinRelX(u.Path) | |||
if cleanedPath == "" || cleanedPath == "." { | |||
u.Path = "/" | |||
} else { | |||
u.Path += "/" + cleanedPath + "/" | |||
} | |||
} | |||
if urlIsRelative(s, u) { | |||
return u.Path == "" || strings.HasPrefix(strings.ToLower(u.Path), strings.ToLower(setting.AppSubURL+"/")) | |||
} | |||
if u.Path == "" { | |||
u.Path = "/" | |||
} | |||
return strings.HasPrefix(strings.ToLower(u.String()), strings.ToLower(setting.AppURL)) | |||
} |
@@ -7,32 +7,70 @@ import ( | |||
"testing" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/test" | |||
"github.com/stretchr/testify/assert" | |||
) | |||
func TestIsRiskyRedirectURL(t *testing.T) { | |||
setting.AppURL = "http://localhost:3000/" | |||
tests := []struct { | |||
input string | |||
want bool | |||
}{ | |||
{"", false}, | |||
{"foo", false}, | |||
{"/", false}, | |||
{"/foo?k=%20#abc", false}, | |||
func TestIsRelativeURL(t *testing.T) { | |||
defer test.MockVariableValue(&setting.AppURL, "http://localhost:3000/sub/")() | |||
defer test.MockVariableValue(&setting.AppSubURL, "/sub")() | |||
rel := []string{ | |||
"", | |||
"foo", | |||
"/", | |||
"/foo?k=%20#abc", | |||
} | |||
for _, s := range rel { | |||
assert.True(t, IsRelativeURL(s), "rel = %q", s) | |||
} | |||
abs := []string{ | |||
"//", | |||
"\\\\", | |||
"/\\", | |||
"\\/", | |||
"mailto:a@b.com", | |||
"https://test.com", | |||
} | |||
for _, s := range abs { | |||
assert.False(t, IsRelativeURL(s), "abs = %q", s) | |||
} | |||
} | |||
{"//", true}, | |||
{"\\\\", true}, | |||
{"/\\", true}, | |||
{"\\/", true}, | |||
{"mail:a@b.com", true}, | |||
{"https://test.com", true}, | |||
{setting.AppURL + "/foo", false}, | |||
} | |||
for _, tt := range tests { | |||
t.Run(tt.input, func(t *testing.T) { | |||
assert.Equal(t, tt.want, IsRiskyRedirectURL(tt.input)) | |||
}) | |||
func TestIsCurrentGiteaSiteURL(t *testing.T) { | |||
defer test.MockVariableValue(&setting.AppURL, "http://localhost:3000/sub/")() | |||
defer test.MockVariableValue(&setting.AppSubURL, "/sub")() | |||
good := []string{ | |||
"?key=val", | |||
"/sub", | |||
"/sub/", | |||
"/sub/foo", | |||
"/sub/foo/", | |||
"http://localhost:3000/sub?key=val", | |||
"http://localhost:3000/sub/", | |||
} | |||
for _, s := range good { | |||
assert.True(t, IsCurrentGiteaSiteURL(s), "good = %q", s) | |||
} | |||
bad := []string{ | |||
".", | |||
"foo", | |||
"/", | |||
"//", | |||
"\\\\", | |||
"/foo", | |||
"http://localhost:3000/sub/..", | |||
"http://localhost:3000/other", | |||
"http://other/", | |||
} | |||
for _, s := range bad { | |||
assert.False(t, IsCurrentGiteaSiteURL(s), "bad = %q", s) | |||
} | |||
setting.AppURL = "http://localhost:3000/" | |||
setting.AppSubURL = "" | |||
assert.False(t, IsCurrentGiteaSiteURL("//")) | |||
assert.False(t, IsCurrentGiteaSiteURL("\\\\")) | |||
assert.False(t, IsCurrentGiteaSiteURL("http://localhost")) | |||
assert.True(t, IsCurrentGiteaSiteURL("http://localhost:3000?key=val")) | |||
} |
@@ -39,6 +39,8 @@ import ( | |||
const ( | |||
unicodeNormalizeName = "unicodeNormalize" | |||
maxBatchSize = 16 | |||
// fuzzyDenominator determines the levenshtein distance per each character of a keyword | |||
fuzzyDenominator = 4 | |||
) | |||
func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error { | |||
@@ -142,7 +144,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro | |||
return err | |||
} | |||
if size, err = strconv.ParseInt(strings.TrimSpace(stdout), 10, 64); err != nil { | |||
return fmt.Errorf("Misformatted git cat-file output: %w", err) | |||
return fmt.Errorf("misformatted git cat-file output: %w", err) | |||
} | |||
} | |||
@@ -233,26 +235,23 @@ func (b *Indexer) Delete(_ context.Context, repoID int64) error { | |||
// Search searches for files in the specified repo. | |||
// Returns the matching file-paths | |||
func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { | |||
func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { | |||
var ( | |||
indexerQuery query.Query | |||
keywordQuery query.Query | |||
) | |||
if isFuzzy { | |||
phraseQuery := bleve.NewMatchPhraseQuery(keyword) | |||
phraseQuery.FieldVal = "Content" | |||
phraseQuery.Analyzer = repoIndexerAnalyzer | |||
keywordQuery = phraseQuery | |||
} else { | |||
prefixQuery := bleve.NewPrefixQuery(keyword) | |||
prefixQuery.FieldVal = "Content" | |||
keywordQuery = prefixQuery | |||
phraseQuery := bleve.NewMatchPhraseQuery(opts.Keyword) | |||
phraseQuery.FieldVal = "Content" | |||
phraseQuery.Analyzer = repoIndexerAnalyzer | |||
keywordQuery = phraseQuery | |||
if opts.IsKeywordFuzzy { | |||
phraseQuery.Fuzziness = len(opts.Keyword) / fuzzyDenominator | |||
} | |||
if len(repoIDs) > 0 { | |||
repoQueries := make([]query.Query, 0, len(repoIDs)) | |||
for _, repoID := range repoIDs { | |||
if len(opts.RepoIDs) > 0 { | |||
repoQueries := make([]query.Query, 0, len(opts.RepoIDs)) | |||
for _, repoID := range opts.RepoIDs { | |||
repoQueries = append(repoQueries, inner_bleve.NumericEqualityQuery(repoID, "RepoID")) | |||
} | |||
@@ -266,8 +265,8 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword | |||
// Save for reuse without language filter | |||
facetQuery := indexerQuery | |||
if len(language) > 0 { | |||
languageQuery := bleve.NewMatchQuery(language) | |||
if len(opts.Language) > 0 { | |||
languageQuery := bleve.NewMatchQuery(opts.Language) | |||
languageQuery.FieldVal = "Language" | |||
languageQuery.Analyzer = analyzer_keyword.Name | |||
@@ -277,12 +276,12 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword | |||
) | |||
} | |||
from := (page - 1) * pageSize | |||
from, pageSize := opts.GetSkipTake() | |||
searchRequest := bleve.NewSearchRequestOptions(indexerQuery, pageSize, from, false) | |||
searchRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"} | |||
searchRequest.IncludeLocations = true | |||
if len(language) == 0 { | |||
if len(opts.Language) == 0 { | |||
searchRequest.AddFacet("languages", bleve.NewFacetRequest("Language", 10)) | |||
} | |||
@@ -326,7 +325,7 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword | |||
} | |||
searchResultLanguages := make([]*internal.SearchResultLanguages, 0, 10) | |||
if len(language) > 0 { | |||
if len(opts.Language) > 0 { | |||
// Use separate query to go get all language counts | |||
facetRequest := bleve.NewSearchRequestOptions(facetQuery, 1, 0, false) | |||
facetRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"} |
@@ -281,18 +281,18 @@ func extractAggs(searchResult *elastic.SearchResult) []*internal.SearchResultLan | |||
} | |||
// Search searches for codes and language stats by given conditions. | |||
func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { | |||
func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { | |||
searchType := esMultiMatchTypePhrasePrefix | |||
if isFuzzy { | |||
if opts.IsKeywordFuzzy { | |||
searchType = esMultiMatchTypeBestFields | |||
} | |||
kwQuery := elastic.NewMultiMatchQuery(keyword, "content").Type(searchType) | |||
kwQuery := elastic.NewMultiMatchQuery(opts.Keyword, "content").Type(searchType) | |||
query := elastic.NewBoolQuery() | |||
query = query.Must(kwQuery) | |||
if len(repoIDs) > 0 { | |||
repoStrs := make([]any, 0, len(repoIDs)) | |||
for _, repoID := range repoIDs { | |||
if len(opts.RepoIDs) > 0 { | |||
repoStrs := make([]any, 0, len(opts.RepoIDs)) | |||
for _, repoID := range opts.RepoIDs { | |||
repoStrs = append(repoStrs, repoID) | |||
} | |||
repoQuery := elastic.NewTermsQuery("repo_id", repoStrs...) | |||
@@ -300,16 +300,12 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword | |||
} | |||
var ( | |||
start int | |||
kw = "<em>" + keyword + "</em>" | |||
aggregation = elastic.NewTermsAggregation().Field("language").Size(10).OrderByCountDesc() | |||
start, pageSize = opts.GetSkipTake() | |||
kw = "<em>" + opts.Keyword + "</em>" | |||
aggregation = elastic.NewTermsAggregation().Field("language").Size(10).OrderByCountDesc() | |||
) | |||
if page > 0 { | |||
start = (page - 1) * pageSize | |||
} | |||
if len(language) == 0 { | |||
if len(opts.Language) == 0 { | |||
searchResult, err := b.inner.Client.Search(). | |||
Index(b.inner.VersionedIndexName()). | |||
Aggregation("language", aggregation). | |||
@@ -330,7 +326,7 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword | |||
return convertResult(searchResult, kw, pageSize) | |||
} | |||
langQuery := elastic.NewMatchQuery("language", language) | |||
langQuery := elastic.NewMatchQuery("language", opts.Language) | |||
countResult, err := b.inner.Client.Search(). | |||
Index(b.inner.VersionedIndexName()). | |||
Aggregation("language", aggregation). |
@@ -32,7 +32,7 @@ func getRepoChanges(ctx context.Context, repo *repo_model.Repository, revision s | |||
needGenesis := len(status.CommitSha) == 0 | |||
if !needGenesis { | |||
hasAncestorCmd := git.NewCommand(ctx, "merge-base").AddDynamicArguments(repo.CodeIndexerStatus.CommitSha, revision) | |||
hasAncestorCmd := git.NewCommand(ctx, "merge-base").AddDynamicArguments(status.CommitSha, revision) | |||
stdout, _, _ := hasAncestorCmd.RunStdString(&git.RunOpts{Dir: repo.RepoPath()}) | |||
needGenesis = len(stdout) == 0 | |||
} |
@@ -8,6 +8,7 @@ import ( | |||
"os" | |||
"testing" | |||
"code.gitea.io/gitea/models/db" | |||
"code.gitea.io/gitea/models/unittest" | |||
"code.gitea.io/gitea/modules/git" | |||
"code.gitea.io/gitea/modules/indexer/code/bleve" | |||
@@ -70,7 +71,15 @@ func testIndexer(name string, t *testing.T, indexer internal.Indexer) { | |||
for _, kw := range keywords { | |||
t.Run(kw.Keyword, func(t *testing.T) { | |||
total, res, langs, err := indexer.Search(context.TODO(), kw.RepoIDs, "", kw.Keyword, 1, 10, true) | |||
total, res, langs, err := indexer.Search(context.TODO(), &internal.SearchOptions{ | |||
RepoIDs: kw.RepoIDs, | |||
Keyword: kw.Keyword, | |||
Paginator: &db.ListOptions{ | |||
Page: 1, | |||
PageSize: 10, | |||
}, | |||
IsKeywordFuzzy: true, | |||
}) | |||
assert.NoError(t, err) | |||
assert.Len(t, kw.IDs, int(total)) | |||
assert.Len(t, langs, kw.Langs) |
@@ -7,6 +7,7 @@ import ( | |||
"context" | |||
"fmt" | |||
"code.gitea.io/gitea/models/db" | |||
repo_model "code.gitea.io/gitea/models/repo" | |||
"code.gitea.io/gitea/modules/indexer/internal" | |||
) | |||
@@ -16,7 +17,17 @@ type Indexer interface { | |||
internal.Indexer | |||
Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *RepoChanges) error | |||
Delete(ctx context.Context, repoID int64) error | |||
Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*SearchResult, []*SearchResultLanguages, error) | |||
Search(ctx context.Context, opts *SearchOptions) (int64, []*SearchResult, []*SearchResultLanguages, error) | |||
} | |||
type SearchOptions struct { | |||
RepoIDs []int64 | |||
Keyword string | |||
Language string | |||
IsKeywordFuzzy bool | |||
db.Paginator | |||
} | |||
// NewDummyIndexer returns a dummy indexer | |||
@@ -38,6 +49,6 @@ func (d *dummyIndexer) Delete(ctx context.Context, repoID int64) error { | |||
return fmt.Errorf("indexer is not ready") | |||
} | |||
func (d *dummyIndexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*SearchResult, []*SearchResultLanguages, error) { | |||
func (d *dummyIndexer) Search(ctx context.Context, opts *SearchOptions) (int64, []*SearchResult, []*SearchResultLanguages, error) { | |||
return 0, nil, nil, fmt.Errorf("indexer is not ready") | |||
} |
@@ -32,6 +32,8 @@ type ResultLine struct { | |||
type SearchResultLanguages = internal.SearchResultLanguages | |||
type SearchOptions = internal.SearchOptions | |||
func indices(content string, selectionStartIndex, selectionEndIndex int) (int, int) { | |||
startIndex := selectionStartIndex | |||
numLinesBefore := 0 | |||
@@ -68,13 +70,27 @@ func writeStrings(buf *bytes.Buffer, strs ...string) error { | |||
return nil | |||
} | |||
func HighlightSearchResultCode(filename string, lineNums []int, code string) []ResultLine { | |||
// we should highlight the whole code block first, otherwise it doesn't work well with multiple line highlighting | |||
hl, _ := highlight.Code(filename, "", code) | |||
highlightedLines := strings.Split(string(hl), "\n") | |||
// The lineNums outputted by highlight.Code might not match the original lineNums, because "highlight" removes the last `\n` | |||
lines := make([]ResultLine, min(len(highlightedLines), len(lineNums))) | |||
for i := 0; i < len(lines); i++ { | |||
lines[i].Num = lineNums[i] | |||
lines[i].FormattedContent = template.HTML(highlightedLines[i]) | |||
} | |||
return lines | |||
} | |||
func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Result, error) { | |||
startLineNum := 1 + strings.Count(result.Content[:startIndex], "\n") | |||
var formattedLinesBuffer bytes.Buffer | |||
contentLines := strings.SplitAfter(result.Content[startIndex:endIndex], "\n") | |||
lines := make([]ResultLine, 0, len(contentLines)) | |||
lineNums := make([]int, 0, len(contentLines)) | |||
index := startIndex | |||
for i, line := range contentLines { | |||
var err error | |||
@@ -89,29 +105,16 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res | |||
line[closeActiveIndex:], | |||
) | |||
} else { | |||
err = writeStrings(&formattedLinesBuffer, | |||
line, | |||
) | |||
err = writeStrings(&formattedLinesBuffer, line) | |||
} | |||
if err != nil { | |||
return nil, err | |||
} | |||
lines = append(lines, ResultLine{Num: startLineNum + i}) | |||
lineNums = append(lineNums, startLineNum+i) | |||
index += len(line) | |||
} | |||
// we should highlight the whole code block first, otherwise it doesn't work well with multiple line highlighting | |||
hl, _ := highlight.Code(result.Filename, "", formattedLinesBuffer.String()) | |||
highlightedLines := strings.Split(string(hl), "\n") | |||
// The lines outputted by highlight.Code might not match the original lines, because "highlight" removes the last `\n` | |||
lines = lines[:min(len(highlightedLines), len(lines))] | |||
highlightedLines = highlightedLines[:len(lines)] | |||
for i := 0; i < len(lines); i++ { | |||
lines[i].FormattedContent = template.HTML(highlightedLines[i]) | |||
} | |||
return &Result{ | |||
RepoID: result.RepoID, | |||
Filename: result.Filename, | |||
@@ -119,18 +122,18 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res | |||
UpdatedUnix: result.UpdatedUnix, | |||
Language: result.Language, | |||
Color: result.Color, | |||
Lines: lines, | |||
Lines: HighlightSearchResultCode(result.Filename, lineNums, formattedLinesBuffer.String()), | |||
}, nil | |||
} | |||
// PerformSearch perform a search on a repository | |||
// if isFuzzy is true set the Damerau-Levenshtein distance from 0 to 2 | |||
func PerformSearch(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int, []*Result, []*internal.SearchResultLanguages, error) { | |||
if len(keyword) == 0 { | |||
func PerformSearch(ctx context.Context, opts *SearchOptions) (int, []*Result, []*SearchResultLanguages, error) { | |||
if opts == nil || len(opts.Keyword) == 0 { | |||
return 0, nil, nil, nil | |||
} | |||
total, results, resultLanguages, err := (*globalIndexer.Load()).Search(ctx, repoIDs, language, keyword, page, pageSize, isFuzzy) | |||
total, results, resultLanguages, err := (*globalIndexer.Load()).Search(ctx, opts) | |||
if err != nil { | |||
return 0, nil, nil, err | |||
} |
@@ -20,17 +20,11 @@ func NumericEqualityQuery(value int64, field string) *query.NumericRangeQuery { | |||
} | |||
// MatchPhraseQuery generates a match phrase query for the given phrase, field and analyzer | |||
func MatchPhraseQuery(matchPhrase, field, analyzer string) *query.MatchPhraseQuery { | |||
func MatchPhraseQuery(matchPhrase, field, analyzer string, fuzziness int) *query.MatchPhraseQuery { | |||
q := bleve.NewMatchPhraseQuery(matchPhrase) | |||
q.FieldVal = field | |||
q.Analyzer = analyzer | |||
return q | |||
} | |||
// PrefixQuery generates a match prefix query for the given prefix and field | |||
func PrefixQuery(matchPrefix, field string) *query.PrefixQuery { | |||
q := bleve.NewPrefixQuery(matchPrefix) | |||
q.FieldVal = field | |||
q.Fuzziness = fuzziness | |||
return q | |||
} | |||
@@ -10,7 +10,7 @@ import ( | |||
) | |||
// ParsePaginator parses a db.Paginator into a skip and limit | |||
func ParsePaginator(paginator db.Paginator, max ...int) (int, int) { | |||
func ParsePaginator(paginator *db.ListOptions, max ...int) (int, int) { | |||
// Use a very large number to indicate no limit | |||
unlimited := math.MaxInt32 | |||
if len(max) > 0 { | |||
@@ -19,22 +19,15 @@ func ParsePaginator(paginator db.Paginator, max ...int) (int, int) { | |||
} | |||
if paginator == nil || paginator.IsListAll() { | |||
// It shouldn't happen. In actual usage scenarios, there should not be requests to search all. | |||
// But if it does happen, respect it and return "unlimited". | |||
// And it's also useful for testing. | |||
return 0, unlimited | |||
} | |||
// Warning: Do not use GetSkipTake() for *db.ListOptions | |||
// Its implementation could reset the page size with setting.API.MaxResponseItems | |||
if listOptions, ok := paginator.(*db.ListOptions); ok { | |||
if listOptions.Page >= 0 && listOptions.PageSize > 0 { | |||
var start int | |||
if listOptions.Page == 0 { | |||
start = 0 | |||
} else { | |||
start = (listOptions.Page - 1) * listOptions.PageSize | |||
} | |||
return start, listOptions.PageSize | |||
} | |||
return 0, unlimited | |||
if paginator.PageSize == 0 { | |||
// Do not return any results when searching, it's used to get the total count only. | |||
return 0, 0 | |||
} | |||
return paginator.GetSkipTake() |
@@ -35,7 +35,11 @@ func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error { | |||
}) | |||
} | |||
const maxBatchSize = 16 | |||
const ( | |||
maxBatchSize = 16 | |||
// fuzzyDenominator determines the levenshtein distance per each character of a keyword | |||
fuzzyDenominator = 4 | |||
) | |||
// IndexerData an update to the issue indexer | |||
type IndexerData internal.IndexerData | |||
@@ -156,19 +160,16 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( | |||
var queries []query.Query | |||
if options.Keyword != "" { | |||
fuzziness := 0 | |||
if options.IsFuzzyKeyword { | |||
queries = append(queries, bleve.NewDisjunctionQuery([]query.Query{ | |||
inner_bleve.MatchPhraseQuery(options.Keyword, "title", issueIndexerAnalyzer), | |||
inner_bleve.MatchPhraseQuery(options.Keyword, "content", issueIndexerAnalyzer), | |||
inner_bleve.MatchPhraseQuery(options.Keyword, "comments", issueIndexerAnalyzer), | |||
}...)) | |||
} else { | |||
queries = append(queries, bleve.NewDisjunctionQuery([]query.Query{ | |||
inner_bleve.PrefixQuery(options.Keyword, "title"), | |||
inner_bleve.PrefixQuery(options.Keyword, "content"), | |||
inner_bleve.PrefixQuery(options.Keyword, "comments"), | |||
}...)) | |||
fuzziness = len(options.Keyword) / fuzzyDenominator | |||
} | |||
queries = append(queries, bleve.NewDisjunctionQuery([]query.Query{ | |||
inner_bleve.MatchPhraseQuery(options.Keyword, "title", issueIndexerAnalyzer, fuzziness), | |||
inner_bleve.MatchPhraseQuery(options.Keyword, "content", issueIndexerAnalyzer, fuzziness), | |||
inner_bleve.MatchPhraseQuery(options.Keyword, "comments", issueIndexerAnalyzer, fuzziness), | |||
}...)) | |||
} | |||
if len(options.RepoIDs) > 0 || options.AllPublic { |
@@ -78,6 +78,17 @@ func (i *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( | |||
return nil, err | |||
} | |||
// If pagesize == 0, return total count only. It's a special case for search count. | |||
if options.Paginator != nil && options.Paginator.PageSize == 0 { | |||
total, err := issue_model.CountIssues(ctx, opt, cond) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return &internal.SearchResult{ | |||
Total: total, | |||
}, nil | |||
} | |||
ids, total, err := issue_model.IssueIDs(ctx, opt, cond) | |||
if err != nil { | |||
return nil, err |
@@ -308,7 +308,7 @@ func SearchIssues(ctx context.Context, opts *SearchOptions) ([]int64, int64, err | |||
// CountIssues counts issues by options. It is a shortcut of SearchIssues(ctx, opts) but only returns the total count. | |||
func CountIssues(ctx context.Context, opts *SearchOptions) (int64, error) { | |||
opts = opts.Copy(func(options *SearchOptions) { opts.Paginator = &db_model.ListOptions{PageSize: 0} }) | |||
opts = opts.Copy(func(options *SearchOptions) { options.Paginator = &db_model.ListOptions{PageSize: 0} }) | |||
_, total, err := SearchIssues(ctx, opts) | |||
return total, err |
@@ -106,7 +106,7 @@ type SearchOptions struct { | |||
UpdatedAfterUnix optional.Option[int64] | |||
UpdatedBeforeUnix optional.Option[int64] | |||
db.Paginator | |||
Paginator *db.ListOptions | |||
SortBy SortBy // sort by field | |||
} |
@@ -77,6 +77,13 @@ func TestIndexer(t *testing.T, indexer internal.Indexer) { | |||
assert.Equal(t, c.ExpectedIDs, ids) | |||
assert.Equal(t, c.ExpectedTotal, result.Total) | |||
} | |||
// test counting | |||
c.SearchOptions.Paginator = &db.ListOptions{PageSize: 0} | |||
countResult, err := indexer.Search(context.Background(), c.SearchOptions) | |||
require.NoError(t, err) | |||
assert.Empty(t, countResult.Hits) | |||
assert.Equal(t, result.Total, countResult.Total) | |||
}) | |||
} | |||
} | |||
@@ -515,10 +522,8 @@ var cases = []*testIndexerCase{ | |||
{ | |||
Name: "SortByCreatedDesc", | |||
SearchOptions: &internal.SearchOptions{ | |||
Paginator: &db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
SortBy: internal.SortByCreatedDesc, | |||
Paginator: &db.ListOptionsAll, | |||
SortBy: internal.SortByCreatedDesc, | |||
}, | |||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { | |||
assert.Equal(t, len(data), len(result.Hits)) | |||
@@ -533,10 +538,8 @@ var cases = []*testIndexerCase{ | |||
{ | |||
Name: "SortByUpdatedDesc", | |||
SearchOptions: &internal.SearchOptions{ | |||
Paginator: &db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
SortBy: internal.SortByUpdatedDesc, | |||
Paginator: &db.ListOptionsAll, | |||
SortBy: internal.SortByUpdatedDesc, | |||
}, | |||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { | |||
assert.Equal(t, len(data), len(result.Hits)) | |||
@@ -551,10 +554,8 @@ var cases = []*testIndexerCase{ | |||
{ | |||
Name: "SortByCommentsDesc", | |||
SearchOptions: &internal.SearchOptions{ | |||
Paginator: &db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
SortBy: internal.SortByCommentsDesc, | |||
Paginator: &db.ListOptionsAll, | |||
SortBy: internal.SortByCommentsDesc, | |||
}, | |||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { | |||
assert.Equal(t, len(data), len(result.Hits)) | |||
@@ -569,10 +570,8 @@ var cases = []*testIndexerCase{ | |||
{ | |||
Name: "SortByDeadlineDesc", | |||
SearchOptions: &internal.SearchOptions{ | |||
Paginator: &db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
SortBy: internal.SortByDeadlineDesc, | |||
Paginator: &db.ListOptionsAll, | |||
SortBy: internal.SortByDeadlineDesc, | |||
}, | |||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { | |||
assert.Equal(t, len(data), len(result.Hits)) | |||
@@ -587,10 +586,8 @@ var cases = []*testIndexerCase{ | |||
{ | |||
Name: "SortByCreatedAsc", | |||
SearchOptions: &internal.SearchOptions{ | |||
Paginator: &db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
SortBy: internal.SortByCreatedAsc, | |||
Paginator: &db.ListOptionsAll, | |||
SortBy: internal.SortByCreatedAsc, | |||
}, | |||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { | |||
assert.Equal(t, len(data), len(result.Hits)) | |||
@@ -605,10 +602,8 @@ var cases = []*testIndexerCase{ | |||
{ | |||
Name: "SortByUpdatedAsc", | |||
SearchOptions: &internal.SearchOptions{ | |||
Paginator: &db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
SortBy: internal.SortByUpdatedAsc, | |||
Paginator: &db.ListOptionsAll, | |||
SortBy: internal.SortByUpdatedAsc, | |||
}, | |||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { | |||
assert.Equal(t, len(data), len(result.Hits)) | |||
@@ -623,10 +618,8 @@ var cases = []*testIndexerCase{ | |||
{ | |||
Name: "SortByCommentsAsc", | |||
SearchOptions: &internal.SearchOptions{ | |||
Paginator: &db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
SortBy: internal.SortByCommentsAsc, | |||
Paginator: &db.ListOptionsAll, | |||
SortBy: internal.SortByCommentsAsc, | |||
}, | |||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { | |||
assert.Equal(t, len(data), len(result.Hits)) | |||
@@ -641,10 +634,8 @@ var cases = []*testIndexerCase{ | |||
{ | |||
Name: "SortByDeadlineAsc", | |||
SearchOptions: &internal.SearchOptions{ | |||
Paginator: &db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
SortBy: internal.SortByDeadlineAsc, | |||
Paginator: &db.ListOptionsAll, | |||
SortBy: internal.SortByDeadlineAsc, | |||
}, | |||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { | |||
assert.Equal(t, len(data), len(result.Hits)) |
@@ -6,6 +6,7 @@ package meilisearch | |||
import ( | |||
"context" | |||
"errors" | |||
"fmt" | |||
"strconv" | |||
"strings" | |||
@@ -217,7 +218,22 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( | |||
skip, limit := indexer_internal.ParsePaginator(options.Paginator, maxTotalHits) | |||
searchRes, err := b.inner.Client.Index(b.inner.VersionedIndexName()).Search(options.Keyword, &meilisearch.SearchRequest{ | |||
counting := limit == 0 | |||
if counting { | |||
// If set limit to 0, it will be 20 by default, and -1 is not allowed. | |||
// See https://www.meilisearch.com/docs/reference/api/search#limit | |||
// So set limit to 1 to make the cost as low as possible, then clear the result before returning. | |||
limit = 1 | |||
} | |||
keyword := options.Keyword | |||
if !options.IsFuzzyKeyword { | |||
// to make it non fuzzy ("typo tolerance" in meilisearch terms), we have to quote the keyword(s) | |||
// https://www.meilisearch.com/docs/reference/api/search#phrase-search | |||
keyword = doubleQuoteKeyword(keyword) | |||
} | |||
searchRes, err := b.inner.Client.Index(b.inner.VersionedIndexName()).Search(keyword, &meilisearch.SearchRequest{ | |||
Filter: query.Statement(), | |||
Limit: int64(limit), | |||
Offset: int64(skip), | |||
@@ -228,7 +244,11 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( | |||
return nil, err | |||
} | |||
hits, err := nonFuzzyWorkaround(searchRes, options.Keyword, options.IsFuzzyKeyword) | |||
if counting { | |||
searchRes.Hits = nil | |||
} | |||
hits, err := convertHits(searchRes) | |||
if err != nil { | |||
return nil, err | |||
} | |||
@@ -247,11 +267,20 @@ func parseSortBy(sortBy internal.SortBy) string { | |||
return field + ":asc" | |||
} | |||
// nonFuzzyWorkaround is needed as meilisearch does not have an exact search | |||
// and you can only change "typo tolerance" per index. So we have to post-filter the results | |||
// https://www.meilisearch.com/docs/learn/configuration/typo_tolerance#configuring-typo-tolerance | |||
// TODO: remove once https://github.com/orgs/meilisearch/discussions/377 is addressed | |||
func nonFuzzyWorkaround(searchRes *meilisearch.SearchResponse, keyword string, isFuzzy bool) ([]internal.Match, error) { | |||
func doubleQuoteKeyword(k string) string { | |||
kp := strings.Split(k, " ") | |||
parts := 0 | |||
for i := range kp { | |||
part := strings.Trim(kp[i], "\"") | |||
if part != "" { | |||
kp[parts] = fmt.Sprintf(`"%s"`, part) | |||
parts++ | |||
} | |||
} | |||
return strings.Join(kp[:parts], " ") | |||
} | |||
func convertHits(searchRes *meilisearch.SearchResponse) ([]internal.Match, error) { | |||
hits := make([]internal.Match, 0, len(searchRes.Hits)) | |||
for _, hit := range searchRes.Hits { | |||
hit, ok := hit.(map[string]any) | |||
@@ -259,61 +288,11 @@ func nonFuzzyWorkaround(searchRes *meilisearch.SearchResponse, keyword string, i | |||
return nil, ErrMalformedResponse | |||
} | |||
if !isFuzzy { | |||
keyword = strings.ToLower(keyword) | |||
// declare a anon func to check if the title, content or at least one comment contains the keyword | |||
found, err := func() (bool, error) { | |||
// check if title match first | |||
title, ok := hit["title"].(string) | |||
if !ok { | |||
return false, ErrMalformedResponse | |||
} else if strings.Contains(strings.ToLower(title), keyword) { | |||
return true, nil | |||
} | |||
// check if content has a match | |||
content, ok := hit["content"].(string) | |||
if !ok { | |||
return false, ErrMalformedResponse | |||
} else if strings.Contains(strings.ToLower(content), keyword) { | |||
return true, nil | |||
} | |||
// now check for each comment if one has a match | |||
// so we first try to cast and skip if there are no comments | |||
comments, ok := hit["comments"].([]any) | |||
if !ok { | |||
return false, ErrMalformedResponse | |||
} else if len(comments) == 0 { | |||
return false, nil | |||
} | |||
// now we iterate over all and report as soon as we detect one match | |||
for i := range comments { | |||
comment, ok := comments[i].(string) | |||
if !ok { | |||
return false, ErrMalformedResponse | |||
} | |||
if strings.Contains(strings.ToLower(comment), keyword) { | |||
return true, nil | |||
} | |||
} | |||
// we got no match | |||
return false, nil | |||
}() | |||
if err != nil { | |||
return nil, err | |||
} else if !found { | |||
continue | |||
} | |||
} | |||
issueID, ok := hit["id"].(float64) | |||
if !ok { | |||
return nil, ErrMalformedResponse | |||
} | |||
hits = append(hits, internal.Match{ | |||
ID: int64(issueID), | |||
}) |
@@ -53,11 +53,10 @@ func TestMeilisearchIndexer(t *testing.T) { | |||
tests.TestIndexer(t, indexer) | |||
} | |||
func TestNonFuzzyWorkaround(t *testing.T) { | |||
// get unexpected return | |||
_, err := nonFuzzyWorkaround(&meilisearch.SearchResponse{ | |||
func TestConvertHits(t *testing.T) { | |||
_, err := convertHits(&meilisearch.SearchResponse{ | |||
Hits: []any{"aa", "bb", "cc", "dd"}, | |||
}, "bowling", false) | |||
}) | |||
assert.ErrorIs(t, err, ErrMalformedResponse) | |||
validResponse := &meilisearch.SearchResponse{ | |||
@@ -82,14 +81,15 @@ func TestNonFuzzyWorkaround(t *testing.T) { | |||
}, | |||
}, | |||
} | |||
// nonFuzzy | |||
hits, err := nonFuzzyWorkaround(validResponse, "bowling", false) | |||
assert.NoError(t, err) | |||
assert.EqualValues(t, []internal.Match{{ID: 11}, {ID: 22}}, hits) | |||
// fuzzy | |||
hits, err = nonFuzzyWorkaround(validResponse, "bowling", true) | |||
hits, err := convertHits(validResponse) | |||
assert.NoError(t, err) | |||
assert.EqualValues(t, []internal.Match{{ID: 11}, {ID: 22}, {ID: 33}}, hits) | |||
} | |||
func TestDoubleQuoteKeyword(t *testing.T) { | |||
assert.EqualValues(t, "", doubleQuoteKeyword("")) | |||
assert.EqualValues(t, `"a" "b" "c"`, doubleQuoteKeyword("a b c")) | |||
assert.EqualValues(t, `"a" "d" "g"`, doubleQuoteKeyword("a d g")) | |||
assert.EqualValues(t, `"a" "d" "g"`, doubleQuoteKeyword("a d g")) | |||
assert.EqualValues(t, `"a" "d" "g"`, doubleQuoteKeyword(`a "" "d" """g`)) | |||
} |
@@ -61,9 +61,7 @@ func getIssueIndexerData(ctx context.Context, issueID int64) (*internal.IndexerD | |||
) | |||
{ | |||
reviews, err := issue_model.FindReviews(ctx, issue_model.FindReviewOptions{ | |||
ListOptions: db.ListOptions{ | |||
ListAll: true, | |||
}, | |||
ListOptions: db.ListOptionsAll, | |||
IssueID: issueID, | |||
OfficialOnly: false, | |||
}) |
@@ -6,6 +6,7 @@ package markup | |||
import ( | |||
"bufio" | |||
"bytes" | |||
"fmt" | |||
"html" | |||
"io" | |||
"regexp" | |||
@@ -77,29 +78,65 @@ func writeField(w io.Writer, element, class, field string) error { | |||
} | |||
// Render implements markup.Renderer | |||
func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { | |||
func (r Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { | |||
tmpBlock := bufio.NewWriter(output) | |||
maxSize := setting.UI.CSV.MaxFileSize | |||
// FIXME: don't read all to memory | |||
rawBytes, err := io.ReadAll(input) | |||
if maxSize == 0 { | |||
return r.tableRender(ctx, input, tmpBlock) | |||
} | |||
rawBytes, err := io.ReadAll(io.LimitReader(input, maxSize+1)) | |||
if err != nil { | |||
return err | |||
} | |||
if setting.UI.CSV.MaxFileSize != 0 && setting.UI.CSV.MaxFileSize < int64(len(rawBytes)) { | |||
if _, err := tmpBlock.WriteString("<pre>"); err != nil { | |||
return err | |||
} | |||
if _, err := tmpBlock.WriteString(html.EscapeString(string(rawBytes))); err != nil { | |||
return err | |||
if int64(len(rawBytes)) <= maxSize { | |||
return r.tableRender(ctx, bytes.NewReader(rawBytes), tmpBlock) | |||
} | |||
return r.fallbackRender(io.MultiReader(bytes.NewReader(rawBytes), input), tmpBlock) | |||
} | |||
func (Renderer) fallbackRender(input io.Reader, tmpBlock *bufio.Writer) error { | |||
_, err := tmpBlock.WriteString("<pre>") | |||
if err != nil { | |||
return err | |||
} | |||
scan := bufio.NewScanner(input) | |||
scan.Split(bufio.ScanRunes) | |||
for scan.Scan() { | |||
switch scan.Text() { | |||
case `&`: | |||
_, err = tmpBlock.WriteString("&") | |||
case `'`: | |||
_, err = tmpBlock.WriteString("'") // "'" is shorter than "'" and apos was not in HTML until HTML5. | |||
case `<`: | |||
_, err = tmpBlock.WriteString("<") | |||
case `>`: | |||
_, err = tmpBlock.WriteString(">") | |||
case `"`: | |||
_, err = tmpBlock.WriteString(""") // """ is shorter than """. | |||
default: | |||
_, err = tmpBlock.Write(scan.Bytes()) | |||
} | |||
if _, err := tmpBlock.WriteString("</pre>"); err != nil { | |||
if err != nil { | |||
return err | |||
} | |||
return tmpBlock.Flush() | |||
} | |||
if err = scan.Err(); err != nil { | |||
return fmt.Errorf("fallbackRender scan: %w", err) | |||
} | |||
_, err = tmpBlock.WriteString("</pre>") | |||
if err != nil { | |||
return err | |||
} | |||
return tmpBlock.Flush() | |||
} | |||
rd, err := csv.CreateReaderAndDetermineDelimiter(ctx, bytes.NewReader(rawBytes)) | |||
func (Renderer) tableRender(ctx *markup.RenderContext, input io.Reader, tmpBlock *bufio.Writer) error { | |||
rd, err := csv.CreateReaderAndDetermineDelimiter(ctx, input) | |||
if err != nil { | |||
return err | |||
} |