Merge remote-tracking branch 'upstream/main' into pubsub

This commit is contained in:
Anbraten 2024-03-21 15:52:00 +01:00
commit 154f61c323
391 changed files with 7114 additions and 8532 deletions

View File

@ -7,7 +7,16 @@ bin = "gitea"
delay = 1000
include_ext = ["go", "tmpl", "css", "js"]
include_file = ["main.go"]
include_dir = ["cmd", "models", "modules", "options", "public", "routers", "services", "templates"]
exclude_dir = ["modules/git/tests", "services/gitdiff/testdata", "modules/avatar/testdata", "models/fixtures", "models/migrations/fixtures", "modules/migration/file_format_testdata", "modules/avatar/identicon/testdata"]
include_dir = ["cmd", "models", "modules", "options", "routers", "services"]
exclude_dir = [
"models/fixtures",
"models/migrations/fixtures",
"modules/avatar/identicon/testdata",
"modules/avatar/testdata",
"modules/git/tests",
"modules/migration/file_format_testdata",
"routers/private/tests",
"services/gitdiff/testdata",
]
exclude_regex = ["_test.go$", "_gen.go$"]
stop_on_error = true

View File

@ -4,7 +4,7 @@
"features": {
// installs nodejs into container
"ghcr.io/devcontainers/features/node:1": {
"version":"20"
"version": "20"
},
"ghcr.io/devcontainers/features/git-lfs:1.1.0": {},
"ghcr.io/devcontainers-contrib/features/poetry:2": {},
@ -24,7 +24,7 @@
"DavidAnson.vscode-markdownlint",
"Vue.volar",
"ms-azuretools.vscode-docker",
"zixuanchen.vitest-explorer",
"vitest.explorer",
"qwtel.sqlite-viewer",
"GitHub.vscode-pull-request-github"
]

View File

@ -62,7 +62,6 @@ cpu.out
/data
/indexers
/log
/public/img/avatar
/tests/integration/gitea-integration-*
/tests/integration/indexers-*
/tests/e2e/gitea-e2e-*
@ -78,6 +77,7 @@ cpu.out
/public/assets/js
/public/assets/css
/public/assets/fonts
/public/assets/img/avatar
/public/assets/img/webpack
/vendor
/web_src/fomantic/node_modules

View File

@ -42,10 +42,6 @@ overrides:
worker: true
rules:
no-restricted-globals: [2, addEventListener, blur, close, closed, confirm, defaultStatus, defaultstatus, error, event, external, find, focus, frameElement, frames, history, innerHeight, innerWidth, isFinite, isNaN, length, locationbar, menubar, moveBy, moveTo, name, onblur, onerror, onfocus, onload, onresize, onunload, open, opener, opera, outerHeight, outerWidth, pageXOffset, pageYOffset, parent, print, removeEventListener, resizeBy, resizeTo, screen, screenLeft, screenTop, screenX, screenY, scroll, scrollbars, scrollBy, scrollTo, scrollX, scrollY, status, statusbar, stop, toolbar, top]
- files: ["build/generate-images.js"]
rules:
i/no-unresolved: [0]
i/no-extraneous-dependencies: [0]
- files: ["*.config.*"]
rules:
i/no-unused-modules: [0]
@ -283,14 +279,14 @@ rules:
i/unambiguous: [0]
init-declarations: [0]
jquery/no-ajax-events: [2]
jquery/no-ajax: [0]
jquery/no-ajax: [2]
jquery/no-animate: [2]
jquery/no-attr: [0]
jquery/no-bind: [2]
jquery/no-class: [0]
jquery/no-clone: [2]
jquery/no-closest: [0]
jquery/no-css: [0]
jquery/no-css: [2]
jquery/no-data: [0]
jquery/no-deferred: [2]
jquery/no-delegate: [2]
@ -315,7 +311,7 @@ rules:
jquery/no-parent: [0]
jquery/no-parents: [0]
jquery/no-parse-html: [2]
jquery/no-prop: [0]
jquery/no-prop: [2]
jquery/no-proxy: [2]
jquery/no-ready: [2]
jquery/no-serialize: [2]
@ -396,11 +392,11 @@ rules:
no-irregular-whitespace: [2]
no-iterator: [2]
no-jquery/no-ajax-events: [2]
no-jquery/no-ajax: [0]
no-jquery/no-ajax: [2]
no-jquery/no-and-self: [2]
no-jquery/no-animate-toggle: [2]
no-jquery/no-animate: [2]
no-jquery/no-append-html: [0]
no-jquery/no-append-html: [2]
no-jquery/no-attr: [0]
no-jquery/no-bind: [2]
no-jquery/no-box-model: [2]
@ -413,7 +409,7 @@ rules:
no-jquery/no-constructor-attributes: [2]
no-jquery/no-contains: [2]
no-jquery/no-context-prop: [2]
no-jquery/no-css: [0]
no-jquery/no-css: [2]
no-jquery/no-data: [0]
no-jquery/no-deferred: [2]
no-jquery/no-delegate: [2]
@ -466,7 +462,7 @@ rules:
no-jquery/no-parse-html: [2]
no-jquery/no-parse-json: [2]
no-jquery/no-parse-xml: [2]
no-jquery/no-prop: [0]
no-jquery/no-prop: [2]
no-jquery/no-proxy: [2]
no-jquery/no-ready-shorthand: [2]
no-jquery/no-ready: [2]
@ -487,7 +483,7 @@ rules:
no-jquery/no-visibility: [2]
no-jquery/no-when: [2]
no-jquery/no-wrap: [2]
no-jquery/variable-pattern: [0]
no-jquery/variable-pattern: [2]
no-label-var: [2]
no-labels: [0] # handled by no-restricted-syntax
no-lone-blocks: [2]

90
.github/labeler.yml vendored
View File

@ -1,36 +1,84 @@
modifies/docs:
- "**/*.md"
- "docs/**"
- changed-files:
- any-glob-to-any-file:
- "**/*.md"
- "docs/**"
modifies/frontend:
- "web_src/**/*"
- changed-files:
- any-glob-to-any-file:
- "web_src/**"
- "tailwind.config.js"
- "webpack.config.js"
modifies/templates:
- all: ["templates/**", "!templates/swagger/v1_json.tmpl"]
- changed-files:
- all-globs-to-any-file:
- "templates/**"
- "!templates/swagger/v1_json.tmpl"
modifies/api:
- "routers/api/**"
- "templates/swagger/v1_json.tmpl"
- changed-files:
- any-glob-to-any-file:
- "routers/api/**"
- "templates/swagger/v1_json.tmpl"
modifies/cli:
- "cmd/**"
- changed-files:
- any-glob-to-any-file:
- "cmd/**"
modifies/translation:
- "options/locale/*.ini"
- changed-files:
- any-glob-to-any-file:
- "options/locale/*.ini"
modifies/migrations:
- "models/migrations/**/*"
- changed-files:
- any-glob-to-any-file:
- "models/migrations/**"
modifies/internal:
- "Makefile"
- "Dockerfile"
- "Dockerfile.rootless"
- "docker/**"
- "webpack.config.js"
- ".eslintrc.yaml"
- ".golangci.yml"
- ".markdownlint.yaml"
- ".spectral.yaml"
- ".stylelintrc.yaml"
- ".yamllint.yaml"
- ".github/**"
- changed-files:
- any-glob-to-any-file:
- ".air.toml"
- "Makefile"
- "Dockerfile"
- "Dockerfile.rootless"
- ".dockerignore"
- "docker/**"
- ".editorconfig"
- ".eslintrc.yaml"
- ".golangci.yml"
- ".gitpod.yml"
- ".markdownlint.yaml"
- ".spectral.yaml"
- ".stylelintrc.yaml"
- ".yamllint.yaml"
- ".github/**"
- ".gitea/"
- ".devcontainer/**"
- "build.go"
- "build/**"
- "contrib/**"
modifies/dependencies:
- changed-files:
- any-glob-to-any-file:
- "package.json"
- "package-lock.json"
- "pyproject.toml"
- "poetry.lock"
- "go.mod"
- "go.sum"
modifies/go:
- changed-files:
- any-glob-to-any-file:
- "**/*.go"
modifies/js:
- changed-files:
- any-glob-to-any-file:
- "**/*.js"
- "**/*.vue"

View File

@ -1,23 +0,0 @@
name: cron-lock
on:
schedule:
- cron: "0 0 * * *" # every day at 00:00 UTC
workflow_dispatch:
permissions:
issues: write
pull-requests: write
concurrency:
group: lock
jobs:
action:
runs-on: ubuntu-latest
if: github.repository == 'go-gitea/gitea'
steps:
- uses: dessant/lock-threads@v5
with:
issue-inactive-days: 10
pr-inactive-days: 7

View File

@ -9,12 +9,12 @@ concurrency:
cancel-in-progress: true
jobs:
label:
labeler:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- uses: actions/labeler@v4
- uses: actions/labeler@v5
with:
dot: true
sync-labels: true

2
.gitignore vendored
View File

@ -58,7 +58,7 @@ cpu.out
/data
/indexers
/log
/public/img/avatar
/public/assets/img/avatar
/tests/integration/gitea-integration-*
/tests/integration/indexers-*
/tests/e2e/gitea-e2e-*

View File

@ -42,7 +42,7 @@ vscode:
- DavidAnson.vscode-markdownlint
- Vue.volar
- ms-azuretools.vscode-docker
- zixuanchen.vitest-explorer
- vitest.explorer
- qwtel.sqlite-viewer
- GitHub.vscode-pull-request-github

View File

@ -31,7 +31,7 @@ GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.6.0
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.56.1
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.11
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.4.1
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.30.5
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@db51e79a0e37c572d8b59ae0c58bf2bbbbe53285
XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1.6.0
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1.0.3
@ -147,6 +147,8 @@ TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(FOMAN
GO_DIRS := build cmd models modules routers services tests
WEB_DIRS := web_src/js web_src/css
ESLINT_FILES := web_src/js tools *.config.js tests/e2e
STYLELINT_FILES := web_src/css web_src/js/components/*.vue
SPELLCHECK_FILES := $(GO_DIRS) $(WEB_DIRS) docs/content templates options/locale/locale_en-US.ini .github
EDITORCONFIG_FILES := templates .github/workflows options/locale/locale_en-US.ini
@ -375,19 +377,19 @@ lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig
.PHONY: lint-js
lint-js: node_modules
npx eslint --color --max-warnings=0 --ext js,vue web_src/js build *.config.js tests/e2e
npx eslint --color --max-warnings=0 --ext js,vue $(ESLINT_FILES)
.PHONY: lint-js-fix
lint-js-fix: node_modules
npx eslint --color --max-warnings=0 --ext js,vue web_src/js build *.config.js tests/e2e --fix
npx eslint --color --max-warnings=0 --ext js,vue $(ESLINT_FILES) --fix
.PHONY: lint-css
lint-css: node_modules
npx stylelint --color --max-warnings=0 web_src/css web_src/js/components/*.vue
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES)
.PHONY: lint-css-fix
lint-css-fix: node_modules
npx stylelint --color --max-warnings=0 web_src/css web_src/js/components/*.vue --fix
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES) --fix
.PHONY: lint-swagger
lint-swagger: node_modules
@ -444,7 +446,7 @@ lint-yaml: .venv
.PHONY: watch
watch:
@bash build/watch.sh
@bash tools/watch.sh
.PHONY: watch-frontend
watch-frontend: node-check node_modules
@ -839,10 +841,6 @@ release-sources: | $(DIST_DIRS)
release-docs: | $(DIST_DIRS) docs
tar -czf $(DIST)/release/gitea-docs-$(VERSION).tar.gz -C ./docs .
.PHONY: docs
docs:
cd docs; bash scripts/trans-copy.sh;
.PHONY: deps
deps: deps-frontend deps-backend deps-tools deps-py
@ -920,7 +918,7 @@ $(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) package-lock.json
.PHONY: svg
svg: node-check | node_modules
rm -rf $(SVG_DEST_DIR)
node build/generate-svg.js
node tools/generate-svg.js
.PHONY: svg-check
svg-check: svg
@ -964,7 +962,7 @@ generate-gitignore:
.PHONY: generate-images
generate-images: | node_modules
npm install --no-save fabric@6.0.0-beta19 imagemin-zopfli@7
node build/generate-images.js $(TAGS)
node tools/generate-images.js $(TAGS)
.PHONY: generate-manpage
generate-manpage:

View File

@ -1,55 +1,18 @@
<p align="center">
<a href="https://gitea.io/">
<img alt="Gitea" src="https://raw.githubusercontent.com/go-gitea/gitea/main/public/assets/img/gitea.svg" width="220"/>
</a>
</p>
<h1 align="center">Gitea - Git with a cup of tea</h1>
# Gitea
<p align="center">
<a href="https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml?query=branch%3Amain" title="Release Nightly">
<img src="https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml/badge.svg?branch=main">
</a>
<a href="https://discord.gg/Gitea" title="Join the Discord chat at https://discord.gg/Gitea">
<img src="https://img.shields.io/discord/322538954119184384.svg?logo=discord&logoColor=white&label=Discord&color=5865F2">
</a>
<a href="https://app.codecov.io/gh/go-gitea/gitea" title="Codecov">
<img src="https://codecov.io/gh/go-gitea/gitea/branch/main/graph/badge.svg">
</a>
<a href="https://goreportcard.com/report/code.gitea.io/gitea" title="Go Report Card">
<img src="https://goreportcard.com/badge/code.gitea.io/gitea">
</a>
<a href="https://pkg.go.dev/code.gitea.io/gitea" title="GoDoc">
<img src="https://pkg.go.dev/badge/code.gitea.io/gitea?status.svg">
</a>
<a href="https://github.com/go-gitea/gitea/releases/latest" title="GitHub release">
<img src="https://img.shields.io/github/release/go-gitea/gitea.svg">
</a>
<a href="https://www.codetriage.com/go-gitea/gitea" title="Help Contribute to Open Source">
<img src="https://www.codetriage.com/go-gitea/gitea/badges/users.svg">
</a>
<a href="https://opencollective.com/gitea" title="Become a backer/sponsor of gitea">
<img src="https://opencollective.com/gitea/tiers/backers/badge.svg?label=backers&color=brightgreen">
</a>
<a href="https://opensource.org/licenses/MIT" title="License: MIT">
<img src="https://img.shields.io/badge/License-MIT-blue.svg">
</a>
<a href="https://gitpod.io/#https://github.com/go-gitea/gitea">
<img
src="https://img.shields.io/badge/Contribute%20with-Gitpod-908a85?logo=gitpod"
alt="Contribute with Gitpod"
/>
</a>
<a href="https://crowdin.com/project/gitea" title="Crowdin">
<img src="https://badges.crowdin.net/gitea/localized.svg">
</a>
<a href="https://www.tickgit.com/browse?repo=github.com/go-gitea/gitea&branch=main" title="TODOs">
<img src="https://badgen.net/https/api.tickgit.com/badgen/github.com/go-gitea/gitea/main">
</a>
</p>
[![](https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml/badge.svg?branch=main)](https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml?query=branch%3Amain "Release Nightly")
[![](https://img.shields.io/discord/322538954119184384.svg?logo=discord&logoColor=white&label=Discord&color=5865F2)](https://discord.gg/Gitea "Join the Discord chat at https://discord.gg/Gitea")
[![](https://goreportcard.com/badge/code.gitea.io/gitea)](https://goreportcard.com/report/code.gitea.io/gitea "Go Report Card")
[![](https://pkg.go.dev/badge/code.gitea.io/gitea?status.svg)](https://pkg.go.dev/code.gitea.io/gitea "GoDoc")
[![](https://img.shields.io/github/release/go-gitea/gitea.svg)](https://github.com/go-gitea/gitea/releases/latest "GitHub release")
[![](https://www.codetriage.com/go-gitea/gitea/badges/users.svg)](https://www.codetriage.com/go-gitea/gitea "Help Contribute to Open Source")
[![](https://opencollective.com/gitea/tiers/backers/badge.svg?label=backers&color=brightgreen)](https://opencollective.com/gitea "Become a backer/sponsor of gitea")
[![](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT "License: MIT")
[![Contribute with Gitpod](https://img.shields.io/badge/Contribute%20with-Gitpod-908a85?logo=gitpod)](https://gitpod.io/#https://github.com/go-gitea/gitea)
[![](https://badges.crowdin.net/gitea/localized.svg)](https://crowdin.com/project/gitea "Crowdin")
[![](https://badgen.net/https/api.tickgit.com/badgen/github.com/go-gitea/gitea/main)](https://www.tickgit.com/browse?repo=github.com/go-gitea/gitea&branch=main "TODOs")
<p align="center">
<a href="README_ZH.md">View this document in Chinese</a>
</p>
[View this document in Chinese](./README_ZH.md)
## Purpose

View File

@ -1,55 +1,18 @@
<p align="center">
<a href="https://gitea.io/">
<img alt="Gitea" src="https://raw.githubusercontent.com/go-gitea/gitea/main/public/assets/img/gitea.svg" width="220"/>
</a>
</p>
<h1 align="center">Gitea - Git with a cup of tea</h1>
# Gitea
<p align="center">
<a href="https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml?query=branch%3Amain" title="Release Nightly">
<img src="https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml/badge.svg?branch=main">
</a>
<a href="https://discord.gg/Gitea" title="Join the Discord chat at https://discord.gg/Gitea">
<img src="https://img.shields.io/discord/322538954119184384.svg">
</a>
<a href="https://app.codecov.io/gh/go-gitea/gitea" title="Codecov">
<img src="https://codecov.io/gh/go-gitea/gitea/branch/main/graph/badge.svg">
</a>
<a href="https://goreportcard.com/report/code.gitea.io/gitea" title="Go Report Card">
<img src="https://goreportcard.com/badge/code.gitea.io/gitea">
</a>
<a href="https://pkg.go.dev/code.gitea.io/gitea" title="GoDoc">
<img src="https://pkg.go.dev/badge/code.gitea.io/gitea?status.svg">
</a>
<a href="https://github.com/go-gitea/gitea/releases/latest" title="GitHub release">
<img src="https://img.shields.io/github/release/go-gitea/gitea.svg">
</a>
<a href="https://www.codetriage.com/go-gitea/gitea" title="Help Contribute to Open Source">
<img src="https://www.codetriage.com/go-gitea/gitea/badges/users.svg">
</a>
<a href="https://opencollective.com/gitea" title="Become a backer/sponsor of gitea">
<img src="https://opencollective.com/gitea/tiers/backers/badge.svg?label=backers&color=brightgreen">
</a>
<a href="https://opensource.org/licenses/MIT" title="License: MIT">
<img src="https://img.shields.io/badge/License-MIT-blue.svg">
</a>
<a href="https://gitpod.io/#https://github.com/go-gitea/gitea">
<img
src="https://img.shields.io/badge/Contribute%20with-Gitpod-908a85?logo=gitpod"
alt="Contribute with Gitpod"
/>
</a>
<a href="https://crowdin.com/project/gitea" title="Crowdin">
<img src="https://badges.crowdin.net/gitea/localized.svg">
</a>
<a href="https://www.tickgit.com/browse?repo=github.com/go-gitea/gitea&branch=main" title="TODOs">
<img src="https://badgen.net/https/api.tickgit.com/badgen/github.com/go-gitea/gitea/main">
</a>
</p>
[![](https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml/badge.svg?branch=main)](https://github.com/go-gitea/gitea/actions/workflows/release-nightly.yml?query=branch%3Amain "Release Nightly")
[![](https://img.shields.io/discord/322538954119184384.svg?logo=discord&logoColor=white&label=Discord&color=5865F2)](https://discord.gg/Gitea "Join the Discord chat at https://discord.gg/Gitea")
[![](https://goreportcard.com/badge/code.gitea.io/gitea)](https://goreportcard.com/report/code.gitea.io/gitea "Go Report Card")
[![](https://pkg.go.dev/badge/code.gitea.io/gitea?status.svg)](https://pkg.go.dev/code.gitea.io/gitea "GoDoc")
[![](https://img.shields.io/github/release/go-gitea/gitea.svg)](https://github.com/go-gitea/gitea/releases/latest "GitHub release")
[![](https://www.codetriage.com/go-gitea/gitea/badges/users.svg)](https://www.codetriage.com/go-gitea/gitea "Help Contribute to Open Source")
[![](https://opencollective.com/gitea/tiers/backers/badge.svg?label=backers&color=brightgreen)](https://opencollective.com/gitea "Become a backer/sponsor of gitea")
[![](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT "License: MIT")
[![Contribute with Gitpod](https://img.shields.io/badge/Contribute%20with-Gitpod-908a85?logo=gitpod)](https://gitpod.io/#https://github.com/go-gitea/gitea)
[![](https://badges.crowdin.net/gitea/localized.svg)](https://crowdin.com/project/gitea "Crowdin")
[![](https://badgen.net/https/api.tickgit.com/badgen/github.com/go-gitea/gitea/main)](https://www.tickgit.com/browse?repo=github.com/go-gitea/gitea&branch=main "TODOs")
<p align="center">
<a href="README.md">View this document in English</a>
</p>
[View this document in English](./README.md)
## 目标

File diff suppressed because one or more lines are too long

View File

@ -2608,7 +2608,7 @@ LEVEL = Info
;ENDLESS_TASK_TIMEOUT = 3h
;; Timeout to cancel the jobs which have waiting status, but haven't been picked by a runner for a long time
;ABANDONED_JOB_TIMEOUT = 24h
;; Strings committers can place inside a commit message to skip executing the corresponding actions workflow
;; Strings committers can place inside a commit message or PR title to skip executing the corresponding actions workflow
;SKIP_WORKFLOW_STRINGS = [skip ci],[ci skip],[no ci],[skip actions],[actions skip]
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View File

@ -37,7 +37,7 @@ gitea embedded list [--include-vendored] [patterns...]
- 列出所有模板文件,无论在哪个虚拟目录下:`**.tmpl`
- 列出所有邮件模板文件:`templates/mail/**.tmpl`
- 列出 `public/img` 目录下的所有文件:`public/img/**`
列出 `public/assets/img` 目录下的所有文件:`public/assets/img/**`
不要忘记为模式使用引号,因为空格、`*` 和其他字符可能对命令行解释器有特殊含义。
@ -49,8 +49,8 @@ gitea embedded list [--include-vendored] [patterns...]
```sh
$ gitea embedded list '**openid**'
public/img/auth/openid_connect.svg
public/img/openid-16x16.png
public/assets/img/auth/openid_connect.svg
public/assets/img/openid-16x16.png
templates/user/auth/finalize_openid.tmpl
templates/user/auth/signin_openid.tmpl
templates/user/auth/signup_openid_connect.tmpl

View File

@ -590,7 +590,7 @@ And the following unique queues:
## OpenID (`openid`)
- `ENABLE_OPENID_SIGNIN`: **false**: Allow authentication in via OpenID.
- `ENABLE_OPENID_SIGNIN`: **true**: Allow authentication in via OpenID.
- `ENABLE_OPENID_SIGNUP`: **! DISABLE\_REGISTRATION**: Allow registering via OpenID.
- `WHITELISTED_URIS`: **_empty_**: If non-empty, list of POSIX regex patterns matching
OpenID URI's to permit.
@ -1406,7 +1406,7 @@ PROXY_HOSTS = *.github.com
- `ZOMBIE_TASK_TIMEOUT`: **10m**: Timeout to stop the task which have running status, but haven't been updated for a long time
- `ENDLESS_TASK_TIMEOUT`: **3h**: Timeout to stop the tasks which have running status and continuous updates, but don't end for a long time
- `ABANDONED_JOB_TIMEOUT`: **24h**: Timeout to cancel the jobs which have waiting status, but haven't been picked by a runner for a long time
- `SKIP_WORKFLOW_STRINGS`: **[skip ci],[ci skip],[no ci],[skip actions],[actions skip]**: Strings committers can place inside a commit message to skip executing the corresponding actions workflow
- `SKIP_WORKFLOW_STRINGS`: **[skip ci],[ci skip],[no ci],[skip actions],[actions skip]**: Strings committers can place inside a commit message or PR title to skip executing the corresponding actions workflow
`DEFAULT_ACTIONS_URL` indicates where the Gitea Actions runners should find the actions with relative path.
For example, `uses: actions/checkout@v4` means `https://github.com/actions/checkout@v4` since the value of `DEFAULT_ACTIONS_URL` is `github`.

View File

@ -562,7 +562,7 @@ Gitea 创建以下非唯一队列:
## OpenID (`openid`)
- `ENABLE_OPENID_SIGNIN`: **false**允许通过OpenID进行身份验证。
- `ENABLE_OPENID_SIGNIN`: **true**允许通过OpenID进行身份验证。
- `ENABLE_OPENID_SIGNUP`: **! DISABLE\_REGISTRATION**允许通过OpenID进行注册。
- `WHITELISTED_URIS`: **_empty_**如果非空是一组匹配OpenID URI的POSIX正则表达式模式用于允许访问。
- `BLACKLISTED_URIS`: **_empty_**如果非空是一组匹配OpenID URI的POSIX正则表达式模式用于阻止访问。

View File

@ -163,7 +163,7 @@ clients don't even support HTML, so they show the text version included in the g
If the template fails to render, it will be noticed only at the moment the mail is sent.
A default subject is used if the subject template fails, and whatever was rendered successfully
from the the _mail body_ is used, disregarding the rest.
from the _mail body_ is used, disregarding the rest.
Please check [Gitea's logs](administration/logging-config.md) for error messages in case of trouble.

View File

@ -53,7 +53,7 @@ HTML 页面由[Go HTML Template](https://pkg.go.dev/html/template)渲染。
### 可访问性 / ARIA
在历史上Gitea大量使用了可访问性不友好的框架 Fomantic UI。
Gitea使用一些补丁使Fomantic UI更具可访问性参见`aria.js`和`aria.md`
Gitea 使用一些补丁使 Fomantic UI 更具可访问性(参见 `aria.md`
但仍然存在许多问题需要大量的工作和时间来修复。
### 框架使用

View File

@ -214,7 +214,7 @@ REPO_INDEXER_CONN_STR = http://elastic:changeme@localhost:9200
### Building and adding SVGs
SVG icons are built using the `make svg` target which compiles the icon sources defined in `build/generate-svg.js` into the output directory `public/assets/img/svg`. Custom icons can be added in the `web_src/svg` directory.
SVG icons are built using the `make svg` target which compiles the icon sources into the output directory `public/assets/img/svg`. Custom icons can be added in the `web_src/svg` directory.
### Building the Logo
@ -333,14 +333,9 @@ Documentation for the website is found in `docs/`. If you change this you
can test your changes to ensure that they pass continuous integration using:
```bash
# from the docs directory within Gitea
make trans-copy clean build
make lint-md
```
You will require a copy of [Hugo](https://gohugo.io/) to run this task. Please
note: this may generate a number of untracked Git objects, which will need to
be cleaned up.
## Visual Studio Code
A `launch.json` and `tasks.json` are provided within `contrib/ide/vscode` for

View File

@ -201,7 +201,7 @@ REPO_INDEXER_CONN_STR = http://elastic:changeme@localhost:9200
### 构建和添加 SVGs
SVG 图标是使用 `make svg` 目标构建的,该目标将 `build/generate-svg.js` 中定义的图标源编译到输出目录 `public/img/svg` 中。可以在 `web_src/svg` 目录中添加自定义图标。
SVG 图标是使用 `make svg` 命令构建的,该命令将图标资源编译到输出目录 `public/assets/img/svg` 中。可以在 `web_src/svg` 目录中添加自定义图标。
### 构建 Logo
@ -307,13 +307,9 @@ TAGS="bindata sqlite sqlite_unlock_notify" make build test-sqlite
该网站的文档位于 `docs/` 中。如果你改变了文档内容,你可以使用以下测试方法进行持续集成:
```bash
# 来自 Gitea 中的 docs 目录
make trans-copy clean build
make lint-md
```
运行此任务依赖于 [Hugo](https://gohugo.io/)。请注意:这可能会生成一些未跟踪的 Git 对象,
需要被清理干净。
## Visual Studio Code
`contrib/ide/vscode` 中为 Visual Studio Code 提供了 `launch.json``tasks.json`。查看

View File

@ -1,34 +0,0 @@
#!/usr/bin/env bash
set -e
#
# This script is used to copy the en-US content to our available locales as a
# fallback to always show all pages when displaying a specific locale that is
# missing some documents to be translated.
#
# Just execute the script without any argument and you will get the missing
# files copied into the content folder. We are calling this script within the CI
# server simply by `make trans-copy`.
#
declare -a LOCALES=(
"fr-fr"
"nl-nl"
"pt-br"
"zh-cn"
"zh-tw"
)
ROOT=$(realpath $(dirname $0)/..)
for SOURCE in $(find ${ROOT}/content -type f -iname *.en-us.md); do
for LOCALE in "${LOCALES[@]}"; do
DEST="${SOURCE%.en-us.md}.${LOCALE}.md"
if [[ ! -f ${DEST} ]]; then
cp ${SOURCE} ${DEST}
sed -i.bak "s/en\-us/${LOCALE}/g" ${DEST}
rm ${DEST}.bak
fi
done
done

157
go.mod
View File

@ -1,27 +1,27 @@
module code.gitea.io/gitea
go 1.21
go 1.22
require (
code.gitea.io/actions-proto-go v0.3.1
code.gitea.io/actions-proto-go v0.4.0
code.gitea.io/gitea-vet v0.2.3
code.gitea.io/sdk/gitea v0.17.1
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570
gitea.com/go-chi/binding v0.0.0-20230415142243-04b515c6d669
connectrpc.com/connect v1.15.0
gitea.com/go-chi/binding v0.0.0-20240316035258-17450c5f3028
gitea.com/go-chi/cache v0.2.0
gitea.com/go-chi/captcha v0.0.0-20230415143339-2c0754df4384
gitea.com/go-chi/session v0.0.0-20230613035928-39541325faa3
gitea.com/go-chi/captcha v0.0.0-20240315150714-fb487f629098
gitea.com/go-chi/session v0.0.0-20240316035857-16768d98ec96
gitea.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96
gitea.com/lunny/levelqueue v0.4.2-0.20230414023320-3c0159fe0fe4
github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
github.com/NYTimes/gziphandler v1.1.1
github.com/PuerkitoBio/goquery v1.8.1
github.com/alecthomas/chroma/v2 v2.12.0
github.com/PuerkitoBio/goquery v1.9.1
github.com/alecthomas/chroma/v2 v2.13.0
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb
github.com/blevesearch/bleve/v2 v2.3.10
github.com/bufbuild/connect-go v1.10.0
github.com/buildkite/terminal-to-html/v3 v3.10.1
github.com/buildkite/terminal-to-html/v3 v3.11.0
github.com/caddyserver/certmagic v0.20.0
github.com/chi-middleware/proxy v1.1.1
github.com/denisenkom/go-mssqldb v0.12.3
@ -30,33 +30,33 @@ require (
github.com/djherbis/nio/v3 v3.0.1
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5
github.com/dustin/go-humanize v1.0.1
github.com/editorconfig/editorconfig-core-go/v2 v2.6.0
github.com/editorconfig/editorconfig-core-go/v2 v2.6.1
github.com/emersion/go-imap v1.2.1
github.com/emirpasic/gods v1.18.1
github.com/ethantkoenig/rupture v1.0.1
github.com/felixge/fgprof v0.9.3
github.com/felixge/fgprof v0.9.4
github.com/fsnotify/fsnotify v1.7.0
github.com/gliderlabs/ssh v0.3.6
github.com/go-ap/activitypub v0.0.0-20231114162308-e219254dc5c9
github.com/go-ap/activitypub v0.0.0-20240316125321-b61fd6a83225
github.com/go-ap/jsonld v0.0.0-20221030091449-f2a191312c73
github.com/go-chi/chi/v5 v5.0.11
github.com/go-chi/chi/v5 v5.0.12
github.com/go-chi/cors v1.2.1
github.com/go-co-op/gocron v1.37.0
github.com/go-enry/go-enry/v2 v2.8.6
github.com/go-enry/go-enry/v2 v2.8.7
github.com/go-fed/httpsig v1.1.1-0.20201223112313-55836744818e
github.com/go-git/go-billy/v5 v5.5.0
github.com/go-git/go-git/v5 v5.11.0
github.com/go-ldap/ldap/v3 v3.4.6
github.com/go-sql-driver/mysql v1.7.1
github.com/go-sql-driver/mysql v1.8.0
github.com/go-swagger/go-swagger v0.30.5
github.com/go-testfixtures/testfixtures/v3 v3.10.0
github.com/go-webauthn/webauthn v0.10.0
github.com/go-webauthn/webauthn v0.10.2
github.com/gobwas/glob v0.2.3
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85
github.com/golang-jwt/jwt/v5 v5.2.0
github.com/golang-jwt/jwt/v5 v5.2.1
github.com/google/go-github/v57 v57.0.0
github.com/google/pprof v0.0.0-20240117000934-35fc243c5815
github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7
github.com/google/uuid v1.6.0
github.com/gorilla/feeds v1.1.2
github.com/gorilla/sessions v1.2.2
@ -64,20 +64,20 @@ require (
github.com/hashicorp/golang-lru/v2 v2.0.7
github.com/huandu/xstrings v1.4.0
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056
github.com/jhillyerd/enmime v1.1.0
github.com/jhillyerd/enmime v1.2.0
github.com/json-iterator/go v1.1.12
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4
github.com/klauspost/compress v1.17.4
github.com/klauspost/cpuid/v2 v2.2.6
github.com/klauspost/compress v1.17.7
github.com/klauspost/cpuid/v2 v2.2.7
github.com/lib/pq v1.10.9
github.com/markbates/goth v1.78.0
github.com/markbates/goth v1.79.0
github.com/mattn/go-isatty v0.0.20
github.com/mattn/go-sqlite3 v1.14.22
github.com/meilisearch/meilisearch-go v0.26.1
github.com/meilisearch/meilisearch-go v0.26.2
github.com/mholt/archiver/v3 v3.5.1
github.com/microcosm-cc/bluemonday v1.0.26
github.com/minio/minio-go/v7 v7.0.66
github.com/minio/minio-go/v7 v7.0.69
github.com/mitchellh/mapstructure v1.5.0
github.com/msteinert/pam v1.2.0
github.com/nektos/act v0.2.52
@ -85,36 +85,36 @@ require (
github.com/olahol/melody v1.1.4
github.com/olivere/elastic/v7 v7.0.32
github.com/opencontainers/go-digest v1.0.0
github.com/opencontainers/image-spec v1.1.0-rc6
github.com/opencontainers/image-spec v1.1.0
github.com/pkg/errors v0.9.1
github.com/pquerna/otp v1.4.0
github.com/prometheus/client_golang v1.18.0
github.com/prometheus/client_golang v1.19.0
github.com/quasoft/websspi v1.1.2
github.com/redis/go-redis/v9 v9.4.0
github.com/redis/go-redis/v9 v9.5.1
github.com/robfig/cron/v3 v3.0.1
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1
github.com/sassoftware/go-rpmutils v0.2.1-0.20240124161140-277b154961dd
github.com/sassoftware/go-rpmutils v0.3.0
github.com/sergi/go-diff v1.3.1
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92
github.com/stretchr/testify v1.8.4
github.com/stretchr/testify v1.9.0
github.com/syndtr/goleveldb v1.0.0
github.com/tstranex/u2f v1.0.0
github.com/ulikunitz/xz v0.5.11
github.com/urfave/cli/v2 v2.27.1
github.com/xanzy/go-gitlab v0.96.0
github.com/xanzy/go-gitlab v0.100.0
github.com/xeipuuv/gojsonschema v1.2.0
github.com/yohcop/openid-go v1.0.1
github.com/yuin/goldmark v1.6.0
github.com/yuin/goldmark v1.7.0
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
github.com/yuin/goldmark-meta v1.1.0
golang.org/x/crypto v0.18.0
golang.org/x/crypto v0.21.0
golang.org/x/image v0.15.0
golang.org/x/net v0.20.0
golang.org/x/oauth2 v0.16.0
golang.org/x/sys v0.16.0
golang.org/x/net v0.22.0
golang.org/x/oauth2 v0.18.0
golang.org/x/sys v0.18.0
golang.org/x/text v0.14.0
golang.org/x/tools v0.17.0
google.golang.org/grpc v1.60.1
golang.org/x/tools v0.19.0
google.golang.org/grpc v1.62.1
google.golang.org/protobuf v1.33.0
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
gopkg.in/ini.v1 v1.67.0
@ -122,23 +122,24 @@ require (
mvdan.cc/xurls/v2 v2.5.0
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
xorm.io/builder v0.3.13
xorm.io/xorm v1.3.7
xorm.io/xorm v1.3.8
)
require (
cloud.google.com/go/compute v1.23.3 // indirect
cloud.google.com/go/compute v1.25.1 // indirect
cloud.google.com/go/compute/metadata v0.2.3 // indirect
dario.cat/mergo v1.0.0 // indirect
filippo.io/edwards25519 v1.1.0 // indirect
git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 // indirect
github.com/ClickHouse/ch-go v0.61.1 // indirect
github.com/ClickHouse/clickhouse-go/v2 v2.18.0 // indirect
github.com/ClickHouse/ch-go v0.61.5 // indirect
github.com/ClickHouse/clickhouse-go/v2 v2.22.0 // indirect
github.com/DataDog/zstd v1.5.5 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.2.1 // indirect
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/ProtonMail/go-crypto v1.0.0 // indirect
github.com/RoaringBitmap/roaring v1.7.0 // indirect
github.com/RoaringBitmap/roaring v1.9.0 // indirect
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
@ -146,12 +147,12 @@ require (
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect
github.com/blevesearch/bleve_index_api v1.1.5 // indirect
github.com/blevesearch/geo v0.1.19 // indirect
github.com/blevesearch/bleve_index_api v1.1.6 // indirect
github.com/blevesearch/geo v0.1.20 // indirect
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
github.com/blevesearch/gtreap v0.1.1 // indirect
github.com/blevesearch/mmap-go v1.0.4 // indirect
github.com/blevesearch/scorch_segment_api/v2 v2.2.6 // indirect
github.com/blevesearch/scorch_segment_api/v2 v2.2.8 // indirect
github.com/blevesearch/segment v0.9.1 // indirect
github.com/blevesearch/snowballstem v0.9.0 // indirect
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
@ -167,43 +168,43 @@ require (
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cloudflare/circl v1.3.7 // indirect
github.com/couchbase/go-couchbase v0.1.1 // indirect
github.com/couchbase/gomemcached v0.3.0 // indirect
github.com/couchbase/gomemcached v0.3.1 // indirect
github.com/couchbase/goutils v0.1.2 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
github.com/cyphar/filepath-securejoin v0.2.4 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/davidmz/go-pageant v1.0.2 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/dlclark/regexp2 v1.10.0 // indirect
github.com/dlclark/regexp2 v1.11.0 // indirect
github.com/emersion/go-sasl v0.0.0-20231106173351-e73c9f7bad43 // indirect
github.com/fatih/color v1.16.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fxamacker/cbor/v2 v2.5.0 // indirect
github.com/go-ap/errors v0.0.0-20231003111023-183eef4b31b7 // indirect
github.com/fxamacker/cbor/v2 v2.6.0 // indirect
github.com/go-ap/errors v0.0.0-20240304112515-6077fa9c17b0 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.5 // indirect
github.com/go-enry/go-oniguruma v1.2.1 // indirect
github.com/go-faster/city v1.0.1 // indirect
github.com/go-faster/errors v0.7.1 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-openapi/analysis v0.22.2 // indirect
github.com/go-openapi/errors v0.21.0 // indirect
github.com/go-openapi/inflect v0.19.0 // indirect
github.com/go-openapi/jsonpointer v0.20.2 // indirect
github.com/go-openapi/jsonreference v0.20.4 // indirect
github.com/go-openapi/loads v0.21.5 // indirect
github.com/go-openapi/runtime v0.26.2 // indirect
github.com/go-openapi/spec v0.20.14 // indirect
github.com/go-openapi/strfmt v0.22.0 // indirect
github.com/go-openapi/swag v0.22.7 // indirect
github.com/go-openapi/validate v0.22.6 // indirect
github.com/go-webauthn/x v0.1.6 // indirect
github.com/go-openapi/analysis v0.23.0 // indirect
github.com/go-openapi/errors v0.22.0 // indirect
github.com/go-openapi/inflect v0.21.0 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/jsonreference v0.21.0 // indirect
github.com/go-openapi/loads v0.22.0 // indirect
github.com/go-openapi/runtime v0.28.0 // indirect
github.com/go-openapi/spec v0.21.0 // indirect
github.com/go-openapi/strfmt v0.23.0 // indirect
github.com/go-openapi/swag v0.23.0 // indirect
github.com/go-openapi/validate v0.24.0 // indirect
github.com/go-webauthn/x v0.1.9 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
github.com/golang-sql/sqlexp v0.1.0 // indirect
github.com/golang/geo v0.0.0-20230421003525-6adc56603217 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/go-tpm v0.9.0 // indirect
@ -248,11 +249,11 @@ require (
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/pjbgf/sha1cd v0.3.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/client_model v0.5.0 // indirect
github.com/prometheus/common v0.46.0 // indirect
github.com/prometheus/procfs v0.12.0 // indirect
github.com/rhysd/actionlint v1.6.26 // indirect
github.com/rivo/uniseg v0.4.4 // indirect
github.com/prometheus/client_model v0.6.0 // indirect
github.com/prometheus/common v0.50.0 // indirect
github.com/prometheus/procfs v0.13.0 // indirect
github.com/rhysd/actionlint v1.6.27 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.12.0 // indirect
github.com/rs/xid v1.5.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
@ -262,7 +263,7 @@ require (
github.com/shopspring/decimal v1.3.1 // indirect
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.2.1 // indirect
github.com/skeema/knownhosts v1.2.2 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.11.0 // indirect
github.com/spf13/cast v1.6.0 // indirect
@ -273,28 +274,28 @@ require (
github.com/toqueteos/webbrowser v1.2.0 // indirect
github.com/unknwon/com v1.0.1 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasthttp v1.51.0 // indirect
github.com/valyala/fasthttp v1.52.0 // indirect
github.com/valyala/fastjson v1.6.4 // indirect
github.com/x448/float16 v0.8.4 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e // indirect
github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 // indirect
github.com/zeebo/blake3 v0.2.3 // indirect
go.etcd.io/bbolt v1.3.8 // indirect
go.mongodb.org/mongo-driver v1.13.1 // indirect
go.opentelemetry.io/otel v1.22.0 // indirect
go.opentelemetry.io/otel/trace v1.22.0 // indirect
go.etcd.io/bbolt v1.3.9 // indirect
go.mongodb.org/mongo-driver v1.14.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
go.uber.org/zap v1.26.0 // indirect
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a // indirect
golang.org/x/mod v0.14.0 // indirect
go.uber.org/zap v1.27.0 // indirect
golang.org/x/exp v0.0.0-20240314144324-c7f7c6466f7f // indirect
golang.org/x/mod v0.16.0 // indirect
golang.org/x/sync v0.6.0 // indirect
golang.org/x/time v0.5.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240116215550-a9fa1716bcac // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c // indirect
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect

679
go.sum

File diff suppressed because it is too large Load Diff

View File

@ -170,15 +170,16 @@ func updateRepoRunsNumbers(ctx context.Context, repo *repo_model.Repository) err
return err
}
// CancelRunningJobs cancels all running and waiting jobs associated with a specific workflow.
func CancelRunningJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error {
// Find all runs in the specified repository, reference, and workflow with statuses 'Running' or 'Waiting'.
// CancelPreviousJobs cancels all previous jobs of the same repository, reference, workflow, and event.
// It's useful when a new run is triggered, and all previous runs needn't be continued anymore.
func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error {
// Find all runs in the specified repository, reference, and workflow with non-final status
runs, total, err := db.FindAndCount[ActionRun](ctx, FindRunOptions{
RepoID: repoID,
Ref: ref,
WorkflowID: workflowID,
TriggerEvent: event,
Status: []Status{StatusRunning, StatusWaiting},
Status: []Status{StatusRunning, StatusWaiting, StatusBlocked},
})
if err != nil {
return err

View File

@ -127,14 +127,14 @@ func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) er
return fmt.Errorf("DeleteCronTaskByRepo: %v", err)
}
// cancel running cron jobs of this repository and delete old schedules
if err := CancelRunningJobs(
if err := CancelPreviousJobs(
ctx,
repo.ID,
repo.DefaultBranch,
"",
webhook_module.HookEventSchedule,
); err != nil {
return fmt.Errorf("CancelRunningJobs: %v", err)
return fmt.Errorf("CancelPreviousJobs: %v", err)
}
return nil
}

View File

@ -148,6 +148,7 @@ type Action struct {
Repo *repo_model.Repository `xorm:"-"`
CommentID int64 `xorm:"INDEX"`
Comment *issues_model.Comment `xorm:"-"`
Issue *issues_model.Issue `xorm:"-"` // get the issue id from content
IsDeleted bool `xorm:"NOT NULL DEFAULT false"`
RefName string
IsPrivate bool `xorm:"NOT NULL DEFAULT false"`
@ -290,11 +291,6 @@ func (a *Action) GetRepoAbsoluteLink(ctx context.Context) string {
return setting.AppURL + url.PathEscape(a.GetRepoUserName(ctx)) + "/" + url.PathEscape(a.GetRepoName(ctx))
}
// GetCommentHTMLURL returns link to action comment.
func (a *Action) GetCommentHTMLURL(ctx context.Context) string {
return a.getCommentHTMLURL(ctx)
}
func (a *Action) loadComment(ctx context.Context) (err error) {
if a.CommentID == 0 || a.Comment != nil {
return nil
@ -303,7 +299,8 @@ func (a *Action) loadComment(ctx context.Context) (err error) {
return err
}
func (a *Action) getCommentHTMLURL(ctx context.Context) string {
// GetCommentHTMLURL returns link to action comment.
func (a *Action) GetCommentHTMLURL(ctx context.Context) string {
if a == nil {
return "#"
}
@ -311,34 +308,19 @@ func (a *Action) getCommentHTMLURL(ctx context.Context) string {
if a.Comment != nil {
return a.Comment.HTMLURL(ctx)
}
if len(a.GetIssueInfos()) == 0 {
if err := a.LoadIssue(ctx); err != nil || a.Issue == nil {
return "#"
}
// Return link to issue
issueIDString := a.GetIssueInfos()[0]
issueID, err := strconv.ParseInt(issueIDString, 10, 64)
if err != nil {
if err := a.Issue.LoadRepo(ctx); err != nil {
return "#"
}
issue, err := issues_model.GetIssueByID(ctx, issueID)
if err != nil {
return "#"
}
if err = issue.LoadRepo(ctx); err != nil {
return "#"
}
return issue.HTMLURL()
return a.Issue.HTMLURL()
}
// GetCommentLink returns link to action comment.
func (a *Action) GetCommentLink(ctx context.Context) string {
return a.getCommentLink(ctx)
}
func (a *Action) getCommentLink(ctx context.Context) string {
if a == nil {
return "#"
}
@ -346,26 +328,15 @@ func (a *Action) getCommentLink(ctx context.Context) string {
if a.Comment != nil {
return a.Comment.Link(ctx)
}
if len(a.GetIssueInfos()) == 0 {
if err := a.LoadIssue(ctx); err != nil || a.Issue == nil {
return "#"
}
// Return link to issue
issueIDString := a.GetIssueInfos()[0]
issueID, err := strconv.ParseInt(issueIDString, 10, 64)
if err != nil {
if err := a.Issue.LoadRepo(ctx); err != nil {
return "#"
}
issue, err := issues_model.GetIssueByID(ctx, issueID)
if err != nil {
return "#"
}
if err = issue.LoadRepo(ctx); err != nil {
return "#"
}
return issue.Link()
return a.Issue.Link()
}
// GetBranch returns the action's repository branch.
@ -393,6 +364,10 @@ func (a *Action) GetCreate() time.Time {
return a.CreatedUnix.AsTime()
}
func (a *Action) IsIssueEvent() bool {
return a.OpType.InActions("comment_issue", "approve_pull_request", "reject_pull_request", "comment_pull", "merge_pull_request")
}
// GetIssueInfos returns a list of associated information with the action.
func (a *Action) GetIssueInfos() []string {
// make sure it always returns 3 elements, because there are some access to the a[1] and a[2] without checking the length
@ -403,27 +378,52 @@ func (a *Action) GetIssueInfos() []string {
return ret
}
// GetIssueTitle returns the title of first issue associated with the action.
func (a *Action) GetIssueTitle(ctx context.Context) string {
index, _ := strconv.ParseInt(a.GetIssueInfos()[0], 10, 64)
issue, err := issues_model.GetIssueByIndex(ctx, a.RepoID, index)
if err != nil {
log.Error("GetIssueByIndex: %v", err)
return "500 when get issue"
func (a *Action) getIssueIndex() int64 {
infos := a.GetIssueInfos()
if len(infos) == 0 {
return 0
}
return issue.Title
index, _ := strconv.ParseInt(infos[0], 10, 64)
return index
}
// GetIssueContent returns the content of first issue associated with
// this action.
func (a *Action) GetIssueContent(ctx context.Context) string {
index, _ := strconv.ParseInt(a.GetIssueInfos()[0], 10, 64)
issue, err := issues_model.GetIssueByIndex(ctx, a.RepoID, index)
if err != nil {
log.Error("GetIssueByIndex: %v", err)
return "500 when get issue"
func (a *Action) LoadIssue(ctx context.Context) error {
if a.Issue != nil {
return nil
}
return issue.Content
if index := a.getIssueIndex(); index > 0 {
issue, err := issues_model.GetIssueByIndex(ctx, a.RepoID, index)
if err != nil {
return err
}
a.Issue = issue
a.Issue.Repo = a.Repo
}
return nil
}
// GetIssueTitle returns the title of first issue associated with the action.
func (a *Action) GetIssueTitle(ctx context.Context) string {
if err := a.LoadIssue(ctx); err != nil {
log.Error("LoadIssue: %v", err)
return "<500 when get issue>"
}
if a.Issue == nil {
return "<Issue not found>"
}
return a.Issue.Title
}
// GetIssueContent returns the content of first issue associated with this action.
func (a *Action) GetIssueContent(ctx context.Context) string {
if err := a.LoadIssue(ctx); err != nil {
log.Error("LoadIssue: %v", err)
return "<500 when get issue>"
}
if a.Issue == nil {
return "<Content not found>"
}
return a.Issue.Content
}
// GetFeedsOptions options for retrieving feeds
@ -463,7 +463,7 @@ func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, int64, err
return nil, 0, fmt.Errorf("FindAndCount: %w", err)
}
if err := ActionList(actions).loadAttributes(ctx); err != nil {
if err := ActionList(actions).LoadAttributes(ctx); err != nil {
return nil, 0, fmt.Errorf("LoadAttributes: %w", err)
}

View File

@ -6,11 +6,16 @@ package activities
import (
"context"
"fmt"
"strconv"
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/util"
"xorm.io/builder"
)
// ActionList defines a list of actions
@ -24,7 +29,7 @@ func (actions ActionList) getUserIDs() []int64 {
return userIDs.Values()
}
func (actions ActionList) loadUsers(ctx context.Context) (map[int64]*user_model.User, error) {
func (actions ActionList) LoadActUsers(ctx context.Context) (map[int64]*user_model.User, error) {
if len(actions) == 0 {
return nil, nil
}
@ -52,7 +57,7 @@ func (actions ActionList) getRepoIDs() []int64 {
return repoIDs.Values()
}
func (actions ActionList) loadRepositories(ctx context.Context) error {
func (actions ActionList) LoadRepositories(ctx context.Context) error {
if len(actions) == 0 {
return nil
}
@ -63,11 +68,11 @@ func (actions ActionList) loadRepositories(ctx context.Context) error {
if err != nil {
return fmt.Errorf("find repository: %w", err)
}
for _, action := range actions {
action.Repo = repoMaps[action.RepoID]
}
return nil
repos := repo_model.RepositoryList(util.ValuesOfMap(repoMaps))
return repos.LoadUnits(ctx)
}
func (actions ActionList) loadRepoOwner(ctx context.Context, userMap map[int64]*user_model.User) (err error) {
@ -75,37 +80,124 @@ func (actions ActionList) loadRepoOwner(ctx context.Context, userMap map[int64]*
userMap = make(map[int64]*user_model.User)
}
userSet := make(container.Set[int64], len(actions))
for _, action := range actions {
if action.Repo == nil {
continue
}
repoOwner, ok := userMap[action.Repo.OwnerID]
if !ok {
repoOwner, err = user_model.GetUserByID(ctx, action.Repo.OwnerID)
if err != nil {
if user_model.IsErrUserNotExist(err) {
continue
}
return err
}
userMap[repoOwner.ID] = repoOwner
if _, ok := userMap[action.Repo.OwnerID]; !ok {
userSet.Add(action.Repo.OwnerID)
}
}
if err := db.GetEngine(ctx).
In("id", userSet.Values()).
Find(&userMap); err != nil {
return fmt.Errorf("find user: %w", err)
}
for _, action := range actions {
if action.Repo != nil {
action.Repo.Owner = userMap[action.Repo.OwnerID]
}
action.Repo.Owner = repoOwner
}
return nil
}
// loadAttributes loads all attributes
func (actions ActionList) loadAttributes(ctx context.Context) error {
userMap, err := actions.loadUsers(ctx)
// LoadAttributes loads all attributes
func (actions ActionList) LoadAttributes(ctx context.Context) error {
// the load sequence cannot be changed because of the dependencies
userMap, err := actions.LoadActUsers(ctx)
if err != nil {
return err
}
if err := actions.loadRepositories(ctx); err != nil {
if err := actions.LoadRepositories(ctx); err != nil {
return err
}
return actions.loadRepoOwner(ctx, userMap)
if err := actions.loadRepoOwner(ctx, userMap); err != nil {
return err
}
if err := actions.LoadIssues(ctx); err != nil {
return err
}
return actions.LoadComments(ctx)
}
func (actions ActionList) LoadComments(ctx context.Context) error {
if len(actions) == 0 {
return nil
}
commentIDs := make([]int64, 0, len(actions))
for _, action := range actions {
if action.CommentID > 0 {
commentIDs = append(commentIDs, action.CommentID)
}
}
commentsMap := make(map[int64]*issues_model.Comment, len(commentIDs))
if err := db.GetEngine(ctx).In("id", commentIDs).Find(&commentsMap); err != nil {
return fmt.Errorf("find comment: %w", err)
}
for _, action := range actions {
if action.CommentID > 0 {
action.Comment = commentsMap[action.CommentID]
if action.Comment != nil {
action.Comment.Issue = action.Issue
}
}
}
return nil
}
func (actions ActionList) LoadIssues(ctx context.Context) error {
if len(actions) == 0 {
return nil
}
conditions := builder.NewCond()
issueNum := 0
for _, action := range actions {
if action.IsIssueEvent() {
infos := action.GetIssueInfos()
if len(infos) == 0 {
continue
}
index, _ := strconv.ParseInt(infos[0], 10, 64)
if index > 0 {
conditions = conditions.Or(builder.Eq{
"repo_id": action.RepoID,
"`index`": index,
})
issueNum++
}
}
}
if !conditions.IsValid() {
return nil
}
issuesMap := make(map[string]*issues_model.Issue, issueNum)
issues := make([]*issues_model.Issue, 0, issueNum)
if err := db.GetEngine(ctx).Where(conditions).Find(&issues); err != nil {
return fmt.Errorf("find issue: %w", err)
}
for _, issue := range issues {
issuesMap[fmt.Sprintf("%d-%d", issue.RepoID, issue.Index)] = issue
}
for _, action := range actions {
if !action.IsIssueEvent() {
continue
}
if index := action.getIssueIndex(); index > 0 {
if issue, ok := issuesMap[fmt.Sprintf("%d-%d", action.RepoID, index)]; ok {
action.Issue = issue
action.Issue.Repo = action.Repo
}
}
}
return nil
}

View File

@ -12,12 +12,8 @@ import (
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/organization"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
@ -79,53 +75,6 @@ func init() {
db.RegisterModel(new(Notification))
}
// FindNotificationOptions represent the filters for notifications. If an ID is 0 it will be ignored.
type FindNotificationOptions struct {
db.ListOptions
UserID int64
RepoID int64
IssueID int64
Status []NotificationStatus
Source []NotificationSource
UpdatedAfterUnix int64
UpdatedBeforeUnix int64
}
// ToCond will convert each condition into a xorm-Cond
func (opts FindNotificationOptions) ToConds() builder.Cond {
cond := builder.NewCond()
if opts.UserID != 0 {
cond = cond.And(builder.Eq{"notification.user_id": opts.UserID})
}
if opts.RepoID != 0 {
cond = cond.And(builder.Eq{"notification.repo_id": opts.RepoID})
}
if opts.IssueID != 0 {
cond = cond.And(builder.Eq{"notification.issue_id": opts.IssueID})
}
if len(opts.Status) > 0 {
if len(opts.Status) == 1 {
cond = cond.And(builder.Eq{"notification.status": opts.Status[0]})
} else {
cond = cond.And(builder.In("notification.status", opts.Status))
}
}
if len(opts.Source) > 0 {
cond = cond.And(builder.In("notification.source", opts.Source))
}
if opts.UpdatedAfterUnix != 0 {
cond = cond.And(builder.Gte{"notification.updated_unix": opts.UpdatedAfterUnix})
}
if opts.UpdatedBeforeUnix != 0 {
cond = cond.And(builder.Lte{"notification.updated_unix": opts.UpdatedBeforeUnix})
}
return cond
}
func (opts FindNotificationOptions) ToOrders() string {
return "notification.updated_unix DESC"
}
// CreateRepoTransferNotification creates notification for the user a repository was transferred to
func CreateRepoTransferNotification(ctx context.Context, doer, newOwner *user_model.User, repo *repo_model.Repository) error {
return db.WithTx(ctx, func(ctx context.Context) error {
@ -159,109 +108,6 @@ func CreateRepoTransferNotification(ctx context.Context, doer, newOwner *user_mo
})
}
// CreateOrUpdateIssueNotifications creates an issue notification
// for each watcher, or updates it if already exists
// receiverID > 0 just send to receiver, else send to all watcher
func CreateOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error {
ctx, committer, err := db.TxContext(ctx)
if err != nil {
return err
}
defer committer.Close()
if err := createOrUpdateIssueNotifications(ctx, issueID, commentID, notificationAuthorID, receiverID); err != nil {
return err
}
return committer.Commit()
}
func createOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error {
// init
var toNotify container.Set[int64]
notifications, err := db.Find[Notification](ctx, FindNotificationOptions{
IssueID: issueID,
})
if err != nil {
return err
}
issue, err := issues_model.GetIssueByID(ctx, issueID)
if err != nil {
return err
}
if receiverID > 0 {
toNotify = make(container.Set[int64], 1)
toNotify.Add(receiverID)
} else {
toNotify = make(container.Set[int64], 32)
issueWatches, err := issues_model.GetIssueWatchersIDs(ctx, issueID, true)
if err != nil {
return err
}
toNotify.AddMultiple(issueWatches...)
if !(issue.IsPull && issues_model.HasWorkInProgressPrefix(issue.Title)) {
repoWatches, err := repo_model.GetRepoWatchersIDs(ctx, issue.RepoID)
if err != nil {
return err
}
toNotify.AddMultiple(repoWatches...)
}
issueParticipants, err := issue.GetParticipantIDsByIssue(ctx)
if err != nil {
return err
}
toNotify.AddMultiple(issueParticipants...)
// dont notify user who cause notification
delete(toNotify, notificationAuthorID)
// explicit unwatch on issue
issueUnWatches, err := issues_model.GetIssueWatchersIDs(ctx, issueID, false)
if err != nil {
return err
}
for _, id := range issueUnWatches {
toNotify.Remove(id)
}
}
err = issue.LoadRepo(ctx)
if err != nil {
return err
}
// notify
for userID := range toNotify {
issue.Repo.Units = nil
user, err := user_model.GetUserByID(ctx, userID)
if err != nil {
if user_model.IsErrUserNotExist(err) {
continue
}
return err
}
if issue.IsPull && !access_model.CheckRepoUnitUser(ctx, issue.Repo, user, unit.TypePullRequests) {
continue
}
if !issue.IsPull && !access_model.CheckRepoUnitUser(ctx, issue.Repo, user, unit.TypeIssues) {
continue
}
if notificationExists(notifications, issue.ID, userID) {
if err = updateIssueNotification(ctx, userID, issue.ID, commentID, notificationAuthorID); err != nil {
return err
}
continue
}
if err = createIssueNotification(ctx, userID, issue, commentID, notificationAuthorID); err != nil {
return err
}
}
return nil
}
func createIssueNotification(ctx context.Context, userID int64, issue *issues_model.Issue, commentID, updatedByID int64) error {
notification := &Notification{
UserID: userID,
@ -449,309 +295,6 @@ func GetUIDsAndNotificationCounts(ctx context.Context, since, until timeutil.Tim
return res, db.GetEngine(ctx).SQL(sql, since, until, NotificationStatusUnread).Find(&res)
}
// NotificationList contains a list of notifications
type NotificationList []*Notification
// LoadAttributes load Repo Issue User and Comment if not loaded
func (nl NotificationList) LoadAttributes(ctx context.Context) error {
if _, _, err := nl.LoadRepos(ctx); err != nil {
return err
}
if _, err := nl.LoadIssues(ctx); err != nil {
return err
}
if _, err := nl.LoadUsers(ctx); err != nil {
return err
}
if _, err := nl.LoadComments(ctx); err != nil {
return err
}
return nil
}
func (nl NotificationList) getPendingRepoIDs() []int64 {
ids := make(container.Set[int64], len(nl))
for _, notification := range nl {
if notification.Repository != nil {
continue
}
ids.Add(notification.RepoID)
}
return ids.Values()
}
// LoadRepos loads repositories from database
func (nl NotificationList) LoadRepos(ctx context.Context) (repo_model.RepositoryList, []int, error) {
if len(nl) == 0 {
return repo_model.RepositoryList{}, []int{}, nil
}
repoIDs := nl.getPendingRepoIDs()
repos := make(map[int64]*repo_model.Repository, len(repoIDs))
left := len(repoIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).
In("id", repoIDs[:limit]).
Rows(new(repo_model.Repository))
if err != nil {
return nil, nil, err
}
for rows.Next() {
var repo repo_model.Repository
err = rows.Scan(&repo)
if err != nil {
rows.Close()
return nil, nil, err
}
repos[repo.ID] = &repo
}
_ = rows.Close()
left -= limit
repoIDs = repoIDs[limit:]
}
failed := []int{}
reposList := make(repo_model.RepositoryList, 0, len(repoIDs))
for i, notification := range nl {
if notification.Repository == nil {
notification.Repository = repos[notification.RepoID]
}
if notification.Repository == nil {
log.Error("Notification[%d]: RepoID: %d not found", notification.ID, notification.RepoID)
failed = append(failed, i)
continue
}
var found bool
for _, r := range reposList {
if r.ID == notification.RepoID {
found = true
break
}
}
if !found {
reposList = append(reposList, notification.Repository)
}
}
return reposList, failed, nil
}
func (nl NotificationList) getPendingIssueIDs() []int64 {
ids := make(container.Set[int64], len(nl))
for _, notification := range nl {
if notification.Issue != nil {
continue
}
ids.Add(notification.IssueID)
}
return ids.Values()
}
// LoadIssues loads issues from database
func (nl NotificationList) LoadIssues(ctx context.Context) ([]int, error) {
if len(nl) == 0 {
return []int{}, nil
}
issueIDs := nl.getPendingIssueIDs()
issues := make(map[int64]*issues_model.Issue, len(issueIDs))
left := len(issueIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).
In("id", issueIDs[:limit]).
Rows(new(issues_model.Issue))
if err != nil {
return nil, err
}
for rows.Next() {
var issue issues_model.Issue
err = rows.Scan(&issue)
if err != nil {
rows.Close()
return nil, err
}
issues[issue.ID] = &issue
}
_ = rows.Close()
left -= limit
issueIDs = issueIDs[limit:]
}
failures := []int{}
for i, notification := range nl {
if notification.Issue == nil {
notification.Issue = issues[notification.IssueID]
if notification.Issue == nil {
if notification.IssueID != 0 {
log.Error("Notification[%d]: IssueID: %d Not Found", notification.ID, notification.IssueID)
failures = append(failures, i)
}
continue
}
notification.Issue.Repo = notification.Repository
}
}
return failures, nil
}
// Without returns the notification list without the failures
func (nl NotificationList) Without(failures []int) NotificationList {
if len(failures) == 0 {
return nl
}
remaining := make([]*Notification, 0, len(nl))
last := -1
var i int
for _, i = range failures {
remaining = append(remaining, nl[last+1:i]...)
last = i
}
if len(nl) > i {
remaining = append(remaining, nl[i+1:]...)
}
return remaining
}
func (nl NotificationList) getPendingCommentIDs() []int64 {
ids := make(container.Set[int64], len(nl))
for _, notification := range nl {
if notification.CommentID == 0 || notification.Comment != nil {
continue
}
ids.Add(notification.CommentID)
}
return ids.Values()
}
func (nl NotificationList) getUserIDs() []int64 {
ids := make(container.Set[int64], len(nl))
for _, notification := range nl {
if notification.UserID == 0 || notification.User != nil {
continue
}
ids.Add(notification.UserID)
}
return ids.Values()
}
// LoadUsers loads users from database
func (nl NotificationList) LoadUsers(ctx context.Context) ([]int, error) {
if len(nl) == 0 {
return []int{}, nil
}
userIDs := nl.getUserIDs()
users := make(map[int64]*user_model.User, len(userIDs))
left := len(userIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).
In("id", userIDs[:limit]).
Rows(new(user_model.User))
if err != nil {
return nil, err
}
for rows.Next() {
var user user_model.User
err = rows.Scan(&user)
if err != nil {
rows.Close()
return nil, err
}
users[user.ID] = &user
}
_ = rows.Close()
left -= limit
userIDs = userIDs[limit:]
}
failures := []int{}
for i, notification := range nl {
if notification.UserID > 0 && notification.User == nil && users[notification.UserID] != nil {
notification.User = users[notification.UserID]
if notification.User == nil {
log.Error("Notification[%d]: UserID[%d] failed to load", notification.ID, notification.UserID)
failures = append(failures, i)
continue
}
}
}
return failures, nil
}
// LoadComments loads comments from database
func (nl NotificationList) LoadComments(ctx context.Context) ([]int, error) {
if len(nl) == 0 {
return []int{}, nil
}
commentIDs := nl.getPendingCommentIDs()
comments := make(map[int64]*issues_model.Comment, len(commentIDs))
left := len(commentIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).
In("id", commentIDs[:limit]).
Rows(new(issues_model.Comment))
if err != nil {
return nil, err
}
for rows.Next() {
var comment issues_model.Comment
err = rows.Scan(&comment)
if err != nil {
rows.Close()
return nil, err
}
comments[comment.ID] = &comment
}
_ = rows.Close()
left -= limit
commentIDs = commentIDs[limit:]
}
failures := []int{}
for i, notification := range nl {
if notification.CommentID > 0 && notification.Comment == nil && comments[notification.CommentID] != nil {
notification.Comment = comments[notification.CommentID]
if notification.Comment == nil {
log.Error("Notification[%d]: CommentID[%d] failed to load", notification.ID, notification.CommentID)
failures = append(failures, i)
continue
}
notification.Comment.Issue = notification.Issue
}
}
return failures, nil
}
// SetIssueReadBy sets issue to be read by given user.
func SetIssueReadBy(ctx context.Context, issueID, userID int64) error {
if err := issues_model.UpdateIssueUserByRead(ctx, userID, issueID); err != nil {

View File

@ -0,0 +1,501 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package activities
import (
"context"
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
"xorm.io/builder"
)
// FindNotificationOptions represent the filters for notifications. If an ID is 0 it will be ignored.
type FindNotificationOptions struct {
db.ListOptions
UserID int64
RepoID int64
IssueID int64
Status []NotificationStatus
Source []NotificationSource
UpdatedAfterUnix int64
UpdatedBeforeUnix int64
}
// ToCond will convert each condition into a xorm-Cond
func (opts FindNotificationOptions) ToConds() builder.Cond {
cond := builder.NewCond()
if opts.UserID != 0 {
cond = cond.And(builder.Eq{"notification.user_id": opts.UserID})
}
if opts.RepoID != 0 {
cond = cond.And(builder.Eq{"notification.repo_id": opts.RepoID})
}
if opts.IssueID != 0 {
cond = cond.And(builder.Eq{"notification.issue_id": opts.IssueID})
}
if len(opts.Status) > 0 {
if len(opts.Status) == 1 {
cond = cond.And(builder.Eq{"notification.status": opts.Status[0]})
} else {
cond = cond.And(builder.In("notification.status", opts.Status))
}
}
if len(opts.Source) > 0 {
cond = cond.And(builder.In("notification.source", opts.Source))
}
if opts.UpdatedAfterUnix != 0 {
cond = cond.And(builder.Gte{"notification.updated_unix": opts.UpdatedAfterUnix})
}
if opts.UpdatedBeforeUnix != 0 {
cond = cond.And(builder.Lte{"notification.updated_unix": opts.UpdatedBeforeUnix})
}
return cond
}
func (opts FindNotificationOptions) ToOrders() string {
return "notification.updated_unix DESC"
}
// CreateOrUpdateIssueNotifications creates an issue notification
// for each watcher, or updates it if already exists
// receiverID > 0 just send to receiver, else send to all watcher
func CreateOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error {
ctx, committer, err := db.TxContext(ctx)
if err != nil {
return err
}
defer committer.Close()
if err := createOrUpdateIssueNotifications(ctx, issueID, commentID, notificationAuthorID, receiverID); err != nil {
return err
}
return committer.Commit()
}
func createOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, notificationAuthorID, receiverID int64) error {
// init
var toNotify container.Set[int64]
notifications, err := db.Find[Notification](ctx, FindNotificationOptions{
IssueID: issueID,
})
if err != nil {
return err
}
issue, err := issues_model.GetIssueByID(ctx, issueID)
if err != nil {
return err
}
if receiverID > 0 {
toNotify = make(container.Set[int64], 1)
toNotify.Add(receiverID)
} else {
toNotify = make(container.Set[int64], 32)
issueWatches, err := issues_model.GetIssueWatchersIDs(ctx, issueID, true)
if err != nil {
return err
}
toNotify.AddMultiple(issueWatches...)
if !(issue.IsPull && issues_model.HasWorkInProgressPrefix(issue.Title)) {
repoWatches, err := repo_model.GetRepoWatchersIDs(ctx, issue.RepoID)
if err != nil {
return err
}
toNotify.AddMultiple(repoWatches...)
}
issueParticipants, err := issue.GetParticipantIDsByIssue(ctx)
if err != nil {
return err
}
toNotify.AddMultiple(issueParticipants...)
// dont notify user who cause notification
delete(toNotify, notificationAuthorID)
// explicit unwatch on issue
issueUnWatches, err := issues_model.GetIssueWatchersIDs(ctx, issueID, false)
if err != nil {
return err
}
for _, id := range issueUnWatches {
toNotify.Remove(id)
}
}
err = issue.LoadRepo(ctx)
if err != nil {
return err
}
// notify
for userID := range toNotify {
issue.Repo.Units = nil
user, err := user_model.GetUserByID(ctx, userID)
if err != nil {
if user_model.IsErrUserNotExist(err) {
continue
}
return err
}
if issue.IsPull && !access_model.CheckRepoUnitUser(ctx, issue.Repo, user, unit.TypePullRequests) {
continue
}
if !issue.IsPull && !access_model.CheckRepoUnitUser(ctx, issue.Repo, user, unit.TypeIssues) {
continue
}
if notificationExists(notifications, issue.ID, userID) {
if err = updateIssueNotification(ctx, userID, issue.ID, commentID, notificationAuthorID); err != nil {
return err
}
continue
}
if err = createIssueNotification(ctx, userID, issue, commentID, notificationAuthorID); err != nil {
return err
}
}
return nil
}
// NotificationList contains a list of notifications
type NotificationList []*Notification
// LoadAttributes load Repo Issue User and Comment if not loaded
func (nl NotificationList) LoadAttributes(ctx context.Context) error {
if _, _, err := nl.LoadRepos(ctx); err != nil {
return err
}
if _, err := nl.LoadIssues(ctx); err != nil {
return err
}
if _, err := nl.LoadUsers(ctx); err != nil {
return err
}
if _, err := nl.LoadComments(ctx); err != nil {
return err
}
return nil
}
func (nl NotificationList) getPendingRepoIDs() []int64 {
ids := make(container.Set[int64], len(nl))
for _, notification := range nl {
if notification.Repository != nil {
continue
}
ids.Add(notification.RepoID)
}
return ids.Values()
}
// LoadRepos loads repositories from database
func (nl NotificationList) LoadRepos(ctx context.Context) (repo_model.RepositoryList, []int, error) {
if len(nl) == 0 {
return repo_model.RepositoryList{}, []int{}, nil
}
repoIDs := nl.getPendingRepoIDs()
repos := make(map[int64]*repo_model.Repository, len(repoIDs))
left := len(repoIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).
In("id", repoIDs[:limit]).
Rows(new(repo_model.Repository))
if err != nil {
return nil, nil, err
}
for rows.Next() {
var repo repo_model.Repository
err = rows.Scan(&repo)
if err != nil {
rows.Close()
return nil, nil, err
}
repos[repo.ID] = &repo
}
_ = rows.Close()
left -= limit
repoIDs = repoIDs[limit:]
}
failed := []int{}
reposList := make(repo_model.RepositoryList, 0, len(repoIDs))
for i, notification := range nl {
if notification.Repository == nil {
notification.Repository = repos[notification.RepoID]
}
if notification.Repository == nil {
log.Error("Notification[%d]: RepoID: %d not found", notification.ID, notification.RepoID)
failed = append(failed, i)
continue
}
var found bool
for _, r := range reposList {
if r.ID == notification.RepoID {
found = true
break
}
}
if !found {
reposList = append(reposList, notification.Repository)
}
}
return reposList, failed, nil
}
func (nl NotificationList) getPendingIssueIDs() []int64 {
ids := make(container.Set[int64], len(nl))
for _, notification := range nl {
if notification.Issue != nil {
continue
}
ids.Add(notification.IssueID)
}
return ids.Values()
}
// LoadIssues loads issues from database
func (nl NotificationList) LoadIssues(ctx context.Context) ([]int, error) {
if len(nl) == 0 {
return []int{}, nil
}
issueIDs := nl.getPendingIssueIDs()
issues := make(map[int64]*issues_model.Issue, len(issueIDs))
left := len(issueIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).
In("id", issueIDs[:limit]).
Rows(new(issues_model.Issue))
if err != nil {
return nil, err
}
for rows.Next() {
var issue issues_model.Issue
err = rows.Scan(&issue)
if err != nil {
rows.Close()
return nil, err
}
issues[issue.ID] = &issue
}
_ = rows.Close()
left -= limit
issueIDs = issueIDs[limit:]
}
failures := []int{}
for i, notification := range nl {
if notification.Issue == nil {
notification.Issue = issues[notification.IssueID]
if notification.Issue == nil {
if notification.IssueID != 0 {
log.Error("Notification[%d]: IssueID: %d Not Found", notification.ID, notification.IssueID)
failures = append(failures, i)
}
continue
}
notification.Issue.Repo = notification.Repository
}
}
return failures, nil
}
// Without returns the notification list without the failures
func (nl NotificationList) Without(failures []int) NotificationList {
if len(failures) == 0 {
return nl
}
remaining := make([]*Notification, 0, len(nl))
last := -1
var i int
for _, i = range failures {
remaining = append(remaining, nl[last+1:i]...)
last = i
}
if len(nl) > i {
remaining = append(remaining, nl[i+1:]...)
}
return remaining
}
func (nl NotificationList) getPendingCommentIDs() []int64 {
ids := make(container.Set[int64], len(nl))
for _, notification := range nl {
if notification.CommentID == 0 || notification.Comment != nil {
continue
}
ids.Add(notification.CommentID)
}
return ids.Values()
}
func (nl NotificationList) getUserIDs() []int64 {
ids := make(container.Set[int64], len(nl))
for _, notification := range nl {
if notification.UserID == 0 || notification.User != nil {
continue
}
ids.Add(notification.UserID)
}
return ids.Values()
}
// LoadUsers loads users from database
func (nl NotificationList) LoadUsers(ctx context.Context) ([]int, error) {
if len(nl) == 0 {
return []int{}, nil
}
userIDs := nl.getUserIDs()
users := make(map[int64]*user_model.User, len(userIDs))
left := len(userIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).
In("id", userIDs[:limit]).
Rows(new(user_model.User))
if err != nil {
return nil, err
}
for rows.Next() {
var user user_model.User
err = rows.Scan(&user)
if err != nil {
rows.Close()
return nil, err
}
users[user.ID] = &user
}
_ = rows.Close()
left -= limit
userIDs = userIDs[limit:]
}
failures := []int{}
for i, notification := range nl {
if notification.UserID > 0 && notification.User == nil && users[notification.UserID] != nil {
notification.User = users[notification.UserID]
if notification.User == nil {
log.Error("Notification[%d]: UserID[%d] failed to load", notification.ID, notification.UserID)
failures = append(failures, i)
continue
}
}
}
return failures, nil
}
// LoadComments loads comments from database
func (nl NotificationList) LoadComments(ctx context.Context) ([]int, error) {
if len(nl) == 0 {
return []int{}, nil
}
commentIDs := nl.getPendingCommentIDs()
comments := make(map[int64]*issues_model.Comment, len(commentIDs))
left := len(commentIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).
In("id", commentIDs[:limit]).
Rows(new(issues_model.Comment))
if err != nil {
return nil, err
}
for rows.Next() {
var comment issues_model.Comment
err = rows.Scan(&comment)
if err != nil {
rows.Close()
return nil, err
}
comments[comment.ID] = &comment
}
_ = rows.Close()
left -= limit
commentIDs = commentIDs[limit:]
}
failures := []int{}
for i, notification := range nl {
if notification.CommentID > 0 && notification.Comment == nil && comments[notification.CommentID] != nil {
notification.Comment = comments[notification.CommentID]
if notification.Comment == nil {
log.Error("Notification[%d]: CommentID[%d] failed to load", notification.ID, notification.CommentID)
failures = append(failures, i)
continue
}
notification.Comment.Issue = notification.Issue
}
}
return failures, nil
}
// LoadIssuePullRequests loads all issues' pull requests if possible
func (nl NotificationList) LoadIssuePullRequests(ctx context.Context) error {
issues := make(map[int64]*issues_model.Issue, len(nl))
for _, notification := range nl {
if notification.Issue != nil && notification.Issue.IsPull && notification.Issue.PullRequest == nil {
issues[notification.Issue.ID] = notification.Issue
}
}
if len(issues) == 0 {
return nil
}
pulls, err := issues_model.GetPullRequestByIssueIDs(ctx, util.KeysOfMap(issues))
if err != nil {
return err
}
for _, pull := range pulls {
if issue := issues[pull.IssueID]; issue != nil {
issue.PullRequest = pull
issue.PullRequest.Issue = issue
}
}
return nil
}

View File

@ -9,6 +9,7 @@ import (
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/organization"
access_model "code.gitea.io/gitea/models/perm/access"
@ -29,7 +30,8 @@ type Statistic struct {
Mirror, Release, AuthSource, Webhook,
Milestone, Label, HookTask,
Team, UpdateTask, Project,
ProjectBoard, Attachment int64
ProjectBoard, Attachment,
Branches, Tags, CommitStatus int64
IssueByLabel []IssueByLabelCount
IssueByRepository []IssueByRepositoryCount
}
@ -58,6 +60,9 @@ func GetStatistic(ctx context.Context) (stats Statistic) {
stats.Counter.Watch, _ = e.Count(new(repo_model.Watch))
stats.Counter.Star, _ = e.Count(new(repo_model.Star))
stats.Counter.Access, _ = e.Count(new(access_model.Access))
stats.Counter.Branches, _ = e.Count(new(git_model.Branch))
stats.Counter.Tags, _ = e.Where("is_draft=?", false).Count(new(repo_model.Release))
stats.Counter.CommitStatus, _ = e.Count(new(git_model.CommitStatus))
type IssueCount struct {
Count int64

View File

@ -152,6 +152,10 @@ func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
return err
}
}
err = scanner.Err()
if err != nil {
return fmt.Errorf("scan: %w", err)
}
f.Close()
}
return nil

View File

@ -75,3 +75,11 @@
content: "comment in private pository"
created_unix: 946684811
updated_unix: 946684811
-
id: 9
type: 22 # review
poster_id: 2
issue_id: 2 # in repo_id 1
review_id: 20
created_unix: 946684810

View File

@ -170,3 +170,12 @@
content: "review request for user15"
updated_unix: 946684835
created_unix: 946684835
-
id: 20
type: 22
reviewer_id: 1
issue_id: 2
content: "Review Comment"
updated_unix: 946684810
created_unix: 946684810

View File

@ -673,7 +673,8 @@ func (c *Comment) LoadTime(ctx context.Context) error {
return err
}
func (c *Comment) loadReactions(ctx context.Context, repo *repo_model.Repository) (err error) {
// LoadReactions loads comment reactions
func (c *Comment) LoadReactions(ctx context.Context, repo *repo_model.Repository) (err error) {
if c.Reactions != nil {
return nil
}
@ -691,11 +692,6 @@ func (c *Comment) loadReactions(ctx context.Context, repo *repo_model.Repository
return nil
}
// LoadReactions loads comment reactions
func (c *Comment) LoadReactions(ctx context.Context, repo *repo_model.Repository) error {
return c.loadReactions(ctx, repo)
}
func (c *Comment) loadReview(ctx context.Context) (err error) {
if c.ReviewID == 0 {
return nil

View File

@ -122,7 +122,7 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu
}
// FetchCodeCommentsByLine fetches the code comments for a given treePath and line number
func FetchCodeCommentsByLine(ctx context.Context, issue *Issue, currentUser *user_model.User, treePath string, line int64, showOutdatedComments bool) ([]*Comment, error) {
func FetchCodeCommentsByLine(ctx context.Context, issue *Issue, currentUser *user_model.User, treePath string, line int64, showOutdatedComments bool) (CommentList, error) {
opts := FindCommentsOptions{
Type: CommentTypeCode,
IssueID: issue.ID,

View File

@ -19,7 +19,9 @@ type CommentList []*Comment
func (comments CommentList) getPosterIDs() []int64 {
posterIDs := make(container.Set[int64], len(comments))
for _, comment := range comments {
posterIDs.Add(comment.PosterID)
if comment.PosterID > 0 {
posterIDs.Add(comment.PosterID)
}
}
return posterIDs.Values()
}
@ -41,18 +43,12 @@ func (comments CommentList) LoadPosters(ctx context.Context) error {
return nil
}
func (comments CommentList) getCommentIDs() []int64 {
ids := make([]int64, 0, len(comments))
for _, comment := range comments {
ids = append(ids, comment.ID)
}
return ids
}
func (comments CommentList) getLabelIDs() []int64 {
ids := make(container.Set[int64], len(comments))
for _, comment := range comments {
ids.Add(comment.LabelID)
if comment.LabelID > 0 {
ids.Add(comment.LabelID)
}
}
return ids.Values()
}
@ -100,7 +96,9 @@ func (comments CommentList) loadLabels(ctx context.Context) error {
func (comments CommentList) getMilestoneIDs() []int64 {
ids := make(container.Set[int64], len(comments))
for _, comment := range comments {
ids.Add(comment.MilestoneID)
if comment.MilestoneID > 0 {
ids.Add(comment.MilestoneID)
}
}
return ids.Values()
}
@ -141,7 +139,9 @@ func (comments CommentList) loadMilestones(ctx context.Context) error {
func (comments CommentList) getOldMilestoneIDs() []int64 {
ids := make(container.Set[int64], len(comments))
for _, comment := range comments {
ids.Add(comment.OldMilestoneID)
if comment.OldMilestoneID > 0 {
ids.Add(comment.OldMilestoneID)
}
}
return ids.Values()
}
@ -182,7 +182,9 @@ func (comments CommentList) loadOldMilestones(ctx context.Context) error {
func (comments CommentList) getAssigneeIDs() []int64 {
ids := make(container.Set[int64], len(comments))
for _, comment := range comments {
ids.Add(comment.AssigneeID)
if comment.AssigneeID > 0 {
ids.Add(comment.AssigneeID)
}
}
return ids.Values()
}
@ -314,7 +316,9 @@ func (comments CommentList) getDependentIssueIDs() []int64 {
if comment.DependentIssue != nil {
continue
}
ids.Add(comment.DependentIssueID)
if comment.DependentIssueID > 0 {
ids.Add(comment.DependentIssueID)
}
}
return ids.Values()
}
@ -369,6 +373,41 @@ func (comments CommentList) loadDependentIssues(ctx context.Context) error {
return nil
}
// getAttachmentCommentIDs only return the comment ids which possibly has attachments
func (comments CommentList) getAttachmentCommentIDs() []int64 {
ids := make(container.Set[int64], len(comments))
for _, comment := range comments {
if comment.Type == CommentTypeComment ||
comment.Type == CommentTypeReview ||
comment.Type == CommentTypeCode {
ids.Add(comment.ID)
}
}
return ids.Values()
}
// LoadAttachmentsByIssue loads attachments by issue id
func (comments CommentList) LoadAttachmentsByIssue(ctx context.Context) error {
if len(comments) == 0 {
return nil
}
attachments := make([]*repo_model.Attachment, 0, len(comments)/2)
if err := db.GetEngine(ctx).Where("issue_id=? AND comment_id>0", comments[0].IssueID).Find(&attachments); err != nil {
return err
}
commentAttachmentsMap := make(map[int64][]*repo_model.Attachment, len(comments))
for _, attach := range attachments {
commentAttachmentsMap[attach.CommentID] = append(commentAttachmentsMap[attach.CommentID], attach)
}
for _, comment := range comments {
comment.Attachments = commentAttachmentsMap[comment.ID]
}
return nil
}
// LoadAttachments loads attachments
func (comments CommentList) LoadAttachments(ctx context.Context) (err error) {
if len(comments) == 0 {
@ -376,16 +415,15 @@ func (comments CommentList) LoadAttachments(ctx context.Context) (err error) {
}
attachments := make(map[int64][]*repo_model.Attachment, len(comments))
commentsIDs := comments.getCommentIDs()
commentsIDs := comments.getAttachmentCommentIDs()
left := len(commentsIDs)
for left > 0 {
limit := db.DefaultMaxInSize
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).Table("attachment").
Join("INNER", "comment", "comment.id = attachment.comment_id").
In("comment.id", commentsIDs[:limit]).
rows, err := db.GetEngine(ctx).
In("comment_id", commentsIDs[:limit]).
Rows(new(repo_model.Attachment))
if err != nil {
return err
@ -415,7 +453,9 @@ func (comments CommentList) LoadAttachments(ctx context.Context) (err error) {
func (comments CommentList) getReviewIDs() []int64 {
ids := make(container.Set[int64], len(comments))
for _, comment := range comments {
ids.Add(comment.ReviewID)
if comment.ReviewID > 0 {
ids.Add(comment.ReviewID)
}
}
return ids.Values()
}

View File

@ -193,20 +193,6 @@ func (issue *Issue) IsTimetrackerEnabled(ctx context.Context) bool {
return issue.Repo.IsTimetrackerEnabled(ctx)
}
// GetPullRequest returns the issue pull request
func (issue *Issue) GetPullRequest(ctx context.Context) (pr *PullRequest, err error) {
if !issue.IsPull {
return nil, fmt.Errorf("Issue is not a pull request")
}
pr, err = GetPullRequestByIssueID(ctx, issue.ID)
if err != nil {
return nil, err
}
pr.Issue = issue
return pr, err
}
// LoadPoster loads poster
func (issue *Issue) LoadPoster(ctx context.Context) (err error) {
if issue.Poster == nil && issue.PosterID != 0 {

View File

@ -370,6 +370,9 @@ func (issues IssueList) LoadPullRequests(ctx context.Context) error {
for _, issue := range issues {
issue.PullRequest = pullRequestMaps[issue.ID]
if issue.PullRequest != nil {
issue.PullRequest.Issue = issue
}
}
return nil
}
@ -388,9 +391,8 @@ func (issues IssueList) LoadAttachments(ctx context.Context) (err error) {
if left < limit {
limit = left
}
rows, err := db.GetEngine(ctx).Table("attachment").
Join("INNER", "issue", "issue.id = attachment.issue_id").
In("issue.id", issuesIDs[:limit]).
rows, err := db.GetEngine(ctx).
In("issue_id", issuesIDs[:limit]).
Rows(new(repo_model.Attachment))
if err != nil {
return err
@ -476,6 +478,16 @@ func (issues IssueList) loadTotalTrackedTimes(ctx context.Context) (err error) {
}
trackedTimes := make(map[int64]int64, len(issues))
reposMap := make(map[int64]*repo_model.Repository, len(issues))
for _, issue := range issues {
reposMap[issue.RepoID] = issue.Repo
}
repos := repo_model.RepositoryListOfMap(reposMap)
if err := repos.LoadUnits(ctx); err != nil {
return err
}
ids := make([]int64, 0, len(issues))
for _, issue := range issues {
if issue.Repo.IsTimetrackerEnabled(ctx) {
@ -599,3 +611,23 @@ func (issues IssueList) GetApprovalCounts(ctx context.Context) (map[int64][]*Rev
return approvalCountMap, nil
}
func (issues IssueList) LoadIsRead(ctx context.Context, userID int64) error {
issueIDs := issues.getIssueIDs()
issueUsers := make([]*IssueUser, 0, len(issueIDs))
if err := db.GetEngine(ctx).Where("uid =?", userID).
In("issue_id").
Find(&issueUsers); err != nil {
return err
}
for _, issueUser := range issueUsers {
for _, issue := range issues {
if issue.ID == issueUser.IssueID {
issue.IsRead = issueUser.IsRead
}
}
}
return nil
}

View File

@ -393,7 +393,7 @@ func applyReviewRequestedCondition(sess *xorm.Session, reviewRequestedID int64)
func applyReviewedCondition(sess *xorm.Session, reviewedID int64) *xorm.Session {
// Query for pull requests where you are a reviewer or commenter, excluding
// any pull requests already returned by the the review requested filter.
// any pull requests already returned by the review requested filter.
notPoster := builder.Neq{"issue.poster_id": reviewedID}
reviewed := builder.In("issue.id", builder.
Select("issue_id").

View File

@ -116,12 +116,17 @@ func (l *Label) CalOpenIssues() {
func (l *Label) SetArchived(isArchived bool) {
if !isArchived {
l.ArchivedUnix = timeutil.TimeStamp(0)
} else if isArchived && l.ArchivedUnix.IsZero() {
} else if isArchived && !l.IsArchived() {
// Only change the date when it is newly archived.
l.ArchivedUnix = timeutil.TimeStampNow()
}
}
// IsArchived returns true if label is an archived
func (l *Label) IsArchived() bool {
return !l.ArchivedUnix.IsZero()
}
// CalOpenOrgIssues calculates the open issues of a label for a specific repo
func (l *Label) CalOpenOrgIssues(ctx context.Context, repoID, labelID int64) {
counts, _ := CountIssuesByRepo(ctx, &IssuesOptions{
@ -166,11 +171,6 @@ func (l *Label) BelongsToOrg() bool {
return l.OrgID > 0
}
// IsArchived returns true if label is an archived
func (l *Label) IsArchived() bool {
return l.ArchivedUnix > 0
}
// BelongsToRepo returns true if label is a repository label
func (l *Label) BelongsToRepo() bool {
return l.RepoID > 0

View File

@ -19,7 +19,6 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
@ -884,77 +883,6 @@ func MergeBlockedByOutdatedBranch(protectBranch *git_model.ProtectedBranch, pr *
return protectBranch.BlockOnOutdatedBranch && pr.CommitsBehind > 0
}
func PullRequestCodeOwnersReview(ctx context.Context, pull *Issue, pr *PullRequest) error {
files := []string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}
if pr.IsWorkInProgress(ctx) {
return nil
}
if err := pr.LoadBaseRepo(ctx); err != nil {
return err
}
repo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
if err != nil {
return err
}
defer repo.Close()
commit, err := repo.GetBranchCommit(pr.BaseRepo.DefaultBranch)
if err != nil {
return err
}
var data string
for _, file := range files {
if blob, err := commit.GetBlobByPath(file); err == nil {
data, err = blob.GetBlobContent(setting.UI.MaxDisplayFileSize)
if err == nil {
break
}
}
}
rules, _ := GetCodeOwnersFromContent(ctx, data)
changedFiles, err := repo.GetFilesChangedBetween(git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName())
if err != nil {
return err
}
uniqUsers := make(map[int64]*user_model.User)
uniqTeams := make(map[string]*org_model.Team)
for _, rule := range rules {
for _, f := range changedFiles {
if (rule.Rule.MatchString(f) && !rule.Negative) || (!rule.Rule.MatchString(f) && rule.Negative) {
for _, u := range rule.Users {
uniqUsers[u.ID] = u
}
for _, t := range rule.Teams {
uniqTeams[fmt.Sprintf("%d/%d", t.OrgID, t.ID)] = t
}
}
}
}
for _, u := range uniqUsers {
if u.ID != pull.Poster.ID {
if _, err := AddReviewRequest(ctx, pull, u, pull.Poster); err != nil {
log.Warn("Failed add assignee user: %s to PR review: %s#%d, error: %s", u.Name, pr.BaseRepo.Name, pr.ID, err)
return err
}
}
}
for _, t := range uniqTeams {
if _, err := AddTeamReviewRequest(ctx, pull, t, pull.Poster); err != nil {
log.Warn("Failed add assignee team: %s to PR review: %s#%d, error: %s", t.Name, pr.BaseRepo.Name, pr.ID, err)
return err
}
}
return nil
}
// GetCodeOwnersFromContent returns the code owners configuration
// Return empty slice if files missing
// Return warning messages on parsing errors

View File

@ -212,3 +212,12 @@ func HasMergedPullRequestInRepo(ctx context.Context, repoID, posterID int64) (bo
Limit(1).
Get(new(Issue))
}
// GetPullRequestByIssueIDs returns all pull requests by issue ids
func GetPullRequestByIssueIDs(ctx context.Context, issueIDs []int64) (PullRequestList, error) {
prs := make([]*PullRequest, 0, len(issueIDs))
return prs, db.GetEngine(ctx).
Where("issue_id > 0").
In("issue_id", issueIDs).
Find(&prs)
}

View File

@ -239,11 +239,11 @@ type CreateReviewOptions struct {
// IsOfficialReviewer check if at least one of the provided reviewers can make official reviews in issue (counts towards required approvals)
func IsOfficialReviewer(ctx context.Context, issue *Issue, reviewer *user_model.User) (bool, error) {
pr, err := GetPullRequestByIssueID(ctx, issue.ID)
if err != nil {
if err := issue.LoadPullRequest(ctx); err != nil {
return false, err
}
pr := issue.PullRequest
rule, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
return false, err
@ -271,11 +271,10 @@ func IsOfficialReviewer(ctx context.Context, issue *Issue, reviewer *user_model.
// IsOfficialReviewerTeam check if reviewer in this team can make official reviews in issue (counts towards required approvals)
func IsOfficialReviewerTeam(ctx context.Context, issue *Issue, team *organization.Team) (bool, error) {
pr, err := GetPullRequestByIssueID(ctx, issue.ID)
if err != nil {
if err := issue.LoadPullRequest(ctx); err != nil {
return false, err
}
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, issue.PullRequest.BaseRepoID, issue.PullRequest.BaseBranch)
if err != nil {
return false, err
}

View File

@ -566,6 +566,8 @@ var migrations = []Migration{
NewMigration("Add default_wiki_branch to repository table", v1_22.AddDefaultWikiBranch),
// v290 -> v291
NewMigration("Add PayloadVersion to HookTask", v1_22.AddPayloadVersionToHookTaskTable),
// v291 -> v292
NewMigration("Add Index to attachment.comment_id", v1_22.AddCommentIDIndexofAttachment),
}
// GetCurrentDBVersion returns the current db version

View File

@ -0,0 +1,14 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_22 //nolint
import "xorm.io/xorm"
func AddCommentIDIndexofAttachment(x *xorm.Engine) error {
type Attachment struct {
CommentID int64 `xorm:"INDEX"`
}
return x.Sync(&Attachment{})
}

View File

@ -24,7 +24,7 @@ type Attachment struct {
IssueID int64 `xorm:"INDEX"` // maybe zero when creating
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added
CommentID int64
CommentID int64 `xorm:"INDEX"`
Name string
DownloadCount int64 `xorm:"DEFAULT 0"`
Size int64 `xorm:"DEFAULT 0"`

View File

@ -531,6 +531,9 @@ func (repo *Repository) GetBaseRepo(ctx context.Context) (err error) {
return nil
}
if repo.BaseRepo != nil {
return nil
}
repo.BaseRepo, err = GetRepositoryByID(ctx, repo.ForkID)
return err
}

View File

@ -63,6 +63,41 @@ func RepositoryListOfMap(repoMap map[int64]*Repository) RepositoryList {
return RepositoryList(ValuesRepository(repoMap))
}
func (repos RepositoryList) LoadUnits(ctx context.Context) error {
if len(repos) == 0 {
return nil
}
// Load units.
units := make([]*RepoUnit, 0, len(repos)*6)
if err := db.GetEngine(ctx).
In("repo_id", repos.IDs()).
Find(&units); err != nil {
return fmt.Errorf("find units: %w", err)
}
unitsMap := make(map[int64][]*RepoUnit, len(repos))
for _, unit := range units {
if !unit.Type.UnitGlobalDisabled() {
unitsMap[unit.RepoID] = append(unitsMap[unit.RepoID], unit)
}
}
for _, repo := range repos {
repo.Units = unitsMap[repo.ID]
}
return nil
}
func (repos RepositoryList) IDs() []int64 {
repoIDs := make([]int64, len(repos))
for i := range repos {
repoIDs[i] = repos[i].ID
}
return repoIDs
}
// LoadAttributes loads the attributes for the given RepositoryList
func (repos RepositoryList) LoadAttributes(ctx context.Context) error {
if len(repos) == 0 {

View File

@ -434,7 +434,7 @@ func SearchEmails(ctx context.Context, opts *SearchEmailOptions) ([]*SearchEmail
cond = cond.And(builder.Eq{"email_address.is_activated": opts.IsActivated.Value()})
}
count, err := db.GetEngine(ctx).Join("INNER", "`user`", "`user`.ID = email_address.uid").
count, err := db.GetEngine(ctx).Join("INNER", "`user`", "`user`.id = email_address.uid").
Where(cond).Count(new(EmailAddress))
if err != nil {
return nil, 0, fmt.Errorf("Count: %w", err)
@ -450,7 +450,7 @@ func SearchEmails(ctx context.Context, opts *SearchEmailOptions) ([]*SearchEmail
emails := make([]*SearchEmailResult, 0, opts.PageSize)
err = db.GetEngine(ctx).Table("email_address").
Select("email_address.*, `user`.name, `user`.full_name").
Join("INNER", "`user`", "`user`.ID = email_address.uid").
Join("INNER", "`user`", "`user`.id = email_address.uid").
Where(cond).
OrderBy(orderby).
Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).
@ -539,17 +539,17 @@ func validateEmailBasic(email string) error {
// validateEmailDomain checks whether the email domain is allowed or blocked
func validateEmailDomain(email string) error {
// if there is no allow list, then check email against block list
if len(setting.Service.EmailDomainAllowList) == 0 &&
validation.IsEmailDomainListed(setting.Service.EmailDomainBlockList, email) {
return ErrEmailInvalid{email}
}
// if there is an allow list, then check email against allow list
if len(setting.Service.EmailDomainAllowList) > 0 &&
!validation.IsEmailDomainListed(setting.Service.EmailDomainAllowList, email) {
if !IsEmailDomainAllowed(email) {
return ErrEmailInvalid{email}
}
return nil
}
func IsEmailDomainAllowed(email string) bool {
if len(setting.Service.EmailDomainAllowList) == 0 {
return !validation.IsEmailDomainListed(setting.Service.EmailDomainBlockList, email)
}
return validation.IsEmailDomainListed(setting.Service.EmailDomainAllowList, email)
}

View File

@ -425,7 +425,7 @@ func (u *User) GetDisplayName() string {
return u.Name
}
// GetCompleteName returns the the full name and username in the form of
// GetCompleteName returns the full name and username in the form of
// "Full Name (username)" if full name is not empty, otherwise it returns
// "username".
func (u *User) GetCompleteName() string {

View File

@ -41,6 +41,12 @@ func FullSteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep {
}
logIndex += preStep.LogLength
// lastHasRunStep is the last step that has run.
// For example,
// 1. preStep(Success) -> step1(Success) -> step2(Running) -> step3(Waiting) -> postStep(Waiting): lastHasRunStep is step1.
// 2. preStep(Success) -> step1(Success) -> step2(Success) -> step3(Success) -> postStep(Success): lastHasRunStep is step3.
// 3. preStep(Success) -> step1(Success) -> step2(Failure) -> step3 -> postStep(Waiting): lastHasRunStep is step2.
// So its Stopped is the Started of postStep when there are no more steps to run.
var lastHasRunStep *actions_model.ActionTaskStep
for _, step := range task.Steps {
if step.Status.HasRun() {
@ -56,11 +62,15 @@ func FullSteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep {
Name: postStepName,
Status: actions_model.StatusWaiting,
}
if task.Status.IsDone() {
// If the lastHasRunStep is the last step, or it has failed, postStep has started.
if lastHasRunStep.Status.IsFailure() || lastHasRunStep == task.Steps[len(task.Steps)-1] {
postStep.LogIndex = logIndex
postStep.LogLength = task.LogLength - postStep.LogIndex
postStep.Status = task.Status
postStep.Started = lastHasRunStep.Stopped
postStep.Status = actions_model.StatusRunning
}
if task.Status.IsDone() {
postStep.Status = task.Status
postStep.Stopped = task.Stopped
}
ret := make([]*actions_model.ActionTaskStep, 0, len(task.Steps)+2)

View File

@ -103,6 +103,40 @@ func TestFullSteps(t *testing.T) {
{Name: postStepName, Status: actions_model.StatusSuccess, LogIndex: 100, LogLength: 0, Started: 10100, Stopped: 10100},
},
},
{
name: "all steps finished but task is running",
task: &actions_model.ActionTask{
Steps: []*actions_model.ActionTaskStep{
{Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
},
Status: actions_model.StatusRunning,
Started: 10000,
Stopped: 0,
LogLength: 100,
},
want: []*actions_model.ActionTaskStep{
{Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 10, Started: 10000, Stopped: 10010},
{Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
{Name: postStepName, Status: actions_model.StatusRunning, LogIndex: 90, LogLength: 10, Started: 10090, Stopped: 0},
},
},
{
name: "skipped task",
task: &actions_model.ActionTask{
Steps: []*actions_model.ActionTaskStep{
{Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
},
Status: actions_model.StatusSkipped,
Started: 0,
Stopped: 0,
LogLength: 0,
},
want: []*actions_model.ActionTaskStep{
{Name: preStepName, Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
{Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
{Name: postStepName, Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {

View File

@ -118,11 +118,12 @@ func TestReadingBlameOutputSha256(t *testing.T) {
},
}
objectFormat, err := repo.GetObjectFormat()
assert.NoError(t, err)
for _, c := range cases {
commit, err := repo.GetCommit(c.CommitID)
assert.NoError(t, err)
blameReader, err := CreateBlameReader(ctx, repo.objectFormat, "./tests/repos/repo6_blame_sha256", commit, "blame.txt", c.Bypass)
blameReader, err := CreateBlameReader(ctx, objectFormat, "./tests/repos/repo6_blame_sha256", commit, "blame.txt", c.Bypass)
assert.NoError(t, err)
assert.NotNil(t, blameReader)
defer blameReader.Close()

View File

@ -118,11 +118,13 @@ func TestReadingBlameOutput(t *testing.T) {
},
}
objectFormat, err := repo.GetObjectFormat()
assert.NoError(t, err)
for _, c := range cases {
commit, err := repo.GetCommit(c.CommitID)
assert.NoError(t, err)
blameReader, err := CreateBlameReader(ctx, repo.objectFormat, "./tests/repos/repo6_blame", commit, "blame.txt", c.Bypass)
blameReader, err := CreateBlameReader(ctx, objectFormat, "./tests/repos/repo6_blame", commit, "blame.txt", c.Bypass)
assert.NoError(t, err)
assert.NotNil(t, blameReader)
defer blameReader.Close()

View File

@ -9,6 +9,7 @@ import (
"bytes"
"context"
"errors"
"fmt"
"io"
"os/exec"
"strconv"
@ -311,7 +312,7 @@ func (c *Commit) GetFilesChangedSinceCommit(pastCommit string) ([]string, error)
return c.repo.GetFilesChangedBetween(pastCommit, c.ID.String())
}
// FileChangedSinceCommit Returns true if the file given has changed since the the past commit
// FileChangedSinceCommit Returns true if the file given has changed since the past commit
// YOU MUST ENSURE THAT pastCommit is a valid commit ID.
func (c *Commit) FileChangedSinceCommit(filename, pastCommit string) (bool, error) {
return c.repo.FileChangedBetweenCommits(filename, pastCommit, c.ID.String())
@ -396,6 +397,10 @@ func (c *Commit) GetSubModules() (*ObjectCache, error) {
}
}
}
err = scanner.Err()
if err != nil {
return nil, fmt.Errorf("scan: %w", err)
}
return c.submoduleCache, nil
}

View File

@ -140,10 +140,13 @@ func TestHasPreviousCommitSha256(t *testing.T) {
commit, err := repo.GetCommit("f004f41359117d319dedd0eaab8c5259ee2263da839dcba33637997458627fdc")
assert.NoError(t, err)
objectFormat, err := repo.GetObjectFormat()
assert.NoError(t, err)
parentSHA := MustIDFromString("b0ec7af4547047f12d5093e37ef8f1b3b5415ed8ee17894d43a34d7d34212e9c")
notParentSHA := MustIDFromString("42e334efd04cd36eea6da0599913333c26116e1a537ca76e5b6e4af4dda00236")
assert.Equal(t, repo.objectFormat, parentSHA.Type())
assert.Equal(t, repo.objectFormat.Name(), "sha256")
assert.Equal(t, objectFormat, parentSHA.Type())
assert.Equal(t, objectFormat.Name(), "sha256")
haz, err := commit.HasPreviousCommit(parentSHA)
assert.NoError(t, err)

View File

@ -283,7 +283,7 @@ type DivergeObject struct {
// GetDivergingCommits returns the number of commits a targetBranch is ahead or behind a baseBranch
func GetDivergingCommits(ctx context.Context, repoPath, baseBranch, targetBranch string) (do DivergeObject, err error) {
cmd := NewCommand(ctx, "rev-list", "--count", "--left-right").
AddDynamicArguments(baseBranch + "..." + targetBranch)
AddDynamicArguments(baseBranch + "..." + targetBranch).AddArguments("--")
stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath})
if err != nil {
return do, err

View File

@ -71,11 +71,6 @@ func OpenRepository(ctx context.Context, repoPath string) (*Repository, error) {
repo.batchWriter, repo.batchReader, repo.batchCancel = CatFileBatch(ctx, repoPath)
repo.checkWriter, repo.checkReader, repo.checkCancel = CatFileBatchCheck(ctx, repoPath)
repo.objectFormat, err = repo.GetObjectFormat()
if err != nil {
return nil, err
}
return repo, nil
}

View File

@ -246,7 +246,12 @@ func (repo *Repository) CommitsByFileAndRange(opts CommitsByFileAndRangeOptions)
}
}()
len := repo.objectFormat.FullLength()
objectFormat, err := repo.GetObjectFormat()
if err != nil {
return nil, err
}
len := objectFormat.FullLength()
commits := []*Commit{}
shaline := make([]byte, len+1)
for {

View File

@ -41,7 +41,10 @@ func (repo *Repository) RemoveReference(name string) error {
// ConvertToHash returns a Hash object from a potential ID string
func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) {
objectFormat := repo.objectFormat
objectFormat, err := repo.GetObjectFormat()
if err != nil {
return nil, err
}
if len(commitID) == hash.HexSize && objectFormat.IsValid(commitID) {
ID, err := NewIDFromString(commitID)
if err == nil {

View File

@ -132,8 +132,11 @@ func (repo *Repository) getCommitFromBatchReader(rd *bufio.Reader, id ObjectID)
// ConvertToGitID returns a GitHash object from a potential ID string
func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) {
IDType := repo.objectFormat
if len(commitID) == IDType.FullLength() && IDType.IsValid(commitID) {
objectFormat, err := repo.GetObjectFormat()
if err != nil {
return nil, err
}
if len(commitID) == objectFormat.FullLength() && objectFormat.IsValid(commitID) {
ID, err := NewIDFromString(commitID)
if err == nil {
return ID, nil
@ -142,7 +145,7 @@ func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) {
wr, rd, cancel := repo.CatFileBatchCheck(repo.Ctx)
defer cancel()
_, err := wr.Write([]byte(commitID + "\n"))
_, err = wr.Write([]byte(commitID + "\n"))
if err != nil {
return nil, err
}

View File

@ -283,8 +283,12 @@ func (repo *Repository) GetPatch(base, head string, w io.Writer) error {
// If base is undefined empty SHA (zeros), it only returns the files changed in the head commit
// If base is the SHA of an empty tree (EmptyTreeSHA), it returns the files changes from the initial commit to the head commit
func (repo *Repository) GetFilesChangedBetween(base, head string) ([]string, error) {
objectFormat, err := repo.GetObjectFormat()
if err != nil {
return nil, err
}
cmd := NewCommand(repo.Ctx, "diff-tree", "--name-only", "--root", "--no-commit-id", "-r", "-z")
if base == repo.objectFormat.EmptyObjectID().String() {
if base == objectFormat.EmptyObjectID().String() {
cmd.AddDynamicArguments(head)
} else {
cmd.AddDynamicArguments(base, head)

View File

@ -126,17 +126,20 @@ func TestGetCommitFilesChanged(t *testing.T) {
assert.NoError(t, err)
defer repo.Close()
objectFormat, err := repo.GetObjectFormat()
assert.NoError(t, err)
testCases := []struct {
base, head string
files []string
}{
{
repo.objectFormat.EmptyObjectID().String(),
objectFormat.EmptyObjectID().String(),
"95bb4d39648ee7e325106df01a621c530863a653",
[]string{"file1.txt"},
},
{
repo.objectFormat.EmptyObjectID().String(),
objectFormat.EmptyObjectID().String(),
"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
[]string{"file2.txt"},
},
@ -146,7 +149,7 @@ func TestGetCommitFilesChanged(t *testing.T) {
[]string{"file2.txt"},
},
{
repo.objectFormat.EmptyTree().String(),
objectFormat.EmptyTree().String(),
"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
[]string{"file1.txt", "file2.txt"},
},

View File

@ -94,6 +94,10 @@ func (repo *Repository) LsFiles(filenames ...string) ([]string, error) {
// RemoveFilesFromIndex removes given filenames from the index - it does not check whether they are present.
func (repo *Repository) RemoveFilesFromIndex(filenames ...string) error {
objectFormat, err := repo.GetObjectFormat()
if err != nil {
return err
}
cmd := NewCommand(repo.Ctx, "update-index", "--remove", "-z", "--index-info")
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
@ -101,7 +105,7 @@ func (repo *Repository) RemoveFilesFromIndex(filenames ...string) error {
for _, file := range filenames {
if file != "" {
buffer.WriteString("0 ")
buffer.WriteString(repo.objectFormat.EmptyObjectID().String())
buffer.WriteString(objectFormat.EmptyObjectID().String())
buffer.WriteByte('\t')
buffer.WriteString(file)
buffer.WriteByte('\000')

View File

@ -124,6 +124,10 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string)
}
}
}
err = scanner.Err()
if err != nil {
return fmt.Errorf("scan: %w", err)
}
a := make([]*CodeActivityAuthor, 0, len(authors))
for _, v := range authors {
a = append(a, v)

View File

@ -141,7 +141,7 @@ func (repo *Repository) GetTagInfos(page, pageSize int) ([]*Tag, int, error) {
break
}
tag, err := parseTagRef(repo.objectFormat, ref)
tag, err := parseTagRef(ref)
if err != nil {
return nil, 0, fmt.Errorf("GetTagInfos: parse tag: %w", err)
}
@ -161,7 +161,7 @@ func (repo *Repository) GetTagInfos(page, pageSize int) ([]*Tag, int, error) {
}
// parseTagRef parses a tag from a 'git for-each-ref'-produced reference.
func parseTagRef(objectFormat ObjectFormat, ref map[string]string) (tag *Tag, err error) {
func parseTagRef(ref map[string]string) (tag *Tag, err error) {
tag = &Tag{
Type: ref["objecttype"],
Name: ref["refname:lstrip=2"],

View File

@ -194,7 +194,6 @@ func TestRepository_GetAnnotatedTag(t *testing.T) {
}
func TestRepository_parseTagRef(t *testing.T) {
sha1 := Sha1ObjectFormat
tests := []struct {
name string
@ -351,7 +350,7 @@ Add changelog of v1.9.1 (#7859)
for _, test := range tests {
tc := test // don't close over loop variable
t.Run(tc.name, func(t *testing.T) {
got, err := parseTagRef(sha1, tc.givenRef)
got, err := parseTagRef(tc.givenRef)
if tc.wantErr {
require.Error(t, err)

View File

@ -21,7 +21,12 @@ func (repo *Repository) getTree(id ObjectID) (*Tree, error) {
// GetTree find the tree object in the repository.
func (repo *Repository) GetTree(idStr string) (*Tree, error) {
if len(idStr) != repo.objectFormat.FullLength() {
objectFormat, err := repo.GetObjectFormat()
if err != nil {
return nil, err
}
if len(idStr) != objectFormat.FullLength() {
res, _, err := NewCommand(repo.Ctx, "rev-parse", "--verify").AddDynamicArguments(idStr).RunStdString(&RunOpts{Dir: repo.Path})
if err != nil {
return nil, err

View File

@ -51,7 +51,11 @@ func (repo *Repository) getTree(id ObjectID) (*Tree, error) {
case "tree":
tree := NewTree(repo, id)
tree.ResolvedID = id
tree.entries, err = catBatchParseTreeEntries(repo.objectFormat, tree, rd, size)
objectFormat, err := repo.GetObjectFormat()
if err != nil {
return nil, err
}
tree.entries, err = catBatchParseTreeEntries(objectFormat, tree, rd, size)
if err != nil {
return nil, err
}
@ -69,7 +73,11 @@ func (repo *Repository) getTree(id ObjectID) (*Tree, error) {
// GetTree find the tree object in the repository.
func (repo *Repository) GetTree(idStr string) (*Tree, error) {
if len(idStr) != repo.objectFormat.FullLength() {
objectFormat, err := repo.GetObjectFormat()
if err != nil {
return nil, err
}
if len(idStr) != objectFormat.FullLength() {
res, err := repo.GetRefCommitID(idStr)
if err != nil {
return nil, err

View File

@ -77,8 +77,11 @@ func (t *Tree) ListEntries() (Entries, error) {
return nil, runErr
}
var err error
t.entries, err = parseTreeEntries(t.repo.objectFormat, stdout, t)
objectFormat, err := t.repo.GetObjectFormat()
if err != nil {
return nil, err
}
t.entries, err = parseTreeEntries(objectFormat, stdout, t)
if err == nil {
t.entriesParsed = true
}
@ -101,8 +104,11 @@ func (t *Tree) listEntriesRecursive(extraArgs TrustedCmdArgs) (Entries, error) {
return nil, runErr
}
var err error
t.entriesRecursive, err = parseTreeEntries(t.repo.objectFormat, stdout, t)
objectFormat, err := t.repo.GetObjectFormat()
if err != nil {
return nil, err
}
t.entriesRecursive, err = parseTreeEntries(objectFormat, stdout, t)
if err == nil {
t.entriesRecursiveParsed = true
}

View File

@ -233,7 +233,10 @@ func (g *Manager) setStateTransition(old, new state) bool {
// At the moment the total number of servers (numberOfServersToCreate) are pre-defined as a const before global init,
// so this function MUST be called if a server is not used.
func (g *Manager) InformCleanup() {
g.createServerWaitGroup.Done()
g.createServerCond.L.Lock()
defer g.createServerCond.L.Unlock()
g.createdServer++
g.createServerCond.Signal()
}
// Done allows the manager to be viewed as a context.Context, it returns a channel that is closed when the server is finished terminating

View File

@ -42,8 +42,9 @@ type Manager struct {
terminateCtxCancel context.CancelFunc
managerCtxCancel context.CancelFunc
runningServerWaitGroup sync.WaitGroup
createServerWaitGroup sync.WaitGroup
terminateWaitGroup sync.WaitGroup
createServerCond sync.Cond
createdServer int
shutdownRequested chan struct{}
toRunAtShutdown []func()
@ -52,7 +53,7 @@ type Manager struct {
func newGracefulManager(ctx context.Context) *Manager {
manager := &Manager{ctx: ctx, shutdownRequested: make(chan struct{})}
manager.createServerWaitGroup.Add(numberOfServersToCreate)
manager.createServerCond.L = &sync.Mutex{}
manager.prepare(ctx)
manager.start()
return manager

View File

@ -57,20 +57,27 @@ func (g *Manager) start() {
// Handle clean up of unused provided listeners and delayed start-up
startupDone := make(chan struct{})
go func() {
defer close(startupDone)
// Wait till we're done getting all the listeners and then close the unused ones
func() {
// FIXME: there is a fundamental design problem of the "manager" and the "wait group".
// If nothing has started, the "Wait" just panics: sync: WaitGroup is reused before previous Wait has returned
// There is no clear solution besides a complete rewriting of the "manager"
defer func() {
_ = recover()
}()
g.createServerWaitGroup.Wait()
defer func() {
close(startupDone)
// Close the unused listeners
closeProvidedListeners()
}()
// Ignore the error here there's not much we can do with it, they're logged in the CloseProvidedListeners function
_ = CloseProvidedListeners()
g.notify(readyMsg)
// Wait for all servers to be created
g.createServerCond.L.Lock()
for {
if g.createdServer >= numberOfServersToCreate {
g.createServerCond.L.Unlock()
g.notify(readyMsg)
return
}
select {
case <-g.IsShutdown():
g.createServerCond.L.Unlock()
return
default:
}
g.createServerCond.Wait()
}
}()
if setting.StartupTimeout > 0 {
go func() {
@ -78,16 +85,7 @@ func (g *Manager) start() {
case <-startupDone:
return
case <-g.IsShutdown():
func() {
// When WaitGroup counter goes negative it will panic - we don't care about this so we can just ignore it.
defer func() {
_ = recover()
}()
// Ensure that the createServerWaitGroup stops waiting
for {
g.createServerWaitGroup.Done()
}
}()
g.createServerCond.Signal()
return
case <-time.After(setting.StartupTimeout):
log.Error("Startup took too long! Shutting down")

View File

@ -149,33 +149,35 @@ hammerLoop:
func (g *Manager) awaitServer(limit time.Duration) bool {
c := make(chan struct{})
go func() {
defer close(c)
func() {
// FIXME: there is a fundamental design problem of the "manager" and the "wait group".
// If nothing has started, the "Wait" just panics: sync: WaitGroup is reused before previous Wait has returned
// There is no clear solution besides a complete rewriting of the "manager"
defer func() {
_ = recover()
}()
g.createServerWaitGroup.Wait()
}()
g.createServerCond.L.Lock()
for {
if g.createdServer >= numberOfServersToCreate {
g.createServerCond.L.Unlock()
close(c)
return
}
select {
case <-g.IsShutdown():
g.createServerCond.L.Unlock()
return
default:
}
g.createServerCond.Wait()
}
}()
var tc <-chan time.Time
if limit > 0 {
select {
case <-c:
return true // completed normally
case <-time.After(limit):
return false // timed out
case <-g.IsShutdown():
return false
}
} else {
select {
case <-c:
return true // completed normally
case <-g.IsShutdown():
return false
}
tc = time.After(limit)
}
select {
case <-c:
return true // completed normally
case <-tc:
return false // timed out
case <-g.IsShutdown():
g.createServerCond.Signal()
return false
}
}

View File

@ -129,25 +129,17 @@ func getProvidedFDs() (savedErr error) {
return savedErr
}
// CloseProvidedListeners closes all unused provided listeners.
func CloseProvidedListeners() error {
// closeProvidedListeners closes all unused provided listeners.
func closeProvidedListeners() {
mutex.Lock()
defer mutex.Unlock()
var returnableError error
for _, l := range providedListeners {
err := l.Close()
if err != nil {
log.Error("Error in closing unused provided listener: %v", err)
if returnableError != nil {
returnableError = fmt.Errorf("%v & %w", returnableError, err)
} else {
returnableError = err
}
}
}
providedListeners = []net.Listener{}
return returnableError
}
// DefaultGetListener obtains a listener for the stream-oriented local network address:

View File

@ -8,20 +8,40 @@ import (
"strings"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
// IsRiskyRedirectURL returns true if the URL is considered risky for redirects
func IsRiskyRedirectURL(s string) bool {
func urlIsRelative(s string, u *url.URL) bool {
// Unfortunately browsers consider a redirect Location with preceding "//", "\\", "/\" and "\/" as meaning redirect to "http(s)://REST_OF_PATH"
// Therefore we should ignore these redirect locations to prevent open redirects
if len(s) > 1 && (s[0] == '/' || s[0] == '\\') && (s[1] == '/' || s[1] == '\\') {
return true
return false
}
u, err := url.Parse(s)
if err != nil || ((u.Scheme != "" || u.Host != "") && !strings.HasPrefix(strings.ToLower(s), strings.ToLower(setting.AppURL))) {
return true
}
return false
return u != nil && u.Scheme == "" && u.Host == ""
}
// IsRelativeURL detects if a URL is relative (no scheme or host)
func IsRelativeURL(s string) bool {
u, err := url.Parse(s)
return err == nil && urlIsRelative(s, u)
}
func IsCurrentGiteaSiteURL(s string) bool {
u, err := url.Parse(s)
if err != nil {
return false
}
if u.Path != "" {
u.Path = "/" + util.PathJoinRelX(u.Path)
if !strings.HasSuffix(u.Path, "/") {
u.Path += "/"
}
}
if urlIsRelative(s, u) {
return u.Path == "" || strings.HasPrefix(strings.ToLower(u.Path), strings.ToLower(setting.AppSubURL+"/"))
}
if u.Path == "" {
u.Path = "/"
}
return strings.HasPrefix(strings.ToLower(u.String()), strings.ToLower(setting.AppURL))
}

View File

@ -7,32 +7,65 @@ import (
"testing"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
)
func TestIsRiskyRedirectURL(t *testing.T) {
setting.AppURL = "http://localhost:3000/"
tests := []struct {
input string
want bool
}{
{"", false},
{"foo", false},
{"/", false},
{"/foo?k=%20#abc", false},
{"//", true},
{"\\\\", true},
{"/\\", true},
{"\\/", true},
{"mail:a@b.com", true},
{"https://test.com", true},
{setting.AppURL + "/foo", false},
func TestIsRelativeURL(t *testing.T) {
defer test.MockVariableValue(&setting.AppURL, "http://localhost:3000/sub/")()
defer test.MockVariableValue(&setting.AppSubURL, "/sub")()
rel := []string{
"",
"foo",
"/",
"/foo?k=%20#abc",
}
for _, tt := range tests {
t.Run(tt.input, func(t *testing.T) {
assert.Equal(t, tt.want, IsRiskyRedirectURL(tt.input))
})
for _, s := range rel {
assert.True(t, IsRelativeURL(s), "rel = %q", s)
}
abs := []string{
"//",
"\\\\",
"/\\",
"\\/",
"mailto:a@b.com",
"https://test.com",
}
for _, s := range abs {
assert.False(t, IsRelativeURL(s), "abs = %q", s)
}
}
func TestIsCurrentGiteaSiteURL(t *testing.T) {
defer test.MockVariableValue(&setting.AppURL, "http://localhost:3000/sub/")()
defer test.MockVariableValue(&setting.AppSubURL, "/sub")()
good := []string{
"?key=val",
"/sub",
"/sub/",
"/sub/foo",
"/sub/foo/",
"http://localhost:3000/sub?key=val",
"http://localhost:3000/sub/",
}
for _, s := range good {
assert.True(t, IsCurrentGiteaSiteURL(s), "good = %q", s)
}
bad := []string{
"/",
"//",
"\\\\",
"/foo",
"http://localhost:3000/sub/..",
"http://localhost:3000/other",
"http://other/",
}
for _, s := range bad {
assert.False(t, IsCurrentGiteaSiteURL(s), "bad = %q", s)
}
setting.AppURL = "http://localhost:3000/"
setting.AppSubURL = ""
assert.True(t, IsCurrentGiteaSiteURL("http://localhost:3000?key=val"))
}

View File

@ -142,7 +142,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
return err
}
if size, err = strconv.ParseInt(strings.TrimSpace(stdout), 10, 64); err != nil {
return fmt.Errorf("Misformatted git cat-file output: %w", err)
return fmt.Errorf("misformatted git cat-file output: %w", err)
}
}
@ -233,26 +233,26 @@ func (b *Indexer) Delete(_ context.Context, repoID int64) error {
// Search searches for files in the specified repo.
// Returns the matching file-paths
func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
var (
indexerQuery query.Query
keywordQuery query.Query
)
if isFuzzy {
phraseQuery := bleve.NewMatchPhraseQuery(keyword)
if opts.IsKeywordFuzzy {
phraseQuery := bleve.NewMatchPhraseQuery(opts.Keyword)
phraseQuery.FieldVal = "Content"
phraseQuery.Analyzer = repoIndexerAnalyzer
keywordQuery = phraseQuery
} else {
prefixQuery := bleve.NewPrefixQuery(keyword)
prefixQuery := bleve.NewPrefixQuery(opts.Keyword)
prefixQuery.FieldVal = "Content"
keywordQuery = prefixQuery
}
if len(repoIDs) > 0 {
repoQueries := make([]query.Query, 0, len(repoIDs))
for _, repoID := range repoIDs {
if len(opts.RepoIDs) > 0 {
repoQueries := make([]query.Query, 0, len(opts.RepoIDs))
for _, repoID := range opts.RepoIDs {
repoQueries = append(repoQueries, inner_bleve.NumericEqualityQuery(repoID, "RepoID"))
}
@ -266,8 +266,8 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword
// Save for reuse without language filter
facetQuery := indexerQuery
if len(language) > 0 {
languageQuery := bleve.NewMatchQuery(language)
if len(opts.Language) > 0 {
languageQuery := bleve.NewMatchQuery(opts.Language)
languageQuery.FieldVal = "Language"
languageQuery.Analyzer = analyzer_keyword.Name
@ -277,12 +277,12 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword
)
}
from := (page - 1) * pageSize
from, pageSize := opts.GetSkipTake()
searchRequest := bleve.NewSearchRequestOptions(indexerQuery, pageSize, from, false)
searchRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"}
searchRequest.IncludeLocations = true
if len(language) == 0 {
if len(opts.Language) == 0 {
searchRequest.AddFacet("languages", bleve.NewFacetRequest("Language", 10))
}
@ -326,7 +326,7 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword
}
searchResultLanguages := make([]*internal.SearchResultLanguages, 0, 10)
if len(language) > 0 {
if len(opts.Language) > 0 {
// Use separate query to go get all language counts
facetRequest := bleve.NewSearchRequestOptions(facetQuery, 1, 0, false)
facetRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"}

View File

@ -281,18 +281,18 @@ func extractAggs(searchResult *elastic.SearchResult) []*internal.SearchResultLan
}
// Search searches for codes and language stats by given conditions.
func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
searchType := esMultiMatchTypePhrasePrefix
if isFuzzy {
if opts.IsKeywordFuzzy {
searchType = esMultiMatchTypeBestFields
}
kwQuery := elastic.NewMultiMatchQuery(keyword, "content").Type(searchType)
kwQuery := elastic.NewMultiMatchQuery(opts.Keyword, "content").Type(searchType)
query := elastic.NewBoolQuery()
query = query.Must(kwQuery)
if len(repoIDs) > 0 {
repoStrs := make([]any, 0, len(repoIDs))
for _, repoID := range repoIDs {
if len(opts.RepoIDs) > 0 {
repoStrs := make([]any, 0, len(opts.RepoIDs))
for _, repoID := range opts.RepoIDs {
repoStrs = append(repoStrs, repoID)
}
repoQuery := elastic.NewTermsQuery("repo_id", repoStrs...)
@ -300,16 +300,12 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword
}
var (
start int
kw = "<em>" + keyword + "</em>"
aggregation = elastic.NewTermsAggregation().Field("language").Size(10).OrderByCountDesc()
start, pageSize = opts.GetSkipTake()
kw = "<em>" + opts.Keyword + "</em>"
aggregation = elastic.NewTermsAggregation().Field("language").Size(10).OrderByCountDesc()
)
if page > 0 {
start = (page - 1) * pageSize
}
if len(language) == 0 {
if len(opts.Language) == 0 {
searchResult, err := b.inner.Client.Search().
Index(b.inner.VersionedIndexName()).
Aggregation("language", aggregation).
@ -330,7 +326,7 @@ func (b *Indexer) Search(ctx context.Context, repoIDs []int64, language, keyword
return convertResult(searchResult, kw, pageSize)
}
langQuery := elastic.NewMatchQuery("language", language)
langQuery := elastic.NewMatchQuery("language", opts.Language)
countResult, err := b.inner.Client.Search().
Index(b.inner.VersionedIndexName()).
Aggregation("language", aggregation).

View File

@ -32,7 +32,7 @@ func getRepoChanges(ctx context.Context, repo *repo_model.Repository, revision s
needGenesis := len(status.CommitSha) == 0
if !needGenesis {
hasAncestorCmd := git.NewCommand(ctx, "merge-base").AddDynamicArguments(repo.CodeIndexerStatus.CommitSha, revision)
hasAncestorCmd := git.NewCommand(ctx, "merge-base").AddDynamicArguments(status.CommitSha, revision)
stdout, _, _ := hasAncestorCmd.RunStdString(&git.RunOpts{Dir: repo.RepoPath()})
needGenesis = len(stdout) == 0
}
@ -91,11 +91,9 @@ func genesisChanges(ctx context.Context, repo *repo_model.Repository, revision s
return nil, runErr
}
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
var err error
objectFormat, err := git.GetObjectFormatOfRepo(ctx, repo.RepoPath())
if err != nil {
return nil, err
}
changes.Updates, err = parseGitLsTreeOutput(objectFormat, stdout)
return &changes, err
}
@ -174,10 +172,8 @@ func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revisio
return nil, err
}
objectFormat, err := git.GetObjectFormatOfRepo(ctx, repo.RepoPath())
if err != nil {
return nil, err
}
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
changes.Updates, err = parseGitLsTreeOutput(objectFormat, lsTreeStdout)
return &changes, err
}

View File

@ -8,6 +8,7 @@ import (
"os"
"testing"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/indexer/code/bleve"
@ -70,7 +71,15 @@ func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
for _, kw := range keywords {
t.Run(kw.Keyword, func(t *testing.T) {
total, res, langs, err := indexer.Search(context.TODO(), kw.RepoIDs, "", kw.Keyword, 1, 10, true)
total, res, langs, err := indexer.Search(context.TODO(), &internal.SearchOptions{
RepoIDs: kw.RepoIDs,
Keyword: kw.Keyword,
Paginator: &db.ListOptions{
Page: 1,
PageSize: 10,
},
IsKeywordFuzzy: true,
})
assert.NoError(t, err)
assert.Len(t, kw.IDs, int(total))
assert.Len(t, langs, kw.Langs)

View File

@ -7,6 +7,7 @@ import (
"context"
"fmt"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/indexer/internal"
)
@ -16,7 +17,17 @@ type Indexer interface {
internal.Indexer
Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *RepoChanges) error
Delete(ctx context.Context, repoID int64) error
Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*SearchResult, []*SearchResultLanguages, error)
Search(ctx context.Context, opts *SearchOptions) (int64, []*SearchResult, []*SearchResultLanguages, error)
}
type SearchOptions struct {
RepoIDs []int64
Keyword string
Language string
IsKeywordFuzzy bool
db.Paginator
}
// NewDummyIndexer returns a dummy indexer
@ -38,6 +49,6 @@ func (d *dummyIndexer) Delete(ctx context.Context, repoID int64) error {
return fmt.Errorf("indexer is not ready")
}
func (d *dummyIndexer) Search(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int64, []*SearchResult, []*SearchResultLanguages, error) {
func (d *dummyIndexer) Search(ctx context.Context, opts *SearchOptions) (int64, []*SearchResult, []*SearchResultLanguages, error) {
return 0, nil, nil, fmt.Errorf("indexer is not ready")
}

View File

@ -32,6 +32,8 @@ type ResultLine struct {
type SearchResultLanguages = internal.SearchResultLanguages
type SearchOptions = internal.SearchOptions
func indices(content string, selectionStartIndex, selectionEndIndex int) (int, int) {
startIndex := selectionStartIndex
numLinesBefore := 0
@ -125,12 +127,12 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res
// PerformSearch perform a search on a repository
// if isFuzzy is true set the Damerau-Levenshtein distance from 0 to 2
func PerformSearch(ctx context.Context, repoIDs []int64, language, keyword string, page, pageSize int, isFuzzy bool) (int, []*Result, []*internal.SearchResultLanguages, error) {
if len(keyword) == 0 {
func PerformSearch(ctx context.Context, opts *SearchOptions) (int, []*Result, []*SearchResultLanguages, error) {
if opts == nil || len(opts.Keyword) == 0 {
return 0, nil, nil, nil
}
total, results, resultLanguages, err := (*globalIndexer.Load()).Search(ctx, repoIDs, language, keyword, page, pageSize, isFuzzy)
total, results, resultLanguages, err := (*globalIndexer.Load()).Search(ctx, opts)
if err != nil {
return 0, nil, nil, err
}

View File

@ -4,6 +4,8 @@
package bleve
import (
"code.gitea.io/gitea/modules/optional"
"github.com/blevesearch/bleve/v2"
"github.com/blevesearch/bleve/v2/search/query"
)
@ -39,18 +41,18 @@ func BoolFieldQuery(value bool, field string) *query.BoolFieldQuery {
return q
}
func NumericRangeInclusiveQuery(min, max *int64, field string) *query.NumericRangeQuery {
func NumericRangeInclusiveQuery(min, max optional.Option[int64], field string) *query.NumericRangeQuery {
var minF, maxF *float64
var minI, maxI *bool
if min != nil {
if min.Has() {
minF = new(float64)
*minF = float64(*min)
*minF = float64(min.Value())
minI = new(bool)
*minI = true
}
if max != nil {
if max.Has() {
maxF = new(float64)
*maxF = float64(*max)
*maxF = float64(max.Value())
maxI = new(bool)
*maxI = true
}

View File

@ -224,38 +224,41 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
queries = append(queries, bleve.NewDisjunctionQuery(milestoneQueries...))
}
if options.ProjectID != nil {
queries = append(queries, inner_bleve.NumericEqualityQuery(*options.ProjectID, "project_id"))
if options.ProjectID.Has() {
queries = append(queries, inner_bleve.NumericEqualityQuery(options.ProjectID.Value(), "project_id"))
}
if options.ProjectBoardID != nil {
queries = append(queries, inner_bleve.NumericEqualityQuery(*options.ProjectBoardID, "project_board_id"))
if options.ProjectBoardID.Has() {
queries = append(queries, inner_bleve.NumericEqualityQuery(options.ProjectBoardID.Value(), "project_board_id"))
}
if options.PosterID != nil {
queries = append(queries, inner_bleve.NumericEqualityQuery(*options.PosterID, "poster_id"))
if options.PosterID.Has() {
queries = append(queries, inner_bleve.NumericEqualityQuery(options.PosterID.Value(), "poster_id"))
}
if options.AssigneeID != nil {
queries = append(queries, inner_bleve.NumericEqualityQuery(*options.AssigneeID, "assignee_id"))
if options.AssigneeID.Has() {
queries = append(queries, inner_bleve.NumericEqualityQuery(options.AssigneeID.Value(), "assignee_id"))
}
if options.MentionID != nil {
queries = append(queries, inner_bleve.NumericEqualityQuery(*options.MentionID, "mention_ids"))
if options.MentionID.Has() {
queries = append(queries, inner_bleve.NumericEqualityQuery(options.MentionID.Value(), "mention_ids"))
}
if options.ReviewedID != nil {
queries = append(queries, inner_bleve.NumericEqualityQuery(*options.ReviewedID, "reviewed_ids"))
if options.ReviewedID.Has() {
queries = append(queries, inner_bleve.NumericEqualityQuery(options.ReviewedID.Value(), "reviewed_ids"))
}
if options.ReviewRequestedID != nil {
queries = append(queries, inner_bleve.NumericEqualityQuery(*options.ReviewRequestedID, "review_requested_ids"))
if options.ReviewRequestedID.Has() {
queries = append(queries, inner_bleve.NumericEqualityQuery(options.ReviewRequestedID.Value(), "review_requested_ids"))
}
if options.SubscriberID != nil {
queries = append(queries, inner_bleve.NumericEqualityQuery(*options.SubscriberID, "subscriber_ids"))
if options.SubscriberID.Has() {
queries = append(queries, inner_bleve.NumericEqualityQuery(options.SubscriberID.Value(), "subscriber_ids"))
}
if options.UpdatedAfterUnix != nil || options.UpdatedBeforeUnix != nil {
queries = append(queries, inner_bleve.NumericRangeInclusiveQuery(options.UpdatedAfterUnix, options.UpdatedBeforeUnix, "updated_unix"))
if options.UpdatedAfterUnix.Has() || options.UpdatedBeforeUnix.Has() {
queries = append(queries, inner_bleve.NumericRangeInclusiveQuery(
options.UpdatedAfterUnix,
options.UpdatedBeforeUnix,
"updated_unix"))
}
var indexerQuery query.Query = bleve.NewConjunctionQuery(queries...)

View File

@ -15,22 +15,6 @@ import (
)
func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_model.IssuesOptions, error) {
// See the comment of issues_model.SearchOptions for the reason why we need to convert
convertID := func(id *int64) int64 {
if id == nil {
return 0
}
if *id == 0 {
return db.NoConditionID
}
return *id
}
convertInt64 := func(i *int64) int64 {
if i == nil {
return 0
}
return *i
}
var sortType string
switch options.SortBy {
case internal.SortByCreatedAsc:
@ -53,6 +37,18 @@ func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_m
sortType = "newest"
}
// See the comment of issues_model.SearchOptions for the reason why we need to convert
convertID := func(id optional.Option[int64]) int64 {
if !id.Has() {
return 0
}
value := id.Value()
if value == 0 {
return db.NoConditionID
}
return value
}
opts := &issue_model.IssuesOptions{
Paginator: options.Paginator,
RepoIDs: options.RepoIDs,
@ -73,8 +69,8 @@ func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_m
IncludeMilestones: nil,
SortType: sortType,
IssueIDs: nil,
UpdatedAfterUnix: convertInt64(options.UpdatedAfterUnix),
UpdatedBeforeUnix: convertInt64(options.UpdatedBeforeUnix),
UpdatedAfterUnix: options.UpdatedAfterUnix.Value(),
UpdatedBeforeUnix: options.UpdatedBeforeUnix.Value(),
PriorityRepoID: 0,
IsArchived: optional.None[bool](),
Org: nil,

View File

@ -6,6 +6,7 @@ package issues
import (
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/modules/optional"
)
func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOptions {
@ -38,13 +39,12 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp
}
// See the comment of issues_model.SearchOptions for the reason why we need to convert
convertID := func(id int64) *int64 {
convertID := func(id int64) optional.Option[int64] {
if id > 0 {
return &id
return optional.Some(id)
}
if id == db.NoConditionID {
var zero int64
return &zero
return optional.None[int64]()
}
return nil
}
@ -59,10 +59,10 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp
searchOpt.SubscriberID = convertID(opts.SubscriberID)
if opts.UpdatedAfterUnix > 0 {
searchOpt.UpdatedAfterUnix = &opts.UpdatedAfterUnix
searchOpt.UpdatedAfterUnix = optional.Some(opts.UpdatedAfterUnix)
}
if opts.UpdatedBeforeUnix > 0 {
searchOpt.UpdatedBeforeUnix = &opts.UpdatedBeforeUnix
searchOpt.UpdatedBeforeUnix = optional.Some(opts.UpdatedBeforeUnix)
}
searchOpt.Paginator = opts.Paginator

View File

@ -195,43 +195,43 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
query.Must(elastic.NewTermsQuery("milestone_id", toAnySlice(options.MilestoneIDs)...))
}
if options.ProjectID != nil {
query.Must(elastic.NewTermQuery("project_id", *options.ProjectID))
if options.ProjectID.Has() {
query.Must(elastic.NewTermQuery("project_id", options.ProjectID.Value()))
}
if options.ProjectBoardID != nil {
query.Must(elastic.NewTermQuery("project_board_id", *options.ProjectBoardID))
if options.ProjectBoardID.Has() {
query.Must(elastic.NewTermQuery("project_board_id", options.ProjectBoardID.Value()))
}
if options.PosterID != nil {
query.Must(elastic.NewTermQuery("poster_id", *options.PosterID))
if options.PosterID.Has() {
query.Must(elastic.NewTermQuery("poster_id", options.PosterID.Value()))
}
if options.AssigneeID != nil {
query.Must(elastic.NewTermQuery("assignee_id", *options.AssigneeID))
if options.AssigneeID.Has() {
query.Must(elastic.NewTermQuery("assignee_id", options.AssigneeID.Value()))
}
if options.MentionID != nil {
query.Must(elastic.NewTermQuery("mention_ids", *options.MentionID))
if options.MentionID.Has() {
query.Must(elastic.NewTermQuery("mention_ids", options.MentionID.Value()))
}
if options.ReviewedID != nil {
query.Must(elastic.NewTermQuery("reviewed_ids", *options.ReviewedID))
if options.ReviewedID.Has() {
query.Must(elastic.NewTermQuery("reviewed_ids", options.ReviewedID.Value()))
}
if options.ReviewRequestedID != nil {
query.Must(elastic.NewTermQuery("review_requested_ids", *options.ReviewRequestedID))
if options.ReviewRequestedID.Has() {
query.Must(elastic.NewTermQuery("review_requested_ids", options.ReviewRequestedID.Value()))
}
if options.SubscriberID != nil {
query.Must(elastic.NewTermQuery("subscriber_ids", *options.SubscriberID))
if options.SubscriberID.Has() {
query.Must(elastic.NewTermQuery("subscriber_ids", options.SubscriberID.Value()))
}
if options.UpdatedAfterUnix != nil || options.UpdatedBeforeUnix != nil {
if options.UpdatedAfterUnix.Has() || options.UpdatedBeforeUnix.Has() {
q := elastic.NewRangeQuery("updated_unix")
if options.UpdatedAfterUnix != nil {
q.Gte(*options.UpdatedAfterUnix)
if options.UpdatedAfterUnix.Has() {
q.Gte(options.UpdatedAfterUnix.Value())
}
if options.UpdatedBeforeUnix != nil {
q.Lte(*options.UpdatedBeforeUnix)
if options.UpdatedBeforeUnix.Has() {
q.Lte(options.UpdatedBeforeUnix.Value())
}
query.Must(q)
}

View File

@ -134,63 +134,60 @@ func searchIssueInRepo(t *testing.T) {
}
func searchIssueByID(t *testing.T) {
int64Pointer := func(x int64) *int64 {
return &x
}
tests := []struct {
opts SearchOptions
expectedIDs []int64
}{
{
SearchOptions{
PosterID: int64Pointer(1),
opts: SearchOptions{
PosterID: optional.Some(int64(1)),
},
[]int64{11, 6, 3, 2, 1},
expectedIDs: []int64{11, 6, 3, 2, 1},
},
{
SearchOptions{
AssigneeID: int64Pointer(1),
opts: SearchOptions{
AssigneeID: optional.Some(int64(1)),
},
[]int64{6, 1},
expectedIDs: []int64{6, 1},
},
{
SearchOptions{
MentionID: int64Pointer(4),
opts: SearchOptions{
MentionID: optional.Some(int64(4)),
},
[]int64{1},
expectedIDs: []int64{1},
},
{
SearchOptions{
ReviewedID: int64Pointer(1),
opts: SearchOptions{
ReviewedID: optional.Some(int64(1)),
},
[]int64{},
expectedIDs: []int64{},
},
{
SearchOptions{
ReviewRequestedID: int64Pointer(1),
opts: SearchOptions{
ReviewRequestedID: optional.Some(int64(1)),
},
[]int64{12},
expectedIDs: []int64{12},
},
{
SearchOptions{
SubscriberID: int64Pointer(1),
opts: SearchOptions{
SubscriberID: optional.Some(int64(1)),
},
[]int64{11, 6, 5, 3, 2, 1},
expectedIDs: []int64{11, 6, 5, 3, 2, 1},
},
{
// issue 20 request user 15 and team 5 which user 15 belongs to
// the review request number of issue 20 should be 1
SearchOptions{
ReviewRequestedID: int64Pointer(15),
opts: SearchOptions{
ReviewRequestedID: optional.Some(int64(15)),
},
[]int64{12, 20},
expectedIDs: []int64{12, 20},
},
{
// user 20 approved the issue 20, so return nothing
SearchOptions{
ReviewRequestedID: int64Pointer(20),
opts: SearchOptions{
ReviewRequestedID: optional.Some(int64(20)),
},
[]int64{},
expectedIDs: []int64{},
},
}
@ -318,16 +315,13 @@ func searchIssueByLabelID(t *testing.T) {
}
func searchIssueByTime(t *testing.T) {
int64Pointer := func(i int64) *int64 {
return &i
}
tests := []struct {
opts SearchOptions
expectedIDs []int64
}{
{
SearchOptions{
UpdatedAfterUnix: int64Pointer(0),
UpdatedAfterUnix: optional.Some(int64(0)),
},
[]int64{22, 21, 17, 16, 15, 14, 13, 12, 11, 20, 6, 5, 19, 18, 10, 7, 4, 9, 8, 3, 2, 1},
},
@ -363,28 +357,25 @@ func searchIssueWithOrder(t *testing.T) {
}
func searchIssueInProject(t *testing.T) {
int64Pointer := func(i int64) *int64 {
return &i
}
tests := []struct {
opts SearchOptions
expectedIDs []int64
}{
{
SearchOptions{
ProjectID: int64Pointer(1),
ProjectID: optional.Some(int64(1)),
},
[]int64{5, 3, 2, 1},
},
{
SearchOptions{
ProjectBoardID: int64Pointer(1),
ProjectBoardID: optional.Some(int64(1)),
},
[]int64{1},
},
{
SearchOptions{
ProjectBoardID: int64Pointer(0), // issue with in default board
ProjectBoardID: optional.Some(int64(0)), // issue with in default board
},
[]int64{2},
},

View File

@ -89,22 +89,22 @@ type SearchOptions struct {
MilestoneIDs []int64 // milestones the issues have
ProjectID *int64 // project the issues belong to
ProjectBoardID *int64 // project board the issues belong to
ProjectID optional.Option[int64] // project the issues belong to
ProjectBoardID optional.Option[int64] // project board the issues belong to
PosterID *int64 // poster of the issues
PosterID optional.Option[int64] // poster of the issues
AssigneeID *int64 // assignee of the issues, zero means no assignee
AssigneeID optional.Option[int64] // assignee of the issues, zero means no assignee
MentionID *int64 // mentioned user of the issues
MentionID optional.Option[int64] // mentioned user of the issues
ReviewedID *int64 // reviewer of the issues
ReviewRequestedID *int64 // requested reviewer of the issues
ReviewedID optional.Option[int64] // reviewer of the issues
ReviewRequestedID optional.Option[int64] // requested reviewer of the issues
SubscriberID *int64 // subscriber of the issues
SubscriberID optional.Option[int64] // subscriber of the issues
UpdatedAfterUnix *int64
UpdatedBeforeUnix *int64
UpdatedAfterUnix optional.Option[int64]
UpdatedBeforeUnix optional.Option[int64]
db.Paginator

View File

@ -300,10 +300,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
ProjectID: func() *int64 {
id := int64(1)
return &id
}(),
ProjectID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -321,10 +318,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
ProjectID: func() *int64 {
id := int64(0)
return &id
}(),
ProjectID: optional.Some(int64(0)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -342,10 +336,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
ProjectBoardID: func() *int64 {
id := int64(1)
return &id
}(),
ProjectBoardID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -363,10 +354,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
ProjectBoardID: func() *int64 {
id := int64(0)
return &id
}(),
ProjectBoardID: optional.Some(int64(0)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -384,10 +372,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
PosterID: func() *int64 {
id := int64(1)
return &id
}(),
PosterID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -405,10 +390,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
AssigneeID: func() *int64 {
id := int64(1)
return &id
}(),
AssigneeID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -426,10 +408,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
AssigneeID: func() *int64 {
id := int64(0)
return &id
}(),
AssigneeID: optional.Some(int64(0)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -447,10 +426,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
MentionID: func() *int64 {
id := int64(1)
return &id
}(),
MentionID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -468,10 +444,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
ReviewedID: func() *int64 {
id := int64(1)
return &id
}(),
ReviewedID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -489,10 +462,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
ReviewRequestedID: func() *int64 {
id := int64(1)
return &id
}(),
ReviewRequestedID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -510,10 +480,7 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
SubscriberID: func() *int64 {
id := int64(1)
return &id
}(),
SubscriberID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
@ -531,14 +498,8 @@ var cases = []*testIndexerCase{
Paginator: &db.ListOptions{
PageSize: 5,
},
UpdatedAfterUnix: func() *int64 {
var t int64 = 20
return &t
}(),
UpdatedBeforeUnix: func() *int64 {
var t int64 = 30
return &t
}(),
UpdatedAfterUnix: optional.Some(int64(20)),
UpdatedBeforeUnix: optional.Some(int64(30)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))

View File

@ -6,6 +6,7 @@ package meilisearch
import (
"context"
"errors"
"fmt"
"strconv"
"strings"
@ -170,41 +171,41 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
query.And(inner_meilisearch.NewFilterIn("milestone_id", options.MilestoneIDs...))
}
if options.ProjectID != nil {
query.And(inner_meilisearch.NewFilterEq("project_id", *options.ProjectID))
if options.ProjectID.Has() {
query.And(inner_meilisearch.NewFilterEq("project_id", options.ProjectID.Value()))
}
if options.ProjectBoardID != nil {
query.And(inner_meilisearch.NewFilterEq("project_board_id", *options.ProjectBoardID))
if options.ProjectBoardID.Has() {
query.And(inner_meilisearch.NewFilterEq("project_board_id", options.ProjectBoardID.Value()))
}
if options.PosterID != nil {
query.And(inner_meilisearch.NewFilterEq("poster_id", *options.PosterID))
if options.PosterID.Has() {
query.And(inner_meilisearch.NewFilterEq("poster_id", options.PosterID.Value()))
}
if options.AssigneeID != nil {
query.And(inner_meilisearch.NewFilterEq("assignee_id", *options.AssigneeID))
if options.AssigneeID.Has() {
query.And(inner_meilisearch.NewFilterEq("assignee_id", options.AssigneeID.Value()))
}
if options.MentionID != nil {
query.And(inner_meilisearch.NewFilterEq("mention_ids", *options.MentionID))
if options.MentionID.Has() {
query.And(inner_meilisearch.NewFilterEq("mention_ids", options.MentionID.Value()))
}
if options.ReviewedID != nil {
query.And(inner_meilisearch.NewFilterEq("reviewed_ids", *options.ReviewedID))
if options.ReviewedID.Has() {
query.And(inner_meilisearch.NewFilterEq("reviewed_ids", options.ReviewedID.Value()))
}
if options.ReviewRequestedID != nil {
query.And(inner_meilisearch.NewFilterEq("review_requested_ids", *options.ReviewRequestedID))
if options.ReviewRequestedID.Has() {
query.And(inner_meilisearch.NewFilterEq("review_requested_ids", options.ReviewRequestedID.Value()))
}
if options.SubscriberID != nil {
query.And(inner_meilisearch.NewFilterEq("subscriber_ids", *options.SubscriberID))
if options.SubscriberID.Has() {
query.And(inner_meilisearch.NewFilterEq("subscriber_ids", options.SubscriberID.Value()))
}
if options.UpdatedAfterUnix != nil {
query.And(inner_meilisearch.NewFilterGte("updated_unix", *options.UpdatedAfterUnix))
if options.UpdatedAfterUnix.Has() {
query.And(inner_meilisearch.NewFilterGte("updated_unix", options.UpdatedAfterUnix.Value()))
}
if options.UpdatedBeforeUnix != nil {
query.And(inner_meilisearch.NewFilterLte("updated_unix", *options.UpdatedBeforeUnix))
if options.UpdatedBeforeUnix.Has() {
query.And(inner_meilisearch.NewFilterLte("updated_unix", options.UpdatedBeforeUnix.Value()))
}
if options.SortBy == "" {
@ -217,7 +218,14 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
skip, limit := indexer_internal.ParsePaginator(options.Paginator, maxTotalHits)
searchRes, err := b.inner.Client.Index(b.inner.VersionedIndexName()).Search(options.Keyword, &meilisearch.SearchRequest{
keyword := options.Keyword
if !options.IsFuzzyKeyword {
// to make it non fuzzy ("typo tolerance" in meilisearch terms), we have to quote the keyword(s)
// https://www.meilisearch.com/docs/reference/api/search#phrase-search
keyword = doubleQuoteKeyword(keyword)
}
searchRes, err := b.inner.Client.Index(b.inner.VersionedIndexName()).Search(keyword, &meilisearch.SearchRequest{
Filter: query.Statement(),
Limit: int64(limit),
Offset: int64(skip),
@ -228,7 +236,7 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
return nil, err
}
hits, err := nonFuzzyWorkaround(searchRes, options.Keyword, options.IsFuzzyKeyword)
hits, err := convertHits(searchRes)
if err != nil {
return nil, err
}
@ -247,11 +255,20 @@ func parseSortBy(sortBy internal.SortBy) string {
return field + ":asc"
}
// nonFuzzyWorkaround is needed as meilisearch does not have an exact search
// and you can only change "typo tolerance" per index. So we have to post-filter the results
// https://www.meilisearch.com/docs/learn/configuration/typo_tolerance#configuring-typo-tolerance
// TODO: remove once https://github.com/orgs/meilisearch/discussions/377 is addressed
func nonFuzzyWorkaround(searchRes *meilisearch.SearchResponse, keyword string, isFuzzy bool) ([]internal.Match, error) {
func doubleQuoteKeyword(k string) string {
kp := strings.Split(k, " ")
parts := 0
for i := range kp {
part := strings.Trim(kp[i], "\"")
if part != "" {
kp[parts] = fmt.Sprintf(`"%s"`, part)
parts++
}
}
return strings.Join(kp[:parts], " ")
}
func convertHits(searchRes *meilisearch.SearchResponse) ([]internal.Match, error) {
hits := make([]internal.Match, 0, len(searchRes.Hits))
for _, hit := range searchRes.Hits {
hit, ok := hit.(map[string]any)
@ -259,61 +276,11 @@ func nonFuzzyWorkaround(searchRes *meilisearch.SearchResponse, keyword string, i
return nil, ErrMalformedResponse
}
if !isFuzzy {
keyword = strings.ToLower(keyword)
// declare a anon func to check if the title, content or at least one comment contains the keyword
found, err := func() (bool, error) {
// check if title match first
title, ok := hit["title"].(string)
if !ok {
return false, ErrMalformedResponse
} else if strings.Contains(strings.ToLower(title), keyword) {
return true, nil
}
// check if content has a match
content, ok := hit["content"].(string)
if !ok {
return false, ErrMalformedResponse
} else if strings.Contains(strings.ToLower(content), keyword) {
return true, nil
}
// now check for each comment if one has a match
// so we first try to cast and skip if there are no comments
comments, ok := hit["comments"].([]any)
if !ok {
return false, ErrMalformedResponse
} else if len(comments) == 0 {
return false, nil
}
// now we iterate over all and report as soon as we detect one match
for i := range comments {
comment, ok := comments[i].(string)
if !ok {
return false, ErrMalformedResponse
}
if strings.Contains(strings.ToLower(comment), keyword) {
return true, nil
}
}
// we got no match
return false, nil
}()
if err != nil {
return nil, err
} else if !found {
continue
}
}
issueID, ok := hit["id"].(float64)
if !ok {
return nil, ErrMalformedResponse
}
hits = append(hits, internal.Match{
ID: int64(issueID),
})

View File

@ -53,11 +53,10 @@ func TestMeilisearchIndexer(t *testing.T) {
tests.TestIndexer(t, indexer)
}
func TestNonFuzzyWorkaround(t *testing.T) {
// get unexpected return
_, err := nonFuzzyWorkaround(&meilisearch.SearchResponse{
func TestConvertHits(t *testing.T) {
_, err := convertHits(&meilisearch.SearchResponse{
Hits: []any{"aa", "bb", "cc", "dd"},
}, "bowling", false)
})
assert.ErrorIs(t, err, ErrMalformedResponse)
validResponse := &meilisearch.SearchResponse{
@ -82,14 +81,15 @@ func TestNonFuzzyWorkaround(t *testing.T) {
},
},
}
// nonFuzzy
hits, err := nonFuzzyWorkaround(validResponse, "bowling", false)
assert.NoError(t, err)
assert.EqualValues(t, []internal.Match{{ID: 11}, {ID: 22}}, hits)
// fuzzy
hits, err = nonFuzzyWorkaround(validResponse, "bowling", true)
hits, err := convertHits(validResponse)
assert.NoError(t, err)
assert.EqualValues(t, []internal.Match{{ID: 11}, {ID: 22}, {ID: 33}}, hits)
}
func TestDoubleQuoteKeyword(t *testing.T) {
assert.EqualValues(t, "", doubleQuoteKeyword(""))
assert.EqualValues(t, `"a" "b" "c"`, doubleQuoteKeyword("a b c"))
assert.EqualValues(t, `"a" "d" "g"`, doubleQuoteKeyword("a d g"))
assert.EqualValues(t, `"a" "d" "g"`, doubleQuoteKeyword("a d g"))
assert.EqualValues(t, `"a" "d" "g"`, doubleQuoteKeyword(`a "" "d" """g`))
}

View File

@ -6,6 +6,7 @@ package markup
import (
"bufio"
"bytes"
"fmt"
"html"
"io"
"regexp"
@ -77,29 +78,66 @@ func writeField(w io.Writer, element, class, field string) error {
}
// Render implements markup.Renderer
func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error {
func (r Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error {
tmpBlock := bufio.NewWriter(output)
maxSize := setting.UI.CSV.MaxFileSize
// FIXME: don't read all to memory
rawBytes, err := io.ReadAll(input)
if maxSize == 0 {
return r.tableRender(ctx, input, tmpBlock)
}
rawBytes, err := io.ReadAll(io.LimitReader(input, maxSize+1))
if err != nil {
return err
}
if setting.UI.CSV.MaxFileSize != 0 && setting.UI.CSV.MaxFileSize < int64(len(rawBytes)) {
if _, err := tmpBlock.WriteString("<pre>"); err != nil {
return err
}
if _, err := tmpBlock.WriteString(html.EscapeString(string(rawBytes))); err != nil {
return err
}
if _, err := tmpBlock.WriteString("</pre>"); err != nil {
return err
}
return tmpBlock.Flush()
if int64(len(rawBytes)) <= maxSize {
return r.tableRender(ctx, bytes.NewReader(rawBytes), tmpBlock)
}
return r.fallbackRender(io.MultiReader(bytes.NewReader(rawBytes), input), tmpBlock)
}
func (Renderer) fallbackRender(input io.Reader, tmpBlock *bufio.Writer) error {
_, err := tmpBlock.WriteString("<pre>")
if err != nil {
return err
}
rd, err := csv.CreateReaderAndDetermineDelimiter(ctx, bytes.NewReader(rawBytes))
scan := bufio.NewScanner(input)
scan.Split(bufio.ScanRunes)
for scan.Scan() {
switch scan.Text() {
case `&`:
_, err = tmpBlock.WriteString("&amp;")
case `'`:
_, err = tmpBlock.WriteString("&#39;") // "&#39;" is shorter than "&apos;" and apos was not in HTML until HTML5.
case `<`:
_, err = tmpBlock.WriteString("&lt;")
case `>`:
_, err = tmpBlock.WriteString("&gt;")
case `"`:
_, err = tmpBlock.WriteString("&#34;") // "&#34;" is shorter than "&quot;".
default:
_, err = tmpBlock.Write(scan.Bytes())
}
if err != nil {
return err
}
}
err = scan.Err()
if err != nil {
return fmt.Errorf("scan: %w", err)
}
_, err = tmpBlock.WriteString("</pre>")
if err != nil {
return err
}
return tmpBlock.Flush()
}
func (Renderer) tableRender(ctx *markup.RenderContext, input io.Reader, tmpBlock *bufio.Writer) error {
rd, err := csv.CreateReaderAndDetermineDelimiter(ctx, input)
if err != nil {
return err
}

View File

@ -4,6 +4,8 @@
package markup
import (
"bufio"
"bytes"
"strings"
"testing"
@ -29,4 +31,12 @@ func TestRenderCSV(t *testing.T) {
assert.NoError(t, err)
assert.EqualValues(t, v, buf.String())
}
t.Run("fallbackRender", func(t *testing.T) {
var buf bytes.Buffer
err := render.fallbackRender(strings.NewReader("1,<a>\n2,<b>"), bufio.NewWriter(&buf))
assert.NoError(t, err)
want := "<pre>1,&lt;a&gt;\n2,&lt;b&gt;</pre>"
assert.Equal(t, want, buf.String())
})
}

View File

@ -609,7 +609,7 @@ func mentionProcessor(ctx *RenderContext, node *html.Node) {
if ok && strings.Contains(mention, "/") {
mentionOrgAndTeam := strings.Split(mention, "/")
if mentionOrgAndTeam[0][1:] == ctx.Metas["org"] && strings.Contains(teams, ","+strings.ToLower(mentionOrgAndTeam[1])+",") {
replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(setting.AppURL, "org", ctx.Metas["org"], "teams", mentionOrgAndTeam[1]), mention, "mention"))
replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(ctx.Links.Prefix(), "org", ctx.Metas["org"], "teams", mentionOrgAndTeam[1]), mention, "mention"))
node = node.NextSibling.NextSibling
start = 0
continue
@ -620,7 +620,7 @@ func mentionProcessor(ctx *RenderContext, node *html.Node) {
mentionedUsername := mention[1:]
if DefaultProcessorHelper.IsUsernameMentionable != nil && DefaultProcessorHelper.IsUsernameMentionable(ctx.Ctx, mentionedUsername) {
replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(setting.AppURL, mentionedUsername), mention, "mention"))
replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(ctx.Links.Prefix(), mentionedUsername), mention, "mention"))
node = node.NextSibling.NextSibling
} else {
node = node.NextSibling
@ -898,9 +898,9 @@ func issueIndexPatternProcessor(ctx *RenderContext, node *html.Node) {
path = "pulls"
}
if ref.Owner == "" {
link = createLink(util.URLJoin(setting.AppURL, ctx.Metas["user"], ctx.Metas["repo"], path, ref.Issue), reftext, "ref-issue")
link = createLink(util.URLJoin(ctx.Links.Prefix(), ctx.Metas["user"], ctx.Metas["repo"], path, ref.Issue), reftext, "ref-issue")
} else {
link = createLink(util.URLJoin(setting.AppURL, ref.Owner, ref.Name, path, ref.Issue), reftext, "ref-issue")
link = createLink(util.URLJoin(ctx.Links.Prefix(), ref.Owner, ref.Name, path, ref.Issue), reftext, "ref-issue")
}
}
@ -939,7 +939,7 @@ func commitCrossReferencePatternProcessor(ctx *RenderContext, node *html.Node) {
}
reftext := ref.Owner + "/" + ref.Name + "@" + base.ShortSha(ref.CommitSha)
link := createLink(util.URLJoin(setting.AppSubURL, ref.Owner, ref.Name, "commit", ref.CommitSha), reftext, "commit")
link := createLink(util.URLJoin(ctx.Links.Prefix(), ref.Owner, ref.Name, "commit", ref.CommitSha), reftext, "commit")
replaceContent(node, ref.RefLocation.Start, ref.RefLocation.End, link)
node = node.NextSibling.NextSibling
@ -1166,7 +1166,7 @@ func hashCurrentPatternProcessor(ctx *RenderContext, node *html.Node) {
continue
}
link := util.URLJoin(setting.AppURL, ctx.Metas["user"], ctx.Metas["repo"], "commit", hash)
link := util.URLJoin(ctx.Links.Prefix(), ctx.Metas["user"], ctx.Metas["repo"], "commit", hash)
replaceContent(node, m[2], m[3], createCodeLink(link, base.ShortSha(hash), "commit"))
start = 0
node = node.NextSibling.NextSibling

View File

@ -287,6 +287,7 @@ func TestRender_IssueIndexPattern_Document(t *testing.T) {
}
func testRenderIssueIndexPattern(t *testing.T, input, expected string, ctx *RenderContext) {
ctx.Links.AbsolutePrefix = true
if ctx.Links.Base == "" {
ctx.Links.Base = TestRepoURL
}

View File

@ -43,7 +43,8 @@ func TestRender_Commits(t *testing.T) {
Ctx: git.DefaultContext,
RelativePath: ".md",
Links: markup.Links{
Base: markup.TestRepoURL,
AbsolutePrefix: true,
Base: markup.TestRepoURL,
},
Metas: localMetas,
}, input)
@ -96,7 +97,8 @@ func TestRender_CrossReferences(t *testing.T) {
Ctx: git.DefaultContext,
RelativePath: "a.md",
Links: markup.Links{
Base: setting.AppSubURL,
AbsolutePrefix: true,
Base: setting.AppSubURL,
},
Metas: localMetas,
}, input)
@ -588,7 +590,8 @@ func TestPostProcess_RenderDocument(t *testing.T) {
err := markup.PostProcess(&markup.RenderContext{
Ctx: git.DefaultContext,
Links: markup.Links{
Base: "https://example.com",
AbsolutePrefix: true,
Base: "https://example.com",
},
Metas: localMetas,
}, strings.NewReader(input), &res)

View File

@ -175,13 +175,6 @@ func NewColorPreview(color []byte) *ColorPreview {
}
}
// IsColorPreview returns true if the given node implements the ColorPreview interface,
// otherwise false.
func IsColorPreview(node ast.Node) bool {
_, ok := node.(*ColorPreview)
return ok
}
// Attention is an inline for an attention
type Attention struct {
ast.BaseInline

Some files were not shown because too many files have changed in this diff Show More