Merge branch 'main' into lunny/automerge_support_delete_branch

This commit is contained in:
Lunny Xiao 2024-12-16 16:20:10 -08:00 committed by GitHub
commit 80110bec30
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
424 changed files with 8537 additions and 5007 deletions

View File

@ -16,10 +16,10 @@ parserOptions:
parser: "@typescript-eslint/parser" # for vue plugin - https://eslint.vuejs.org/user-guide/#how-to-use-a-custom-parser
settings:
import/extensions: [".js", ".ts"]
import/parsers:
import-x/extensions: [".js", ".ts"]
import-x/parsers:
"@typescript-eslint/parser": [".js", ".ts"]
import/resolver:
import-x/resolver:
typescript: true
plugins:
@ -28,7 +28,7 @@ plugins:
- "@typescript-eslint/eslint-plugin"
- eslint-plugin-array-func
- eslint-plugin-github
- eslint-plugin-i
- eslint-plugin-import-x
- eslint-plugin-no-jquery
- eslint-plugin-no-use-extend-native
- eslint-plugin-regexp
@ -58,15 +58,15 @@ overrides:
no-restricted-globals: [2, addEventListener, blur, close, closed, confirm, defaultStatus, defaultstatus, error, event, external, find, focus, frameElement, frames, history, innerHeight, innerWidth, isFinite, isNaN, length, locationbar, menubar, moveBy, moveTo, name, onblur, onerror, onfocus, onload, onresize, onunload, open, opener, opera, outerHeight, outerWidth, pageXOffset, pageYOffset, parent, print, removeEventListener, resizeBy, resizeTo, screen, screenLeft, screenTop, screenX, screenY, scroll, scrollbars, scrollBy, scrollTo, scrollX, scrollY, status, statusbar, stop, toolbar, top]
- files: ["*.config.*"]
rules:
i/no-unused-modules: [0]
import-x/no-unused-modules: [0]
- files: ["**/*.d.ts"]
rules:
i/no-unused-modules: [0]
import-x/no-unused-modules: [0]
"@typescript-eslint/consistent-type-definitions": [0]
"@typescript-eslint/consistent-type-imports": [0]
- files: ["web_src/js/types.ts"]
rules:
i/no-unused-modules: [0]
import-x/no-unused-modules: [0]
- files: ["**/*.test.*", "web_src/js/test/setup.ts"]
env:
vitest-globals/env: true
@ -394,49 +394,49 @@ rules:
id-blacklist: [0]
id-length: [0]
id-match: [0]
i/consistent-type-specifier-style: [0]
i/default: [0]
i/dynamic-import-chunkname: [0]
i/export: [2]
i/exports-last: [0]
i/extensions: [2, always, {ignorePackages: true}]
i/first: [2]
i/group-exports: [0]
i/max-dependencies: [0]
i/named: [2]
i/namespace: [0]
i/newline-after-import: [0]
i/no-absolute-path: [0]
i/no-amd: [2]
i/no-anonymous-default-export: [0]
i/no-commonjs: [2]
i/no-cycle: [2, {ignoreExternal: true, maxDepth: 1}]
i/no-default-export: [0]
i/no-deprecated: [0]
i/no-dynamic-require: [0]
i/no-empty-named-blocks: [2]
i/no-extraneous-dependencies: [2]
i/no-import-module-exports: [0]
i/no-internal-modules: [0]
i/no-mutable-exports: [0]
i/no-named-as-default-member: [0]
i/no-named-as-default: [0]
i/no-named-default: [0]
i/no-named-export: [0]
i/no-namespace: [0]
i/no-nodejs-modules: [0]
i/no-relative-packages: [0]
i/no-relative-parent-imports: [0]
i/no-restricted-paths: [0]
i/no-self-import: [2]
i/no-unassigned-import: [0]
i/no-unresolved: [2, {commonjs: true, ignore: ["\\?.+$"]}]
i/no-unused-modules: [2, {unusedExports: true}]
i/no-useless-path-segments: [2, {commonjs: true}]
i/no-webpack-loader-syntax: [2]
i/order: [0]
i/prefer-default-export: [0]
i/unambiguous: [0]
import-x/consistent-type-specifier-style: [0]
import-x/default: [0]
import-x/dynamic-import-chunkname: [0]
import-x/export: [2]
import-x/exports-last: [0]
import-x/extensions: [2, always, {ignorePackages: true}]
import-x/first: [2]
import-x/group-exports: [0]
import-x/max-dependencies: [0]
import-x/named: [2]
import-x/namespace: [0]
import-x/newline-after-import: [0]
import-x/no-absolute-path: [0]
import-x/no-amd: [2]
import-x/no-anonymous-default-export: [0]
import-x/no-commonjs: [2]
import-x/no-cycle: [2, {ignoreExternal: true, maxDepth: 1}]
import-x/no-default-export: [0]
import-x/no-deprecated: [0]
import-x/no-dynamic-require: [0]
import-x/no-empty-named-blocks: [2]
import-x/no-extraneous-dependencies: [2]
import-x/no-import-module-exports: [0]
import-x/no-internal-modules: [0]
import-x/no-mutable-exports: [0]
import-x/no-named-as-default-member: [0]
import-x/no-named-as-default: [0]
import-x/no-named-default: [0]
import-x/no-named-export: [0]
import-x/no-namespace: [0]
import-x/no-nodejs-modules: [0]
import-x/no-relative-packages: [0]
import-x/no-relative-parent-imports: [0]
import-x/no-restricted-paths: [0]
import-x/no-self-import: [2]
import-x/no-unassigned-import: [0]
import-x/no-unresolved: [2, {commonjs: true, ignore: ["\\?.+$"]}]
import-x/no-unused-modules: [2, {unusedExports: true}]
import-x/no-useless-path-segments: [2, {commonjs: true}]
import-x/no-webpack-loader-syntax: [2]
import-x/order: [0]
import-x/prefer-default-export: [0]
import-x/unambiguous: [0]
init-declarations: [0]
line-comment-position: [0]
logical-assignment-operators: [0]
@ -818,7 +818,7 @@ rules:
unicorn/consistent-destructuring: [2]
unicorn/consistent-empty-array-spread: [2]
unicorn/consistent-existence-index-check: [0]
unicorn/consistent-function-scoping: [2]
unicorn/consistent-function-scoping: [0]
unicorn/custom-error-definition: [0]
unicorn/empty-brace-spaces: [2]
unicorn/error-message: [0]

View File

@ -3,3 +3,5 @@ self-hosted-runner:
- actuated-4cpu-8gb
- actuated-4cpu-16gb
- nscloud
- namespace-profile-gitea-release-docker
- namespace-profile-gitea-release-binary

View File

@ -10,7 +10,7 @@ concurrency:
jobs:
nightly-binary:
runs-on: nscloud
runs-on: namespace-profile-gitea-release-binary
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
@ -58,7 +58,7 @@ jobs:
run: |
aws s3 sync dist/release s3://${{ secrets.AWS_S3_BUCKET }}/gitea/${{ steps.clean_name.outputs.branch }} --no-progress
nightly-docker-rootful:
runs-on: ubuntu-latest
runs-on: namespace-profile-gitea-release-docker
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
@ -95,7 +95,7 @@ jobs:
push: true
tags: gitea/gitea:${{ steps.clean_name.outputs.branch }}
nightly-docker-rootless:
runs-on: ubuntu-latest
runs-on: namespace-profile-gitea-release-docker
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions

View File

@ -11,7 +11,7 @@ concurrency:
jobs:
binary:
runs-on: nscloud
runs-on: namespace-profile-gitea-release-binary
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
@ -68,7 +68,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
docker-rootful:
runs-on: ubuntu-latest
runs-on: namespace-profile-gitea-release-docker
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
@ -99,7 +99,7 @@ jobs:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
docker-rootless:
runs-on: ubuntu-latest
runs-on: namespace-profile-gitea-release-docker
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions

View File

@ -13,7 +13,7 @@ concurrency:
jobs:
binary:
runs-on: nscloud
runs-on: namespace-profile-gitea-release-binary
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
@ -70,7 +70,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
docker-rootful:
runs-on: ubuntu-latest
runs-on: namespace-profile-gitea-release-docker
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
@ -105,7 +105,7 @@ jobs:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
docker-rootless:
runs-on: ubuntu-latest
runs-on: namespace-profile-gitea-release-docker
steps:
- uses: actions/checkout@v4
# fetch all commits instead of only the last as some branches are long lived and could have many between versions

2
.gitignore vendored
View File

@ -28,7 +28,7 @@ _testmain.go
*.exe
*.test
*.prof
*.tsbuildInfo
*.tsbuildinfo
*coverage.out
coverage.all

View File

@ -19,6 +19,8 @@ linters:
- revive
- staticcheck
- stylecheck
- tenv
- testifylint
- typecheck
- unconvert
- unused
@ -34,6 +36,10 @@ output:
show-stats: true
linters-settings:
testifylint:
disable:
- go-require
- require-error
stylecheck:
checks: ["all", "-ST1005", "-ST1003"]
nakedret:

View File

@ -4,6 +4,399 @@ This changelog goes through the changes that have been made in each release
without substantial changes to our git log; to see the highlights of what has
been added to each release, please refer to the [blog](https://blog.gitea.com).
## [1.22.4](https://github.com/go-gitea/gitea/releases/tag/v1.22.4) - 2024-11-14
* SECURITY
* Fix basic auth with webauthn (#32531) (#32536)
* Refactor internal routers (partial backport, auth token const time comparing) (#32473) (#32479)
* PERFORMANCE
* Remove transaction for archive download (#32186) (#32520)
* BUGFIXES
* Fix `missing signature key` error when pulling Docker images with `SERVE_DIRECT` enabled (#32365) (#32397)
* Fix get reviewers fails when selecting user without pull request permissions unit (#32415) (#32616)
* Fix adding index files to tmp directory (#32360) (#32593)
* Fix PR creation on forked repositories via API (#31863) (#32591)
* Fix missing menu tabs in organization project view page (#32313) (#32592)
* Support HTTP POST requests to `/userinfo`, aligning to OpenID Core specification (#32578) (#32594)
* Fix debian package clean up cron job (#32351) (#32590)
* Fix GetInactiveUsers (#32540) (#32588)
* Allow the actions user to login via the jwt token (#32527) (#32580)
* Fix submodule parsing (#32571) (#32577)
* Refactor find forks and fix possible bugs that weaken permissions check (#32528) (#32547)
* Fix some places that don't respect org full name setting (#32243) (#32550)
* Refactor push mirror find and add check for updating push mirror (#32539) (#32549)
* Fix basic auth with webauthn (#32531) (#32536)
* Fix artifact v4 upload above 8MB (#31664) (#32523)
* Fix oauth2 error handle not return immediately (#32514) (#32516)
* Fix action not triggered when commit message is too long (#32498) (#32507)
* Fix `GetRepoLink` nil pointer dereference on dashboard feed page when repo is deleted with actions enabled (#32501) (#32502)
* Fix `missing signature key` error when pulling Docker images with `SERVE_DIRECT` enabled (#32397) (#32397)
* Fix the permission check for user search API and limit the number of returned users for `/user/search` (#32310)
* Fix SearchIssues swagger docs (#32208) (#32298)
* Fix dropdown content overflow (#31610) (#32250)
* Disable Oauth check if oauth disabled (#32368) (#32480)
* Respect renamed dependencies of Cargo registry (#32430) (#32478)
* Fix mermaid diagram height when initially hidden (#32457) (#32464)
* Fix broken releases when re-pushing tags (#32435) (#32449)
* Only provide the commit summary for Discord webhook push events (#32432) (#32447)
* Only query team tables if repository is under org when getting assignees (#32414) (#32426)
* Fix created_unix for mirroring (#32342) (#32406)
* Respect UI.ExploreDefaultSort setting again (#32357) (#32385)
* Fix broken image when editing comment with non-image attachments (#32319) (#32345)
* Fix disable 2fa bug (#32320) (#32330)
* Always update expiration time when creating an artifact (#32281) (#32285)
* Fix null errors on conversation holder (#32258) (#32266) (#32282)
* Only rename a user when they should receive a different name (#32247) (#32249)
* Fix checkbox bug on private/archive filter (#32236) (#32240)
* Add a doctor check to disable the "Actions" unit for mirrors (#32424) (#32497)
* Quick fix milestone deadline 9999 (#32423)
* Make `show stats` work when only one file changed (#32244) (#32268)
* Make `owner/repo/pulls` handlers use "PR reader" permission (#32254) (#32265)
* Update scheduled tasks even if changes are pushed by "ActionsUser" (#32246) (#32252)
* MISC
* Remove unnecessary code: `GetPushMirrorsByRepoID` called on all repo pages (#32560) (#32567)
* Improve some sanitizer rules (#32534)
* Update nix development environment vor v1.22.x (#32495)
* Add warn log when deleting inactive users (#32318) (#32321)
* Update github.com/go-enry/go-enry to v2.9.1 (#32295) (#32296)
* Warn users when they try to use a non-root-url to sign in/up (#32272) (#32273)
## [1.22.3](https://github.com/go-gitea/gitea/releases/tag/v1.22.3) - 2024-10-08
* SECURITY
* Fix bug when a token is given public only (#32204) (#32218)
* PERFORMANCE
* Increase `cacheContextLifetime` to reduce false reports (#32011) (#32023)
* Don't join repository when loading action table data (#32127) (#32143)
* BUGFIXES
* Fix javascript error when an anonymous user visits migration page (#32144) (#32179)
* Don't init signing keys if oauth2 provider is disabled (#32177)
* Fix wrong status of `Set up Job` when first step is skipped (#32120) (#32125)
* Fix bug when deleting a migrated branch (#32075) (#32123)
* Truncate commit message during Discord webhook push events (#31970) (#32121)
* Allow to set branch protection in an empty repository (#32095) (#32119)
* Fix panic when cloning with wrong ssh format. (#32076) (#32118)
* Fix rename branch permission bug (#32066) (#32108)
* Fix: database not update release when using `git push --tags --force` (#32040) (#32074)
* Add missing comment reply handling (#32050) (#32065)
* Do not escape relative path in RPM primary index (#32038) (#32054)
* Fix `/repos/{owner}/{repo}/pulls/{index}/files` endpoint not populating `previous_filename` (#32017) (#32028)
* Support allowed hosts for migrations to work with proxy (#32025) (#32026)
* Fix the logic of finding the latest pull review commit ID (#32139) (#32165)
* Fix bug in getting merged pull request by commit (#32079) (#32117)
* Fix wrong last modify time (#32102) (#32104)
* Fix incorrect `/tokens` api (#32085) (#32092)
* Handle invalid target when creating releases using API (#31841) (#32043)
* Check if the `due_date` is nil when editing issues (#32035) (#32042)
* Fix container parallel upload bugs (#32022)
* Fixed race condition when deleting documents by repoId in ElasticSearch (#32185) (#32188)
* Refactor CSRF protector (#32057) (#32069)
* Fix Bug in Issue/pulls list (#32081) (#32115)
* Include collaboration repositories on dashboard source/forks/mirrors list (#31946) (#32122)
* Add null check for responseData.invalidTopics (#32212) (#32217)
* TESTING
* Fix mssql ci with a new mssql version on ci (#32094)
* MISC
* Upgrade some dependencies include minio-go (#32166)
* Add bin to Composer Metadata (#32099) (#32106)
* Lazy load avatar images (#32051) (#32063)
* Upgrade cache to v0.2.1 (#32003) (#32009)
## [1.22.2](https://github.com/go-gitea/gitea/releases/tag/v1.22.2) - 2024-08-28
* Security
* Replace v-html with v-text in search inputbox (#31966) (#31973)
* Fix nuget/conan/container packages upload bugs (#31967) (#31982)
* PERFORMANCE
* Refactor the usage of batch catfile (#31754) (#31889)
* BUGFIXES
* Fix overflowing content in action run log (#31842) (#31853)
* Scroll images in project issues separately from the remaining issue (#31683) (#31823)
* Add `:focus-visible` style to buttons (#31799) (#31819)
* Fix the display of project type for deleted projects (#31732) (#31734)
* Fix API owner ID should be zero when created repo secret (#31715) (#31811)
* Set owner id to zero when GetRegistrationToken for repo (#31725) (#31729)
* Fix API endpoint for registration-token (#31722) (#31728)
* Add permission check when creating PR (#31033) (#31720)
* Don't return 500 if mirror url contains special chars (#31859) (#31895)
* Fix agit automerge (#31207) (#31881)
* Add CfTurnstileSitekey context data to all captcha templates (#31874) (#31876)
* Avoid returning without written ctx when posting PR (#31843) (#31848)
* Fix raw wiki links (#31825) (#31845)
* Fix panic of ssh public key page after deletion of auth source (#31829) (#31836)
* Fixes for unreachable project issues when transfer repository from organization (#31770) (#31828)
* Show lock owner instead of repo owner on LFS setting page (#31788) (#31817)
* Fix `IsObjectExist` with gogit (#31790) (#31806)
* Fix protected branch files detection on pre_receive hook (#31778) (#31796)
* Add `TAGS` to `TEST_TAGS` and fix bugs found with gogit (#31791) (#31795)
* Rename head branch of pull requests when renaming a branch (#31759) (#31774)
* Fix wiki revision pagination (#31760) (#31772)
* Bump vue-bar-graph (#31705) (#31753)
* Distinguish LFS object errors to ignore missing objects during migration (#31702) (#31745)
* Make GetRepositoryByName more safer (#31712) (#31718)
* Fix a branch divergence cache bug (#31659) (#31661)
* Allow org team names of length 255 in create team form (#31564) (#31603)
* Use old behavior for telegram webhook (#31588)
* Bug fix for translation in ru (#31892)
* Fix actions notify bug (#31866) (#31875)
* Fix the component of access token list not mounted (#31824) (#31868)
* Add missing repository type filter parameters to pager (#31832) (#31837)
* Fix dates displaying in a wrong manner when we're close to the end of… (#31750)
* Fix "Filter by commit" Dropdown (#31695) (#31696)
* Properly filter issue list given no assignees filter (#31522) (#31685)
* Prevent update pull refs manually and will not affect other refs update (#31931)(#31955)
* Fix sort order for organization home and user profile page (#31921) (#31922)
* Fix search team (#31923) (#31942)
* Fix 500 error when state params is set when editing issue/PR by API (#31880) (#31952)
* Fix index too many file names bug (#31903) (#31953)
* Add lock for parallel maven upload (#31851) (#31954)
* MISC
* Remove "dsa-1024" testcases from Test_SSHParsePublicKey and Test_calcFingerprint (#31905) (#31914)
* Upgrade bleve to 2.4.2 (#31894)
* Remove unneccessary uses of `word-break: break-all` (#31637) (#31652)
* Return an empty string when a repo has no avatar in the repo API (#31187) (#31567)
* Upgrade micromatch to 4.0.8 (#31944)
* Update webpack to 5.94.0 (#31941)
## [1.22.1](https://github.com/go-gitea/gitea/releases/tag/v1.22.1) - 2024-07-04
* SECURITY
* Add replacement module for `mholt/archiver` (#31267) (#31270)
* API
* Fix missing images in editor preview due to wrong links (#31299) (#31393)
* Fix duplicate sub-path for avatars (#31365) (#31368)
* Reduce memory usage for chunked artifact uploads to MinIO (#31325) (#31338)
* Remove sub-path from container registry realm (#31293) (#31300)
* Fix NuGet Package API for $filter with Id equality (#31188) (#31242)
* Add an immutable tarball link to archive download headers for Nix (#31139) (#31145)
* Add missed return after `ctx.ServerError` (#31130) (#31133)
* BUGFIXES
* Fix avatar radius problem on the new issue page (#31506) (#31508)
* Fix overflow menu flickering on mobile (#31484) (#31488)
* Fix poor table column width due to breaking words (#31473) (#31477)
* Support relative paths to videos from Wiki pages (#31061) (#31453)
* Fix new issue/pr avatar (#31419) (#31424)
* Increase max length of org team names from 30 to 255 characters (#31410) (#31421)
* Fix line number width in code preview (#31307) (#31316)
* Optimize runner-tags layout to enhance visual experience (#31258) (#31263)
* Fix overflow on push notification (#31179) (#31238)
* Fix overflow on notifications (#31178) (#31237)
* Fix overflow in issue card (#31203) (#31225)
* Split sanitizer functions and fine-tune some tests (#31192) (#31200)
* use correct l10n string (#31487) (#31490)
* Fix dropzone JS error when attachment is disabled (#31486)
* Fix web notification icon not updated once you read all notifications (#31447) (#31466)
* Switch to "Write" tab when edit comment again (#31445) (#31461)
* Fix the link for .git-blame-ignore-revs bypass (#31432) (#31442)
* Fix the wrong line number in the diff view page when expanded twice. (#31431) (#31440)
* Fix labels and projects menu overflow on issue page (#31435) (#31439)
* Fix Account Linking UpdateMigrationsByType (#31428) (#31434)
* Fix markdown math brackets render problem (#31420) (#31430)
* Fix rendered wiki page link (#31398) (#31407)
* Fix natural sort (#31384) (#31394)
* Allow downloading attachments of draft releases (#31369) (#31380)
* Fix repo graph JS (#31377)
* Fix incorrect localization `explorer.go` (#31348) (#31350)
* Fix hash render end with colon (#31319) (#31346)
* Fix line number widths (#31341) (#31343)
* Fix navbar `+` menu flashing on page load (#31281) (#31342)
* Fix adopt repository has empty object name in database (#31333) (#31335)
* Delete legacy cookie before setting new cookie (#31306) (#31317)
* Fix some URLs whose sub-path is missing (#31289) (#31292)
* Fix admin oauth2 custom URL settings (#31246) (#31247)
* Make pasted "img" tag has the same behavior as markdown image (#31235) (#31243)
* Fix agit checkout command line hint & fix ShowMergeInstructions checking (#31219) (#31222)
* Fix the possible migration failure on 286 with postgres 16 (#31209) (#31218)
* Fix branch order (#31174) (#31193)
* Fix markup preview (#31158) (#31166)
* Fix push multiple branches error with tests (#31151) (#31153)
* Fix API repository object format missed (#31118) (#31132)
* Fix missing memcache import (#31105) (#31109)
* Upgrade `github.com/hashicorp/go-retryablehttp` (#31499)
* Fix double border in system status table (#31363) (#31401)
* Fix bug filtering issues which have no project (#31337) (#31367)
* Fix #31185 try fix lfs download from bitbucket failed (#31201) (#31329)
* Add nix flake for dev shell (#30967) (#31310)
* Fix and clean up `ConfirmModal` (#31283) (#31291)
* Optimize repo-list layout to enhance visual experience (#31272) (#31276)
* fixed the dropdown menu for the top New button to expand to the left (#31273) (#31275)
* Fix Activity Page Contributors dropdown (#31264) (#31269)
* fix: allow actions artifacts storage migration to complete succesfully (#31251) (#31257)
* Make blockquote attention recognize more syntaxes (#31240) (#31250)
* Remove .segment from .project-column (#31204) (#31239)
* Ignore FindRecentlyPushedNewBranches err (#31164) (#31171)
* Use vertical layout for multiple code expander buttons (#31122) (#31152)
* Remove duplicate `ProxyPreserveHost` in Apache httpd doc (#31143) (#31147)
* Improve mobile review ui (#31091) (#31136)
* Fix DashboardRepoList margin (#31121) (#31128)
* Update pip related commands for docker (#31106) (#31111)
## [1.22.0](https://github.com/go-gitea/gitea/releases/tag/v1.22.0) - 2024-05-27
This release stands as a monumental milestone in our development journey with a record-breaking incorporation of [1528](https://github.com/go-gitea/gitea/pulls?q=is%3Apr+milestone%3A1.22.0+is%3Amerged) pull requests. It marks the most extensive update in Gitea's history, showcasing a plethora of new features and infrastructure improvements.
Noteworthy advancements in this release include the introduction of `HTMX` and `Tailwind`, signaling a strategic shift as we gradually phase out `jquery` and `Fomantic UI`. These changes reflect our commitment to embracing modern technologies and enhancing the user experience.
Key highlights of this release encompass significant changes categorized under `BREAKING`, `FEATURES`, `ENHANCEMENTS`, and `PERFORMANCE`, each contributing to a more robust and efficient Gitea platform.
* BREAKING
* Improve reverse proxy documents and clarify the AppURL guessing behavior (#31003) (#31020)
* Remember log in for a month by default (#30150)
* Breaking summary for template refactoring (#29395)
* All custom templates need to follow these changes
* Recommend/convert to use case-sensitive collation for MySQL/MSSQL (#28662)
* Make offline mode as default to not connect external avatar service by default (#28548)
* Include public repos in the doer's dashboard for issue search (#28304)
* Use restricted sanitizer for repository description (#28141)
* Support storage base path as prefix (#27827)
* Enhanced auth token / remember me (#27606)
* Rename the default themes to `gitea-light`, `gitea-dark`, `gitea-auto` (#27419)
* If you didn't see the new themes, please remove the `[ui].THEMES` config option from `app.ini`
* Require MySQL 8.0, PostgreSQL 12, MSSQL 2012 (#27337)
* FEATURES
* Allow everyone to read or write a wiki by a repo unit setting (#30495)
* Use raw Wiki links for non-renderable Wiki files (#30273)
* Render embedded code preview by permalink in markdown (#30234) (#30249)
* Support repo code search without setting up an indexer (#29998)
* Support pasting URLs over markdown text (#29566)
* Allow to change primary email before account activation (#29412)
* Customizable "Open with" applications for repository clone (#29320)
* Allow options to disable user deletion from the interface on app.ini (#29275)
* Extend issue template YAML engine (#29274)
* Add support for `linguist-detectable` and `linguist-documentation` (#29267)
* Implement code frequency graph (#29191)
* Show commit status for releases (#29149)
* Add user blocking (#29028)
* Actions Artifacts v4 backend (#28965)
* Add merge style `fast-forward-only` (#28954)
* Retarget depending pulls when the parent branch is deleted (#28686)
* Add global setting on how timestamps should be rendered (#28657)
* Implement actions badge SVGs (#28102)
* Add skip ci functionality (#28075)
* Show latest commit for file (#28067)
* Allow to sync tags from the admin dashboard (#28045)
* Add Profile Readme for Organisations (#27955)
* Implement contributors graph (#27882)
* Artifact deletion in actions ui (#27172)
* Add API routes to get runner registration token (#27144)
* Add support for forking single branch (#25821)
* Add support for sha256 repositories (#23894)
* Add admin API route for managing user's badges (#23106)
* ENHANCEMENTS
* Make gitea webhooks openproject compatible (#28435) (#31081)
* Support using label names when changing issue labels (#30943) (#30958)
* Fix various problems around project board view (#30696) (#30902)
* Improve context popup rendering (#30824) (#30829)
* Allow to save empty comment (#30706)
* Prevent allow/reject reviews on merged/closed PRs (#30686)
* Initial support for colorblindness-friendly themes (#30625)
* Some NuGet package enhancements (#30280) (#30324)
* Markup color and font size fixes (#30282) (#30310)
* Show 12 lines in markup code preview (#30255) (#30257)
* Add `[other].SHOW_FOOTER_POWERED_BY` setting to hide `Powered by` (#30253)
* Pulse page improvements (#30149)
* Render code tags in commit messages (#30146)
* Prevent re-review and dismiss review actions on closed and merged PRs (#30065)
* Cancel previous runs of the same PR automatically (#29961)
* Drag-and-drop improvements for projects and issue pins (#29875)
* Add default board to new projects, remove uncategorized pseudo-board (#29874)
* Prevent layout shift in `<overflow-menu>` items (#29831)
* Add skip ci support for pull request title (#29774)
* Add more stats tables (#29730)
* Update API to return 'source_id' for users (#29718)
* Determine fuzziness of bleve indexer by keyword length (#29706)
* Expose fuzzy search for issues/pulls (#29701)
* Put an edit file button on pull request files to allow a quick operation (#29697)
* Fix action runner offline label padding (#29691)
* Update allowed attachment types (#29688)
* Completely style the webkit autofill (#29683)
* Highlight archived labels (#29680)
* Add a warning for disallowed email domains (#29658)
* Set user's 24h preference from their current OS locale (#29651)
* Add setting to disable user features when user login type is not plain (#29615)
* Improve natural sort (#29611)
* Make wiki default branch name changeable (#29603)
* Unify search boxes (#29530)
* Add support for API blob upload of release attachments (#29507)
* Detect broken git hooks (#29494)
* Sync branches to DB immediately when handling git hook calling (#29493)
* Allow options to disable user GPG key configuration from the interface on app.ini (#29486)
* Allow options to disable user SSH key configuration from the interface on app.ini (#29447)
* Use relative links for commits, mentions, and issues in markdown (#29427)
* Add `<overflow-menu>`, rename webcomponents (#29400)
* Include resource state events in Gitlab downloads (#29382)
* Properly migrate target branch change GitLab comment (#29340)
* Recolor dark theme to blue shade (#29283)
* Partially enable MSSQL case-sensitive collation support (#29238)
* Auto-update the system status in the admin dashboard (#29163)
* Integrate alpine `noarch` packages into other architectures index (#29137)
* Document how the TOC election process works (#29135)
* Tweak repo header (#29134)
* Make blockquote border size less aggressive (#29124)
* Downscale pasted PNG images based on metadata (#29123)
* Show `View at this point in history` for every commit (#29122)
* Add support for action artifact serve direct (#29120)
* Change webhook-type in create-view (#29114)
* Drop "@" from the email sender to avoid spam filters (#29109)
* Allow non-admin users to delete review requests (#29057)
* Improve user search display name (#29002)
* Include username in email headers (#28981)
* Show whether a PR is WIP inside popups (#28975)
* Also match weakly validated ETags (#28957)
* Support nuspec manifest download for Nuget packages (#28921)
* Fix hardcoded GitHub icon used as migrated release avatar (#28910)
* Propagate install_if and provider_priority to APKINDEX (#28899)
* Add artifacts v4 JWT to job message and accept it (#28885)
* Enable/disable owner and repo projects independently (#28805)
* Add non-JS fallback for reaction tooltips (#28785)
* Add the ability to see open and closed issues at the same time (#28757)
* Move sign-in labels to be above inputs (#28753)
* Display the latest sync time for pull mirrors on the repo page (#28712)
* Show in Web UI if the file is vendored and generated (#28620)
* Add orphaned topic consistency check (#28507)
* Add branch protection setting for ignoring stale approvals (#28498)
* Add option to set language in admin user view (#28449)
* Fix incorrect run order of action jobs (#28367)
* Add missing exclusive in advanced label options (#28322)
* Added instance-level variables (#28115)
* Add edit option for README.md (#28071)
* Fix link to `Code` tab on wiki commits (#28041)
* Allow to set explore page default sort (#27951)
* Improve PR diff view on mobile (#27883)
* Properly migrate automatic merge GitLab comments (#27873)
* Display issue task list on project cards (#27865)
* Add Index to pull_auto_merge.doer_id (#27811)
* Fix display member unit in the menu bar if there are no hidden members in public org (#27795)
* List all Debian package versions in `Packages` (#27786)
* Allow pull requests Manually Merged option to be used by non-admins (#27780)
* Only show diff file tree when more than one file changed (#27775)
* Show placeholder email in privacy popup (#27770)
* Revamp repo header (#27760)
* Add `must-change-password` command line parameter (#27626)
* Unify password changing and invalidate auth tokens (#27625)
* Add border to file tree 'sub-items' and add padding to 'item-file' (#27593)
* Add slow SQL query warning (#27545)
* Pre-register OAuth application for tea (#27509)
* Differentiate between `push` and `pull` `mirror sync in progress` (#27390)
* Link to file from its history (#27354)
* Add a shortcut to user's profile page to admin user details (#27299)
* Doctor: delete action entries without existing user (#27292)
* Show total TrackedTime on issue/pull/milestone lists (#26672)
* Don't show the new pull request button when the page is not compare pull (#26431)
* Add `Hide/Show all checks` button to commit status check (#26284)
* Improvements of releases list and tags list (#25859)
* PERFORMANCE
* Fix package list performance (#30520) (#30616)
* Add commit status summary table to reduce query from commit status table (#30223)
* Refactor markup/csv: don't read all to memory (#29760)
* Lazy load object format with command line and don't do it in OpenRepository (#29712)
* Add cache for branch divergence on branch list page (#29577)
* Do some performance optimization for issues list and view issue/pull (#29515)
* Cache repository default branch commit status to reduce query on commit status table (#29444)
* Use `crypto/sha256` (#29386)
* Some performance optimization on the dashboard and issues page (#29010)
* Add combined index for issue_user.uid and issue_id (#28080)
## [1.21.11](https://github.com/go-gitea/gitea/releases/tag/v1.21.11) - 2024-04-07
* SECURITY

View File

@ -46,7 +46,6 @@ Wim <wim@42.be> (@42wim)
Jason Song <i@wolfogre.com> (@wolfogre)
Yarden Shoham <git@yardenshoham.com> (@yardenshoham)
Yu Tian <zettat123@gmail.com> (@Zettat123)
Eddie Yang <576951401@qq.com> (@yp05327)
Dong Ge <gedong_1994@163.com> (@sillyguodong)
Xinyi Gong <hestergong@gmail.com> (@HesterG)
wxiaoguang <wxiaoguang@gmail.com> (@wxiaoguang)

View File

@ -28,7 +28,7 @@ XGO_VERSION := go-1.23.x
AIR_PACKAGE ?= github.com/air-verse/air@v1
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.7.0
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.7.0
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.60.3
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.2
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.11
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.5.1
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0
@ -377,12 +377,12 @@ lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig
.PHONY: lint-js
lint-js: node_modules
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES)
# npx vue-tsc
npx vue-tsc
.PHONY: lint-js-fix
lint-js-fix: node_modules
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES) --fix
# npx vue-tsc
npx vue-tsc
.PHONY: lint-css
lint-css: node_modules
@ -451,10 +451,6 @@ lint-templates: .venv node_modules
lint-yaml: .venv
@poetry run yamllint .
.PHONY: tsc
tsc:
npx vue-tsc
.PHONY: watch
watch:
@bash tools/watch.sh

View File

@ -1040,9 +1040,13 @@ LEVEL = Info
;; Don't allow download source archive files from UI
;DISABLE_DOWNLOAD_SOURCE_ARCHIVES = false
;; Allow fork repositories without maximum number limit
;; Allow to fork repositories without maximum number limit
;ALLOW_FORK_WITHOUT_MAXIMUM_LIMIT = true
;; Allow to fork repositories into the same owner (user or organization)
;; This feature is experimental, not fully tested, and may be changed in the future
;ALLOW_FORK_INTO_SAME_OWNER = false
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;[repository.editor]

10
go.mod
View File

@ -48,7 +48,7 @@ require (
github.com/ethantkoenig/rupture v1.0.1
github.com/felixge/fgprof v0.9.5
github.com/fsnotify/fsnotify v1.7.0
github.com/gliderlabs/ssh v0.3.7
github.com/gliderlabs/ssh v0.3.8
github.com/go-ap/activitypub v0.0.0-20240910141749-b4b8c8aa484c
github.com/go-ap/jsonld v0.0.0-20221030091449-f2a191312c73
github.com/go-chi/chi/v5 v5.1.0
@ -121,13 +121,13 @@ require (
github.com/yuin/goldmark v1.7.8
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
github.com/yuin/goldmark-meta v1.1.0
golang.org/x/crypto v0.28.0
golang.org/x/crypto v0.31.0
golang.org/x/image v0.21.0
golang.org/x/net v0.30.0
golang.org/x/oauth2 v0.23.0
golang.org/x/sync v0.8.0
golang.org/x/sys v0.26.0
golang.org/x/text v0.19.0
golang.org/x/sync v0.10.0
golang.org/x/sys v0.28.0
golang.org/x/text v0.21.0
golang.org/x/tools v0.26.0
google.golang.org/grpc v1.67.1
google.golang.org/protobuf v1.35.1

19
go.sum
View File

@ -293,8 +293,8 @@ github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv
github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
github.com/git-lfs/pktline v0.0.0-20230103162542-ca444d533ef1 h1:mtDjlmloH7ytdblogrMz1/8Hqua1y8B4ID+bh3rvod0=
github.com/git-lfs/pktline v0.0.0-20230103162542-ca444d533ef1/go.mod h1:fenKRzpXDjNpsIBhuhUzvjCKlDjKam0boRAenTE0Q6A=
github.com/gliderlabs/ssh v0.3.7 h1:iV3Bqi942d9huXnzEF2Mt+CY9gLu8DNM4Obd+8bODRE=
github.com/gliderlabs/ssh v0.3.7/go.mod h1:zpHEXBstFnQYtGnB8k8kQLol82umzn/2/snG7alWVD8=
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU=
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/go-ap/activitypub v0.0.0-20240910141749-b4b8c8aa484c h1:82lzmsy5Nr6JA6HcLRVxGfbdSoWfW45C6jnY3zFS7Ks=
@ -893,8 +893,9 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY=
golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8=
golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s=
@ -946,8 +947,9 @@ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -982,8 +984,9 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@ -996,8 +999,9 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24=
golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@ -1009,8 +1013,9 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/time v0.7.0 h1:ntUhktv3OPE6TgYxXWv9vKvUSJyIFJlyohwbkEwPrKQ=
golang.org/x/time v0.7.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

View File

@ -37,6 +37,7 @@ type ActionRun struct {
TriggerUser *user_model.User `xorm:"-"`
ScheduleID int64
Ref string `xorm:"index"` // the commit/tag/… that caused the run
IsRefDeleted bool `xorm:"-"`
CommitSHA string
IsForkPullRequest bool // If this is triggered by a PR from a forked repository or an untrusted user, we need to check if it is approved and limit permissions when running the workflow.
NeedApproval bool // may need approval if it's a fork pull request

View File

@ -137,7 +137,7 @@ func UpdateRunJob(ctx context.Context, job *ActionRunJob, cond builder.Cond, col
if err != nil {
return 0, err
}
run.Status = aggregateJobStatus(jobs)
run.Status = AggregateJobStatus(jobs)
if run.Started.IsZero() && run.Status.IsRunning() {
run.Started = timeutil.TimeStampNow()
}
@ -152,29 +152,35 @@ func UpdateRunJob(ctx context.Context, job *ActionRunJob, cond builder.Cond, col
return affected, nil
}
func aggregateJobStatus(jobs []*ActionRunJob) Status {
allDone := true
allWaiting := true
hasFailure := false
func AggregateJobStatus(jobs []*ActionRunJob) Status {
allSuccessOrSkipped := len(jobs) != 0
allSkipped := len(jobs) != 0
var hasFailure, hasCancelled, hasWaiting, hasRunning, hasBlocked bool
for _, job := range jobs {
if !job.Status.IsDone() {
allDone = false
}
if job.Status != StatusWaiting && !job.Status.IsDone() {
allWaiting = false
}
if job.Status == StatusFailure || job.Status == StatusCancelled {
hasFailure = true
}
}
if allDone {
if hasFailure {
return StatusFailure
allSuccessOrSkipped = allSuccessOrSkipped && (job.Status == StatusSuccess || job.Status == StatusSkipped)
allSkipped = allSkipped && job.Status == StatusSkipped
hasFailure = hasFailure || job.Status == StatusFailure
hasCancelled = hasCancelled || job.Status == StatusCancelled
hasWaiting = hasWaiting || job.Status == StatusWaiting
hasRunning = hasRunning || job.Status == StatusRunning
hasBlocked = hasBlocked || job.Status == StatusBlocked
}
switch {
case allSkipped:
return StatusSkipped
case allSuccessOrSkipped:
return StatusSuccess
}
if allWaiting {
return StatusWaiting
}
case hasCancelled:
return StatusCancelled
case hasFailure:
return StatusFailure
case hasRunning:
return StatusRunning
case hasWaiting:
return StatusWaiting
case hasBlocked:
return StatusBlocked
default:
return StatusUnknown // it shouldn't happen
}
}

View File

@ -0,0 +1,85 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestAggregateJobStatus(t *testing.T) {
testStatuses := func(expected Status, statuses []Status) {
t.Helper()
var jobs []*ActionRunJob
for _, v := range statuses {
jobs = append(jobs, &ActionRunJob{Status: v})
}
actual := AggregateJobStatus(jobs)
if !assert.Equal(t, expected, actual) {
var statusStrings []string
for _, s := range statuses {
statusStrings = append(statusStrings, s.String())
}
t.Errorf("AggregateJobStatus(%v) = %v, want %v", statusStrings, statusNames[actual], statusNames[expected])
}
}
cases := []struct {
statuses []Status
expected Status
}{
// unknown cases, maybe it shouldn't happen in real world
{[]Status{}, StatusUnknown},
{[]Status{StatusUnknown, StatusSuccess}, StatusUnknown},
{[]Status{StatusUnknown, StatusSkipped}, StatusUnknown},
{[]Status{StatusUnknown, StatusFailure}, StatusFailure},
{[]Status{StatusUnknown, StatusCancelled}, StatusCancelled},
{[]Status{StatusUnknown, StatusWaiting}, StatusWaiting},
{[]Status{StatusUnknown, StatusRunning}, StatusRunning},
{[]Status{StatusUnknown, StatusBlocked}, StatusBlocked},
// success with other status
{[]Status{StatusSuccess}, StatusSuccess},
{[]Status{StatusSuccess, StatusSkipped}, StatusSuccess}, // skipped doesn't affect success
{[]Status{StatusSuccess, StatusFailure}, StatusFailure},
{[]Status{StatusSuccess, StatusCancelled}, StatusCancelled},
{[]Status{StatusSuccess, StatusWaiting}, StatusWaiting},
{[]Status{StatusSuccess, StatusRunning}, StatusRunning},
{[]Status{StatusSuccess, StatusBlocked}, StatusBlocked},
// any cancelled, then cancelled
{[]Status{StatusCancelled}, StatusCancelled},
{[]Status{StatusCancelled, StatusSuccess}, StatusCancelled},
{[]Status{StatusCancelled, StatusSkipped}, StatusCancelled},
{[]Status{StatusCancelled, StatusFailure}, StatusCancelled},
{[]Status{StatusCancelled, StatusWaiting}, StatusCancelled},
{[]Status{StatusCancelled, StatusRunning}, StatusCancelled},
{[]Status{StatusCancelled, StatusBlocked}, StatusCancelled},
// failure with other status, fail fast
// Should "running" win? Maybe no: old code does make "running" win, but GitHub does fail fast.
{[]Status{StatusFailure}, StatusFailure},
{[]Status{StatusFailure, StatusSuccess}, StatusFailure},
{[]Status{StatusFailure, StatusSkipped}, StatusFailure},
{[]Status{StatusFailure, StatusCancelled}, StatusCancelled},
{[]Status{StatusFailure, StatusWaiting}, StatusFailure},
{[]Status{StatusFailure, StatusRunning}, StatusFailure},
{[]Status{StatusFailure, StatusBlocked}, StatusFailure},
// skipped with other status
// TODO: need to clarify whether a PR with "skipped" job status is considered as "mergeable" or not.
{[]Status{StatusSkipped}, StatusSkipped},
{[]Status{StatusSkipped, StatusSuccess}, StatusSuccess},
{[]Status{StatusSkipped, StatusFailure}, StatusFailure},
{[]Status{StatusSkipped, StatusCancelled}, StatusCancelled},
{[]Status{StatusSkipped, StatusWaiting}, StatusWaiting},
{[]Status{StatusSkipped, StatusRunning}, StatusRunning},
{[]Status{StatusSkipped, StatusBlocked}, StatusBlocked},
}
for _, c := range cases {
testStatuses(c.expected, c.statuses)
}
}

View File

@ -17,7 +17,7 @@ func TestGetLatestRunnerToken(t *testing.T) {
token := unittest.AssertExistsAndLoadBean(t, &ActionRunnerToken{ID: 3})
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
assert.NoError(t, err)
assert.EqualValues(t, token, expectedToken)
assert.EqualValues(t, expectedToken, token)
}
func TestNewRunnerToken(t *testing.T) {
@ -26,7 +26,7 @@ func TestNewRunnerToken(t *testing.T) {
assert.NoError(t, err)
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
assert.NoError(t, err)
assert.EqualValues(t, token, expectedToken)
assert.EqualValues(t, expectedToken, token)
}
func TestUpdateRunnerToken(t *testing.T) {
@ -36,5 +36,5 @@ func TestUpdateRunnerToken(t *testing.T) {
assert.NoError(t, UpdateRunnerToken(db.DefaultContext, token))
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
assert.NoError(t, err)
assert.EqualValues(t, token, expectedToken)
assert.EqualValues(t, expectedToken, token)
}

View File

@ -4,7 +4,6 @@
package activities_test
import (
"fmt"
"testing"
"time"
@ -91,11 +90,11 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, actions, contributions, "invalid action count: did the test data became too old?")
assert.Equal(t, count, int64(contributions))
assert.Equal(t, tc.CountResult, contributions, fmt.Sprintf("testcase '%s'", tc.desc))
assert.Equal(t, tc.CountResult, contributions, "testcase '%s'", tc.desc)
// Test JSON rendering
jsonData, err := json.Marshal(heatmap)
assert.NoError(t, err)
assert.Equal(t, tc.JSONResult, string(jsonData))
assert.JSONEq(t, tc.JSONResult, string(jsonData))
}
}

View File

@ -18,7 +18,7 @@ func TestOAuth2Application_GenerateClientSecret(t *testing.T) {
app := unittest.AssertExistsAndLoadBean(t, &auth_model.OAuth2Application{ID: 1})
secret, err := app.GenerateClientSecret(db.DefaultContext)
assert.NoError(t, err)
assert.True(t, len(secret) > 0)
assert.NotEmpty(t, secret)
unittest.AssertExistsAndLoadBean(t, &auth_model.OAuth2Application{ID: 1, ClientSecret: app.ClientSecret})
}
@ -165,7 +165,7 @@ func TestOAuth2Grant_GenerateNewAuthorizationCode(t *testing.T) {
code, err := grant.GenerateNewAuthorizationCode(db.DefaultContext, "https://example2.com/callback", "CjvyTLSdR47G5zYenDA-eDWW4lRrO8yvjcWwbD_deOg", "S256")
assert.NoError(t, err)
assert.NotNil(t, code)
assert.True(t, len(code.Code) > 32) // secret length > 32
assert.Greater(t, len(code.Code), 32) // secret length > 32
}
func TestOAuth2Grant_TableName(t *testing.T) {

View File

@ -4,7 +4,7 @@
package db // it's not db_test, because this file is for testing the private type halfCommitter
import (
"fmt"
"errors"
"testing"
"github.com/stretchr/testify/assert"
@ -80,7 +80,7 @@ func Test_halfCommitter(t *testing.T) {
testWithCommitter(mockCommitter, func(committer Committer) error {
defer committer.Close()
if true {
return fmt.Errorf("error")
return errors.New("error")
}
return committer.Commit()
})
@ -94,7 +94,7 @@ func Test_halfCommitter(t *testing.T) {
testWithCommitter(mockCommitter, func(committer Committer) error {
committer.Close()
committer.Commit()
return fmt.Errorf("error")
return errors.New("error")
})
mockCommitter.Assert(t)

View File

@ -38,8 +38,6 @@ func TestIterate(t *testing.T) {
if !has {
return db.ErrNotExist{Resource: "repo_unit", ID: repoUnit.ID}
}
assert.EqualValues(t, repoUnit.RepoID, repoUnit.RepoID)
assert.EqualValues(t, repoUnit.CreatedUnix, repoUnit.CreatedUnix)
return nil
})
assert.NoError(t, err)

View File

@ -26,8 +26,10 @@ const (
SearchOrderByForksReverse SearchOrderBy = "num_forks DESC"
)
const (
// Which means a condition to filter the records which don't match any id.
// It's different from zero which means the condition could be ignored.
NoConditionID = -1
)
// NoConditionID means a condition to filter the records which don't match any id.
// eg: "milestone_id=-1" means "find the items without any milestone.
const NoConditionID int64 = -1
// NonExistingID means a condition to match no result (eg: a non-existing user)
// It doesn't use -1 or -2 because they are used as builtin users.
const NonExistingID int64 = -1000000

View File

@ -36,3 +36,41 @@
updated: 1683636626
need_approval: 0
approved_by: 0
-
id: 793
title: "job output"
repo_id: 4
owner_id: 1
workflow_id: "test.yaml"
index: 189
trigger_user_id: 1
ref: "refs/heads/master"
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
event: "push"
is_fork_pull_request: 0
status: 1
started: 1683636528
stopped: 1683636626
created: 1683636108
updated: 1683636626
need_approval: 0
approved_by: 0
-
id: 794
title: "job output"
repo_id: 4
owner_id: 1
workflow_id: "test.yaml"
index: 190
trigger_user_id: 1
ref: "refs/heads/test"
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
event: "push"
is_fork_pull_request: 0
status: 1
started: 1683636528
stopped: 1683636626
created: 1683636108
updated: 1683636626
need_approval: 0
approved_by: 0

View File

@ -26,3 +26,46 @@
status: 1
started: 1683636528
stopped: 1683636626
-
id: 194
run_id: 793
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
is_fork_pull_request: 0
name: job1 (1)
attempt: 1
job_id: job1
task_id: 49
status: 1
started: 1683636528
stopped: 1683636626
-
id: 195
run_id: 793
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
is_fork_pull_request: 0
name: job1 (2)
attempt: 1
job_id: job1
task_id: 50
status: 1
started: 1683636528
stopped: 1683636626
-
id: 196
run_id: 793
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
is_fork_pull_request: 0
name: job2
attempt: 1
job_id: job2
needs: [job1]
task_id: 51
status: 5
started: 1683636528
stopped: 1683636626

View File

@ -57,3 +57,63 @@
log_length: 707
log_size: 90179
log_expired: 0
-
id: 49
job_id: 194
attempt: 1
runner_id: 1
status: 1 # success
started: 1683636528
stopped: 1683636626
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
is_fork_pull_request: 0
token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784220
token_salt: ffffffffff
token_last_eight: ffffffff
log_filename: artifact-test2/2f/47.log
log_in_storage: 1
log_length: 707
log_size: 90179
log_expired: 0
-
id: 50
job_id: 195
attempt: 1
runner_id: 1
status: 1 # success
started: 1683636528
stopped: 1683636626
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
is_fork_pull_request: 0
token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784221
token_salt: ffffffffff
token_last_eight: ffffffff
log_filename: artifact-test2/2f/47.log
log_in_storage: 1
log_length: 707
log_size: 90179
log_expired: 0
-
id: 51
job_id: 196
attempt: 1
runner_id: 1
status: 6 # running
started: 1683636528
stopped: 1683636626
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
is_fork_pull_request: 0
token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784222
token_salt: ffffffffff
token_last_eight: ffffffff
log_filename: artifact-test2/2f/47.log
log_in_storage: 1
log_length: 707
log_size: 90179
log_expired: 0

View File

@ -0,0 +1,20 @@
-
id: 1
task_id: 49
output_key: output_a
output_value: abc
-
id: 2
task_id: 49
output_key: output_b
output_value: ''
-
id: 3
task_id: 50
output_key: output_a
output_value: ''
-
id: 4
task_id: 50
output_key: output_b
output_value: bbb

View File

@ -81,3 +81,15 @@
is_deleted: false
deleted_by_id: 0
deleted_unix: 0
-
id: 15
repo_id: 4
name: 'master'
commit_id: 'c7cd3cd144e6d23c9d6f3d07e52b2c1a956e0338'
commit_message: 'add Readme'
commit_time: 1588147171
pusher_id: 13
is_deleted: false
deleted_by_id: 0
deleted_unix: 0

View File

@ -4,6 +4,7 @@
reviewer_id: 1
issue_id: 2
content: "Demo Review"
original_author_id: 0
updated_unix: 946684810
created_unix: 946684810
-
@ -12,6 +13,7 @@
reviewer_id: 534543
issue_id: 534543
content: "Invalid Review #1"
original_author_id: 0
updated_unix: 946684810
created_unix: 946684810
-
@ -20,6 +22,7 @@
reviewer_id: 1
issue_id: 343545
content: "Invalid Review #2"
original_author_id: 0
updated_unix: 946684810
created_unix: 946684810
-
@ -28,6 +31,7 @@
reviewer_id: 1
issue_id: 2
content: "Pending Review"
original_author_id: 0
updated_unix: 946684810
created_unix: 946684810
-
@ -36,6 +40,7 @@
reviewer_id: 1
issue_id: 3
content: "New review 1"
original_author_id: 0
updated_unix: 946684810
created_unix: 946684810
-
@ -61,8 +66,8 @@
type: 1
reviewer_id: 4
issue_id: 3
original_author_id: 0
content: "New review 5"
original_author_id: 0
commit_id: 8091a55037cd59e47293aca02981b5a67076b364
stale: true
updated_unix: 946684813
@ -73,9 +78,9 @@
reviewer_id: 2
issue_id: 3
content: "New review 3 rejected"
original_author_id: 0
updated_unix: 946684814
created_unix: 946684814
original_author_id: 0
-
id: 10
@ -83,6 +88,7 @@
reviewer_id: 100
issue_id: 3
content: "a deleted user's review"
original_author_id: 0
official: true
updated_unix: 946684815
created_unix: 946684815
@ -112,6 +118,7 @@
reviewer_id: 5
issue_id: 11
content: "old review from user5"
original_author_id: 0
updated_unix: 946684820
created_unix: 946684820
@ -121,6 +128,7 @@
reviewer_id: 5
issue_id: 11
content: "duplicate review from user5 (latest)"
original_author_id: 0
updated_unix: 946684830
created_unix: 946684830
@ -130,6 +138,7 @@
reviewer_id: 6
issue_id: 11
content: "singular review from org6 and final review for this pr"
original_author_id: 0
updated_unix: 946684831
created_unix: 946684831
@ -139,6 +148,7 @@
reviewer_id: 20
issue_id: 20
content: "review request for user20"
original_author_id: 0
updated_unix: 946684832
created_unix: 946684832
@ -148,6 +158,7 @@
reviewer_id: 20
issue_id: 20
content: "review approved by user20"
original_author_id: 0
updated_unix: 946684833
created_unix: 946684833
@ -158,6 +169,7 @@
reviewer_team_id: 5
issue_id: 20
content: "review request for team5"
original_author_id: 0
updated_unix: 946684834
created_unix: 946684834
@ -168,6 +180,7 @@
reviewer_team_id: 0
issue_id: 20
content: "review request for user15"
original_author_id: 0
updated_unix: 946684835
created_unix: 946684835
@ -177,6 +190,7 @@
reviewer_id: 1
issue_id: 2
content: "Review Comment"
original_author_id: 0
updated_unix: 946684810
created_unix: 946684810
@ -186,6 +200,7 @@
reviewer_id: 5
issue_id: 3
content: "reviewed by user5"
original_author_id: 0
commit_id: 4a357436d925b5c974181ff12a994538ddc5a269
updated_unix: 946684816
created_unix: 946684816
@ -196,5 +211,6 @@
reviewer_id: 5
issue_id: 3
content: "review request for user5"
original_author_id: 0
updated_unix: 946684817
created_unix: 946684817

View File

@ -12,6 +12,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
@ -169,9 +170,22 @@ func GetBranch(ctx context.Context, repoID int64, branchName string) (*Branch, e
return &branch, nil
}
func GetBranches(ctx context.Context, repoID int64, branchNames []string) ([]*Branch, error) {
func GetBranches(ctx context.Context, repoID int64, branchNames []string, includeDeleted bool) ([]*Branch, error) {
branches := make([]*Branch, 0, len(branchNames))
return branches, db.GetEngine(ctx).Where("repo_id=?", repoID).In("name", branchNames).Find(&branches)
sess := db.GetEngine(ctx).Where("repo_id=?", repoID).In("name", branchNames)
if !includeDeleted {
sess.And("is_deleted=?", false)
}
return branches, sess.Find(&branches)
}
func BranchesToNamesSet(branches []*Branch) container.Set[string] {
names := make(container.Set[string], len(branches))
for _, branch := range branches {
names.Add(branch.Name)
}
return names
}
func AddBranches(ctx context.Context, branches []*Branch) error {

View File

@ -34,7 +34,7 @@ func TestGetCommitStatuses(t *testing.T) {
SHA: sha1,
})
assert.NoError(t, err)
assert.Equal(t, int(maxResults), 5)
assert.Equal(t, 5, int(maxResults))
assert.Len(t, statuses, 5)
assert.Equal(t, "ci/awesomeness", statuses[0].Context)
@ -63,7 +63,7 @@ func TestGetCommitStatuses(t *testing.T) {
SHA: sha1,
})
assert.NoError(t, err)
assert.Equal(t, int(maxResults), 5)
assert.Equal(t, 5, int(maxResults))
assert.Empty(t, statuses)
}

View File

@ -4,7 +4,6 @@
package git
import (
"fmt"
"testing"
"code.gitea.io/gitea/models/db"
@ -76,7 +75,7 @@ func TestBranchRuleMatch(t *testing.T) {
infact = " not"
}
assert.EqualValues(t, kase.ExpectedMatch, pb.Match(kase.BranchName),
fmt.Sprintf("%s should%s match %s but it is%s", kase.BranchName, should, kase.Rule, infact),
"%s should%s match %s but it is%s", kase.BranchName, should, kase.Rule, infact,
)
}
}

View File

@ -64,7 +64,7 @@ func TestFetchCodeComments(t *testing.T) {
}
func TestAsCommentType(t *testing.T) {
assert.Equal(t, issues_model.CommentType(0), issues_model.CommentTypeComment)
assert.Equal(t, issues_model.CommentTypeComment, issues_model.CommentType(0))
assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType(""))
assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType("nonsense"))
assert.Equal(t, issues_model.CommentTypeComment, issues_model.AsCommentType("comment"))

View File

@ -125,7 +125,10 @@ type Issue struct {
IsPull bool `xorm:"INDEX"` // Indicates whether is a pull request or not.
PullRequest *PullRequest `xorm:"-"`
NumComments int
// TODO: RemoveIssueRef: see "repo/issue/branch_selector_field.tmpl"
Ref string
PinOrder int `xorm:"DEFAULT 0"`
DeadlineUnix timeutil.TimeStamp `xorm:"INDEX"`

View File

@ -18,12 +18,12 @@ func RecalculateIssueIndexForRepo(ctx context.Context, repoID int64) error {
}
defer committer.Close()
var max int64
if _, err = db.GetEngine(ctx).Select(" MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&max); err != nil {
var maxIndex int64
if _, err = db.GetEngine(ctx).Select(" MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&maxIndex); err != nil {
return err
}
if err = db.SyncMaxResourceIndex(ctx, "issue_index", repoID, max); err != nil {
if err = db.SyncMaxResourceIndex(ctx, "issue_index", repoID, maxIndex); err != nil {
return err
}

View File

@ -27,8 +27,8 @@ type IssuesOptions struct { //nolint
RepoIDs []int64 // overwrites RepoCond if the length is not 0
AllPublic bool // include also all public repositories
RepoCond builder.Cond
AssigneeID int64
PosterID int64
AssigneeID optional.Option[int64]
PosterID optional.Option[int64]
MentionedID int64
ReviewRequestedID int64
ReviewedID int64
@ -231,15 +231,8 @@ func applyConditions(sess *xorm.Session, opts *IssuesOptions) {
sess.And("issue.is_closed=?", opts.IsClosed.Value())
}
if opts.AssigneeID > 0 {
applyAssigneeCondition(sess, opts.AssigneeID)
} else if opts.AssigneeID == db.NoConditionID {
sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_assignees)")
}
if opts.PosterID > 0 {
applyPosterCondition(sess, opts.PosterID)
}
if opts.MentionedID > 0 {
applyMentionedCondition(sess, opts.MentionedID)
@ -359,13 +352,27 @@ func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *organizati
return cond
}
func applyAssigneeCondition(sess *xorm.Session, assigneeID int64) {
func applyAssigneeCondition(sess *xorm.Session, assigneeID optional.Option[int64]) {
// old logic: 0 is also treated as "not filtering assignee", because the "assignee" was read as FormInt64
if !assigneeID.Has() || assigneeID.Value() == 0 {
return
}
if assigneeID.Value() == db.NoConditionID {
sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_assignees)")
} else {
sess.Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id").
And("issue_assignees.assignee_id = ?", assigneeID)
And("issue_assignees.assignee_id = ?", assigneeID.Value())
}
}
func applyPosterCondition(sess *xorm.Session, posterID int64) {
sess.And("issue.poster_id=?", posterID)
func applyPosterCondition(sess *xorm.Session, posterID optional.Option[int64]) {
if !posterID.Has() {
return
}
// poster doesn't need to support db.NoConditionID(-1), so just use the value as-is
if posterID.Has() {
sess.And("issue.poster_id=?", posterID.Value())
}
}
func applyMentionedCondition(sess *xorm.Session, mentionedID int64) {

View File

@ -151,15 +151,9 @@ func applyIssuesOptions(sess *xorm.Session, opts *IssuesOptions, issueIDs []int6
applyProjectCondition(sess, opts)
if opts.AssigneeID > 0 {
applyAssigneeCondition(sess, opts.AssigneeID)
} else if opts.AssigneeID == db.NoConditionID {
sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_assignees)")
}
if opts.PosterID > 0 {
applyPosterCondition(sess, opts.PosterID)
}
if opts.MentionedID > 0 {
applyMentionedCondition(sess, opts.MentionedID)

View File

@ -16,6 +16,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
@ -155,7 +156,7 @@ func TestIssues(t *testing.T) {
}{
{
issues_model.IssuesOptions{
AssigneeID: 1,
AssigneeID: optional.Some(int64(1)),
SortType: "oldest",
},
[]int64{1, 6},
@ -433,7 +434,7 @@ func assertCreateIssues(t *testing.T, isPull bool) {
owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
assert.EqualValues(t, milestone.ID, 1)
assert.EqualValues(t, 1, milestone.ID)
reaction := &issues_model.Reaction{
Type: "heart",
UserID: owner.ID,

View File

@ -48,17 +48,17 @@ func TestGetIssueWatchers(t *testing.T) {
iws, err := issues_model.GetIssueWatchers(db.DefaultContext, 1, db.ListOptions{})
assert.NoError(t, err)
// Watcher is inactive, thus 0
assert.Len(t, iws, 0)
assert.Empty(t, iws)
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 2, db.ListOptions{})
assert.NoError(t, err)
// Watcher is explicit not watching
assert.Len(t, iws, 0)
assert.Empty(t, iws)
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 5, db.ListOptions{})
assert.NoError(t, err)
// Issue has no Watchers
assert.Len(t, iws, 0)
assert.Empty(t, iws)
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 7, db.ListOptions{})
assert.NoError(t, err)

View File

@ -31,12 +31,12 @@ func TestLabel_LoadSelectedLabelsAfterClick(t *testing.T) {
// First test : with negative and scope
label.LoadSelectedLabelsAfterClick([]int64{1, -8}, []string{"", "scope"})
assert.Equal(t, "1", label.QueryString)
assert.Equal(t, true, label.IsSelected)
assert.True(t, label.IsSelected)
// Second test : with duplicates
label.LoadSelectedLabelsAfterClick([]int64{1, 7, 1, 7, 7}, []string{"", "scope", "", "scope", "scope"})
assert.Equal(t, "1,8", label.QueryString)
assert.Equal(t, false, label.IsSelected)
assert.False(t, label.IsSelected)
// Third test : empty set
label.LoadSelectedLabelsAfterClick([]int64{}, []string{})
@ -248,7 +248,7 @@ func TestGetLabelsByIssueID(t *testing.T) {
labels, err = issues_model.GetLabelsByIssueID(db.DefaultContext, unittest.NonexistentID)
assert.NoError(t, err)
assert.Len(t, labels, 0)
assert.Empty(t, labels)
}
func TestUpdateLabel(t *testing.T) {
@ -271,7 +271,7 @@ func TestUpdateLabel(t *testing.T) {
assert.EqualValues(t, label.Color, newLabel.Color)
assert.EqualValues(t, label.Name, newLabel.Name)
assert.EqualValues(t, label.Description, newLabel.Description)
assert.EqualValues(t, newLabel.ArchivedUnix, 0)
assert.EqualValues(t, 0, newLabel.ArchivedUnix)
unittest.CheckConsistencyFor(t, &issues_model.Label{}, &repo_model.Repository{})
}

View File

@ -87,7 +87,7 @@ func TestGetMilestonesByRepoID(t *testing.T) {
IsClosed: optional.Some(false),
})
assert.NoError(t, err)
assert.Len(t, milestones, 0)
assert.Empty(t, milestones)
}
func TestGetMilestones(t *testing.T) {

View File

@ -16,6 +16,7 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
"xorm.io/builder"
"xorm.io/xorm"
)
@ -257,6 +258,64 @@ func (prs PullRequestList) GetIssueIDs() []int64 {
})
}
func (prs PullRequestList) LoadReviewCommentsCounts(ctx context.Context) (map[int64]int, error) {
issueIDs := prs.GetIssueIDs()
countsMap := make(map[int64]int, len(issueIDs))
counts := make([]struct {
IssueID int64
Count int
}, 0, len(issueIDs))
if err := db.GetEngine(ctx).Select("issue_id, count(*) as count").
Table("comment").In("issue_id", issueIDs).And("type = ?", CommentTypeReview).
GroupBy("issue_id").Find(&counts); err != nil {
return nil, err
}
for _, c := range counts {
countsMap[c.IssueID] = c.Count
}
return countsMap, nil
}
func (prs PullRequestList) LoadReviews(ctx context.Context) (ReviewList, error) {
issueIDs := prs.GetIssueIDs()
reviews := make([]*Review, 0, len(issueIDs))
subQuery := builder.Select("max(id) as id").
From("review").
Where(builder.In("issue_id", issueIDs)).
And(builder.In("`type`", ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest)).
And(builder.Eq{
"dismissed": false,
"original_author_id": 0,
"reviewer_team_id": 0,
}).
GroupBy("issue_id, reviewer_id")
// Get latest review of each reviewer, sorted in order they were made
if err := db.GetEngine(ctx).In("id", subQuery).OrderBy("review.updated_unix ASC").Find(&reviews); err != nil {
return nil, err
}
teamReviewRequests := make([]*Review, 0, 5)
subQueryTeam := builder.Select("max(id) as id").
From("review").
Where(builder.In("issue_id", issueIDs)).
And(builder.Eq{
"original_author_id": 0,
}).And(builder.Neq{
"reviewer_team_id": 0,
}).
GroupBy("issue_id, reviewer_team_id")
if err := db.GetEngine(ctx).In("id", subQueryTeam).OrderBy("review.updated_unix ASC").Find(&teamReviewRequests); err != nil {
return nil, err
}
if len(teamReviewRequests) > 0 {
reviews = append(reviews, teamReviewRequests...)
}
return reviews, nil
}
// HasMergedPullRequestInRepo returns whether the user(poster) has merged pull-request in the repo
func HasMergedPullRequestInRepo(ctx context.Context, repoID, posterID int64) (bool, error) {
return db.GetEngine(ctx).

View File

@ -0,0 +1,64 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package issues_test
import (
"testing"
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/unittest"
"github.com/stretchr/testify/assert"
)
func TestPullRequestList_LoadAttributes(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
prs := []*issues_model.PullRequest{
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
}
assert.NoError(t, issues_model.PullRequestList(prs).LoadAttributes(db.DefaultContext))
for _, pr := range prs {
assert.NotNil(t, pr.Issue)
assert.Equal(t, pr.IssueID, pr.Issue.ID)
}
assert.NoError(t, issues_model.PullRequestList([]*issues_model.PullRequest{}).LoadAttributes(db.DefaultContext))
}
func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
prs := []*issues_model.PullRequest{
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
}
reviewComments, err := issues_model.PullRequestList(prs).LoadReviewCommentsCounts(db.DefaultContext)
assert.NoError(t, err)
assert.Len(t, reviewComments, 2)
for _, pr := range prs {
assert.EqualValues(t, 1, reviewComments[pr.IssueID])
}
}
func TestPullRequestList_LoadReviews(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
prs := []*issues_model.PullRequest{
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
}
reviewList, err := issues_model.PullRequestList(prs).LoadReviews(db.DefaultContext)
assert.NoError(t, err)
// 1, 7, 8, 9, 10, 22
assert.Len(t, reviewList, 6)
assert.EqualValues(t, 1, reviewList[0].ID)
assert.EqualValues(t, 7, reviewList[1].ID)
assert.EqualValues(t, 8, reviewList[2].ID)
assert.EqualValues(t, 9, reviewList[3].ID)
assert.EqualValues(t, 10, reviewList[4].ID)
assert.EqualValues(t, 22, reviewList[5].ID)
}

View File

@ -79,11 +79,11 @@ func TestPullRequestsNewest(t *testing.T) {
func TestLoadRequestedReviewers(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
assert.NoError(t, pull.LoadIssue(db.DefaultContext))
issue := pull.Issue
assert.NoError(t, issue.LoadRepo(db.DefaultContext))
assert.Len(t, pull.RequestedReviewers, 0)
assert.Empty(t, pull.RequestedReviewers)
user1, err := user_model.GetUserByID(db.DefaultContext, 1)
assert.NoError(t, err)
@ -93,7 +93,7 @@ func TestLoadRequestedReviewers(t *testing.T) {
assert.NotNil(t, comment)
assert.NoError(t, pull.LoadRequestedReviewers(db.DefaultContext))
assert.Len(t, pull.RequestedReviewers, 1)
assert.Len(t, pull.RequestedReviewers, 6)
comment, err = issues_model.RemoveReviewRequest(db.DefaultContext, issue, user1, &user_model.User{})
assert.NoError(t, err)
@ -234,22 +234,6 @@ func TestPullRequest_UpdateCols(t *testing.T) {
unittest.CheckConsistencyFor(t, pr)
}
func TestPullRequestList_LoadAttributes(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
prs := []*issues_model.PullRequest{
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
}
assert.NoError(t, issues_model.PullRequestList(prs).LoadAttributes(db.DefaultContext))
for _, pr := range prs {
assert.NotNil(t, pr.Issue)
assert.Equal(t, pr.IssueID, pr.Issue.ID)
}
assert.NoError(t, issues_model.PullRequestList([]*issues_model.PullRequest{}).LoadAttributes(db.DefaultContext))
}
// TODO TestAddTestPullRequestTask
func TestPullRequest_IsWorkInProgress(t *testing.T) {

View File

@ -47,16 +47,11 @@ func (reviews ReviewList) LoadReviewersTeams(ctx context.Context) error {
}
}
teamsMap := make(map[int64]*organization_model.Team, 0)
for _, teamID := range reviewersTeamsIDs {
team, err := organization_model.GetTeamByID(ctx, teamID)
teamsMap, err := organization_model.GetTeamsByIDs(ctx, reviewersTeamsIDs)
if err != nil {
return err
}
teamsMap[teamID] = team
}
for _, review := range reviews {
if review.ReviewerTeamID != 0 {
review.ReviewerTeam = teamsMap[review.ReviewerTeamID]

View File

@ -126,42 +126,48 @@ func TestGetReviewersByIssueID(t *testing.T) {
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
org3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
user5 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
expectedReviews := []*issues_model.Review{}
expectedReviews = append(expectedReviews,
&issues_model.Review{
ID: 7,
Reviewer: org3,
Type: issues_model.ReviewTypeReject,
UpdatedUnix: 946684812,
},
&issues_model.Review{
ID: 8,
Reviewer: user4,
Type: issues_model.ReviewTypeApprove,
UpdatedUnix: 946684813,
},
&issues_model.Review{
ID: 9,
Reviewer: user2,
Type: issues_model.ReviewTypeReject,
UpdatedUnix: 946684814,
})
},
&issues_model.Review{
ID: 10,
Reviewer: user_model.NewGhostUser(),
Type: issues_model.ReviewTypeReject,
UpdatedUnix: 946684815,
},
&issues_model.Review{
ID: 22,
Reviewer: user5,
Type: issues_model.ReviewTypeRequest,
UpdatedUnix: 946684817,
},
)
allReviews, err := issues_model.GetReviewsByIssueID(db.DefaultContext, issue.ID)
assert.NoError(t, err)
for _, review := range allReviews {
assert.NoError(t, review.LoadReviewer(db.DefaultContext))
}
if assert.Len(t, allReviews, 3) {
for i, review := range allReviews {
assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
assert.Equal(t, expectedReviews[i].Type, review.Type)
assert.Equal(t, expectedReviews[i].UpdatedUnix, review.UpdatedUnix)
}
}
allReviews, err = issues_model.GetReviewsByIssueID(db.DefaultContext, issue.ID)
assert.NoError(t, err)
assert.NoError(t, allReviews.LoadReviewers(db.DefaultContext))
if assert.Len(t, allReviews, 3) {
if assert.Len(t, allReviews, 5) {
for i, review := range allReviews {
assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
assert.Equal(t, expectedReviews[i].Type, review.Type)

View File

@ -32,7 +32,7 @@ func TestCancelStopwatch(t *testing.T) {
_ = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeCancelTracking, PosterID: user1.ID, IssueID: issue1.ID})
assert.Nil(t, issues_model.CancelStopwatch(db.DefaultContext, user1, issue2))
assert.NoError(t, issues_model.CancelStopwatch(db.DefaultContext, user1, issue2))
}
func TestStopwatchExists(t *testing.T) {

View File

@ -50,7 +50,7 @@ func TestGetTrackedTimes(t *testing.T) {
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: -1})
assert.NoError(t, err)
assert.Len(t, times, 0)
assert.Empty(t, times)
// by User
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{UserID: 1})
@ -60,7 +60,7 @@ func TestGetTrackedTimes(t *testing.T) {
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{UserID: 3})
assert.NoError(t, err)
assert.Len(t, times, 0)
assert.Empty(t, times)
// by Repo
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 2})
@ -69,7 +69,7 @@ func TestGetTrackedTimes(t *testing.T) {
assert.Equal(t, int64(1), times[0].Time)
issue, err := issues_model.GetIssueByID(db.DefaultContext, times[0].IssueID)
assert.NoError(t, err)
assert.Equal(t, issue.RepoID, int64(2))
assert.Equal(t, int64(2), issue.RepoID)
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 1})
assert.NoError(t, err)
@ -77,7 +77,7 @@ func TestGetTrackedTimes(t *testing.T) {
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 10})
assert.NoError(t, err)
assert.Len(t, times, 0)
assert.Empty(t, times)
}
func TestTotalTimesForEachUser(t *testing.T) {

View File

@ -56,8 +56,8 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
err := x.Table("attachment").Where("issue_id > 0").Find(&issueAttachments)
assert.NoError(t, err)
for _, attach := range issueAttachments {
assert.Greater(t, attach.RepoID, int64(0))
assert.Greater(t, attach.IssueID, int64(0))
assert.Positive(t, attach.RepoID)
assert.Positive(t, attach.IssueID)
var issue Issue
has, err := x.ID(attach.IssueID).Get(&issue)
assert.NoError(t, err)
@ -69,8 +69,8 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
err = x.Table("attachment").Where("release_id > 0").Find(&releaseAttachments)
assert.NoError(t, err)
for _, attach := range releaseAttachments {
assert.Greater(t, attach.RepoID, int64(0))
assert.Greater(t, attach.ReleaseID, int64(0))
assert.Positive(t, attach.RepoID)
assert.Positive(t, attach.ReleaseID)
var release Release
has, err := x.ID(attach.ReleaseID).Get(&release)
assert.NoError(t, err)

View File

@ -107,12 +107,12 @@ func Test_RepositoryFormat(t *testing.T) {
repo = new(Repository)
ok, err := x.ID(2).Get(repo)
assert.NoError(t, err)
assert.EqualValues(t, true, ok)
assert.True(t, ok)
assert.EqualValues(t, "sha1", repo.ObjectFormatName)
repo = new(Repository)
ok, err = x.ID(id).Get(repo)
assert.NoError(t, err)
assert.EqualValues(t, true, ok)
assert.True(t, ok)
assert.EqualValues(t, "sha256", repo.ObjectFormatName)
}

View File

@ -39,7 +39,7 @@ func Test_AddUniqueIndexForProjectIssue(t *testing.T) {
tables, err := x.DBMetas()
assert.NoError(t, err)
assert.EqualValues(t, 1, len(tables))
assert.Len(t, tables, 1)
found := false
for _, index := range tables[0].Indexes {
if index.Type == schemas.UniqueType {

View File

@ -40,7 +40,7 @@ func TestFindOrgs(t *testing.T) {
IncludePrivate: false,
})
assert.NoError(t, err)
assert.Len(t, orgs, 0)
assert.Empty(t, orgs)
total, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{
UserID: 4,

View File

@ -283,7 +283,7 @@ func TestGetOrgUsersByOrgID(t *testing.T) {
OrgID: unittest.NonexistentID,
})
assert.NoError(t, err)
assert.Len(t, orgUsers, 0)
assert.Empty(t, orgUsers)
}
func TestChangeOrgUserStatus(t *testing.T) {

View File

@ -131,3 +131,8 @@ func GetTeamsByOrgIDs(ctx context.Context, orgIDs []int64) (TeamList, error) {
teams := make([]*Team, 0, 10)
return teams, db.GetEngine(ctx).Where(builder.In("org_id", orgIDs)).Find(&teams)
}
func GetTeamsByIDs(ctx context.Context, teamIDs []int64) (map[int64]*Team, error) {
teams := make(map[int64]*Team, len(teamIDs))
return teams, db.GetEngine(ctx).Where(builder.In("`id`", teamIDs)).Find(&teams)
}

View File

@ -0,0 +1,25 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package organization_test
import (
"testing"
"code.gitea.io/gitea/models/db"
org_model "code.gitea.io/gitea/models/organization"
"code.gitea.io/gitea/models/unittest"
"github.com/stretchr/testify/assert"
)
func Test_GetTeamsByIDs(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
// 1 owner team, 2 normal team
teams, err := org_model.GetTeamsByIDs(db.DefaultContext, []int64{1, 2})
assert.NoError(t, err)
assert.Len(t, teams, 2)
assert.Equal(t, "Owners", teams[1].Name)
assert.Equal(t, "team1", teams[2].Name)
}

View File

@ -15,7 +15,7 @@ func TestAccessMode(t *testing.T) {
m := ParseAccessMode(name)
assert.Equal(t, AccessMode(i), m)
}
assert.Equal(t, AccessMode(4), AccessModeOwner)
assert.Equal(t, AccessModeOwner, AccessMode(4))
assert.Equal(t, "owner", AccessModeOwner.ToString())
assert.Equal(t, AccessModeNone, ParseAccessMode("owner"))
assert.Equal(t, AccessModeNone, ParseAccessMode("invalid"))

View File

@ -5,7 +5,6 @@ package project
import (
"fmt"
"strings"
"testing"
"code.gitea.io/gitea/models/db"
@ -66,7 +65,7 @@ func Test_moveIssuesToAnotherColumn(t *testing.T) {
issues, err = column1.GetIssues(db.DefaultContext)
assert.NoError(t, err)
assert.Len(t, issues, 0)
assert.Empty(t, issues)
issues, err = column2.GetIssues(db.DefaultContext)
assert.NoError(t, err)
@ -123,5 +122,5 @@ func Test_NewColumn(t *testing.T) {
ProjectID: project1.ID,
})
assert.Error(t, err)
assert.True(t, strings.Contains(err.Error(), "maximum number of columns reached"))
assert.Contains(t, err.Error(), "maximum number of columns reached")
}

View File

@ -144,8 +144,8 @@ func TestGetRepositoryByURL(t *testing.T) {
assert.NotNil(t, repo)
assert.NoError(t, err)
assert.Equal(t, repo.ID, int64(2))
assert.Equal(t, repo.OwnerID, int64(2))
assert.Equal(t, int64(2), repo.ID)
assert.Equal(t, int64(2), repo.OwnerID)
}
test(t, "https://try.gitea.io/user2/repo2")
@ -159,8 +159,8 @@ func TestGetRepositoryByURL(t *testing.T) {
assert.NotNil(t, repo)
assert.NoError(t, err)
assert.Equal(t, repo.ID, int64(2))
assert.Equal(t, repo.OwnerID, int64(2))
assert.Equal(t, int64(2), repo.ID)
assert.Equal(t, int64(2), repo.OwnerID)
}
test(t, "git+ssh://sshuser@try.gitea.io/user2/repo2")
@ -177,8 +177,8 @@ func TestGetRepositoryByURL(t *testing.T) {
assert.NotNil(t, repo)
assert.NoError(t, err)
assert.Equal(t, repo.ID, int64(2))
assert.Equal(t, repo.OwnerID, int64(2))
assert.Equal(t, int64(2), repo.ID)
assert.Equal(t, int64(2), repo.OwnerID)
}
test(t, "sshuser@try.gitea.io:user2/repo2")

View File

@ -52,7 +52,7 @@ func TestRepository_GetStargazers2(t *testing.T) {
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
gazers, err := repo_model.GetStargazers(db.DefaultContext, repo, db.ListOptions{Page: 0})
assert.NoError(t, err)
assert.Len(t, gazers, 0)
assert.Empty(t, gazers)
}
func TestClearRepoStars(t *testing.T) {
@ -71,5 +71,5 @@ func TestClearRepoStars(t *testing.T) {
gazers, err := repo_model.GetStargazers(db.DefaultContext, repo, db.ListOptions{Page: 0})
assert.NoError(t, err)
assert.Len(t, gazers, 0)
assert.Empty(t, gazers)
}

View File

@ -21,7 +21,7 @@ func TestRepoAssignees(t *testing.T) {
users, err := repo_model.GetRepoAssignees(db.DefaultContext, repo2)
assert.NoError(t, err)
assert.Len(t, users, 1)
assert.Equal(t, users[0].ID, int64(2))
assert.Equal(t, int64(2), users[0].ID)
repo21 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21})
users, err = repo_model.GetRepoAssignees(db.DefaultContext, repo21)

View File

@ -41,7 +41,7 @@ func TestGetWatchers(t *testing.T) {
watches, err = repo_model.GetWatchers(db.DefaultContext, unittest.NonexistentID)
assert.NoError(t, err)
assert.Len(t, watches, 0)
assert.Empty(t, watches)
}
func TestRepository_GetWatchers(t *testing.T) {
@ -58,7 +58,7 @@ func TestRepository_GetWatchers(t *testing.T) {
repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 9})
watchers, err = repo_model.GetRepoWatchers(db.DefaultContext, repo.ID, db.ListOptions{Page: 1})
assert.NoError(t, err)
assert.Len(t, watchers, 0)
assert.Empty(t, watchers)
}
func TestWatchIfAuto(t *testing.T) {

View File

@ -79,7 +79,7 @@ func AssertExistsAndLoadMap(t assert.TestingT, table string, conditions ...any)
e := db.GetEngine(db.DefaultContext).Table(table)
res, err := whereOrderConditions(e, conditions).Query()
assert.NoError(t, err)
assert.True(t, len(res) == 1,
assert.Len(t, res, 1,
"Expected to find one row in %s (with conditions %+v), but found %d",
table, conditions, len(res),
)

View File

@ -97,8 +97,7 @@ func TestListEmails(t *testing.T) {
}
emails, count, err := user_model.SearchEmails(db.DefaultContext, opts)
assert.NoError(t, err)
assert.NotEqual(t, int64(0), count)
assert.True(t, count > 5)
assert.Greater(t, count, int64(5))
contains := func(match func(s *user_model.SearchEmailResult) bool) bool {
for _, v := range emails {

View File

@ -56,5 +56,5 @@ func TestSettings(t *testing.T) {
assert.NoError(t, err)
settings, err = user_model.GetUserAllSettings(db.DefaultContext, 99)
assert.NoError(t, err)
assert.Len(t, settings, 0)
assert.Empty(t, settings)
}

View File

@ -201,7 +201,7 @@ func TestNewGitSig(t *testing.T) {
assert.NotContains(t, sig.Name, "<")
assert.NotContains(t, sig.Name, ">")
assert.NotContains(t, sig.Name, "\n")
assert.NotEqual(t, len(strings.TrimSpace(sig.Name)), 0)
assert.NotEmpty(t, strings.TrimSpace(sig.Name))
}
}
@ -216,7 +216,7 @@ func TestDisplayName(t *testing.T) {
if len(strings.TrimSpace(user.FullName)) == 0 {
assert.Equal(t, user.Name, displayName)
}
assert.NotEqual(t, len(strings.TrimSpace(displayName)), 0)
assert.NotEmpty(t, strings.TrimSpace(displayName))
}
}
@ -322,15 +322,15 @@ func TestGetMaileableUsersByIDs(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, results, 1)
if len(results) > 1 {
assert.Equal(t, results[0].ID, 1)
assert.Equal(t, 1, results[0].ID)
}
results, err = user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, true)
assert.NoError(t, err)
assert.Len(t, results, 2)
if len(results) > 2 {
assert.Equal(t, results[0].ID, 1)
assert.Equal(t, results[1].ID, 4)
assert.Equal(t, 1, results[0].ID)
assert.Equal(t, 4, results[1].ID)
}
}
@ -499,7 +499,7 @@ func Test_ValidateUser(t *testing.T) {
{ID: 2, Visibility: structs.VisibleTypePrivate}: true,
}
for kase, expected := range kases {
assert.EqualValues(t, expected, nil == user_model.ValidateUser(kase), fmt.Sprintf("case: %+v", kase))
assert.EqualValues(t, expected, nil == user_model.ValidateUser(kase), "case: %+v", kase)
}
}
@ -570,11 +570,11 @@ func TestDisabledUserFeatures(t *testing.T) {
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
assert.Len(t, setting.Admin.UserDisabledFeatures.Values(), 0)
assert.Empty(t, setting.Admin.UserDisabledFeatures.Values())
// no features should be disabled with a plain login type
assert.LessOrEqual(t, user.LoginType, auth.Plain)
assert.Len(t, user_model.DisabledFeaturesWithLoginType(user).Values(), 0)
assert.Empty(t, user_model.DisabledFeaturesWithLoginType(user).Values())
for _, f := range testValues.Values() {
assert.False(t, user_model.IsFeatureDisabledWithLoginType(user, f))
}
@ -600,5 +600,5 @@ func TestGetInactiveUsers(t *testing.T) {
interval := time.Now().Unix() - 1730468968 + 3600*24
users, err = user_model.GetInactiveUsers(db.DefaultContext, time.Duration(interval*int64(time.Second)))
assert.NoError(t, err)
assert.Len(t, users, 0)
assert.Empty(t, users)
}

View File

@ -43,7 +43,7 @@ func TestWebhook_History(t *testing.T) {
webhook = unittest.AssertExistsAndLoadBean(t, &Webhook{ID: 2})
tasks, err = webhook.History(db.DefaultContext, 0)
assert.NoError(t, err)
assert.Len(t, tasks, 0)
assert.Empty(t, tasks)
}
func TestWebhook_UpdateEvent(t *testing.T) {
@ -206,7 +206,7 @@ func TestHookTasks(t *testing.T) {
hookTasks, err = HookTasks(db.DefaultContext, unittest.NonexistentID, 1)
assert.NoError(t, err)
assert.Len(t, hookTasks, 0)
assert.Empty(t, hookTasks)
}
func TestCreateHookTask(t *testing.T) {

View File

@ -8,7 +8,6 @@ import (
"io"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"code.gitea.io/gitea/models/db"
@ -28,9 +27,9 @@ func TestActivityPubSignedPost(t *testing.T) {
expected := "BODY"
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Regexp(t, regexp.MustCompile("^"+setting.Federation.DigestAlgorithm), r.Header.Get("Digest"))
assert.Regexp(t, "^"+setting.Federation.DigestAlgorithm, r.Header.Get("Digest"))
assert.Contains(t, r.Header.Get("Signature"), pubID)
assert.Equal(t, r.Header.Get("Content-Type"), ActivityStreamsContentType)
assert.Equal(t, ActivityStreamsContentType, r.Header.Get("Content-Type"))
body, err := io.ReadAll(r.Body)
assert.NoError(t, err)
assert.Equal(t, expected, string(body))

View File

@ -58,7 +58,7 @@ func TestLayered(t *testing.T) {
assertRead := func(expected string, expectedErr error, elems ...string) {
bs, err := assets.ReadFile(elems...)
if err != nil {
assert.ErrorAs(t, err, &expectedErr)
assert.ErrorIs(t, err, expectedErr)
} else {
assert.NoError(t, err)
assert.Equal(t, expected, string(bs))

View File

@ -15,5 +15,5 @@ func TestPamAuth(t *testing.T) {
result, err := Auth("gitea", "user1", "false-pwd")
assert.Error(t, err)
assert.EqualError(t, err, "Authentication failure")
assert.Len(t, result, 0)
assert.Len(t, result)
}

View File

@ -18,7 +18,7 @@ func TestDummyHasher(t *testing.T) {
password, salt := "password", "ZogKvWdyEx"
hash, err := dummy.Hash(password, salt)
assert.Nil(t, err)
assert.NoError(t, err)
assert.Equal(t, hash, salt+":"+password)
assert.True(t, dummy.VerifyPassword(password, hash, salt))

View File

@ -99,10 +99,10 @@ func IsComplexEnough(pwd string) bool {
func Generate(n int) (string, error) {
NewComplexity()
buffer := make([]byte, n)
max := big.NewInt(int64(len(validChars)))
maxInt := big.NewInt(int64(len(validChars)))
for {
for j := 0; j < n; j++ {
rnd, err := rand.Int(rand.Reader, max)
rnd, err := rand.Int(rand.Reader, maxInt)
if err != nil {
return "", err
}

View File

@ -6,7 +6,6 @@ package base
import (
"crypto/sha1"
"fmt"
"os"
"testing"
"time"
@ -157,7 +156,7 @@ func TestStringsToInt64s(t *testing.T) {
testSuccess([]string{"1", "4", "16", "64", "256"}, []int64{1, 4, 16, 64, 256})
ints, err := StringsToInt64s([]string{"-1", "a"})
assert.Len(t, ints, 0)
assert.Empty(t, ints)
assert.Error(t, err)
}
@ -172,9 +171,9 @@ func TestInt64sToStrings(t *testing.T) {
// TODO: Test EntryIcon
func TestSetupGiteaRoot(t *testing.T) {
_ = os.Setenv("GITEA_ROOT", "test")
t.Setenv("GITEA_ROOT", "test")
assert.Equal(t, "test", SetupGiteaRoot())
_ = os.Setenv("GITEA_ROOT", "")
t.Setenv("GITEA_ROOT", "")
assert.NotEqual(t, "test", SetupGiteaRoot())
}

View File

@ -25,7 +25,7 @@ func TestPrepareFileNameAndType(t *testing.T) {
assert.Equal(t,
fmt.Sprintf("outFile=%s, outType=%s", expFile, expType),
fmt.Sprintf("outFile=%s, outType=%s", outFile, outType),
fmt.Sprintf("argFile=%s, argType=%s", argFile, argType),
"argFile=%s, argType=%s", argFile, argType,
)
}

View File

@ -474,3 +474,17 @@ func (c *Commit) GetRepositoryDefaultPublicGPGKey(forceUpdate bool) (*GPGSetting
}
return c.repo.GetDefaultPublicGPGKey(forceUpdate)
}
func IsStringLikelyCommitID(objFmt ObjectFormat, s string, minLength ...int) bool {
minLen := util.OptionalArg(minLength, objFmt.FullLength())
if len(s) < minLen || len(s) > objFmt.FullLength() {
return false
}
for _, c := range s {
isHex := (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f')
if !isHex {
return false
}
}
return true
}

View File

@ -146,7 +146,7 @@ func TestHasPreviousCommitSha256(t *testing.T) {
parentSHA := MustIDFromString("b0ec7af4547047f12d5093e37ef8f1b3b5415ed8ee17894d43a34d7d34212e9c")
notParentSHA := MustIDFromString("42e334efd04cd36eea6da0599913333c26116e1a537ca76e5b6e4af4dda00236")
assert.Equal(t, objectFormat, parentSHA.Type())
assert.Equal(t, objectFormat.Name(), "sha256")
assert.Equal(t, "sha256", objectFormat.Name())
haz, err := commit.HasPreviousCommit(parentSHA)
assert.NoError(t, err)

View File

@ -343,9 +343,9 @@ func TestGetCommitFileStatusMerges(t *testing.T) {
},
}
assert.Equal(t, commitFileStatus.Added, expected.Added)
assert.Equal(t, commitFileStatus.Removed, expected.Removed)
assert.Equal(t, commitFileStatus.Modified, expected.Modified)
assert.Equal(t, expected.Added, commitFileStatus.Added)
assert.Equal(t, expected.Removed, commitFileStatus.Removed)
assert.Equal(t, expected.Modified, commitFileStatus.Modified)
}
func Test_GetCommitBranchStart(t *testing.T) {

View File

@ -73,9 +73,9 @@ func TestGrepSearch(t *testing.T) {
res, err = GrepSearch(context.Background(), repo, "no-such-content", GrepOptions{})
assert.NoError(t, err)
assert.Len(t, res, 0)
assert.Empty(t, res)
res, err = GrepSearch(context.Background(), &Repository{Path: "no-such-git-repo"}, "no-such-content", GrepOptions{})
assert.Error(t, err)
assert.Len(t, res, 0)
assert.Empty(t, res)
}

View File

@ -100,5 +100,5 @@ func TestParseTreeEntriesInvalid(t *testing.T) {
// there was a panic: "runtime error: slice bounds out of range" when the input was invalid: #20315
entries, err := ParseTreeEntries([]byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af"))
assert.Error(t, err)
assert.Len(t, entries, 0)
assert.Empty(t, entries)
}

View File

@ -142,7 +142,6 @@ func (ref RefName) RemoteName() string {
// ShortName returns the short name of the reference name
func (ref RefName) ShortName() string {
refName := string(ref)
if ref.IsBranch() {
return ref.BranchName()
}
@ -158,8 +157,7 @@ func (ref RefName) ShortName() string {
if ref.IsFor() {
return ref.ForBranchName()
}
return refName
return string(ref) // usually it is a commit ID
}
// RefGroup returns the group type of the reference

View File

@ -223,7 +223,7 @@ func Push(ctx context.Context, repoPath string, opts PushOptions) error {
if err != nil {
if strings.Contains(stderr, "non-fast-forward") {
return &ErrPushOutOfDate{StdOut: stdout, StdErr: stderr, Err: err}
} else if strings.Contains(stderr, "! [remote rejected]") {
} else if strings.Contains(stderr, "! [remote rejected]") || strings.Contains(stderr, "! [rejected]") {
err := &ErrPushRejected{StdOut: stdout, StdErr: stderr, Err: err}
err.GenerateMessage()
return err

View File

@ -34,7 +34,7 @@ func TestRepository_GetBranches(t *testing.T) {
branches, countAll, err = bareRepo1.GetBranchNames(5, 1)
assert.NoError(t, err)
assert.Len(t, branches, 0)
assert.Empty(t, branches)
assert.EqualValues(t, 3, countAll)
assert.ElementsMatch(t, []string{}, branches)
}
@ -66,7 +66,7 @@ func TestGetRefsBySha(t *testing.T) {
// do not exist
branches, err := bareRepo5.GetRefsBySha("8006ff9adbf0cb94da7dad9e537e53817f9fa5c0", "")
assert.NoError(t, err)
assert.Len(t, branches, 0)
assert.Empty(t, branches)
// refs/pull/1/head
branches, err = bareRepo5.GetRefsBySha("c83380d7056593c51a699d12b9c00627bd5743e9", PullPrefix)

View File

@ -465,15 +465,15 @@ func (repo *Repository) getBranches(env []string, commitID string, limit int) ([
refs := strings.Split(stdout, "\n")
var max int
var maxNum int
if len(refs) > limit {
max = limit
maxNum = limit
} else {
max = len(refs) - 1
maxNum = len(refs) - 1
}
branches := make([]string, max)
for i, ref := range refs[:max] {
branches := make([]string, maxNum)
for i, ref := range refs[:maxNum] {
parts := strings.Fields(ref)
branches[i] = parts[len(parts)-1]

View File

@ -246,19 +246,41 @@ func (repo *Repository) GetDiffOrPatch(base, head string, w io.Writer, patch, bi
// GetDiff generates and returns patch data between given revisions, optimized for human readability
func (repo *Repository) GetDiff(base, head string, w io.Writer) error {
return NewCommand(repo.Ctx, "diff", "-p").AddDynamicArguments(base, head).Run(&RunOpts{
stderr := new(bytes.Buffer)
err := NewCommand(repo.Ctx, "diff", "-p").AddDynamicArguments(base + "..." + head).
Run(&RunOpts{
Dir: repo.Path,
Stdout: w,
Stderr: stderr,
})
if err != nil && bytes.Contains(stderr.Bytes(), []byte("no merge base")) {
return NewCommand(repo.Ctx, "diff", "-p").AddDynamicArguments(base, head).
Run(&RunOpts{
Dir: repo.Path,
Stdout: w,
})
}
return err
}
// GetDiffBinary generates and returns patch data between given revisions, including binary diffs.
func (repo *Repository) GetDiffBinary(base, head string, w io.Writer) error {
return NewCommand(repo.Ctx, "diff", "-p", "--binary", "--histogram").AddDynamicArguments(base, head).Run(&RunOpts{
stderr := new(bytes.Buffer)
err := NewCommand(repo.Ctx, "diff", "-p", "--binary", "--histogram").AddDynamicArguments(base + "..." + head).
Run(&RunOpts{
Dir: repo.Path,
Stdout: w,
Stderr: stderr,
})
if err != nil && bytes.Contains(stderr.Bytes(), []byte("no merge base")) {
return NewCommand(repo.Ctx, "diff", "-p", "--binary", "--histogram").AddDynamicArguments(base, head).
Run(&RunOpts{
Dir: repo.Path,
Stdout: w,
})
}
return err
}
// GetPatch generates and returns format-patch data between given revisions, able to be used with `git apply`
func (repo *Repository) GetPatch(base, head string, w io.Writer) error {

View File

@ -72,7 +72,7 @@ func TestReadPatch(t *testing.T) {
assert.Empty(t, noFile)
assert.Empty(t, noCommit)
assert.Len(t, oldCommit, 40)
assert.True(t, oldCommit == "6e8e2a6f9efd71dbe6917816343ed8415ad696c3")
assert.Equal(t, "6e8e2a6f9efd71dbe6917816343ed8415ad696c3", oldCommit)
}
func TestReadWritePullHead(t *testing.T) {
@ -113,7 +113,7 @@ func TestReadWritePullHead(t *testing.T) {
}
assert.Len(t, headContents, 40)
assert.True(t, headContents == newCommit)
assert.Equal(t, headContents, newCommit)
// Remove file after the test
err = repo.RemoveReference(PullPrefix + "1/head")

View File

@ -61,3 +61,31 @@ func parseTags(refs []string) []string {
}
return results
}
// UnstableGuessRefByShortName does the best guess to see whether a "short name" provided by user is a branch, tag or commit.
// It could guess wrongly if the input is already ambiguous. For example:
// * "refs/heads/the-name" vs "refs/heads/refs/heads/the-name"
// * "refs/tags/1234567890" vs commit "1234567890"
// In most cases, it SHOULD AVOID using this function, unless there is an irresistible reason (eg: make API friendly to end users)
// If the function is used, the caller SHOULD CHECK the ref type carefully.
func (repo *Repository) UnstableGuessRefByShortName(shortName string) RefName {
if repo.IsBranchExist(shortName) {
return RefNameFromBranch(shortName)
}
if repo.IsTagExist(shortName) {
return RefNameFromTag(shortName)
}
if strings.HasPrefix(shortName, "refs/") {
if repo.IsReferenceExist(shortName) {
return RefName(shortName)
}
}
commit, err := repo.GetCommit(shortName)
if err == nil {
commitIDString := commit.ID.String()
if strings.HasPrefix(commitIDString, shortName) {
return RefName(commitIDString)
}
}
return ""
}

View File

@ -64,7 +64,7 @@ func TestLockAndDo(t *testing.T) {
}
func testLockAndDo(t *testing.T) {
const concurrency = 1000
const concurrency = 50
ctx := context.Background()
count := 0

View File

@ -218,13 +218,13 @@ func (g *Manager) ServerDone() {
g.runningServerWaitGroup.Done()
}
func (g *Manager) setStateTransition(old, new state) bool {
func (g *Manager) setStateTransition(oldState, newState state) bool {
g.lock.Lock()
if g.state != old {
if g.state != oldState {
g.lock.Unlock()
return false
}
g.state = new
g.state = newState
g.lock.Unlock()
return true
}

View File

@ -35,18 +35,18 @@ func BoolFieldQuery(value bool, field string) *query.BoolFieldQuery {
return q
}
func NumericRangeInclusiveQuery(min, max optional.Option[int64], field string) *query.NumericRangeQuery {
func NumericRangeInclusiveQuery(minOption, maxOption optional.Option[int64], field string) *query.NumericRangeQuery {
var minF, maxF *float64
var minI, maxI *bool
if min.Has() {
if minOption.Has() {
minF = new(float64)
*minF = float64(min.Value())
*minF = float64(minOption.Value())
minI = new(bool)
*minI = true
}
if max.Has() {
if maxOption.Has() {
maxF = new(float64)
*maxF = float64(max.Value())
*maxF = float64(maxOption.Value())
maxI = new(bool)
*maxI = true
}

View File

@ -10,12 +10,12 @@ import (
)
// ParsePaginator parses a db.Paginator into a skip and limit
func ParsePaginator(paginator *db.ListOptions, max ...int) (int, int) {
func ParsePaginator(paginator *db.ListOptions, maxNums ...int) (int, int) {
// Use a very large number to indicate no limit
unlimited := math.MaxInt32
if len(max) > 0 {
if len(maxNums) > 0 {
// Some indexer engines have a limit on the page size, respect that
unlimited = max[0]
unlimited = maxNums[0]
}
if paginator == nil || paginator.IsListAll() {

View File

@ -23,7 +23,7 @@ import (
const (
issueIndexerAnalyzer = "issueIndexer"
issueIndexerDocType = "issueIndexerDocType"
issueIndexerLatestVersion = 4
issueIndexerLatestVersion = 5
)
const unicodeNormalizeName = "unicodeNormalize"
@ -75,6 +75,7 @@ func generateIssueIndexMapping() (mapping.IndexMapping, error) {
docMapping.AddFieldMappingsAt("is_pull", boolFieldMapping)
docMapping.AddFieldMappingsAt("is_closed", boolFieldMapping)
docMapping.AddFieldMappingsAt("is_archived", boolFieldMapping)
docMapping.AddFieldMappingsAt("label_ids", numberFieldMapping)
docMapping.AddFieldMappingsAt("no_label", boolFieldMapping)
docMapping.AddFieldMappingsAt("milestone_id", numberFieldMapping)
@ -185,6 +186,9 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
if options.IsClosed.Has() {
queries = append(queries, inner_bleve.BoolFieldQuery(options.IsClosed.Value(), "is_closed"))
}
if options.IsArchived.Has() {
queries = append(queries, inner_bleve.BoolFieldQuery(options.IsArchived.Value(), "is_archived"))
}
if options.NoLabelOnly {
queries = append(queries, inner_bleve.BoolFieldQuery(true, "no_label"))

View File

@ -54,8 +54,8 @@ func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_m
RepoIDs: options.RepoIDs,
AllPublic: options.AllPublic,
RepoCond: nil,
AssigneeID: convertID(options.AssigneeID),
PosterID: convertID(options.PosterID),
AssigneeID: optional.Some(convertID(options.AssigneeID)),
PosterID: options.PosterID,
MentionedID: convertID(options.MentionID),
ReviewRequestedID: convertID(options.ReviewRequestedID),
ReviewedID: convertID(options.ReviewedID),
@ -72,7 +72,7 @@ func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_m
UpdatedAfterUnix: options.UpdatedAfterUnix.Value(),
UpdatedBeforeUnix: options.UpdatedBeforeUnix.Value(),
PriorityRepoID: 0,
IsArchived: optional.None[bool](),
IsArchived: options.IsArchived,
Org: nil,
Team: nil,
User: nil,

View File

@ -16,6 +16,7 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp
AllPublic: opts.AllPublic,
IsPull: opts.IsPull,
IsClosed: opts.IsClosed,
IsArchived: opts.IsArchived,
}
if len(opts.LabelIDs) == 1 && opts.LabelIDs[0] == 0 {
@ -40,14 +41,14 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp
if opts.ProjectID > 0 {
searchOpt.ProjectID = optional.Some(opts.ProjectID)
} else if opts.ProjectID == -1 { // FIXME: this is inconsistent from other places
} else if opts.ProjectID == db.NoConditionID { // FIXME: this is inconsistent from other places
searchOpt.ProjectID = optional.Some[int64](0) // Those issues with no project(projectid==0)
}
if opts.AssigneeID > 0 {
searchOpt.AssigneeID = optional.Some(opts.AssigneeID)
} else if opts.AssigneeID == -1 { // FIXME: this is inconsistent from other places
searchOpt.AssigneeID = optional.Some[int64](0)
if opts.AssigneeID.Value() == db.NoConditionID {
searchOpt.AssigneeID = optional.Some[int64](0) // FIXME: this is inconsistent from other places, 0 means "no assignee"
} else if opts.AssigneeID.Value() != 0 {
searchOpt.AssigneeID = opts.AssigneeID
}
// See the comment of issues_model.SearchOptions for the reason why we need to convert
@ -62,7 +63,7 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp
}
searchOpt.ProjectColumnID = convertID(opts.ProjectColumnID)
searchOpt.PosterID = convertID(opts.PosterID)
searchOpt.PosterID = opts.PosterID
searchOpt.MentionID = convertID(opts.MentionedID)
searchOpt.ReviewedID = convertID(opts.ReviewedID)
searchOpt.ReviewRequestedID = convertID(opts.ReviewRequestedID)

View File

@ -18,7 +18,7 @@ import (
)
const (
issueIndexerLatestVersion = 1
issueIndexerLatestVersion = 2
// multi-match-types, currently only 2 types are used
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
esMultiMatchTypeBestFields = "best_fields"
@ -58,6 +58,7 @@ const (
"is_pull": { "type": "boolean", "index": true },
"is_closed": { "type": "boolean", "index": true },
"is_archived": { "type": "boolean", "index": true },
"label_ids": { "type": "integer", "index": true },
"no_label": { "type": "boolean", "index": true },
"milestone_id": { "type": "integer", "index": true },
@ -168,6 +169,9 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
if options.IsClosed.Has() {
query.Must(elastic.NewTermQuery("is_closed", options.IsClosed.Value()))
}
if options.IsArchived.Has() {
query.Must(elastic.NewTermQuery("is_archived", options.IsArchived.Value()))
}
if options.NoLabelOnly {
query.Must(elastic.NewTermQuery("no_label", true))

View File

@ -37,6 +37,7 @@ func TestDBSearchIssues(t *testing.T) {
t.Run("search issues by ID", searchIssueByID)
t.Run("search issues is pr", searchIssueIsPull)
t.Run("search issues is closed", searchIssueIsClosed)
t.Run("search issues is archived", searchIssueIsArchived)
t.Run("search issues by milestone", searchIssueByMilestoneID)
t.Run("search issues by label", searchIssueByLabelID)
t.Run("search issues by time", searchIssueByTime)
@ -191,7 +192,7 @@ func searchIssueByID(t *testing.T) {
},
{
// NOTE: This tests no assignees filtering and also ToSearchOptions() to ensure it will set AssigneeID to 0 when it is passed as -1.
opts: *ToSearchOptions("", &issues.IssuesOptions{AssigneeID: -1}),
opts: *ToSearchOptions("", &issues.IssuesOptions{AssigneeID: optional.Some(db.NoConditionID)}),
expectedIDs: []int64{22, 21, 16, 15, 14, 13, 12, 11, 20, 5, 19, 18, 10, 7, 4, 9, 8, 3, 2},
},
{
@ -298,6 +299,33 @@ func searchIssueIsClosed(t *testing.T) {
}
}
func searchIssueIsArchived(t *testing.T) {
tests := []struct {
opts SearchOptions
expectedIDs []int64
}{
{
SearchOptions{
IsArchived: optional.Some(false),
},
[]int64{22, 21, 17, 16, 15, 13, 12, 11, 20, 6, 5, 19, 18, 10, 7, 4, 9, 8, 3, 2, 1},
},
{
SearchOptions{
IsArchived: optional.Some(true),
},
[]int64{14},
},
}
for _, test := range tests {
issueIDs, _, err := SearchIssues(context.TODO(), &test.opts)
if !assert.NoError(t, err) {
return
}
assert.Equal(t, test.expectedIDs, issueIDs)
}
}
func searchIssueByMilestoneID(t *testing.T) {
tests := []struct {
opts SearchOptions

View File

@ -25,6 +25,7 @@ type IndexerData struct {
// Fields used for filtering
IsPull bool `json:"is_pull"`
IsClosed bool `json:"is_closed"`
IsArchived bool `json:"is_archived"`
LabelIDs []int64 `json:"label_ids"`
NoLabel bool `json:"no_label"` // True if LabelIDs is empty
MilestoneID int64 `json:"milestone_id"`
@ -83,6 +84,7 @@ type SearchOptions struct {
IsPull optional.Option[bool] // if the issues is a pull request
IsClosed optional.Option[bool] // if the issues is closed
IsArchived optional.Option[bool] // if the repo is archived
IncludedLabelIDs []int64 // labels the issues have
ExcludedLabelIDs []int64 // labels the issues don't have

View File

@ -113,7 +113,7 @@ var cases = []*testIndexerCase{
},
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
assert.Equal(t, len(data), int(result.Total))
},
},
@ -176,7 +176,7 @@ var cases = []*testIndexerCase{
IsPull: optional.Some(false),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.False(t, data[v.ID].IsPull)
}
@ -192,7 +192,7 @@ var cases = []*testIndexerCase{
IsPull: optional.Some(true),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.True(t, data[v.ID].IsPull)
}
@ -208,7 +208,7 @@ var cases = []*testIndexerCase{
IsClosed: optional.Some(false),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.False(t, data[v.ID].IsClosed)
}
@ -224,7 +224,7 @@ var cases = []*testIndexerCase{
IsClosed: optional.Some(true),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.True(t, data[v.ID].IsClosed)
}
@ -274,7 +274,7 @@ var cases = []*testIndexerCase{
MilestoneIDs: []int64{1, 2, 6},
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Contains(t, []int64{1, 2, 6}, data[v.ID].MilestoneID)
}
@ -292,7 +292,7 @@ var cases = []*testIndexerCase{
MilestoneIDs: []int64{0},
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Equal(t, int64(0), data[v.ID].MilestoneID)
}
@ -310,7 +310,7 @@ var cases = []*testIndexerCase{
ProjectID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Equal(t, int64(1), data[v.ID].ProjectID)
}
@ -328,7 +328,7 @@ var cases = []*testIndexerCase{
ProjectID: optional.Some(int64(0)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Equal(t, int64(0), data[v.ID].ProjectID)
}
@ -346,7 +346,7 @@ var cases = []*testIndexerCase{
ProjectColumnID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Equal(t, int64(1), data[v.ID].ProjectColumnID)
}
@ -364,7 +364,7 @@ var cases = []*testIndexerCase{
ProjectColumnID: optional.Some(int64(0)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Equal(t, int64(0), data[v.ID].ProjectColumnID)
}
@ -382,7 +382,7 @@ var cases = []*testIndexerCase{
PosterID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Equal(t, int64(1), data[v.ID].PosterID)
}
@ -400,7 +400,7 @@ var cases = []*testIndexerCase{
AssigneeID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Equal(t, int64(1), data[v.ID].AssigneeID)
}
@ -418,7 +418,7 @@ var cases = []*testIndexerCase{
AssigneeID: optional.Some(int64(0)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Equal(t, int64(0), data[v.ID].AssigneeID)
}
@ -436,7 +436,7 @@ var cases = []*testIndexerCase{
MentionID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Contains(t, data[v.ID].MentionIDs, int64(1))
}
@ -454,7 +454,7 @@ var cases = []*testIndexerCase{
ReviewedID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Contains(t, data[v.ID].ReviewedIDs, int64(1))
}
@ -472,7 +472,7 @@ var cases = []*testIndexerCase{
ReviewRequestedID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Contains(t, data[v.ID].ReviewRequestedIDs, int64(1))
}
@ -490,7 +490,7 @@ var cases = []*testIndexerCase{
SubscriberID: optional.Some(int64(1)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.Contains(t, data[v.ID].SubscriberIDs, int64(1))
}
@ -509,7 +509,7 @@ var cases = []*testIndexerCase{
UpdatedBeforeUnix: optional.Some(int64(30)),
},
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
assert.Equal(t, 5, len(result.Hits))
assert.Len(t, result.Hits, 5)
for _, v := range result.Hits {
assert.GreaterOrEqual(t, data[v.ID].UpdatedUnix, int64(20))
assert.LessOrEqual(t, data[v.ID].UpdatedUnix, int64(30))

View File

@ -18,7 +18,7 @@ import (
)
const (
issueIndexerLatestVersion = 3
issueIndexerLatestVersion = 4
// TODO: make this configurable if necessary
maxTotalHits = 10000
@ -61,6 +61,7 @@ func NewIndexer(url, apiKey, indexerName string) *Indexer {
"is_public",
"is_pull",
"is_closed",
"is_archived",
"label_ids",
"no_label",
"milestone_id",
@ -145,6 +146,9 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
if options.IsClosed.Has() {
query.And(inner_meilisearch.NewFilterEq("is_closed", options.IsClosed.Value()))
}
if options.IsArchived.Has() {
query.And(inner_meilisearch.NewFilterEq("is_archived", options.IsArchived.Value()))
}
if options.NoLabelOnly {
query.And(inner_meilisearch.NewFilterEq("no_label", true))

View File

@ -101,6 +101,7 @@ func getIssueIndexerData(ctx context.Context, issueID int64) (*internal.IndexerD
Comments: comments,
IsPull: issue.IsPull,
IsClosed: issue.IsClosed,
IsArchived: issue.Repo.IsArchived,
LabelIDs: labels,
NoLabel: len(labels) == 0,
MilestoneID: issue.MilestoneID,

View File

@ -72,7 +72,10 @@ func (c *HTTPClient) batch(ctx context.Context, operation string, objects []Poin
url := fmt.Sprintf("%s/objects/batch", c.endpoint)
request := &BatchRequest{operation, c.transferNames(), nil, objects}
// `ref` is an "optional object describing the server ref that the objects belong to"
// but some (incorrect) lfs servers require it, so maybe adding an empty ref here doesn't break the correct ones.
// https://github.com/git-lfs/git-lfs/blob/a32a02b44bf8a511aa14f047627c49e1a7fd5021/docs/api/batch.md?plain=1#L37
request := &BatchRequest{operation, c.transferNames(), &Reference{}, objects}
payload := new(bytes.Buffer)
err := json.NewEncoder(payload).Encode(request)
if err != nil {
@ -236,6 +239,7 @@ func createRequest(ctx context.Context, method, url string, headers map[string]s
req.Header.Set(key, value)
}
req.Header.Set("Accept", AcceptHeader)
req.Header.Set("User-Agent", UserAgentHeader)
return req, nil
}

View File

@ -14,8 +14,12 @@ import (
const (
// MediaType contains the media type for LFS server requests
MediaType = "application/vnd.git-lfs+json"
// Some LFS servers offer content with other types, so fallback to '*/*' if application/vnd.git-lfs+json cannot be served
// AcceptHeader Some LFS servers offer content with other types, so fallback to '*/*' if application/vnd.git-lfs+json cannot be served
AcceptHeader = "application/vnd.git-lfs+json;q=0.9, */*;q=0.8"
// UserAgentHeader Add User-Agent for gitea's self-implemented lfs client,
// and the version is consistent with the latest version of git lfs can be avoided incompatibilities.
// Some lfs servers will check this
UserAgentHeader = "git-lfs/3.6.0 (Gitea)"
)
// BatchRequest contains multiple requests processed in one batch operation.

View File

@ -96,7 +96,7 @@ func TestBasicTransferAdapter(t *testing.T) {
for n, c := range cases {
_, err := a.Download(context.Background(), c.link)
if len(c.expectederror) > 0 {
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
} else {
assert.NoError(t, err, "case %d", n)
}
@ -129,7 +129,7 @@ func TestBasicTransferAdapter(t *testing.T) {
for n, c := range cases {
err := a.Upload(context.Background(), c.link, p, bytes.NewBufferString("dummy"))
if len(c.expectederror) > 0 {
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
} else {
assert.NoError(t, err, "case %d", n)
}
@ -162,7 +162,7 @@ func TestBasicTransferAdapter(t *testing.T) {
for n, c := range cases {
err := a.Verify(context.Background(), c.link, p)
if len(c.expectederror) > 0 {
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
} else {
assert.NoError(t, err, "case %d", n)
}

Some files were not shown because too many files have changed in this diff Show More