Merge branch 'main' into sync-issue-pr-and-more

This commit is contained in:
harryzcy 2023-04-22 01:21:33 -04:00 committed by GitHub
commit a2df3f1b7f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
460 changed files with 8983 additions and 6905 deletions

View File

@ -12,7 +12,8 @@ trigger:
- pull_request
paths:
exclude:
- docs/**
- "docs/**"
- "*.md"
volumes:
- name: deps
@ -181,7 +182,7 @@ trigger:
- pull_request
paths:
exclude:
- docs/**
- "docs/**"
volumes:
- name: deps
@ -266,6 +267,35 @@ steps:
- name: deps
path: /go
---
kind: pipeline
type: docker
name: compliance-docs
platform:
os: linux
arch: amd64
trigger:
event:
- pull_request
paths:
include:
- "docs/**"
- "*.md"
steps:
- name: deps-frontend
image: node:18
pull: always
commands:
- make deps-frontend
- name: lint-md
image: node:18
commands:
- make lint-md
---
kind: pipeline
type: docker
@ -283,7 +313,7 @@ trigger:
- pull_request
paths:
exclude:
- docs/**
- "docs/**"
volumes:
- name: deps
@ -444,7 +474,7 @@ trigger:
- pull_request
paths:
exclude:
- docs/**
- "docs/**"
volumes:
- name: deps
@ -530,7 +560,7 @@ trigger:
- pull_request
paths:
exclude:
- docs/**
- "docs/**"
volumes:
- name: deps
@ -616,7 +646,7 @@ trigger:
- pull_request
paths:
exclude:
- docs/**
- "docs/**"
volumes:
- name: deps
@ -696,7 +726,7 @@ trigger:
- pull_request
paths:
exclude:
- docs/**
- "docs/**"
volumes:
- name: deps
@ -726,11 +756,11 @@ steps:
# TODO: We should probably build all dependencies into a test image
- name: test-e2e
image: mcr.microsoft.com/playwright:v1.32.1-focal
image: mcr.microsoft.com/playwright:v1.32.3-focal
commands:
- curl -sLO https://go.dev/dl/go1.20.linux-amd64.tar.gz && tar -C /usr/local -xzf go1.20.linux-amd64.tar.gz
- apt-get -qq update && apt-get -qqy install jq build-essential
- curl -fsSL "https://go.dev/dl/$(curl -s 'https://go.dev/dl/?mode=json' | jq -r '.[].version' | sort -Vr | head -1).linux-amd64.tar.gz" | tar -xz -C /usr/local
- groupadd --gid 1001 gitea && useradd -m --gid 1001 --uid 1001 gitea
- apt-get -qq update && apt-get -qqy install build-essential
- ./build/test-env-prepare.sh
- su gitea bash -c "export PATH=$PATH:/usr/local/go/bin && timeout -s ABRT 40m make test-e2e-sqlite"
environment:
@ -867,7 +897,7 @@ trigger:
- push
paths:
exclude:
- docs/**
- "docs/**"
depends_on:
- testing-mysql
@ -1125,7 +1155,7 @@ trigger:
- pull_request
paths:
include:
- docs/**
- "docs/**"
steps:
- name: build-docs
@ -1176,7 +1206,7 @@ trigger:
- cron
paths:
exclude:
- docs/**
- "docs/**"
steps:
- name: fetch-tags
@ -1253,7 +1283,7 @@ trigger:
- cron
paths:
exclude:
- docs/**
- "docs/**"
steps:
- name: fetch-tags
@ -1468,7 +1498,7 @@ trigger:
- pull_request
paths:
exclude:
- docs/**
- "docs/**"
steps:
- name: dryrun
@ -1515,7 +1545,7 @@ trigger:
- cron
paths:
exclude:
- docs/**
- "docs/**"
steps:
- name: fetch-tags
@ -1592,7 +1622,7 @@ trigger:
- cron
paths:
exclude:
- docs/**
- "docs/**"
steps:
- name: fetch-tags
@ -1667,7 +1697,7 @@ trigger:
- cron
paths:
exclude:
- docs/**
- "docs/**"
steps:
- name: fetch-tags
@ -1834,7 +1864,7 @@ trigger:
- cron
paths:
exclude:
- docs/**
- "docs/**"
depends_on:
- docker-linux-amd64-release-version
@ -1884,7 +1914,7 @@ trigger:
- cron
paths:
exclude:
- docs/**
- "docs/**"
depends_on:
- docker-linux-amd64-release

View File

@ -12,6 +12,7 @@ plugins:
- eslint-plugin-unicorn
- eslint-plugin-import
- eslint-plugin-jquery
- eslint-plugin-no-jquery
- eslint-plugin-sonarjs
- eslint-plugin-custom-elements
@ -65,7 +66,7 @@ rules:
curly: [0]
custom-elements/expose-class-on-global: [0]
custom-elements/extends-correct-class: [2]
custom-elements/file-name-matches-element: [0]
custom-elements/file-name-matches-element: [2]
custom-elements/no-constructor: [2]
custom-elements/no-customized-built-in-elements: [2]
custom-elements/no-dom-traversal-in-attributechangedcallback: [2]
@ -192,6 +193,7 @@ rules:
jquery/no-val: [0]
jquery/no-when: [2]
jquery/no-wrap: [2]
no-jquery/no-event-shorthand: [2]
key-spacing: [2]
keyword-spacing: [2]
line-comment-position: [0]
@ -500,7 +502,7 @@ rules:
unicorn/prefer-default-parameters: [0]
unicorn/prefer-event-key: [2]
unicorn/prefer-event-target: [2]
unicorn/prefer-export-from: [2]
unicorn/prefer-export-from: [2, {ignoreUsedVariables: true}]
unicorn/prefer-includes: [2]
unicorn/prefer-json-parse-buffer: [0]
unicorn/prefer-logical-operator-over-ternary: [2]

1
.gitattributes vendored
View File

@ -1,6 +1,7 @@
* text=auto eol=lf
*.tmpl linguist-language=Handlebars
/assets/*.json linguist-generated
/public/img/svg/*.svg linguist-generated
/public/vendor/** -text -eol linguist-vendored
/vendor/** -text -eol linguist-vendored
/web_src/fomantic/build/** linguist-generated

View File

@ -111,67 +111,16 @@ issues:
- linters:
- gocritic
text: "`ID' should not be capitalized"
- path: modules/templates/helper.go
linters:
- gocritic
- linters:
- unused
- deadcode
text: "swagger"
- path: contrib/pr/checkout.go
linters:
- errcheck
- path: models/issue.go
linters:
- errcheck
- path: models/migrations/
linters:
- errcheck
- path: modules/log/
linters:
- errcheck
- path: routers/api/v1/repo/issue_subscription.go
linters:
- dupl
- path: routers/repo/view.go
linters:
- dupl
- path: models/migrations/
linters:
- unused
- linters:
- staticcheck
text: "argument x is overwritten before first use"
- path: modules/httplib/httplib.go
linters:
- staticcheck
# Enabling this would require refactoring the methods and how they are called.
- path: models/issue_comment_list.go
linters:
- dupl
- path: models/update.go
linters:
- unused
- path: cmd/dump.go
linters:
- dupl
- path: services/webhook/webhook.go
linters:
- structcheck
- text: "commentFormatting: put a space between `//` and comment text"
linters:
- gocritic
- text: "exitAfterDefer:"
linters:
- gocritic
- path: modules/graceful/manager_windows.go
linters:
- staticcheck
text: "svc.IsAnInteractiveSession is deprecated: Use IsWindowsService instead."
- path: models/user/openid.go
linters:
- golint
- path: models/user/badge.go
linters:
- revive
text: "exported: type name will be used as user.UserBadge by other packages, and that stutters; consider calling this Badge"

View File

@ -1,6 +1,7 @@
# GNU makefile proxy script for BSD make
#
# Written and maintained by Mahmoud Al-Qudsi <mqudsi@neosmart.net>
# Copyright NeoSmart Technologies <https://neosmart.net/> 2014-2018
# Copyright NeoSmart Technologies <https://neosmart.net/> 2014-2019
# Obtain updates from <https://github.com/neosmart/gmake-proxy>
#
# Redistribution and use in source and binary forms, with or without
@ -26,26 +27,32 @@
JARG =
GMAKE = "gmake"
#When gmake is called from another make instance, -w is automatically added
#which causes extraneous messages about directory changes to be emitted.
#--no-print-directory silences these messages.
# When gmake is called from another make instance, -w is automatically added
# which causes extraneous messages about directory changes to be emitted.
# Running with --no-print-directory silences these messages.
GARGS = "--no-print-directory"
.if "$(.MAKE.JOBS)" != ""
JARG = -j$(.MAKE.JOBS)
JARG = -j$(.MAKE.JOBS)
.endif
#by default bmake will cd into ./obj first
# bmake prefers out-of-source builds and tries to cd into ./obj (among others)
# where possible. GNU Make doesn't, so override that value.
.OBJDIR: ./
# The GNU convention is to use the lowercased `prefix` variable/macro to
# specify the installation directory. Humor them.
GPREFIX = ""
.if defined(PREFIX) && ! defined(prefix)
GPREFIX = 'prefix = "$(PREFIX)"'
.endif
.BEGIN: .SILENT
which $(GMAKE) || printf "Error: GNU Make is required!\n\n" 1>&2 && false
.PHONY: FRC
$(.TARGETS): FRC
$(GMAKE) $(GARGS) $(.TARGETS:S,.DONE,,) $(JARG)
$(GMAKE) $(GPREFIX) $(GARGS) $(.TARGETS:S,.DONE,,) $(JARG)
.DONE .DEFAULT: .SILENT
$(GMAKE) $(GARGS) $(.TARGETS:S,.DONE,,) $(JARG)
.ERROR: .SILENT
if ! which $(GMAKE) > /dev/null; then \
echo "GNU Make is required!"; \
fi
$(GMAKE) $(GPREFIX) $(GARGS) $(.TARGETS:S,.DONE,,) $(JARG)

View File

@ -4,7 +4,96 @@ This changelog goes through all the changes that have been made in each release
without substantial changes to our git log; to see the highlights of what has
been added to each release, please refer to the [blog](https://blog.gitea.io).
## [1.19.0](https://github.com/go-gitea/gitea/releases/tag/1.19.0) - 2023-03-19
## [1.19.1](https://github.com/go-gitea/gitea/releases/tag/v1.19.1) - 2023-04-12
* BREAKING
* Rename actions unit to `repo.actions` and add docs for it (#23733) (#23881)
* ENHANCEMENTS
* Add card type to org/user level project on creation, edit and view (#24043) (#24066)
* Refactor commit status for Actions jobs (#23786) (#24060)
* Show errors for KaTeX and mermaid on the preview tab (#24009) (#24019)
* Show protected branch rule names again (#23907) (#24018)
* Adjust sticky PR header to cover background (#23956) (#23999)
* Discolor pull request tab labels (#23950) (#23987)
* Treat PRs with agit flow as fork PRs when triggering actions. (#23884) (#23967)
* Left-align review comments (#23937)
* Fix image border-radius (#23886) (#23930)
* Scroll collapsed file into view (#23702) (#23929)
* Fix code view (diff) broken layout (#23096) (#23918)
* Org pages style fixes (#23901) (#23914)
* Fix user profile description rendering (#23882) (#23902)
* Fix review box viewport overflow issue (#23800) (#23898)
* Prefill input values in OAuth settings as intended (#23829) (#23871)
* CSS color tweaks (#23828) (#23842)
* Fix incorrect visibility dropdown list in add/edit user page (#23804) (#23833)
* Add CSS rules for basic colored labels (#23774) (#23777)
* Add creation time in tag list page (#23693) (#23773)
* Fix br display for packages curls (#23737) (#23764)
* Fix issue due date edit toggle bug (#23723) (#23758)
* Improve commit graph page UI alignment (#23751) (#23754)
* Use GitHub Actions compatible globbing for `branches`, `tag`, `path` filter (#22804) (#23740)
* Redirect to project again after editing it (#23326) (#23739)
* Remove row clicking from notification table (#22695) (#23706)
* Remove conflicting CSS rules on notifications, improve notifications table (#23565) (#23621)
* Fix diff tree height and adjust target file style (#23616)
* BUGFIXES
* Improve error logging for LFS (#24072) (#24082)
* Fix custom mailer template on Windows platform (#24081)
* Update the value of `diffEnd` when clicking the `Show More` button in the DiffFileTree (#24069) (#24078)
* Make label templates have consistent behavior and priority (#23749)
* Fix accidental overwriting of LDAP team memberships (#24050) (#24065)
* Fix branch protection priority (#24045) (#24061)
* Use actions job link as commit status URL instead of run link (#24023) (#24032)
* Add actions support to package auth verification (#23729) (#24028)
* Fix protected branch for API (#24013) (#24027)
* Do not escape space between PyPI repository URL and package name… (#23981) (#24008)
* Fix redirect bug when creating issue from a project (#23971) (#23997)
* Set `ref` to fully-formed of the tag when trigger event is `release` (#23944) (#23989)
* Use Get/Set instead of Rename when regenerating session ID (#23975) (#23983)
* Ensure RSS icon is present on all repo tabs (#23904) (#23973)
* Remove `Repository.getFilesChanged` to fix Actions `paths` and `paths-ignore` filter (#23920) (#23969)
* Delete deleted release attachments immediately from storage (#23913) (#23958)
* Use ghost user if package creator does not exist (#23822) (#23915)
* User/Org Feed render description as per web (#23887) (#23906)
* Fix `cases.Title` crash for concurrency (#23885) (#23903)
* Convert .Source.SkipVerify to $cfg.SkipVerify (#23839) (#23899)
* Support "." char as user name for User/Orgs in RSS/ATOM/GPG/KEYS path ... (#23874) (#23878)
* Fix JS error when changing PR's target branch (#23862) (#23864)
* Fix 500 error if there is a name conflict when editing authentication source (#23832) (#23852)
* Fix closed PR also triggers Webhooks and actions (#23782) (#23834)
* Fix checks for `needs` in Actions (#23789) (#23831)
* Fix "Updating branch by merge" bug in "update_branch_by_merge.tmpl" (#23790) (#23825)
* Fix cancel button in the page of project edit not work (#23655) (#23813)
* Don't apply the group filter when listing LDAP group membership if it is empty (#23745) (#23788)
* Fix profile page email display, respect settings (#23747) (#23756)
* Fix project card preview select and template select (#23684) (#23731)
* Check LFS/Packages settings in dump and doctor command (#23631) (#23730)
* Add git dashes separator to some "log" and "diff" commands (#23606) (#23720)
* Create commit status when event is `pull_request_sync` (#23683) (#23691)
* Fix incorrect `HookEventType` of pull request review comments (#23650) (#23678)
* Fix incorrect `show-modal` and `show-panel` class (#23660) (#23663)
* Improve workflow event triggers (#23613) (#23648)
* Introduce path Clean/Join helper functions, partially backport&refactor (#23495) (#23607)
* Fix pagination on `/notifications/watching` (#23564) (#23603)
* Fix submodule is nil panic (#23588) (#23601)
* Polyfill the window.customElements (#23592) (#23595)
* Avoid too long names for actions (#23162) (#23190)
* TRANSLATION
* Backport locales (with manual fixes) (#23808, #23634, #24083)
* BUILD
* Hardcode the path to docker images (#23955) (#23968)
* DOCS
* Update documentation to explain which projects allow Gitea to host static pages (#23993) (#24058)
* Merge `push to create`, `open PR from push`, and `push options` docs articles into one (#23744) (#23959)
* Fix code blocks in the cheat sheet (#23664) (#23669)
* MISC
* Do not crash when parsing an invalid workflow file (#23972) (#23976)
* Remove assertion debug code for show/hide refactoring (#23576) (#23868)
* Add ONLY_SHOW_RELEVANT_REPOS back, fix explore page bug, make code more strict (#23766) (#23791)
* Make minio package support legacy MD5 checksum (#23768) (#23770)
* Improve template error reporting (#23396) (#23600)
## [1.19.0](https://github.com/go-gitea/gitea/releases/tag/v1.19.0) - 2023-03-19
* BREAKING
* Add loading yaml label template files (#22976) (#23232)

View File

@ -41,7 +41,7 @@ Patrick Schratz <patrick.schratz@gmail.com> (@pat-s)
Janis Estelmann <admin@oldschoolhack.me> (@KN4CK3R)
Steven Kriegler <sk.bunsenbrenner@gmail.com> (@justusbunsi)
Jimmy Praet <jimmy.praet@telenet.be> (@jpraet)
Leon Hofmeister <dev.lh@web.de> (@delvh)
Leon Hofmeister <dev.lh@web.de> (@delvh)
Wim <wim@42.be> (@42wim)
Jason Song <i@wolfogre.com> (@wolfogre)
Yarden Shoham <git@yardenshoham.com> (@yardenshoham)
@ -49,3 +49,4 @@ Yu Tian <zettat123@gmail.com> (@Zettat123)
Eddie Yang <576951401@qq.com> (@yp05327)
Dong Ge <gedong_1994@163.com> (@sillyguodong)
Xinyi Gong <hestergong@gmail.com> (@HesterG)
wxiaoguang <wxiaoguang@gmail.com> (@wxiaoguang)

View File

@ -20,7 +20,7 @@ IMPORT := code.gitea.io/gitea
GO ?= go
SHASUM ?= shasum -a 256
HAS_GO = $(shell hash $(GO) > /dev/null 2>&1 && echo "GO" || echo "NOGO" )
HAS_GO := $(shell hash $(GO) > /dev/null 2>&1 && echo yes)
COMMA := ,
XGO_VERSION := go-1.20.x
@ -41,7 +41,7 @@ DOCKER_IMAGE ?= gitea/gitea
DOCKER_TAG ?= latest
DOCKER_REF := $(DOCKER_IMAGE):$(DOCKER_TAG)
ifeq ($(HAS_GO), GO)
ifeq ($(HAS_GO), yes)
GOPATH ?= $(shell $(GO) env GOPATH)
export PATH := $(GOPATH)/bin:$(PATH)
@ -196,6 +196,7 @@ help:
@echo " - lint lint everything"
@echo " - lint-frontend lint frontend files"
@echo " - lint-backend lint backend files"
@echo " - lint-md lint markdown files"
@echo " - checks run various consistency checks"
@echo " - checks-frontend check frontend files"
@echo " - checks-backend check backend files"
@ -341,10 +342,13 @@ checks-backend: tidy-check swagger-check fmt-check misspell-check swagger-valida
lint: lint-frontend lint-backend
.PHONY: lint-frontend
lint-frontend: node_modules
lint-frontend: node_modules lint-md
npx eslint --color --max-warnings=0 --ext js,vue web_src/js build *.config.js docs/assets/js tests/e2e
npx stylelint --color --max-warnings=0 web_src/css
npx spectral lint -q -F hint $(SWAGGER_SPEC)
.PHONY: lint-md
lint-md: node_modules
npx markdownlint docs *.md
.PHONY: lint-backend

File diff suppressed because one or more lines are too long

56
cmd/actions.go Normal file
View File

@ -0,0 +1,56 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"fmt"
"code.gitea.io/gitea/modules/private"
"code.gitea.io/gitea/modules/setting"
"github.com/urfave/cli"
)
var (
// CmdActions represents the available actions sub-commands.
CmdActions = cli.Command{
Name: "actions",
Usage: "",
Description: "Commands for managing Gitea Actions",
Subcommands: []cli.Command{
subcmdActionsGenRunnerToken,
},
}
subcmdActionsGenRunnerToken = cli.Command{
Name: "generate-runner-token",
Usage: "Generate a new token for a runner to use to register with the server",
Action: runGenerateActionsRunnerToken,
Aliases: []string{"grt"},
Flags: []cli.Flag{
cli.StringFlag{
Name: "scope, s",
Value: "",
Usage: "{owner}[/{repo}] - leave empty for a global runner",
},
},
}
)
func runGenerateActionsRunnerToken(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
setting.InitProviderFromExistingFile()
setting.LoadCommonSettings()
scope := c.String("scope")
respText, extra := private.GenerateActionsRunnerToken(ctx, scope)
if extra.HasError() {
return handleCliResponseExtra(extra)
}
_, _ = fmt.Printf("%s\n", respText)
return nil
}

View File

@ -17,7 +17,7 @@ import (
var CmdConvert = cli.Command{
Name: "convert",
Usage: "Convert the database",
Description: "A command to convert an existing MySQL database from utf8 to utf8mb4",
Description: "A command to convert an existing MySQL database from utf8 to utf8mb4 or MSSQL database from varchar to nvarchar",
Action: runConvert,
}
@ -35,17 +35,22 @@ func runConvert(ctx *cli.Context) error {
log.Info("Log path: %s", setting.Log.RootPath)
log.Info("Configuration file: %s", setting.CustomConf)
if !setting.Database.Type.IsMySQL() {
fmt.Println("This command can only be used with a MySQL database")
return nil
switch {
case setting.Database.Type.IsMySQL():
if err := db.ConvertUtf8ToUtf8mb4(); err != nil {
log.Fatal("Failed to convert database from utf8 to utf8mb4: %v", err)
return err
}
fmt.Println("Converted successfully, please confirm your database's character set is now utf8mb4")
case setting.Database.Type.IsMSSQL():
if err := db.ConvertVarcharToNVarchar(); err != nil {
log.Fatal("Failed to convert database from varchar to nvarchar: %v", err)
return err
}
fmt.Println("Converted successfully, please confirm your database's all columns character is NVARCHAR now")
default:
fmt.Println("This command can only be used with a MySQL or MSSQL database")
}
if err := db.ConvertUtf8ToUtf8mb4(); err != nil {
log.Fatal("Failed to convert database from utf8 to utf8mb4: %v", err)
return err
}
fmt.Println("Converted successfully, please confirm your database's character set is now utf8mb4")
return nil
}

View File

@ -112,6 +112,10 @@ It can be used for backup and capture Gitea server image to send to maintainer`,
Name: "verbose, V",
Usage: "Show process details",
},
cli.BoolFlag{
Name: "quiet, q",
Usage: "Only display warnings and errors",
},
cli.StringFlag{
Name: "tempdir, t",
Value: os.TempDir(),
@ -192,12 +196,25 @@ func runDump(ctx *cli.Context) error {
if _, err := setting.CfgProvider.Section("log.console").NewKey("STDERR", "true"); err != nil {
fatal("Setting console logger to stderr failed: %v", err)
}
// Set loglevel to Warn if quiet-mode is requested
if ctx.Bool("quiet") {
if _, err := setting.CfgProvider.Section("log.console").NewKey("LEVEL", "Warn"); err != nil {
fatal("Setting console log-level failed: %v", err)
}
}
if !setting.InstallLock {
log.Error("Is '%s' really the right config path?\n", setting.CustomConf)
return fmt.Errorf("gitea is not initialized")
}
setting.LoadSettings() // cannot access session settings otherwise
verbose := ctx.Bool("verbose")
if verbose && ctx.Bool("quiet") {
return fmt.Errorf("--quiet and --verbose cannot both be set")
}
stdCtx, cancel := installSignals()
defer cancel()
@ -223,7 +240,6 @@ func runDump(ctx *cli.Context) error {
return err
}
verbose := ctx.Bool("verbose")
var iface interface{}
if fileName == "-" {
iface, err = archiver.ByExtension(fmt.Sprintf(".%s", outType))

View File

@ -1,8 +1,6 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build bindata
package cmd
import (
@ -10,9 +8,9 @@ import (
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"code.gitea.io/gitea/modules/assetfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/options"
"code.gitea.io/gitea/modules/public"
@ -89,24 +87,20 @@ var (
},
}
sections map[string]*section
assets []asset
matchedAssetFiles []assetFile
)
type section struct {
Path string
Names func() []string
IsDir func(string) (bool, error)
Asset func(string) ([]byte, error)
}
type asset struct {
Section *section
Name string
Path string
type assetFile struct {
fs *assetfs.LayeredFS
name string
path string
}
func initEmbeddedExtractor(c *cli.Context) error {
// FIXME: there is a bug, if the user runs `gitea embedded` with a different user or root,
// The setting.Init (loadRunModeFrom) will fail and do log.Fatal
// But the console logger has been deleted, so nothing is printed, the user sees nothing and Gitea just exits.
// Silence the console logger
log.DelNamedLogger("console")
log.DelNamedLogger(log.DEFAULT)
@ -115,24 +109,14 @@ func initEmbeddedExtractor(c *cli.Context) error {
setting.InitProviderAllowEmpty()
setting.LoadCommonSettings()
pats, err := getPatterns(c.Args())
patterns, err := compileCollectPatterns(c.Args())
if err != nil {
return err
}
sections := make(map[string]*section, 3)
sections["public"] = &section{Path: "public", Names: public.AssetNames, IsDir: public.AssetIsDir, Asset: public.Asset}
sections["options"] = &section{Path: "options", Names: options.AssetNames, IsDir: options.AssetIsDir, Asset: options.Asset}
sections["templates"] = &section{Path: "templates", Names: templates.BuiltinAssetNames, IsDir: templates.BuiltinAssetIsDir, Asset: templates.BuiltinAsset}
for _, sec := range sections {
assets = append(assets, buildAssetList(sec, pats, c)...)
}
// Sort assets
sort.SliceStable(assets, func(i, j int) bool {
return assets[i].Path < assets[j].Path
})
collectAssetFilesByPattern(c, patterns, "options", options.BuiltinAssets())
collectAssetFilesByPattern(c, patterns, "public", public.BuiltinAssets())
collectAssetFilesByPattern(c, patterns, "templates", templates.BuiltinAssets())
return nil
}
@ -166,8 +150,8 @@ func runListDo(c *cli.Context) error {
return err
}
for _, a := range assets {
fmt.Println(a.Path)
for _, a := range matchedAssetFiles {
fmt.Println(a.path)
}
return nil
@ -178,19 +162,19 @@ func runViewDo(c *cli.Context) error {
return err
}
if len(assets) == 0 {
return fmt.Errorf("No files matched the given pattern")
} else if len(assets) > 1 {
return fmt.Errorf("Too many files matched the given pattern; try to be more specific")
if len(matchedAssetFiles) == 0 {
return fmt.Errorf("no files matched the given pattern")
} else if len(matchedAssetFiles) > 1 {
return fmt.Errorf("too many files matched the given pattern, try to be more specific")
}
data, err := assets[0].Section.Asset(assets[0].Name)
data, err := matchedAssetFiles[0].fs.ReadFile(matchedAssetFiles[0].name)
if err != nil {
return fmt.Errorf("%s: %w", assets[0].Path, err)
return fmt.Errorf("%s: %w", matchedAssetFiles[0].path, err)
}
if _, err = os.Stdout.Write(data); err != nil {
return fmt.Errorf("%s: %w", assets[0].Path, err)
return fmt.Errorf("%s: %w", matchedAssetFiles[0].path, err)
}
return nil
@ -202,7 +186,7 @@ func runExtractDo(c *cli.Context) error {
}
if len(c.Args()) == 0 {
return fmt.Errorf("A list of pattern of files to extract is mandatory (e.g. '**' for all)")
return fmt.Errorf("a list of pattern of files to extract is mandatory (e.g. '**' for all)")
}
destdir := "."
@ -227,7 +211,7 @@ func runExtractDo(c *cli.Context) error {
if err != nil {
return fmt.Errorf("%s: %s", destdir, err)
} else if !fi.IsDir() {
return fmt.Errorf("%s is not a directory.", destdir)
return fmt.Errorf("destination %q is not a directory", destdir)
}
fmt.Printf("Extracting to %s:\n", destdir)
@ -235,23 +219,23 @@ func runExtractDo(c *cli.Context) error {
overwrite := c.Bool("overwrite")
rename := c.Bool("rename")
for _, a := range assets {
for _, a := range matchedAssetFiles {
if err := extractAsset(destdir, a, overwrite, rename); err != nil {
// Non-fatal error
fmt.Fprintf(os.Stderr, "%s: %v", a.Path, err)
fmt.Fprintf(os.Stderr, "%s: %v", a.path, err)
}
}
return nil
}
func extractAsset(d string, a asset, overwrite, rename bool) error {
dest := filepath.Join(d, filepath.FromSlash(a.Path))
func extractAsset(d string, a assetFile, overwrite, rename bool) error {
dest := filepath.Join(d, filepath.FromSlash(a.path))
dir := filepath.Dir(dest)
data, err := a.Section.Asset(a.Name)
data, err := a.fs.ReadFile(a.name)
if err != nil {
return fmt.Errorf("%s: %w", a.Path, err)
return fmt.Errorf("%s: %w", a.path, err)
}
if err := os.MkdirAll(dir, os.ModePerm); err != nil {
@ -272,7 +256,7 @@ func extractAsset(d string, a asset, overwrite, rename bool) error {
return fmt.Errorf("%s already exists, but it's not a regular file", dest)
} else if rename {
if err := util.Rename(dest, dest+".bak"); err != nil {
return fmt.Errorf("Error creating backup for %s: %w", dest, err)
return fmt.Errorf("error creating backup for %s: %w", dest, err)
}
// Attempt to respect file permissions mask (even if user:group will be set anew)
perms = fi.Mode()
@ -293,32 +277,30 @@ func extractAsset(d string, a asset, overwrite, rename bool) error {
return nil
}
func buildAssetList(sec *section, globs []glob.Glob, c *cli.Context) []asset {
results := make([]asset, 0, 64)
for _, name := range sec.Names() {
if isdir, err := sec.IsDir(name); !isdir && err == nil {
if sec.Path == "public" &&
strings.HasPrefix(name, "vendor/") &&
!c.Bool("include-vendored") {
continue
}
matchName := sec.Path + "/" + name
for _, g := range globs {
if g.Match(matchName) {
results = append(results, asset{
Section: sec,
Name: name,
Path: sec.Path + "/" + name,
})
break
}
func collectAssetFilesByPattern(c *cli.Context, globs []glob.Glob, path string, layer *assetfs.Layer) {
fs := assetfs.Layered(layer)
files, err := fs.ListAllFiles(".", true)
if err != nil {
log.Error("Error listing files in %q: %v", path, err)
return
}
for _, name := range files {
if path == "public" &&
strings.HasPrefix(name, "vendor/") &&
!c.Bool("include-vendored") {
continue
}
matchName := path + "/" + name
for _, g := range globs {
if g.Match(matchName) {
matchedAssetFiles = append(matchedAssetFiles, assetFile{fs: fs, name: name, path: path + "/" + name})
break
}
}
}
return results
}
func getPatterns(args []string) ([]glob.Glob, error) {
func compileCollectPatterns(args []string) ([]glob.Glob, error) {
if len(args) == 0 {
args = []string{"**"}
}
@ -326,7 +308,7 @@ func getPatterns(args []string) ([]glob.Glob, error) {
for i := range args {
if g, err := glob.Compile(args[i], '/'); err != nil {
return nil, fmt.Errorf("'%s': Invalid glob pattern: %w", args[i], err)
} else {
} else { //nolint:revive
pat[i] = g
}
}

View File

@ -1,29 +0,0 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build !bindata
package cmd
import (
"fmt"
"os"
"github.com/urfave/cli"
)
// Cmdembedded represents the available extract sub-command.
var (
Cmdembedded = cli.Command{
Name: "embedded",
Usage: "Extract embedded resources",
Description: "A command for extracting embedded resources, like templates and images",
Action: extractorNotImplemented,
}
)
func extractorNotImplemented(c *cli.Context) error {
err := fmt.Errorf("Sorry: the 'embedded' subcommand is not available in builds without bindata")
fmt.Fprintf(os.Stderr, "%s\n", err)
return err
}

View File

@ -186,8 +186,8 @@ RUN_MODE = ; prod
;; default is the system temporary directory.
;SSH_KEY_TEST_PATH =
;;
;; Path to ssh-keygen, default is 'ssh-keygen' which means the shell is responsible for finding out which one to call.
;SSH_KEYGEN_PATH = ssh-keygen
;; Use `ssh-keygen` to parse public SSH keys. The value is passed to the shell. By default, Gitea does the parsing itself.
;SSH_KEYGEN_PATH =
;;
;; Enable SSH Authorized Key Backup when rewriting all keys, default is true
;SSH_AUTHORIZED_KEYS_BACKUP = true
@ -603,7 +603,7 @@ ROUTER = console
;ACCESS = file
;;
;; Sets the template used to create the access log.
;ACCESS_LOG_TEMPLATE = {{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}"
;ACCESS_LOG_TEMPLATE = {{.Ctx.RemoteHost}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}" "{{.Ctx.Req.UserAgent}}"
;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;

View File

@ -551,3 +551,28 @@ Restore-repo restore repository data from disk dir:
- `--owner_name lunny`: Restore destination owner name
- `--repo_name tango`: Restore destination repository name
- `--units <units>`: Which items will be restored, one or more units should be separated as comma. wiki, issues, labels, releases, release_assets, milestones, pull_requests, comments are allowed. Empty means all units.
### actions generate-runner-token
Generate a new token for a runner to use to register with the server
- Options:
- `--scope {owner}[/{repo}]`, `-s {owner}[/{repo}]`: To limit the scope of the runner, no scope means the runner can be used for all repos, but you can also limit it to a specific repo or owner
To register a global runner:
```
gitea actions generate-runner-token
```
To register a runner for a specific organization, in this case `org`:
```
gitea actions generate-runner-token -s org
```
To register a runner for a specific repo, in this case `username/test-repo`:
```
gitea actions generate-runner-token -s username/test-repo
```

View File

@ -345,7 +345,7 @@ The following configuration set `Content-Type: application/vnd.android.package-a
- `SSH_SERVER_MACS`: **hmac-sha2-256-etm@openssh.com, hmac-sha2-256, hmac-sha1**: For the built-in SSH server, choose the MACs to support for SSH connections, for system SSH this setting has no effect
- `SSH_SERVER_HOST_KEYS`: **ssh/gitea.rsa, ssh/gogs.rsa**: For the built-in SSH server, choose the keypairs to offer as the host key. The private key should be at `SSH_SERVER_HOST_KEY` and the public `SSH_SERVER_HOST_KEY.pub`. Relative paths are made absolute relative to the `APP_DATA_PATH`. If no key exists a 4096 bit RSA key will be created for you.
- `SSH_KEY_TEST_PATH`: **/tmp**: Directory to create temporary files in when testing public keys using ssh-keygen, default is the system temporary directory.
- `SSH_KEYGEN_PATH`: **ssh-keygen**: Path to ssh-keygen, default is 'ssh-keygen' which means the shell is responsible for finding out which one to call.
- `SSH_KEYGEN_PATH`: **\<empty\>**: Use `ssh-keygen` to parse public SSH keys. The value is passed to the shell. By default, Gitea does the parsing itself.
- `SSH_EXPOSE_ANONYMOUS`: **false**: Enable exposure of SSH clone URL to anonymous visitors, default is false.
- `SSH_PER_WRITE_TIMEOUT`: **30s**: Timeout for any write to the SSH connections. (Set to
-1 to disable all timeouts.)
@ -878,7 +878,7 @@ Default templates for project boards:
- `ENABLE_ACCESS_LOG`: **false**: Creates an access.log in NCSA common log format, or as per the following template
- `ACCESS`: **file**: Logging mode for the access logger, use a comma to separate values. Configure each mode in per mode log subsections `\[log.modename.access\]`. By default the file mode will log to `$ROOT_PATH/access.log`. (If you set this to `,` it will log to the default Gitea logger.)
- `ACCESS_LOG_TEMPLATE`: **`{{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}"`**: Sets the template used to create the access log.
- `ACCESS_LOG_TEMPLATE`: **`{{.Ctx.RemoteHost}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}" "{{.Ctx.Req.UserAgent}}"`**: Sets the template used to create the access log.
- The following variables are available:
- `Ctx`: the `context.Context` of the request.
- `Identity`: the SignedUserName or `"-"` if not logged in.

View File

@ -265,7 +265,7 @@ test01.xls: application/vnd.ms-excel; charset=binary
- `LEVEL`: 日志级别,默认为 `Trace`
- `DISABLE_ROUTER_LOG`: 关闭日志中的路由日志。
- `ENABLE_ACCESS_LOG`: 是否开启 Access Log, 默认为 false。
- `ACCESS_LOG_TEMPLATE`: `access.log` 输出内容的模板,默认模板:**`{{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}"`**
- `ACCESS_LOG_TEMPLATE`: `access.log` 输出内容的模板,默认模板:**`{{.Ctx.RemoteHost}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}" "{{.Ctx.Req.UserAgent}}"`**
模板支持以下参数:
- `Ctx`: 请求上下文。
- `Identity`: 登录用户名,默认: “`-`”。

View File

@ -0,0 +1,101 @@
---
date: "2023-04-09T11:00:00+02:00"
title: "使用: HTTPS配置"
slug: "https-setup"
weight: 12
toc: false
draft: false
menu:
sidebar:
parent: "administration"
name: "HTTPS setup"
weight: 12
identifier: "https-setup"
---
# HTTPS setup to encrypt connections to Gitea
**Table of Contents**
{{< toc >}}
## 使用内置服务器
在启用HTTPS之前确保您拥有有效的SSL/TLS证书。
建议在测试和评估情况下使用自签名证书,请运行 `gitea cert --host [HOST]` 以生成自签名证书
如果您在服务器上使用阿帕奇Apache或Nginx建议参考 [反向代理指南]({{< relref "doc/administration/reverse-proxies.zh-cn.md" >}})。
要使用Gitea内置HTTPS支持您必须编辑`app.ini`文件。
```ini
[server]
PROTOCOL = https
ROOT_URL = https://git.example.com:3000/
HTTP_PORT = 3000
CERT_FILE = cert.pem
KEY_FILE = key.pem
```
请注意,如果您的证书由第三方证书颁发机构签名(即不是自签名的),则 cert.pem 应包含证书链。服务器证书必须是 cert.pem 中的第一个条目,后跟中介(如果有)。不必包含根证书,因为连接客户端必须已经拥有根证书才能建立信任关系。要了解有关配置值的更多信息,请查看 [配置备忘单](../config-cheat-sheet#server-server)。
对于“CERT_FILE”或“KEY_FILE”字段当文件路径是相对路径时文件路径相对于“GITEA_CUSTOM”环境变量。它也可以是绝对路径。
### 设置HTTP重定向
Gitea服务器仅支持监听一个端口要重定向HTTP请求致HTTPS端口您需要启用HTTP重定向服务
```ini
[server]
REDIRECT_OTHER_PORT = true
; Port the redirection service should listen on
PORT_TO_REDIRECT = 3080
```
如果您使用Docker确保端口已配置在 `docker-compose.yml` 文件
## 使用 ACME (默认: Let's Encrypt)
[ACME](https://tools.ietf.org/html/rfc8555) 是一种证书颁发机构标准协议,允许您自动请求和续订 SSL/TLS 证书。[Let`s Encrypt](https://letsencrypt.org/) 是使用此标准的免费公开信任的证书颁发机构服务器。仅实施“HTTP-01”和“TLS-ALPN-01”挑战。为了使 ACME 质询通过并验证您的域所有权“80”端口“HTTP-01”或“443”端口“TLS-ALPN-01”上 gitea 域的外部流量必须由 gitea 实例提供服务。可能需要设置 [HTTP 重定向](#设置http重定向) 和端口转发才能正确路由外部流量。否则到端口“80”的正常流量将自动重定向到 HTTPS。**您必须同意**ACME提供商的服务条款默认为Let's Encrypt的 [服务条款](https://letsencrypt.org/documents/LE-SA-v1.2-2017年11月15日.pdf)。
使用默认 Let's Encrypt 的最小配置如下:
```ini
[server]
PROTOCOL=https
DOMAIN=git.example.com
ENABLE_ACME=true
ACME_ACCEPTTOS=true
ACME_DIRECTORY=https
;; Email can be omitted here and provided manually at first run, after which it is cached
ACME_EMAIL=email@example.com
```
小型配置请使用 [smallstep CA](https://github.com/smallstep/certificates), 点击 [教程](https://smallstep.com/docs/tutorials/acme-challenge) 了解更多信息。
```ini
[server]
PROTOCOL=https
DOMAIN=git.example.com
ENABLE_ACME=true
ACME_ACCEPTTOS=true
ACME_URL=https://ca.example.com/acme/acme/directory
;; Can be omitted if using the system's trust is preferred
;ACME_CA_ROOT=/path/to/root_ca.crt
ACME_DIRECTORY=https
ACME_EMAIL=email@example.com
```
要了解关于配置, 请访问 [配置备忘单](../config-cheat-sheet#server-server)获取更多信息
## 使用反向代理服务器
按照 [reverse proxy guide](../reverse-proxies) 的规则设置你的反向代理服务器
然后,按照下面的向导启用 HTTPS
- [nginx](https://nginx.org/en/docs/http/configuring_https_servers.html)
- [apache2/httpd](https://httpd.apache.org/docs/2.4/ssl/ssl_howto.html)
- [caddy](https://caddyserver.com/docs/tls)
注意:仅在代理层启用 HTTPS 被称为 [TLS 终止代理](https://en.wikipedia.org/wiki/TLS_termination_proxy)。代理服务器接受传入的 TLS 连接,解密内容,然后将现在未加密的内容传递给 Gitea。只要代理和 Gitea 实例在同一台计算机上或在私有网络中的不同计算机上(代理暴露给外部网络),这通常是可以接受的。如果您的 Gitea 实例与代理隔离在公共网络上,或者如果您想要全端到端的加密,您还可以直接在 Gitea 中 [启用内置服务器的 HTTPS 支持](#使用内置服务器),并将连接转发到 HTTPS 上。

View File

@ -304,7 +304,7 @@ log using the value: `ACCESS = ,`
This value represent a go template. It's default value is:
`{{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}"`
`{{.Ctx.RemoteHost}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}" "{{.Ctx.Req.UserAgent}}"`
The template is passed following options:

View File

@ -118,6 +118,14 @@ The correct path for the template(s) will be relative to the `CustomPath`
2. If you are still unable to find a path, the default can be [calculated above](#where-does-gitea-store-what-file)
3. Once you have figured out the correct custom path, you can refer to the [customizing Gitea]({{< relref "doc/administration/customizing-gitea.en-us.md" >}}) page to add your template to the correct location.
## Does Gitea have a "GitHub/GitLab pages" feature?
Gitea doesn't provide a built-in Pages server. You need a dedicated domain to serve static pages to avoid CSRF security risks.
For simple usage, you can use a reverse proxy to rewrite & serve static contents from Gitea's raw file URLs.
And there are already available third-party services, like a standalone [pages server](https://codeberg.org/Codeberg/pages-server) or a [caddy plugin](https://github.com/42wim/caddy-gitea), that can provide the required functionality.
## Active user vs login prohibited user
In Gitea, an "active" user refers to a user that has activated their account via email.

View File

@ -31,6 +31,8 @@ _Symbols used in table:_
- _✘ - unsupported_
- _⚙ - supported through third-party software_
## General Features
| Feature | Gitea | Gogs | GitHub EE | GitLab CE | GitLab EE | BitBucket | RhodeCode CE |
@ -51,7 +53,7 @@ _Symbols used in table:_
| Custom Theme Support | ✓ | ✓ | ✘ | ✘ | ✘ | ✓ | ✘ |
| Markdown support | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
| CSV support | ✓ | ✘ | ✓ | ✘ | ✘ | ✓ | ? |
| 'GitHub / GitLab pages' | [](https://github.com/go-gitea/gitea/issues/302) | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ |
| 'GitHub / GitLab pages' | [⚙️][gitea-pages-server], [⚙️][gitea-caddy-plugin] | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ |
| Repo-specific wiki (as a repo itself) | ✓ | ✓ | ✓ | ✓ | ✓ | / | ✘ |
| Deploy Tokens | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
| Repository Tokens with write rights | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
@ -144,3 +146,6 @@ _Symbols used in table:_
| Two factor authentication (2FA) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✘ |
| Integration with the most common services | ✓ | / | | ✓ | ✓ | | ✓ |
| Incorporate external CI/CD | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
[gitea-caddy-plugin]: https://github.com/42wim/caddy-gitea
[gitea-pages-server]: https://codeberg.org/Codeberg/pages-server

View File

@ -29,6 +29,8 @@ _表格中的符号含义:_
* _? - 不确定_
* _⚙ - 由第三方服务或插件支持_
#### 主要特性
| 特性 | Gitea | Gogs | GitHub EE | GitLab CE | GitLab EE | BitBucket | RhodeCode CE |
@ -42,7 +44,7 @@ _表格中的符号含义:_
| 支持 Orgmode | ✓ | ✘ | ✓ | ✘ | ✘ | ✘ | ? |
| 支持 CSV | ✓ | ✘ | ✓ | ✘ | ✘ | ✓ | ? |
| 支持第三方渲染工具 | ✓ | ✘ | ✘ | ✘ | ✘ | ✓ | ? |
| Git 驱动的静态 pages | [](https://github.com/go-gitea/gitea/issues/302) | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ |
| Git 驱动的静态 pages | [⚙️][gitea-pages-server], [⚙️][gitea-caddy-plugin] | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ |
| Git 驱动的集成化 wiki | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ (cloud only) | ✘ |
| 部署令牌 | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
| 仓库写权限令牌 | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
@ -129,3 +131,6 @@ _表格中的符号含义:_
| 集成 Discord | ✓ | ✓ | ✓ | ✓ | ✓ | ✘ | ✘ |
| 集成 Microsoft Teams | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✘ |
| 显示外部 CI/CD 的状态 | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
[gitea-caddy-plugin]: https://github.com/42wim/caddy-gitea
[gitea-pages-server]: https://codeberg.org/Codeberg/pages-server

View File

@ -31,6 +31,8 @@ menu:
- ✘ - 不支援
- _⚙ - 由第三方服務或外掛程式支援_
## 一般功能
| 功能 | Gitea | Gogs | GitHub EE | GitLab CE | GitLab EE | BitBucket | RhodeCode CE |
@ -44,7 +46,7 @@ menu:
| 支援 Orgmode | ✓ | ✘ | ✓ | ✘ | ✘ | ✘ | ? |
| 支援 CSV | ✓ | ✘ | ✓ | ✘ | ✘ | ✓ | ? |
| 支援第三方渲染工具 | ✓ | ✘ | ✘ | ✘ | ✘ | ✓ | ? |
| Git 驅動的靜態頁面 | [](https://github.com/go-gitea/gitea/issues/302) | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ |
| Git 驅動的靜態頁面 | [⚙️][gitea-pages-server], [⚙️][gitea-caddy-plugin] | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ |
| Git 驅動的整合 wiki | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✘ |
| 部署 Token | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
| 有寫入權限的儲存庫 Token | ✓ | ✘ | ✓ | ✓ | ✓ | ✘ | ✓ |
@ -130,3 +132,6 @@ menu:
| 整合 Discord | ✓ | ✓ | ✓ | ✓ | ✓ | ✘ | ✘ |
| 整合 Microsoft Teams | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✘ |
| 顯示外部 CI/CD 狀態 | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
[gitea-caddy-plugin]: https://github.com/42wim/caddy-gitea
[gitea-pages-server]: https://codeberg.org/Codeberg/pages-server

View File

@ -0,0 +1,91 @@
---
date: "2021-09-02T16:00:00+08:00"
title: "从旧版 Gitea 升级"
slug: "upgrade-from-gitea"
weight: 100
toc: false
draft: false
menu:
sidebar:
parent: "installation"
name: "从旧版 Gitea 升级"
weight: 100
identifier: "upgrade-from-gitea"
---
# 从旧版 Gitea 升级
**目录**
{{< toc >}}
想要升级 Gitea只需要下载新版停止运行旧版进行数据备份然后运行新版就好。
每次 Gitea 实例启动时,它都会检查是否要进行数据库迁移。
如果需要进行数据库迁移Gitea 会花一些时间完成升级然后继续服务。
## 为重大变更检查更新日志
为了让 Gitea 变得更好,进行重大变更是不可避免的,尤其是一些里程碑更新的发布。
在更新前,请 [在 Gitea 博客上阅读更新日志](https://blog.gitea.io/)
并检查重大变更是否会影响你的 Gitea 实例。
## 降级前的备份
Gitea 会保留首二位版本号相同的版本的兼容性 (`a.b.x` -> `a.b.y`)
这些版本拥有相同的数据库结构,可以自由升级或降级。
其他情况 (`a.b.?` -> `a.c.?`)下,
新版 Gitea 可能将会将数据库升级到与旧版数据库不同的结构。
举个例子:
| 当前 | 目标 | 结果 |
| --- | --- | --- |
| 1.4.0 | 1.4.1 | ✅ |
| 1.4.1 | 1.4.0 | ⚠️ 不建议,后果自负!尽管数据库结构可能不会变更,让它可以正常工作。我们强烈建议降级前进行完全的备份。 |
| 1.4.x | 1.5.y | ✅ 数据库会被自动升级。你可以直接从 1.4.x 升级到最新的 1.5.y。 |
| 1.5.y | 1.4.x | ❌ 数据库已被升级且不可被旧版 Gitea 利用,使用备份来进行降级。 |
**因为你不能基于升级后的数据库运行旧版 Gitea所以你应该在数据库升级前完成数据备份。**
如果你在生产环境下使用 Gitea你应该在升级前做好备份哪怕只是小版本的补丁更新。
备份步骤:
* 停止 Gitea 实例
* 备份数据库
* 备份 Gitea 配置文件
* 备份 Gitea 在 `APP_DATA_PATH` 中的数据文件
* 备份 Gitea 的外部存储 (例如: S3/MinIO 或被使用的其他存储)
如果你在使用云服务或拥有快照功能的文件系统,
最好对 Gitea 的数据盘及相关资料存储进行一次快照。
## 从 Docker 升级
* `docker pull` 拉取 Gitea 的最新发布版。
* 停止运行中的实例,备份数据。
* 使用 `docker``docker-compose` 启动更新的 Gitea Docker 容器.
## 从包升级
* 停止运行中的实例,备份数据。
* 使用你的包管理器更新 Gitea 到最新版本。
* 启动 Gitea 实例。
## 从二进制升级
* 下载最新的 Gitea 二进制文件到临时文件夹中。
* 停止运行中的实例,备份数据。
* 将旧的 Gitea 二进制文件覆盖成新的。
* 启动 Gitea 实例。
在 Linux 系统上自动执行以上步骤的脚本可在 [Gitea 的 source tree 中找到 `contrib/upgrade.sh` 来获取](https://github.com/go-gitea/gitea/blob/main/contrib/upgrade.sh).
## 小心你的自定义模板
Gitea 的模板结构与变量可能会随着各个版本的发布发生变化,如果你使用了自定义模板,
你得注意你的模板与你使用的 Gitea 版本的兼容性。
如果自定义模板与 Gitea 版本不兼容,你可能会得到:
`50x` 服务器错误,页面元素丢失或故障,莫名其妙的页面布局,等等…
移除或更新不兼容的模板Gitea Web 才可以正常工作。

7
go.mod
View File

@ -44,7 +44,6 @@ require (
github.com/go-git/go-billy/v5 v5.4.1
github.com/go-git/go-git/v5 v5.5.2
github.com/go-ldap/ldap/v3 v3.4.4
github.com/go-redis/redis/v8 v8.11.5
github.com/go-sql-driver/mysql v1.7.0
github.com/go-swagger/go-swagger v0.30.4
github.com/go-testfixtures/testfixtures/v3 v3.8.1
@ -90,6 +89,7 @@ require (
github.com/pquerna/otp v1.4.0
github.com/prometheus/client_golang v1.14.0
github.com/quasoft/websspi v1.1.2
github.com/redis/go-redis/v9 v9.0.3
github.com/santhosh-tekuri/jsonschema/v5 v5.2.0
github.com/sergi/go-diff v1.3.1
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546
@ -104,10 +104,11 @@ require (
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20220924101305-151362477c87
github.com/yuin/goldmark-meta v1.1.0
golang.org/x/crypto v0.7.0
golang.org/x/image v0.7.0
golang.org/x/net v0.8.0
golang.org/x/oauth2 v0.6.0
golang.org/x/sys v0.6.0
golang.org/x/text v0.8.0
golang.org/x/text v0.9.0
golang.org/x/tools v0.6.0
google.golang.org/grpc v1.53.0
google.golang.org/protobuf v1.28.1
@ -231,6 +232,8 @@ require (
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/olekukonko/tablewriter v0.0.5 // indirect
github.com/onsi/ginkgo v1.16.5 // indirect
github.com/onsi/gomega v1.18.1 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/pelletier/go-toml/v2 v2.0.5 // indirect
github.com/pierrec/lz4/v4 v4.1.17 // indirect

27
go.sum
View File

@ -213,6 +213,8 @@ github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0=
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
github.com/bsm/ginkgo/v2 v2.7.0 h1:ItPMPH90RbmZJt5GtkcNvIRuGEdwlBItdNVoyzaNQao=
github.com/bsm/gomega v1.26.0 h1:LhQm+AFcgV2M0WyKroMASzAzCAJVpAxQXv4SaI9a69Y=
github.com/bufbuild/connect-go v1.3.1 h1:doJP6Q8Ypg6haUT2IAZJPWHUN9rAUp+F9MfK7yhu1zs=
github.com/bufbuild/connect-go v1.3.1/go.mod h1:9iNvh/NOsfhNBUH5CtvXeVUskQO1xsrEviH7ZArwZ3I=
github.com/buildkite/terminal-to-html/v3 v3.7.0 h1:chdLUSpiOj/A4v3dzxyOqixXI6aw7IDA6Dk77FXsvNU=
@ -436,8 +438,6 @@ github.com/go-openapi/validate v0.22.0 h1:b0QecH6VslW/TxtpKgzpO1SNG7GU2FsaqKdP1E
github.com/go-openapi/validate v0.22.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg=
github.com/go-redis/redis v6.15.2+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
github.com/go-redis/redis/v8 v8.4.0/go.mod h1:A1tbYoHSa1fXwN+//ljcCYYJeLmVrwL9hbQN45Jdy0M=
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
@ -447,6 +447,7 @@ github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/me
github.com/go-swagger/go-swagger v0.30.4 h1:cPrWLSXY6ZdcgfRicOj0lANg72TkTHz6uv/OlUdzO5U=
github.com/go-swagger/go-swagger v0.30.4/go.mod h1:YM5D5kR9c1ft3ynMXvDk2uo/7UZHKFEqKXcAL9f4Phc=
github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013 h1:l9rI6sNaZgNC0LnF3MiE+qTmyBA/tZAg1rtyrGbUMK0=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
github.com/go-test/deep v1.0.7 h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M=
github.com/go-test/deep v1.0.7/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8=
github.com/go-testfixtures/testfixtures/v3 v3.8.1 h1:uonwvepqRvSgddcrReZQhojTlWlmOlHkYAb9ZaOMWgU=
@ -583,6 +584,7 @@ github.com/google/pprof v0.0.0-20200905233945-acf8798be1f7/go.mod h1:ZgVRPoUq/hf
github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg=
github.com/google/pprof v0.0.0-20230222194610-99052d3372e7 h1:pNFnpaSXfibgW7aUbk9pwLmI7LNwh/iR46x/YwN/lNg=
github.com/google/pprof v0.0.0-20230222194610-99052d3372e7/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg=
@ -939,6 +941,7 @@ github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9l
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
@ -955,13 +958,18 @@ github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY=
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
github.com/onsi/ginkgo/v2 v2.0.0/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc=
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
@ -1050,6 +1058,8 @@ github.com/quasoft/websspi v1.1.2 h1:/mA4w0LxWlE3novvsoEL6BBA1WnjJATbjkh1kFrTidw
github.com/quasoft/websspi v1.1.2/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/redis/go-redis/v9 v9.0.3 h1:+7mmR26M0IvyLxGZUHxu4GiBkJkVDid0Un+j4ScYu4k=
github.com/redis/go-redis/v9 v9.0.3/go.mod h1:WqMKv5vnQbRuZstUwxQI195wHy+t4PuXDOjzMvcuQHk=
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk=
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rhysd/actionlint v1.6.23 h1:041VOXgZddfvSJa9Il+WT3Iwuo/j0Nmu4bhpAScrds4=
@ -1321,6 +1331,8 @@ golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EH
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.7.0 h1:gzS29xtG1J5ybQlv0PuyfE3nmc6R4qB73m6LUUmvFuw=
golang.org/x/image v0.7.0/go.mod h1:nd/q4ef1AKKYl/4kft7g+6UyGbdiqWqTP1ZAbRoV7Rg=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@ -1391,6 +1403,7 @@ golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
golang.org/x/net v0.0.0-20210501142056-aec3718b3fa0/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
@ -1404,6 +1417,7 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug
golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ=
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@ -1493,6 +1507,7 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20201126233918-771906719818/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -1521,6 +1536,7 @@ golang.org/x/sys v0.0.0-20220825204002-c680a09ffe64/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
@ -1528,6 +1544,7 @@ golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9sn
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -1539,8 +1556,9 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -1607,6 +1625,7 @@ golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=

View File

@ -75,6 +75,7 @@ arguments - which can alternatively be run by running the subcommand web.`
cmd.CmdDocs,
cmd.CmdDumpRepository,
cmd.CmdRestoreRepository,
cmd.CmdActions,
}
// Now adjust these commands to add our global configuration options

View File

@ -69,3 +69,12 @@ func getUserHeatmapData(user *user_model.User, team *organization.Team, doer *us
OrderBy("timestamp").
Find(&hdata)
}
// GetTotalContributionsInHeatmap returns the total number of contributions in a heatmap
func GetTotalContributionsInHeatmap(hdata []*UserHeatmapData) int64 {
var total int64
for _, v := range hdata {
total += v.Contributions
}
return total
}

View File

@ -16,7 +16,6 @@ import (
"strconv"
"strings"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/setting"
@ -158,10 +157,6 @@ func parseKeyString(content string) (string, error) {
// CheckPublicKeyString checks if the given public key string is recognized by SSH.
// It returns the actual public key line on success.
func CheckPublicKeyString(content string) (_ string, err error) {
if setting.SSH.Disabled {
return "", db.ErrSSHDisabled{}
}
content, err = parseKeyString(content)
if err != nil {
return "", err
@ -184,7 +179,7 @@ func CheckPublicKeyString(content string) (_ string, err error) {
keyType string
length int
)
if setting.SSH.StartBuiltinServer {
if len(setting.SSH.KeygenPath) == 0 {
fnName = "SSHNativeParsePublicKey"
keyType, length, err = SSHNativeParsePublicKey(content)
} else {
@ -290,7 +285,12 @@ func SSHKeyGenParsePublicKey(key string) (string, int, error) {
}
}()
stdout, stderr, err := process.GetManager().Exec("SSHKeyGenParsePublicKey", setting.SSH.KeygenPath, "-lf", tmpName)
keygenPath := setting.SSH.KeygenPath
if len(keygenPath) == 0 {
keygenPath = "ssh-keygen"
}
stdout, stderr, err := process.GetManager().Exec("SSHKeyGenParsePublicKey", keygenPath, "-lf", tmpName)
if err != nil {
return "", 0, fmt.Errorf("fail to parse public key: %s - %s", err, stderr)
}

View File

@ -57,6 +57,14 @@ func Test_SSHParsePublicKey(t *testing.T) {
assert.Equal(t, tc.keyType, keyTypeK)
assert.EqualValues(t, tc.length, lengthK)
})
t.Run("SSHParseKeyNative", func(t *testing.T) {
keyTypeK, lengthK, err := SSHNativeParsePublicKey(tc.content)
if err != nil {
assert.Fail(t, "%v", err)
}
assert.Equal(t, tc.keyType, keyTypeK)
assert.EqualValues(t, tc.length, lengthK)
})
})
}
}

View File

@ -42,6 +42,33 @@ func ConvertUtf8ToUtf8mb4() error {
return nil
}
// ConvertVarcharToNVarchar converts database and tables from varchar to nvarchar if it's mssql
func ConvertVarcharToNVarchar() error {
if x.Dialect().URI().DBType != schemas.MSSQL {
return nil
}
sess := x.NewSession()
defer sess.Close()
res, err := sess.QuerySliceString(`SELECT 'ALTER TABLE ' + OBJECT_NAME(SC.object_id) + ' MODIFY SC.name NVARCHAR(' + CONVERT(VARCHAR(5),SC.max_length) + ')'
FROM SYS.columns SC
JOIN SYS.types ST
ON SC.system_type_id = ST.system_type_id
AND SC.user_type_id = ST.user_type_id
WHERE ST.name ='varchar'`)
if err != nil {
return err
}
for _, row := range res {
if len(row) == 1 {
if _, err = sess.Exec(row[0]); err != nil {
return err
}
}
}
return err
}
// Cell2Int64 converts a xorm.Cell type to int64,
// and handles possible irregular cases.
func Cell2Int64(val xorm.Cell) int64 {

View File

@ -19,13 +19,16 @@ func TestIterate(t *testing.T) {
xe := unittest.GetXORMEngine()
assert.NoError(t, xe.Sync(&repo_model.RepoUnit{}))
var repoCnt int
err := db.Iterate(db.DefaultContext, nil, func(ctx context.Context, repo *repo_model.RepoUnit) error {
repoCnt++
cnt, err := db.GetEngine(db.DefaultContext).Count(&repo_model.RepoUnit{})
assert.NoError(t, err)
var repoUnitCnt int
err = db.Iterate(db.DefaultContext, nil, func(ctx context.Context, repo *repo_model.RepoUnit) error {
repoUnitCnt++
return nil
})
assert.NoError(t, err)
assert.EqualValues(t, 84, repoCnt)
assert.EqualValues(t, cnt, repoUnitCnt)
err = db.Iterate(db.DefaultContext, nil, func(ctx context.Context, repoUnit *repo_model.RepoUnit) error {
reopUnit2 := repo_model.RepoUnit{ID: repoUnit.ID}

View File

@ -31,15 +31,20 @@ func TestFind(t *testing.T) {
xe := unittest.GetXORMEngine()
assert.NoError(t, xe.Sync(&repo_model.RepoUnit{}))
var repoUnitCount int
_, err := db.GetEngine(db.DefaultContext).SQL("SELECT COUNT(*) FROM repo_unit").Get(&repoUnitCount)
assert.NoError(t, err)
assert.NotEmpty(t, repoUnitCount)
opts := mockListOptions{}
var repoUnits []repo_model.RepoUnit
err := db.Find(db.DefaultContext, &opts, &repoUnits)
err = db.Find(db.DefaultContext, &opts, &repoUnits)
assert.NoError(t, err)
assert.EqualValues(t, 84, len(repoUnits))
assert.EqualValues(t, repoUnitCount, len(repoUnits))
cnt, err := db.Count(db.DefaultContext, &opts, new(repo_model.RepoUnit))
assert.NoError(t, err)
assert.EqualValues(t, 84, cnt)
assert.EqualValues(t, repoUnitCount, cnt)
repoUnits = make([]repo_model.RepoUnit, 0, 10)
newCnt, err := db.FindAndCount(db.DefaultContext, &opts, &repoUnits)

View File

@ -12,8 +12,6 @@ import (
"code.gitea.io/gitea/models/db"
"github.com/stretchr/testify/assert"
_ "github.com/mattn/go-sqlite3"
)
func changeDefaultFileBlockSize(n int64) (restore func()) {

View File

@ -66,3 +66,45 @@
is_prerelease: true
is_tag: false
created_unix: 946684800
- id: 6
repo_id: 57
publisher_id: 2
tag_name: "v1.0"
lower_tag_name: "v1.0"
target: "main"
title: "v1.0"
sha1: "a8a700e8c644c783ba2c6e742bb81bf91e244bff"
num_commits: 3
is_draft: false
is_prerelease: false
is_tag: false
created_unix: 946684801
- id: 7
repo_id: 57
publisher_id: 2
tag_name: "v1.1"
lower_tag_name: "v1.1"
target: "main"
title: "v1.1"
sha1: "cef06e48f2642cd0dc9597b4bea09f4b3f74aad6"
num_commits: 5
is_draft: false
is_prerelease: false
is_tag: false
created_unix: 946684802
- id: 8
repo_id: 57
publisher_id: 2
tag_name: "v2.0"
lower_tag_name: "v2.0"
target: "main"
title: "v2.0"
sha1: "7197b56fdc75b453f47c9110938cb46a303579fd"
num_commits: 6
is_draft: false
is_prerelease: false
is_tag: false
created_unix: 946684803

View File

@ -1,3 +1,4 @@
# See models/unit/unit.go for the meaning of the type
-
id: 1
repo_id: 1
@ -575,3 +576,34 @@
repo_id: 56
type: 1
created_unix: 946684810
-
id: 85
repo_id: 57
type: 1
created_unix: 946684810
-
id: 86
repo_id: 57
type: 2
created_unix: 946684810
-
id: 87
repo_id: 57
type: 3
created_unix: 946684810
-
id: 88
repo_id: 57
type: 4
created_unix: 946684810
-
id: 89
repo_id: 57
type: 5
created_unix: 946684810
-
id: 90
repo_id: 52
type: 1
created_unix: 946684810

View File

@ -1,3 +1,4 @@
# don't forget to add fixtures in repo_unit.yml
-
id: 1
owner_id: 2
@ -1559,6 +1560,7 @@
owner_name: user30
lower_name: empty
name: empty
default_branch: master
num_watches: 0
num_stars: 0
num_forks: 0
@ -1647,3 +1649,16 @@
is_private: true
status: 0
num_issues: 0
-
id: 57
owner_id: 2
owner_name: user2
lower_name: repo-release
name: repo-release
default_branch: main
is_empty: false
is_archived: false
is_private: false
status: 0
num_issues: 0

View File

@ -66,7 +66,7 @@
num_followers: 2
num_following: 1
num_stars: 2
num_repos: 12
num_repos: 13
num_teams: 0
num_members: 0
visibility: 0
@ -1091,7 +1091,7 @@
max_repo_creation: -1
is_active: true
is_admin: false
is_restricted: true
is_restricted: false
allow_git_hook: false
allow_import_local: false
allow_create_organization: true

View File

@ -0,0 +1,76 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package git
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
func TestBranchRuleMatchPriority(t *testing.T) {
kases := []struct {
Rules []string
BranchName string
ExpectedMatchIdx int
}{
{
Rules: []string{"release/*", "release/v1.17"},
BranchName: "release/v1.17",
ExpectedMatchIdx: 1,
},
{
Rules: []string{"release/v1.17", "release/*"},
BranchName: "release/v1.17",
ExpectedMatchIdx: 0,
},
{
Rules: []string{"release/**/v1.17", "release/test/v1.17"},
BranchName: "release/test/v1.17",
ExpectedMatchIdx: 1,
},
{
Rules: []string{"release/test/v1.17", "release/**/v1.17"},
BranchName: "release/test/v1.17",
ExpectedMatchIdx: 0,
},
{
Rules: []string{"release/**", "release/v1.0.0"},
BranchName: "release/v1.0.0",
ExpectedMatchIdx: 1,
},
{
Rules: []string{"release/v1.0.0", "release/**"},
BranchName: "release/v1.0.0",
ExpectedMatchIdx: 0,
},
{
Rules: []string{"release/**", "release/v1.0.0"},
BranchName: "release/v2.0.0",
ExpectedMatchIdx: 0,
},
{
Rules: []string{"release/*", "release/v1.0.0"},
BranchName: "release/1/v2.0.0",
ExpectedMatchIdx: -1,
},
}
for _, kase := range kases {
var pbs ProtectedBranchRules
for _, rule := range kase.Rules {
pbs = append(pbs, &ProtectedBranch{RuleName: rule})
}
pbs.sort()
matchedPB := pbs.GetFirstMatched(kase.BranchName)
if matchedPB == nil {
if kase.ExpectedMatchIdx >= 0 {
assert.Error(t, fmt.Errorf("no matched rules but expected %s[%d]", kase.Rules[kase.ExpectedMatchIdx], kase.ExpectedMatchIdx))
}
} else {
assert.EqualValues(t, kase.Rules[kase.ExpectedMatchIdx], matchedPB.RuleName)
}
}
}

View File

@ -28,12 +28,8 @@ func (rules ProtectedBranchRules) sort() {
sort.Slice(rules, func(i, j int) bool {
rules[i].loadGlob()
rules[j].loadGlob()
if rules[i].isPlainName {
if !rules[j].isPlainName {
return true
}
} else if rules[j].isPlainName {
return true
if rules[i].isPlainName != rules[j].isPlainName {
return rules[i].isPlainName // plain name comes first, so plain name means "less"
}
return rules[i].CreatedUnix < rules[j].CreatedUnix
})
@ -46,7 +42,7 @@ func FindRepoProtectedBranchRules(ctx context.Context, repoID int64) (ProtectedB
if err != nil {
return nil, err
}
rules.sort()
rules.sort() // to make non-glob rules have higher priority, and for same glob/non-glob rules, first created rules have higher priority
return rules, nil
}

View File

@ -63,8 +63,8 @@ func IsUserAssignedToIssue(ctx context.Context, issue *Issue, user *user_model.U
}
// ToggleIssueAssignee changes a user between assigned and not assigned for this issue, and make issue comment for it.
func ToggleIssueAssignee(issue *Issue, doer *user_model.User, assigneeID int64) (removed bool, comment *Comment, err error) {
ctx, committer, err := db.TxContext(db.DefaultContext)
func ToggleIssueAssignee(ctx context.Context, issue *Issue, doer *user_model.User, assigneeID int64) (removed bool, comment *Comment, err error) {
ctx, committer, err := db.TxContext(ctx)
if err != nil {
return false, nil, err
}

View File

@ -24,17 +24,17 @@ func TestUpdateAssignee(t *testing.T) {
// Assign multiple users
user2, err := user_model.GetUserByID(db.DefaultContext, 2)
assert.NoError(t, err)
_, _, err = issues_model.ToggleIssueAssignee(issue, &user_model.User{ID: 1}, user2.ID)
_, _, err = issues_model.ToggleIssueAssignee(db.DefaultContext, issue, &user_model.User{ID: 1}, user2.ID)
assert.NoError(t, err)
user3, err := user_model.GetUserByID(db.DefaultContext, 3)
assert.NoError(t, err)
_, _, err = issues_model.ToggleIssueAssignee(issue, &user_model.User{ID: 1}, user3.ID)
_, _, err = issues_model.ToggleIssueAssignee(db.DefaultContext, issue, &user_model.User{ID: 1}, user3.ID)
assert.NoError(t, err)
user1, err := user_model.GetUserByID(db.DefaultContext, 1) // This user is already assigned (see the definition in fixtures), so running UpdateAssignee should unassign him
assert.NoError(t, err)
_, _, err = issues_model.ToggleIssueAssignee(issue, &user_model.User{ID: 1}, user1.ID)
_, _, err = issues_model.ToggleIssueAssignee(db.DefaultContext, issue, &user_model.User{ID: 1}, user1.ID)
assert.NoError(t, err)
// Check if he got removed

View File

@ -52,84 +52,61 @@ func (err ErrCommentNotExist) Unwrap() error {
// CommentType defines whether a comment is just a simple comment, an action (like close) or a reference.
type CommentType int
// define unknown comment type
const (
CommentTypeUnknown CommentType = -1
)
// CommentTypeUndefined is used to search for comments of any type
const CommentTypeUndefined CommentType = -1
// Enumerate all the comment types
const (
// 0 Plain comment, can be associated with a commit (CommitID > 0) and a line (LineNum > 0)
CommentTypeComment CommentType = iota
CommentTypeReopen // 1
CommentTypeClose // 2
CommentTypeComment CommentType = iota // 0 Plain comment, can be associated with a commit (CommitID > 0) and a line (LineNum > 0)
CommentTypeReopen // 1
CommentTypeClose // 2
CommentTypeIssueRef // 3 References.
CommentTypeCommitRef // 4 Reference from a commit (not part of a pull request)
CommentTypeCommentRef // 5 Reference from a comment
CommentTypePullRef // 6 Reference from a pull request
CommentTypeLabel // 7 Labels changed
CommentTypeMilestone // 8 Milestone changed
CommentTypeAssignees // 9 Assignees changed
CommentTypeChangeTitle // 10 Change Title
CommentTypeDeleteBranch // 11 Delete Branch
CommentTypeStartTracking // 12 Start a stopwatch for time tracking
CommentTypeStopTracking // 13 Stop a stopwatch for time tracking
CommentTypeAddTimeManual // 14 Add time manual for time tracking
CommentTypeCancelTracking // 15 Cancel a stopwatch for time tracking
CommentTypeAddedDeadline // 16 Added a due date
CommentTypeModifiedDeadline // 17 Modified the due date
CommentTypeRemovedDeadline // 18 Removed a due date
CommentTypeAddDependency // 19 Dependency added
CommentTypeRemoveDependency // 20 Dependency removed
CommentTypeCode // 21 Comment a line of code
CommentTypeReview // 22 Reviews a pull request by giving general feedback
CommentTypeLock // 23 Lock an issue, giving only collaborators access
CommentTypeUnlock // 24 Unlocks a previously locked issue
CommentTypeChangeTargetBranch // 25 Change pull request's target branch
CommentTypeDeleteTimeManual // 26 Delete time manual for time tracking
CommentTypeReviewRequest // 27 add or remove Request from one
CommentTypeMergePull // 28 merge pull request
CommentTypePullRequestPush // 29 push to PR head branch
CommentTypeProject // 30 Project changed
CommentTypeProjectBoard // 31 Project board changed
CommentTypeDismissReview // 32 Dismiss Review
CommentTypeChangeIssueRef // 33 Change issue ref
CommentTypePRScheduledToAutoMerge // 34 pr was scheduled to auto merge when checks succeed
CommentTypePRUnScheduledToAutoMerge // 35 pr was un scheduled to auto merge when checks succeed
// 3 References.
CommentTypeIssueRef
// 4 Reference from a commit (not part of a pull request)
CommentTypeCommitRef
// 5 Reference from a comment
CommentTypeCommentRef
// 6 Reference from a pull request
CommentTypePullRef
// 7 Labels changed
CommentTypeLabel
// 8 Milestone changed
CommentTypeMilestone
// 9 Assignees changed
CommentTypeAssignees
// 10 Change Title
CommentTypeChangeTitle
// 11 Delete Branch
CommentTypeDeleteBranch
// 12 Start a stopwatch for time tracking
CommentTypeStartTracking
// 13 Stop a stopwatch for time tracking
CommentTypeStopTracking
// 14 Add time manual for time tracking
CommentTypeAddTimeManual
// 15 Cancel a stopwatch for time tracking
CommentTypeCancelTracking
// 16 Added a due date
CommentTypeAddedDeadline
// 17 Modified the due date
CommentTypeModifiedDeadline
// 18 Removed a due date
CommentTypeRemovedDeadline
// 19 Dependency added
CommentTypeAddDependency
// 20 Dependency removed
CommentTypeRemoveDependency
// 21 Comment a line of code
CommentTypeCode
// 22 Reviews a pull request by giving general feedback
CommentTypeReview
// 23 Lock an issue, giving only collaborators access
CommentTypeLock
// 24 Unlocks a previously locked issue
CommentTypeUnlock
// 25 Change pull request's target branch
CommentTypeChangeTargetBranch
// 26 Delete time manual for time tracking
CommentTypeDeleteTimeManual
// 27 add or remove Request from one
CommentTypeReviewRequest
// 28 merge pull request
CommentTypeMergePull
// 29 push to PR head branch
CommentTypePullRequestPush
// 30 Project changed
CommentTypeProject
// 31 Project board changed
CommentTypeProjectBoard
// 32 Dismiss Review
CommentTypeDismissReview
// 33 Change issue ref
CommentTypeChangeIssueRef
// 34 pr was scheduled to auto merge when checks succeed
CommentTypePRScheduledToAutoMerge
// 35 pr was un scheduled to auto merge when checks succeed
CommentTypePRUnScheduledToAutoMerge
)
var commentStrings = []string{
@ -181,7 +158,23 @@ func AsCommentType(typeName string) CommentType {
return CommentType(index)
}
}
return CommentTypeUnknown
return CommentTypeUndefined
}
func (t CommentType) HasContentSupport() bool {
switch t {
case CommentTypeComment, CommentTypeCode, CommentTypeReview:
return true
}
return false
}
func (t CommentType) HasAttachmentSupport() bool {
switch t {
case CommentTypeComment, CommentTypeCode, CommentTypeReview:
return true
}
return false
}
// RoleDescriptor defines comment tag type
@ -1039,7 +1032,7 @@ func (opts *FindCommentsOptions) ToConds() builder.Cond {
if opts.Before > 0 {
cond = cond.And(builder.Lte{"comment.updated_unix": opts.Before})
}
if opts.Type != CommentTypeUnknown {
if opts.Type != CommentTypeUndefined {
cond = cond.And(builder.Eq{"comment.type": opts.Type})
}
if opts.Line != 0 {

View File

@ -64,8 +64,9 @@ func TestFetchCodeComments(t *testing.T) {
}
func TestAsCommentType(t *testing.T) {
assert.Equal(t, issues_model.CommentTypeUnknown, issues_model.AsCommentType(""))
assert.Equal(t, issues_model.CommentTypeUnknown, issues_model.AsCommentType("nonsense"))
assert.Equal(t, issues_model.CommentType(0), issues_model.CommentTypeComment)
assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType(""))
assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType("nonsense"))
assert.Equal(t, issues_model.CommentTypeComment, issues_model.AsCommentType("comment"))
assert.Equal(t, issues_model.CommentTypePRUnScheduledToAutoMerge, issues_model.AsCommentType("pull_cancel_scheduled_merge"))
}

View File

@ -269,7 +269,7 @@ func (issue *Issue) LoadPullRequest(ctx context.Context) (err error) {
}
func (issue *Issue) loadComments(ctx context.Context) (err error) {
return issue.loadCommentsByType(ctx, CommentTypeUnknown)
return issue.loadCommentsByType(ctx, CommentTypeUndefined)
}
// LoadDiscussComments loads discuss comments
@ -743,8 +743,8 @@ func ChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User,
}
// ChangeIssueTitle changes the title of this issue, as the given user.
func ChangeIssueTitle(issue *Issue, doer *user_model.User, oldTitle string) (err error) {
ctx, committer, err := db.TxContext(db.DefaultContext)
func ChangeIssueTitle(ctx context.Context, issue *Issue, doer *user_model.User, oldTitle string) (err error) {
ctx, committer, err := db.TxContext(ctx)
if err != nil {
return err
}

View File

@ -83,7 +83,7 @@ func TestXRef_NeuterCrossReferences(t *testing.T) {
d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
i.Title = "title2, no mentions"
assert.NoError(t, issues_model.ChangeIssueTitle(i, d, title))
assert.NoError(t, issues_model.ChangeIssueTitle(db.DefaultContext, i, d, title))
ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0})
assert.Equal(t, issues_model.CommentTypeIssueRef, ref.Type)

View File

@ -7,7 +7,6 @@ import (
"context"
"fmt"
"strings"
"time"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
@ -65,7 +64,6 @@ type Milestone struct {
DeadlineString string `xorm:"-"`
TotalTrackedTime int64 `xorm:"-"`
TimeSinceUpdate int64 `xorm:"-"`
}
func init() {
@ -84,9 +82,6 @@ func (m *Milestone) BeforeUpdate() {
// AfterLoad is invoked from XORM after setting the value of a field of
// this object.
func (m *Milestone) AfterLoad() {
if !m.UpdatedUnix.IsZero() {
m.TimeSinceUpdate = time.Now().Unix() - m.UpdatedUnix.AsTime().Unix()
}
m.NumOpenIssues = m.NumIssues - m.NumClosedIssues
if m.DeadlineUnix.Year() == 9999 {
return

View File

@ -165,11 +165,6 @@ func (log *TestLogger) Init(config string) error {
return nil
}
// Content returns the content accumulated in the content provider
func (log *TestLogger) Content() (string, error) {
return "", fmt.Errorf("not supported")
}
// Flush when log should be flushed
func (log *TestLogger) Flush() {
}

View File

@ -481,6 +481,10 @@ var migrations = []Migration{
NewMigration("Change Container Metadata", v1_20.ChangeContainerMetadataMultiArch),
// v251 -> v252
NewMigration("Fix incorrect owner team unit access mode", v1_20.FixIncorrectOwnerTeamUnitAccessMode),
// v252 -> v253
NewMigration("Fix incorrect admin team unit access mode", v1_20.FixIncorrectAdminTeamUnitAccessMode),
// v253 -> v254
NewMigration("Fix ExternalTracker and ExternalWiki accessMode in owner and admin team", v1_20.FixExternalTrackerAndExternalWikiAccessModeInOwnerAndAdminTeam),
}
// GetCurrentDBVersion returns the current db version

View File

@ -22,10 +22,10 @@ func FixLanguageStatsToSaveSize(x *xorm.Engine) error {
type RepoIndexerType int
const (
// RepoIndexerTypeCode code indexer
RepoIndexerTypeCode RepoIndexerType = iota // 0
// RepoIndexerTypeStats repository stats indexer
RepoIndexerTypeStats // 1
// RepoIndexerTypeCode code indexer - 0
RepoIndexerTypeCode RepoIndexerType = iota //nolint:unused
// RepoIndexerTypeStats repository stats indexer - 1
RepoIndexerTypeStats
)
// RepoIndexerStatus see models/repo_indexer.go

View File

@ -8,13 +8,13 @@ import (
)
func FixRepoTopics(x *xorm.Engine) error {
type Topic struct {
type Topic struct { //nolint:unused
ID int64 `xorm:"pk autoincr"`
Name string `xorm:"UNIQUE VARCHAR(25)"`
RepoCount int
}
type RepoTopic struct {
type RepoTopic struct { //nolint:unused
RepoID int64 `xorm:"pk"`
TopicID int64 `xorm:"pk"`
}

View File

@ -16,7 +16,7 @@ func ConvertHookTaskTypeToVarcharAndTrim(x *xorm.Engine) error {
return nil
}
type HookTask struct {
type HookTask struct { //nolint:unused
Typ string `xorm:"VARCHAR(16) index"`
}
@ -42,7 +42,7 @@ func ConvertHookTaskTypeToVarcharAndTrim(x *xorm.Engine) error {
return err
}
type Webhook struct {
type Webhook struct { //nolint:unused
Type string `xorm:"VARCHAR(16) index"`
}

View File

@ -0,0 +1,47 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_20 //nolint
import (
"code.gitea.io/gitea/modules/log"
"xorm.io/xorm"
)
func FixIncorrectAdminTeamUnitAccessMode(x *xorm.Engine) error {
type UnitType int
type AccessMode int
type TeamUnit struct {
ID int64 `xorm:"pk autoincr"`
OrgID int64 `xorm:"INDEX"`
TeamID int64 `xorm:"UNIQUE(s)"`
Type UnitType `xorm:"UNIQUE(s)"`
AccessMode AccessMode
}
const (
// AccessModeAdmin admin access
AccessModeAdmin = 3
)
sess := x.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return err
}
count, err := sess.Table("team_unit").
Where("team_id IN (SELECT id FROM team WHERE authorize = ?)", AccessModeAdmin).
Update(&TeamUnit{
AccessMode: AccessModeAdmin,
})
if err != nil {
return err
}
log.Debug("Updated %d admin team unit access mode to belong to admin instead of none", count)
return sess.Commit()
}

View File

@ -0,0 +1,49 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_20 //nolint
import (
"code.gitea.io/gitea/modules/log"
"xorm.io/xorm"
)
func FixExternalTrackerAndExternalWikiAccessModeInOwnerAndAdminTeam(x *xorm.Engine) error {
type UnitType int
type AccessMode int
type TeamUnit struct {
ID int64 `xorm:"pk autoincr"`
Type UnitType `xorm:"UNIQUE(s)"`
AccessMode AccessMode
}
const (
// AccessModeRead read access
AccessModeRead = 1
// Unit Type
TypeExternalWiki = 6
TypeExternalTracker = 7
)
sess := x.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return err
}
count, err := sess.Table("team_unit").
Where("type IN (?, ?) AND access_mode > ?", TypeExternalWiki, TypeExternalTracker, AccessModeRead).
Update(&TeamUnit{
AccessMode: AccessModeRead,
})
if err != nil {
return err
}
log.Debug("Updated %d ExternalTracker and ExternalWiki access mode to belong to owner and admin", count)
return sess.Commit()
}

View File

@ -414,6 +414,7 @@ func DeleteTeam(t *organization.Team) error {
&organization.TeamUser{OrgID: t.OrgID, TeamID: t.ID},
&organization.TeamUnit{TeamID: t.ID},
&organization.TeamInvite{TeamID: t.ID},
&issues_model.Review{Type: issues_model.ReviewTypeRequest, ReviewerTeamID: t.ID}, // batch delete the binding relationship between team and PR (request review from team)
); err != nil {
return err
}

View File

@ -342,11 +342,15 @@ func CreateOrganization(org *Organization, owner *user_model.User) (err error) {
// insert units for team
units := make([]TeamUnit, 0, len(unit.AllRepoUnitTypes))
for _, tp := range unit.AllRepoUnitTypes {
up := perm.AccessModeOwner
if tp == unit.TypeExternalTracker || tp == unit.TypeExternalWiki {
up = perm.AccessModeRead
}
units = append(units, TeamUnit{
OrgID: org.ID,
TeamID: t.ID,
Type: tp,
AccessMode: perm.AccessModeOwner,
AccessMode: up,
})
}

View File

@ -229,14 +229,14 @@ func UpdateBoard(ctx context.Context, board *Board) error {
// GetBoards fetches all boards related to a project
// if no default board set, first board is a temporary "Uncategorized" board
func GetBoards(ctx context.Context, projectID int64) (BoardList, error) {
func (p *Project) GetBoards(ctx context.Context) (BoardList, error) {
boards := make([]*Board, 0, 5)
if err := db.GetEngine(ctx).Where("project_id=? AND `default`=?", projectID, false).OrderBy("Sorting").Find(&boards); err != nil {
if err := db.GetEngine(ctx).Where("project_id=? AND `default`=?", p.ID, false).OrderBy("Sorting").Find(&boards); err != nil {
return nil, err
}
defaultB, err := getDefaultBoard(ctx, projectID)
defaultB, err := p.getDefaultBoard(ctx)
if err != nil {
return nil, err
}
@ -245,9 +245,9 @@ func GetBoards(ctx context.Context, projectID int64) (BoardList, error) {
}
// getDefaultBoard return default board and create a dummy if none exist
func getDefaultBoard(ctx context.Context, projectID int64) (*Board, error) {
func (p *Project) getDefaultBoard(ctx context.Context) (*Board, error) {
var board Board
exist, err := db.GetEngine(ctx).Where("project_id=? AND `default`=?", projectID, true).Get(&board)
exist, err := db.GetEngine(ctx).Where("project_id=? AND `default`=?", p.ID, true).Get(&board)
if err != nil {
return nil, err
}
@ -257,7 +257,7 @@ func getDefaultBoard(ctx context.Context, projectID int64) (*Board, error) {
// represents a board for issues not assigned to one
return &Board{
ProjectID: projectID,
ProjectID: p.ID,
Title: "Uncategorized",
Default: true,
}, nil

View File

@ -18,17 +18,18 @@ import (
// Attachment represent a attachment of issue/comment/release.
type Attachment struct {
ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"uuid UNIQUE"`
RepoID int64 `xorm:"INDEX"` // this should not be zero
IssueID int64 `xorm:"INDEX"` // maybe zero when creating
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added
CommentID int64
Name string
DownloadCount int64 `xorm:"DEFAULT 0"`
Size int64 `xorm:"DEFAULT 0"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"uuid UNIQUE"`
RepoID int64 `xorm:"INDEX"` // this should not be zero
IssueID int64 `xorm:"INDEX"` // maybe zero when creating
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added
CommentID int64
Name string
DownloadCount int64 `xorm:"DEFAULT 0"`
Size int64 `xorm:"DEFAULT 0"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
CustomDownloadURL string `xorm:"-"`
}
func init() {
@ -57,6 +58,10 @@ func (a *Attachment) RelativePath() string {
// DownloadURL returns the download url of the attached file
func (a *Attachment) DownloadURL() string {
if a.CustomDownloadURL != "" {
return a.CustomDownloadURL
}
return setting.AppURL + "attachments/" + url.PathEscape(a.UUID)
}

View File

@ -7,6 +7,7 @@ package repo
import (
"context"
"fmt"
"net/url"
"sort"
"strconv"
"strings"
@ -372,6 +373,34 @@ func GetReleaseAttachments(ctx context.Context, rels ...*Release) (err error) {
sortedRels.Rel[currentIndex].Attachments = append(sortedRels.Rel[currentIndex].Attachments, attachment)
}
// Makes URL's predictable
for _, release := range rels {
// If we have no Repo, we don't need to execute this loop
if release.Repo == nil {
continue
}
// Check if there are two or more attachments with the same name
hasDuplicates := false
foundNames := make(map[string]bool)
for _, attachment := range release.Attachments {
_, found := foundNames[attachment.Name]
if found {
hasDuplicates = true
break
} else {
foundNames[attachment.Name] = true
}
}
// If the names unique, use the URL with the Name instead of the UUID
if !hasDuplicates {
for _, attachment := range release.Attachments {
attachment.CustomDownloadURL = release.Repo.HTMLURL() + "/releases/download/" + url.PathEscape(release.TagName) + "/" + url.PathEscape(attachment.Name)
}
}
}
return err
}

View File

@ -225,6 +225,12 @@ func (repo *Repository) IsBroken() bool {
return repo.Status == RepositoryBroken
}
// MarkAsBrokenEmpty marks the repo as broken and empty
func (repo *Repository) MarkAsBrokenEmpty() {
repo.Status = RepositoryBroken
repo.IsEmpty = true
}
// AfterLoad is invoked from XORM after setting the values of all fields of this object.
func (repo *Repository) AfterLoad() {
repo.NumOpenIssues = repo.NumIssues - repo.NumClosedIssues
@ -735,7 +741,7 @@ func IsRepositoryExist(ctx context.Context, u *user_model.User, repoName string)
return false, err
}
isDir, err := util.IsDir(RepoPath(u.Name, repoName))
return has && isDir, err
return has || isDir, err
}
// GetTemplateRepo populates repo.TemplateRepo for a generated repository and

View File

@ -235,12 +235,12 @@ func TestSearchRepository(t *testing.T) {
{
name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: util.OptionalBoolFalse},
count: 29,
count: 30,
},
{
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: util.OptionalBoolFalse},
count: 34,
count: 35,
},
{
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
@ -255,7 +255,7 @@ func TestSearchRepository(t *testing.T) {
{
name: "AllPublic/PublicRepositoriesOfOrganization",
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: util.OptionalBoolFalse, Template: util.OptionalBoolFalse},
count: 29,
count: 30,
},
{
name: "AllTemplates",

View File

@ -194,14 +194,16 @@ func (opts *FindTopicOptions) toConds() builder.Cond {
// FindTopics retrieves the topics via FindTopicOptions
func FindTopics(opts *FindTopicOptions) ([]*Topic, int64, error) {
sess := db.GetEngine(db.DefaultContext).Select("topic.*").Where(opts.toConds())
orderBy := "topic.repo_count DESC"
if opts.RepoID > 0 {
sess.Join("INNER", "repo_topic", "repo_topic.topic_id = topic.id")
orderBy = "topic.name" // when render topics for a repo, it's better to sort them by name, to get consistent result
}
if opts.PageSize != 0 && opts.Page != 0 {
sess = db.SetSessionPagination(sess, opts)
}
topics := make([]*Topic, 0, 10)
total, err := sess.Desc("topic.repo_count").FindAndCount(&topics)
total, err := sess.OrderBy(orderBy).FindAndCount(&topics)
return topics, total, err
}

View File

@ -17,7 +17,7 @@ import (
"xorm.io/xorm/schemas"
)
var fixtures *testfixtures.Loader
var fixturesLoader *testfixtures.Loader
// GetXORMEngine gets the XORM engine
func GetXORMEngine(engine ...*xorm.Engine) (x *xorm.Engine) {
@ -30,11 +30,11 @@ func GetXORMEngine(engine ...*xorm.Engine) (x *xorm.Engine) {
// InitFixtures initialize test fixtures for a test database
func InitFixtures(opts FixturesOptions, engine ...*xorm.Engine) (err error) {
e := GetXORMEngine(engine...)
var testfiles func(*testfixtures.Loader) error
var fixtureOptionFiles func(*testfixtures.Loader) error
if opts.Dir != "" {
testfiles = testfixtures.Directory(opts.Dir)
fixtureOptionFiles = testfixtures.Directory(opts.Dir)
} else {
testfiles = testfixtures.Files(opts.Files...)
fixtureOptionFiles = testfixtures.Files(opts.Files...)
}
dialect := "unknown"
switch e.Dialect().URI().DBType {
@ -54,14 +54,14 @@ func InitFixtures(opts FixturesOptions, engine ...*xorm.Engine) (err error) {
testfixtures.Database(e.DB().DB),
testfixtures.Dialect(dialect),
testfixtures.DangerousSkipTestDatabaseCheck(),
testfiles,
fixtureOptionFiles,
}
if e.Dialect().URI().DBType == schemas.POSTGRES {
loaderOptions = append(loaderOptions, testfixtures.SkipResetSequences())
}
fixtures, err = testfixtures.New(loaderOptions...)
fixturesLoader, err = testfixtures.New(loaderOptions...)
if err != nil {
return err
}
@ -78,11 +78,9 @@ func InitFixtures(opts FixturesOptions, engine ...*xorm.Engine) (err error) {
func LoadFixtures(engine ...*xorm.Engine) error {
e := GetXORMEngine(engine...)
var err error
// Database transaction conflicts could occur and result in ROLLBACK
// As a simple workaround, we just retry 20 times.
for i := 0; i < 20; i++ {
err = fixtures.Load()
if err == nil {
// (doubt) database transaction conflicts could occur and result in ROLLBACK? just try for a few times.
for i := 0; i < 5; i++ {
if err = fixturesLoader.Load(); err == nil {
break
}
time.Sleep(200 * time.Millisecond)

View File

@ -17,7 +17,7 @@ type Badge struct {
}
// UserBadge represents a user badge
type UserBadge struct {
type UserBadge struct { //nolint:revive
ID int64 `xorm:"pk autoincr"`
BadgeID int64
UserID int64 `xorm:"INDEX"`

View File

@ -41,6 +41,18 @@ const (
// UserTypeOrganization defines an organization
UserTypeOrganization
// UserTypeReserved reserves a (non-existing) user, i.e. to prevent a spam user from re-registering after being deleted, or to reserve the name until the user is actually created later on
UserTypeUserReserved
// UserTypeOrganizationReserved reserves a (non-existing) organization, to be used in combination with UserTypeUserReserved
UserTypeOrganizationReserved
// UserTypeBot defines a bot user
UserTypeBot
// UserTypeRemoteUser defines a remote user for federated users
UserTypeRemoteUser
)
const (
@ -50,7 +62,7 @@ const (
EmailNotificationsOnMention = "onmention"
// EmailNotificationsDisabled indicates that the user would not like to be notified via email.
EmailNotificationsDisabled = "disabled"
// EmailNotificationsEnabled indicates that the user would like to receive all email notifications and your own
// EmailNotificationsAndYourOwn indicates that the user would like to receive all email notifications and your own
EmailNotificationsAndYourOwn = "andyourown"
)
@ -312,6 +324,7 @@ func GetUserFollowers(ctx context.Context, u, viewer *User, listOptions db.ListO
Select("`user`.*").
Join("LEFT", "follow", "`user`.id=follow.user_id").
Where("follow.follow_id=?", u.ID).
And("`user`.type=?", UserTypeIndividual).
And(isUserVisibleToViewerCond(viewer))
if listOptions.Page != 0 {
@ -333,6 +346,7 @@ func GetUserFollowing(ctx context.Context, u, viewer *User, listOptions db.ListO
Select("`user`.*").
Join("LEFT", "follow", "`user`.id=follow.follow_id").
Where("follow.user_id=?", u.ID).
And("`user`.type=?", UserTypeIndividual).
And(isUserVisibleToViewerCond(viewer))
if listOptions.Page != 0 {
@ -537,7 +551,8 @@ var (
"gitea-actions",
}
reservedUserPatterns = []string{"*.keys", "*.gpg", "*.rss", "*.atom"}
// DON'T ADD ANY NEW STUFF, WE SOLVE THIS WITH `/user/{obj}` PATHS!
reservedUserPatterns = []string{"*.keys", "*.gpg", "*.rss", "*.atom", "*.png"}
)
// IsUsableUsername returns an error when a username is reserved
@ -958,7 +973,7 @@ func GetUserByName(ctx context.Context, name string) (*User, error) {
if len(name) == 0 {
return nil, ErrUserNotExist{0, name, 0}
}
u := &User{LowerName: strings.ToLower(name)}
u := &User{LowerName: strings.ToLower(name), Type: UserTypeIndividual}
has, err := db.GetEngine(ctx).Get(u)
if err != nil {
return nil, err

View File

@ -5,6 +5,7 @@ package user_test
import (
"context"
"fmt"
"math/rand"
"strings"
"testing"
@ -64,9 +65,10 @@ func TestSearchUsers(t *testing.T) {
testSuccess := func(opts *user_model.SearchUserOptions, expectedUserOrOrgIDs []int64) {
users, _, err := user_model.SearchUsers(opts)
assert.NoError(t, err)
if assert.Len(t, users, len(expectedUserOrOrgIDs), opts) {
cassText := fmt.Sprintf("ids: %v, opts: %v", expectedUserOrOrgIDs, opts)
if assert.Len(t, users, len(expectedUserOrOrgIDs), "case: %s", cassText) {
for i, expectedID := range expectedUserOrOrgIDs {
assert.EqualValues(t, expectedID, users[i].ID)
assert.EqualValues(t, expectedID, users[i].ID, "case: %s", cassText)
}
}
}
@ -118,7 +120,7 @@ func TestSearchUsers(t *testing.T) {
[]int64{1})
testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsRestricted: util.OptionalBoolTrue},
[]int64{29, 30})
[]int64{29})
testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsProhibitLogin: util.OptionalBoolTrue},
[]int64{30})

View File

@ -21,6 +21,7 @@ const (
githubEventIssueComment = "issue_comment"
githubEventRelease = "release"
githubEventPullRequestComment = "pull_request_comment"
githubEventGollum = "gollum"
)
// canGithubEventMatch check if the input Github event can match any Gitea event.
@ -29,6 +30,10 @@ func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEvent
case githubEventRegistryPackage:
return triggedEvent == webhook_module.HookEventPackage
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#gollum
case githubEventGollum:
return triggedEvent == webhook_module.HookEventWiki
case githubEventIssues:
switch triggedEvent {
case webhook_module.HookEventIssues,

View File

@ -115,40 +115,58 @@ func detectMatched(commit *git.Commit, triggedEvent webhook_module.HookEventType
}
switch triggedEvent {
case webhook_module.HookEventCreate,
case // events with no activity types
webhook_module.HookEventCreate,
webhook_module.HookEventDelete,
webhook_module.HookEventFork,
webhook_module.HookEventIssueAssign,
webhook_module.HookEventIssueLabel,
webhook_module.HookEventIssueMilestone,
webhook_module.HookEventPullRequestAssign,
webhook_module.HookEventPullRequestLabel,
webhook_module.HookEventPullRequestMilestone,
webhook_module.HookEventPullRequestComment,
webhook_module.HookEventPullRequestReviewApproved,
webhook_module.HookEventPullRequestReviewRejected,
webhook_module.HookEventPullRequestReviewComment,
webhook_module.HookEventWiki,
webhook_module.HookEventRepository,
webhook_module.HookEventRelease,
webhook_module.HookEventPackage:
webhook_module.HookEventWiki:
if len(evt.Acts()) != 0 {
log.Warn("Ignore unsupported %s event arguments %v", triggedEvent, evt.Acts())
}
// no special filter parameters for these events, just return true if name matched
return true
case webhook_module.HookEventPush:
case // push
webhook_module.HookEventPush:
return matchPushEvent(commit, payload.(*api.PushPayload), evt)
case webhook_module.HookEventIssues:
case // issues
webhook_module.HookEventIssues,
webhook_module.HookEventIssueAssign,
webhook_module.HookEventIssueLabel,
webhook_module.HookEventIssueMilestone:
return matchIssuesEvent(commit, payload.(*api.IssuePayload), evt)
case webhook_module.HookEventPullRequest, webhook_module.HookEventPullRequestSync:
case // issue_comment
webhook_module.HookEventIssueComment,
// `pull_request_comment` is same as `issue_comment`
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment
webhook_module.HookEventPullRequestComment:
return matchIssueCommentEvent(commit, payload.(*api.IssueCommentPayload), evt)
case // pull_request
webhook_module.HookEventPullRequest,
webhook_module.HookEventPullRequestSync,
webhook_module.HookEventPullRequestAssign,
webhook_module.HookEventPullRequestLabel:
return matchPullRequestEvent(commit, payload.(*api.PullRequestPayload), evt)
case webhook_module.HookEventIssueComment:
return matchIssueCommentEvent(commit, payload.(*api.IssueCommentPayload), evt)
case // pull_request_review
webhook_module.HookEventPullRequestReviewApproved,
webhook_module.HookEventPullRequestReviewRejected:
return matchPullRequestReviewEvent(commit, payload.(*api.PullRequestPayload), evt)
case // pull_request_review_comment
webhook_module.HookEventPullRequestReviewComment:
return matchPullRequestReviewCommentEvent(commit, payload.(*api.PullRequestPayload), evt)
case // release
webhook_module.HookEventRelease:
return matchReleaseEvent(commit, payload.(*api.ReleasePayload), evt)
case // registry_package
webhook_module.HookEventPackage:
return matchPackageEvent(commit, payload.(*api.PackagePayload), evt)
default:
log.Warn("unsupported event %q", triggedEvent)
@ -265,8 +283,24 @@ func matchIssuesEvent(commit *git.Commit, issuePayload *api.IssuePayload, evt *j
for cond, vals := range evt.Acts() {
switch cond {
case "types":
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issues
// Actions with the same name:
// opened, edited, closed, reopened, assigned, unassigned, milestoned, demilestoned
// Actions need to be converted:
// label_updated -> labeled
// label_cleared -> unlabeled
// Unsupported activity types:
// deleted, transferred, pinned, unpinned, locked, unlocked
action := issuePayload.Action
switch action {
case api.HookIssueLabelUpdated:
action = "labeled"
case api.HookIssueLabelCleared:
action = "unlabeled"
}
for _, val := range vals {
if glob.MustCompile(val, '/').Match(string(issuePayload.Action)) {
if glob.MustCompile(val, '/').Match(string(action)) {
matchTimes++
break
}
@ -281,8 +315,9 @@ func matchIssuesEvent(commit *git.Commit, issuePayload *api.IssuePayload, evt *j
func matchPullRequestEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
// defaultly, only pull request opened and synchronized will trigger workflow
return prPayload.Action == api.HookIssueSynchronized || prPayload.Action == api.HookIssueOpened
// defaultly, only pull request `opened`, `reopened` and `synchronized` will trigger workflow
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
return prPayload.Action == api.HookIssueSynchronized || prPayload.Action == api.HookIssueOpened || prPayload.Action == api.HookIssueReOpened
}
matchTimes := 0
@ -290,9 +325,24 @@ func matchPullRequestEvent(commit *git.Commit, prPayload *api.PullRequestPayload
for cond, vals := range evt.Acts() {
switch cond {
case "types":
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
// Actions with the same name:
// opened, edited, closed, reopened, assigned, unassigned
// Actions need to be converted:
// synchronized -> synchronize
// label_updated -> labeled
// label_cleared -> unlabeled
// Unsupported activity types:
// converted_to_draft, ready_for_review, locked, unlocked, review_requested, review_request_removed, auto_merge_enabled, auto_merge_disabled
action := prPayload.Action
if prPayload.Action == api.HookIssueSynchronized {
switch action {
case api.HookIssueSynchronized:
action = "synchronize"
case api.HookIssueLabelUpdated:
action = "labeled"
case api.HookIssueLabelCleared:
action = "unlabeled"
}
log.Trace("matching pull_request %s with %v", action, vals)
for _, val := range vals {
@ -363,6 +413,14 @@ func matchIssueCommentEvent(commit *git.Commit, issueCommentPayload *api.IssueCo
for cond, vals := range evt.Acts() {
switch cond {
case "types":
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issue_comment
// Actions with the same name:
// created, edited, deleted
// Actions need to be converted:
// NONE
// Unsupported activity types:
// NONE
for _, val := range vals {
if glob.MustCompile(val, '/').Match(string(issueCommentPayload.Action)) {
matchTimes++
@ -370,7 +428,179 @@ func matchIssueCommentEvent(commit *git.Commit, issueCommentPayload *api.IssueCo
}
}
default:
log.Warn("issue comment unsupported condition %q", cond)
log.Warn("issue comment event unsupported condition %q", cond)
}
}
return matchTimes == len(evt.Acts())
}
func matchPullRequestReviewEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
}
matchTimes := 0
// all acts conditions should be satisfied
for cond, vals := range evt.Acts() {
switch cond {
case "types":
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_review
// Activity types with the same name:
// NONE
// Activity types need to be converted:
// reviewed -> submitted
// reviewed -> edited
// Unsupported activity types:
// dismissed
actions := make([]string, 0)
if prPayload.Action == api.HookIssueReviewed {
// the `reviewed` HookIssueAction can match the two activity types: `submitted` and `edited`
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_review
actions = append(actions, "submitted", "edited")
}
matched := false
for _, val := range vals {
for _, action := range actions {
if glob.MustCompile(val, '/').Match(action) {
matched = true
break
}
}
if matched {
break
}
}
if matched {
matchTimes++
}
default:
log.Warn("pull request review event unsupported condition %q", cond)
}
}
return matchTimes == len(evt.Acts())
}
func matchPullRequestReviewCommentEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
}
matchTimes := 0
// all acts conditions should be satisfied
for cond, vals := range evt.Acts() {
switch cond {
case "types":
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_review_comment
// Activity types with the same name:
// NONE
// Activity types need to be converted:
// reviewed -> created
// reviewed -> edited
// Unsupported activity types:
// deleted
actions := make([]string, 0)
if prPayload.Action == api.HookIssueReviewed {
// the `reviewed` HookIssueAction can match the two activity types: `created` and `edited`
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_review_comment
actions = append(actions, "created", "edited")
}
matched := false
for _, val := range vals {
for _, action := range actions {
if glob.MustCompile(val, '/').Match(action) {
matched = true
break
}
}
if matched {
break
}
}
if matched {
matchTimes++
}
default:
log.Warn("pull request review comment event unsupported condition %q", cond)
}
}
return matchTimes == len(evt.Acts())
}
func matchReleaseEvent(commit *git.Commit, payload *api.ReleasePayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
}
matchTimes := 0
// all acts conditions should be satisfied
for cond, vals := range evt.Acts() {
switch cond {
case "types":
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#release
// Activity types with the same name:
// published
// Activity types need to be converted:
// updated -> edited
// Unsupported activity types:
// unpublished, created, deleted, prereleased, released
action := payload.Action
switch action {
case api.HookReleaseUpdated:
action = "edited"
}
for _, val := range vals {
if glob.MustCompile(val, '/').Match(string(action)) {
matchTimes++
break
}
}
default:
log.Warn("release event unsupported condition %q", cond)
}
}
return matchTimes == len(evt.Acts())
}
func matchPackageEvent(commit *git.Commit, payload *api.PackagePayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
}
matchTimes := 0
// all acts conditions should be satisfied
for cond, vals := range evt.Acts() {
switch cond {
case "types":
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#registry_package
// Activity types with the same name:
// NONE
// Activity types need to be converted:
// created -> published
// Unsupported activity types:
// updated
action := payload.Action
switch action {
case api.HookPackageCreated:
action = "published"
}
for _, val := range vals {
if glob.MustCompile(val, '/').Match(string(action)) {
matchTimes++
break
}
}
default:
log.Warn("package event unsupported condition %q", cond)
}
}
return matchTimes == len(evt.Acts())

View File

@ -0,0 +1,112 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"testing"
"code.gitea.io/gitea/modules/git"
api "code.gitea.io/gitea/modules/structs"
webhook_module "code.gitea.io/gitea/modules/webhook"
"github.com/stretchr/testify/assert"
)
func TestDetectMatched(t *testing.T) {
testCases := []struct {
desc string
commit *git.Commit
triggedEvent webhook_module.HookEventType
payload api.Payloader
yamlOn string
expected bool
}{
{
desc: "HookEventCreate(create) matches githubEventCreate(create)",
triggedEvent: webhook_module.HookEventCreate,
payload: nil,
yamlOn: "on: create",
expected: true,
},
{
desc: "HookEventIssues(issues) `opened` action matches githubEventIssues(issues)",
triggedEvent: webhook_module.HookEventIssues,
payload: &api.IssuePayload{Action: api.HookIssueOpened},
yamlOn: "on: issues",
expected: true,
},
{
desc: "HookEventIssues(issues) `milestoned` action matches githubEventIssues(issues)",
triggedEvent: webhook_module.HookEventIssues,
payload: &api.IssuePayload{Action: api.HookIssueMilestoned},
yamlOn: "on: issues",
expected: true,
},
{
desc: "HookEventPullRequestSync(pull_request_sync) matches githubEventPullRequest(pull_request)",
triggedEvent: webhook_module.HookEventPullRequestSync,
payload: &api.PullRequestPayload{Action: api.HookIssueSynchronized},
yamlOn: "on: pull_request",
expected: true,
},
{
desc: "HookEventPullRequest(pull_request) `label_updated` action doesn't match githubEventPullRequest(pull_request) with no activity type",
triggedEvent: webhook_module.HookEventPullRequest,
payload: &api.PullRequestPayload{Action: api.HookIssueLabelUpdated},
yamlOn: "on: pull_request",
expected: false,
},
{
desc: "HookEventPullRequest(pull_request) `label_updated` action matches githubEventPullRequest(pull_request) with `label` activity type",
triggedEvent: webhook_module.HookEventPullRequest,
payload: &api.PullRequestPayload{Action: api.HookIssueLabelUpdated},
yamlOn: "on:\n pull_request:\n types: [labeled]",
expected: true,
},
{
desc: "HookEventPullRequestReviewComment(pull_request_review_comment) matches githubEventPullRequestReviewComment(pull_request_review_comment)",
triggedEvent: webhook_module.HookEventPullRequestReviewComment,
payload: &api.PullRequestPayload{Action: api.HookIssueReviewed},
yamlOn: "on:\n pull_request_review_comment:\n types: [created]",
expected: true,
},
{
desc: "HookEventPullRequestReviewRejected(pull_request_review_rejected) doesn't match githubEventPullRequestReview(pull_request_review) with `dismissed` activity type (we don't support `dismissed` at present)",
triggedEvent: webhook_module.HookEventPullRequestReviewRejected,
payload: &api.PullRequestPayload{Action: api.HookIssueReviewed},
yamlOn: "on:\n pull_request_review:\n types: [dismissed]",
expected: false,
},
{
desc: "HookEventRelease(release) `published` action matches githubEventRelease(release) with `published` activity type",
triggedEvent: webhook_module.HookEventRelease,
payload: &api.ReleasePayload{Action: api.HookReleasePublished},
yamlOn: "on:\n release:\n types: [published]",
expected: true,
},
{
desc: "HookEventPackage(package) `created` action doesn't match githubEventRegistryPackage(registry_package) with `updated` activity type",
triggedEvent: webhook_module.HookEventPackage,
payload: &api.PackagePayload{Action: api.HookPackageCreated},
yamlOn: "on:\n registry_package:\n types: [updated]",
expected: false,
},
{
desc: "HookEventWiki(wiki) matches githubEventGollum(gollum)",
triggedEvent: webhook_module.HookEventWiki,
payload: nil,
yamlOn: "on: gollum",
expected: true,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
evts, err := GetEventsFromContent([]byte(tc.yamlOn))
assert.NoError(t, err)
assert.Len(t, evts, 1)
assert.Equal(t, tc.expected, detectMatched(tc.commit, tc.triggedEvent, tc.payload, evts[0]))
})
}
}

260
modules/assetfs/layered.go Normal file
View File

@ -0,0 +1,260 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package assetfs
import (
"context"
"fmt"
"io"
"io/fs"
"net/http"
"os"
"path/filepath"
"sort"
"time"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/util"
"github.com/fsnotify/fsnotify"
)
// Layer represents a layer in a layered asset file-system. It has a name and works like http.FileSystem
type Layer struct {
name string
fs http.FileSystem
localPath string
}
func (l *Layer) Name() string {
return l.name
}
// Open opens the named file. The caller is responsible for closing the file.
func (l *Layer) Open(name string) (http.File, error) {
return l.fs.Open(name)
}
// Local returns a new Layer with the given name, it serves files from the given local path.
func Local(name, base string, sub ...string) *Layer {
// TODO: the old behavior (StaticRootPath might not be absolute), not ideal, just keep the same as before
// Ideally, the caller should guarantee the base is absolute, guessing a relative path based on the current working directory is unreliable.
base, err := filepath.Abs(base)
if err != nil {
// This should never happen in a real system. If it happens, the user must have already been in trouble: the system is not able to resolve its own paths.
panic(fmt.Sprintf("Unable to get absolute path for %q: %v", base, err))
}
root := util.FilePathJoinAbs(base, sub...)
return &Layer{name: name, fs: http.Dir(root), localPath: root}
}
// Bindata returns a new Layer with the given name, it serves files from the given bindata asset.
func Bindata(name string, fs http.FileSystem) *Layer {
return &Layer{name: name, fs: fs}
}
// LayeredFS is a layered asset file-system. It works like http.FileSystem, but it can have multiple layers.
// The first layer is the top layer, and it will be used first.
// If the file is not found in the top layer, it will be searched in the next layer.
type LayeredFS struct {
layers []*Layer
}
// Layered returns a new LayeredFS with the given layers. The first layer is the top layer.
func Layered(layers ...*Layer) *LayeredFS {
return &LayeredFS{layers: layers}
}
// Open opens the named file. The caller is responsible for closing the file.
func (l *LayeredFS) Open(name string) (http.File, error) {
for _, layer := range l.layers {
f, err := layer.Open(name)
if err == nil || !os.IsNotExist(err) {
return f, err
}
}
return nil, fs.ErrNotExist
}
// ReadFile reads the named file.
func (l *LayeredFS) ReadFile(elems ...string) ([]byte, error) {
bs, _, err := l.ReadLayeredFile(elems...)
return bs, err
}
// ReadLayeredFile reads the named file, and returns the layer name.
func (l *LayeredFS) ReadLayeredFile(elems ...string) ([]byte, string, error) {
name := util.PathJoinRel(elems...)
for _, layer := range l.layers {
f, err := layer.Open(name)
if os.IsNotExist(err) {
continue
} else if err != nil {
return nil, layer.name, err
}
bs, err := io.ReadAll(f)
_ = f.Close()
return bs, layer.name, err
}
return nil, "", fs.ErrNotExist
}
func shouldInclude(info fs.FileInfo, fileMode ...bool) bool {
if util.CommonSkip(info.Name()) {
return false
}
if len(fileMode) == 0 {
return true
} else if len(fileMode) == 1 {
return fileMode[0] == !info.Mode().IsDir()
}
panic("too many arguments for fileMode in shouldInclude")
}
func readDir(layer *Layer, name string) ([]fs.FileInfo, error) {
f, err := layer.Open(name)
if os.IsNotExist(err) {
return nil, nil
} else if err != nil {
return nil, err
}
defer f.Close()
return f.Readdir(-1)
}
// ListFiles lists files/directories in the given directory. The fileMode controls the returned files.
// * omitted: all files and directories will be returned.
// * true: only files will be returned.
// * false: only directories will be returned.
// The returned files are sorted by name.
func (l *LayeredFS) ListFiles(name string, fileMode ...bool) ([]string, error) {
fileMap := map[string]bool{}
for _, layer := range l.layers {
infos, err := readDir(layer, name)
if err != nil {
return nil, err
}
for _, info := range infos {
if shouldInclude(info, fileMode...) {
fileMap[info.Name()] = true
}
}
}
files := make([]string, 0, len(fileMap))
for file := range fileMap {
files = append(files, file)
}
sort.Strings(files)
return files, nil
}
// ListAllFiles returns files/directories in the given directory, including subdirectories, recursively.
// The fileMode controls the returned files:
// * omitted: all files and directories will be returned.
// * true: only files will be returned.
// * false: only directories will be returned.
// The returned files are sorted by name.
func (l *LayeredFS) ListAllFiles(name string, fileMode ...bool) ([]string, error) {
return listAllFiles(l.layers, name, fileMode...)
}
func listAllFiles(layers []*Layer, name string, fileMode ...bool) ([]string, error) {
fileMap := map[string]bool{}
var list func(dir string) error
list = func(dir string) error {
for _, layer := range layers {
infos, err := readDir(layer, dir)
if err != nil {
return err
}
for _, info := range infos {
path := util.PathJoinRelX(dir, info.Name())
if shouldInclude(info, fileMode...) {
fileMap[path] = true
}
if info.IsDir() {
if err = list(path); err != nil {
return err
}
}
}
}
return nil
}
if err := list(name); err != nil {
return nil, err
}
var files []string
for file := range fileMap {
files = append(files, file)
}
sort.Strings(files)
return files, nil
}
// WatchLocalChanges watches local changes in the file-system. It's used to help to reload assets when the local file-system changes.
func (l *LayeredFS) WatchLocalChanges(ctx context.Context, callback func()) {
ctx, _, finished := process.GetManager().AddTypedContext(ctx, "Asset Local FileSystem Watcher", process.SystemProcessType, true)
defer finished()
watcher, err := fsnotify.NewWatcher()
if err != nil {
log.Error("Unable to create watcher for asset local file-system: %v", err)
return
}
defer watcher.Close()
for _, layer := range l.layers {
if layer.localPath == "" {
continue
}
layerDirs, err := listAllFiles([]*Layer{layer}, ".", false)
if err != nil {
log.Error("Unable to list directories for asset local file-system %q: %v", layer.localPath, err)
continue
}
for _, dir := range layerDirs {
if err = watcher.Add(util.FilePathJoinAbs(layer.localPath, dir)); err != nil {
log.Error("Unable to watch directory %s: %v", dir, err)
}
}
}
debounce := util.Debounce(100 * time.Millisecond)
for {
select {
case <-ctx.Done():
return
case event, ok := <-watcher.Events:
if !ok {
return
}
log.Trace("Watched asset local file-system had event: %v", event)
debounce(callback)
case err, ok := <-watcher.Errors:
if !ok {
return
}
log.Error("Watched asset local file-system had error: %v", err)
}
}
}
// GetFileLayerName returns the name of the first-seen layer that contains the given file.
func (l *LayeredFS) GetFileLayerName(elems ...string) string {
name := util.PathJoinRel(elems...)
for _, layer := range l.layers {
f, err := layer.Open(name)
if os.IsNotExist(err) {
continue
} else if err != nil {
return ""
}
_ = f.Close()
return layer.name
}
return ""
}

View File

@ -0,0 +1,109 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package assetfs
import (
"io"
"io/fs"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
)
func TestLayered(t *testing.T) {
dir := filepath.Join(t.TempDir(), "assetfs-layers")
dir1 := filepath.Join(dir, "l1")
dir2 := filepath.Join(dir, "l2")
mkdir := func(elems ...string) {
assert.NoError(t, os.MkdirAll(filepath.Join(elems...), 0o755))
}
write := func(content string, elems ...string) {
assert.NoError(t, os.WriteFile(filepath.Join(elems...), []byte(content), 0o644))
}
// d1 & f1: only in "l1"; d2 & f2: only in "l2"
// da & fa: in both "l1" and "l2"
mkdir(dir1, "d1")
mkdir(dir1, "da")
mkdir(dir1, "da/sub1")
mkdir(dir2, "d2")
mkdir(dir2, "da")
mkdir(dir2, "da/sub2")
write("dummy", dir1, ".DS_Store")
write("f1", dir1, "f1")
write("fa-1", dir1, "fa")
write("d1-f", dir1, "d1/f")
write("da-f-1", dir1, "da/f")
write("f2", dir2, "f2")
write("fa-2", dir2, "fa")
write("d2-f", dir2, "d2/f")
write("da-f-2", dir2, "da/f")
assets := Layered(Local("l1", dir1), Local("l2", dir2))
f, err := assets.Open("f1")
assert.NoError(t, err)
bs, err := io.ReadAll(f)
assert.NoError(t, err)
assert.EqualValues(t, "f1", string(bs))
_ = f.Close()
assertRead := func(expected string, expectedErr error, elems ...string) {
bs, err := assets.ReadFile(elems...)
if err != nil {
assert.ErrorAs(t, err, &expectedErr)
} else {
assert.NoError(t, err)
assert.Equal(t, expected, string(bs))
}
}
assertRead("f1", nil, "f1")
assertRead("f2", nil, "f2")
assertRead("fa-1", nil, "fa")
assertRead("d1-f", nil, "d1/f")
assertRead("d2-f", nil, "d2/f")
assertRead("da-f-1", nil, "da/f")
assertRead("", fs.ErrNotExist, "no-such")
files, err := assets.ListFiles(".", true)
assert.NoError(t, err)
assert.EqualValues(t, []string{"f1", "f2", "fa"}, files)
files, err = assets.ListFiles(".", false)
assert.NoError(t, err)
assert.EqualValues(t, []string{"d1", "d2", "da"}, files)
files, err = assets.ListFiles(".")
assert.NoError(t, err)
assert.EqualValues(t, []string{"d1", "d2", "da", "f1", "f2", "fa"}, files)
files, err = assets.ListAllFiles(".", true)
assert.NoError(t, err)
assert.EqualValues(t, []string{"d1/f", "d2/f", "da/f", "f1", "f2", "fa"}, files)
files, err = assets.ListAllFiles(".", false)
assert.NoError(t, err)
assert.EqualValues(t, []string{"d1", "d2", "da", "da/sub1", "da/sub2"}, files)
files, err = assets.ListAllFiles(".")
assert.NoError(t, err)
assert.EqualValues(t, []string{
"d1", "d1/f",
"d2", "d2/f",
"da", "da/f", "da/sub1", "da/sub2",
"f1", "f2", "fa",
}, files)
assert.Empty(t, assets.GetFileLayerName("no-such"))
assert.EqualValues(t, "l1", assets.GetFileLayerName("f1"))
assert.EqualValues(t, "l2", assets.GetFileLayerName("f2"))
}

View File

@ -14,5 +14,9 @@ var Supported = false
// Auth not supported lack of pam tag
func Auth(serviceName, userName, passwd string) (string, error) {
return "", errors.New("PAM not supported")
// bypass the lint on callers: SA4023: this comparison is always true (staticcheck)
if !Supported {
return "", errors.New("PAM not supported")
}
return "", nil
}

View File

@ -18,6 +18,8 @@ import (
"github.com/nfnt/resize"
"github.com/oliamb/cutter"
_ "golang.org/x/image/webp" // for processing webp images
)
// AvatarSize returns avatar's size

View File

@ -12,7 +12,7 @@ import (
"code.gitea.io/gitea/modules/nosql"
"gitea.com/go-chi/cache"
"github.com/go-redis/redis/v8"
"github.com/redis/go-redis/v9"
)
// RedisCacher represents a redis cache adapter implementation.

View File

@ -132,18 +132,10 @@ then resh (ר), and finally heh (ה) (which should appear leftmost).`,
},
}
type nullLocale struct{}
func (nullLocale) Language() string { return "" }
func (nullLocale) Tr(key string, _ ...interface{}) string { return key }
func (nullLocale) TrN(cnt interface{}, key1, keyN string, args ...interface{}) string { return "" }
var _ (translation.Locale) = nullLocale{}
func TestEscapeControlString(t *testing.T) {
for _, tt := range escapeControlTests {
t.Run(tt.name, func(t *testing.T) {
status, result := EscapeControlString(tt.text, nullLocale{})
status, result := EscapeControlString(tt.text, &translation.MockLocale{})
if !reflect.DeepEqual(*status, tt.status) {
t.Errorf("EscapeControlString() status = %v, wanted= %v", status, tt.status)
}
@ -179,7 +171,7 @@ func TestEscapeControlReader(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
input := strings.NewReader(tt.text)
output := &strings.Builder{}
status, err := EscapeControlReader(input, output, nullLocale{})
status, err := EscapeControlReader(input, output, &translation.MockLocale{})
result := output.String()
if err != nil {
t.Errorf("EscapeControlReader(): err = %v", err)
@ -201,5 +193,5 @@ func TestEscapeControlReader_panic(t *testing.T) {
for i := 0; i < 6826; i++ {
bs = append(bs, []byte("—")...)
}
_, _ = EscapeControlString(string(bs), nullLocale{})
_, _ = EscapeControlString(string(bs), &translation.MockLocale{})
}

View File

@ -7,6 +7,7 @@ import (
"bytes"
"context"
"fmt"
"net"
"net/http"
"strings"
"text/template"
@ -67,17 +68,23 @@ func AccessLogger() func(http.Handler) http.Handler {
requestID = parseRequestIDFromRequestHeader(req)
}
reqHost, _, err := net.SplitHostPort(req.RemoteAddr)
if err != nil {
reqHost = req.RemoteAddr
}
next.ServeHTTP(w, r)
rw := w.(ResponseWriter)
buf := bytes.NewBuffer([]byte{})
err := logTemplate.Execute(buf, routerLoggerOptions{
err = logTemplate.Execute(buf, routerLoggerOptions{
req: req,
Identity: &identity,
Start: &start,
ResponseWriter: rw,
Ctx: map[string]interface{}{
"RemoteAddr": req.RemoteAddr,
"RemoteHost": reqHost,
"Req": req,
},
RequestID: &requestID,

View File

@ -67,7 +67,7 @@ func Toggle(options *ToggleOptions) func(ctx *Context) {
}
if !options.SignOutRequired && !options.DisableCSRF && ctx.Req.Method == "POST" {
ctx.csrf.Validate(ctx)
ctx.Csrf.Validate(ctx)
if ctx.Written() {
return
}
@ -89,7 +89,7 @@ func Toggle(options *ToggleOptions) func(ctx *Context) {
// Redirect to log in page if auto-signin info is provided and has not signed in.
if !options.SignOutRequired && !ctx.IsSigned &&
len(ctx.GetCookie(setting.CookieUserName)) > 0 {
len(ctx.GetSiteCookie(setting.CookieUserName)) > 0 {
if ctx.Req.URL.Path != "/user/events" {
middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
}

View File

@ -16,10 +16,8 @@ import (
"net/http"
"net/url"
"path"
"regexp"
"strconv"
"strings"
texttemplate "text/template"
"time"
"code.gitea.io/gitea/models/db"
@ -45,9 +43,11 @@ import (
"golang.org/x/crypto/pbkdf2"
)
const CookieNameFlash = "gitea_flash"
// Render represents a template render
type Render interface {
TemplateLookup(tmpl string) (*template.Template, error)
TemplateLookup(tmpl string) (templates.TemplateExecutor, error)
HTML(w io.Writer, status int, name string, data interface{}) error
}
@ -60,7 +60,7 @@ type Context struct {
Render Render
translation.Locale
Cache cache.Cache
csrf CSRFProtector
Csrf CSRFProtector
Flash *middleware.Flash
Session session.Store
@ -214,7 +214,7 @@ func (ctx *Context) RedirectToFirst(location ...string) {
ctx.Redirect(setting.AppSubURL + "/")
}
var templateExecutingErr = regexp.MustCompile(`^template: (.*):([1-9][0-9]*):([1-9][0-9]*): executing (?:"(.*)" at <(.*)>: )?`)
const tplStatus500 base.TplName = "status/500"
// HTML calls Context.HTML and renders the template to HTTP response
func (ctx *Context) HTML(status int, name base.TplName) {
@ -227,38 +227,11 @@ func (ctx *Context) HTML(status int, name base.TplName) {
return strconv.FormatInt(time.Since(tmplStartTime).Nanoseconds()/1e6, 10) + "ms"
}
if err := ctx.Render.HTML(ctx.Resp, status, string(name), templates.BaseVars().Merge(ctx.Data)); err != nil {
if status == http.StatusInternalServerError && name == base.TplName("status/500") {
if status == http.StatusInternalServerError && name == tplStatus500 {
ctx.PlainText(http.StatusInternalServerError, "Unable to find HTML templates, the template system is not initialized, or Gitea can't find your template files.")
return
}
if execErr, ok := err.(texttemplate.ExecError); ok {
if groups := templateExecutingErr.FindStringSubmatch(err.Error()); len(groups) > 0 {
errorTemplateName, lineStr, posStr := groups[1], groups[2], groups[3]
target := ""
if len(groups) == 6 {
target = groups[5]
}
line, _ := strconv.Atoi(lineStr) // Cannot error out as groups[2] is [1-9][0-9]*
pos, _ := strconv.Atoi(posStr) // Cannot error out as groups[3] is [1-9][0-9]*
filename, filenameErr := templates.GetAssetFilename("templates/" + errorTemplateName + ".tmpl")
if filenameErr != nil {
filename = "(template) " + errorTemplateName
}
if errorTemplateName != string(name) {
filename += " (subtemplate of " + string(name) + ")"
}
err = fmt.Errorf("failed to render %s, error: %w:\n%s", filename, err, templates.GetLineFromTemplate(errorTemplateName, line, target, pos))
} else {
filename, filenameErr := templates.GetAssetFilename("templates/" + execErr.Name + ".tmpl")
if filenameErr != nil {
filename = "(template) " + execErr.Name
}
if execErr.Name != string(name) {
filename += " (subtemplate of " + string(name) + ")"
}
err = fmt.Errorf("failed to render %s, error: %w", filename, err)
}
}
err = fmt.Errorf("failed to render template: %s, error: %s", name, templates.HandleTemplateRenderingError(err))
ctx.ServerError("Render failed", err)
}
}
@ -326,24 +299,25 @@ func (ctx *Context) serverErrorInternal(logMsg string, logErr error) {
return
}
if !setting.IsProd {
ctx.Data["ErrorMsg"] = logErr
// it's safe to show internal error to admin users, and it helps
if !setting.IsProd || (ctx.Doer != nil && ctx.Doer.IsAdmin) {
ctx.Data["ErrorMsg"] = fmt.Sprintf("%s, %s", logMsg, logErr)
}
}
ctx.Data["Title"] = "Internal Server Error"
ctx.HTML(http.StatusInternalServerError, base.TplName("status/500"))
ctx.HTML(http.StatusInternalServerError, tplStatus500)
}
// NotFoundOrServerError use error check function to determine if the error
// is about not found. It responds with 404 status code for not found error,
// or error context description for logging purpose of 500 server error.
func (ctx *Context) NotFoundOrServerError(logMsg string, errCheck func(error) bool, err error) {
if errCheck(err) {
ctx.notFoundInternal(logMsg, err)
func (ctx *Context) NotFoundOrServerError(logMsg string, errCheck func(error) bool, logErr error) {
if errCheck(logErr) {
ctx.notFoundInternal(logMsg, logErr)
return
}
ctx.serverErrorInternal(logMsg, err)
ctx.serverErrorInternal(logMsg, logErr)
}
// PlainTextBytes renders bytes as plain text
@ -482,38 +456,26 @@ func (ctx *Context) Redirect(location string, status ...int) {
http.Redirect(ctx.Resp, ctx.Req, location, code)
}
// SetCookie convenience function to set most cookies consistently
// SetSiteCookie convenience function to set most cookies consistently
// CSRF and a few others are the exception here
func (ctx *Context) SetCookie(name, value string, expiry int) {
middleware.SetCookie(ctx.Resp, name, value,
expiry,
setting.AppSubURL,
setting.SessionConfig.Domain,
setting.SessionConfig.Secure,
true,
middleware.SameSite(setting.SessionConfig.SameSite))
func (ctx *Context) SetSiteCookie(name, value string, maxAge int) {
middleware.SetSiteCookie(ctx.Resp, name, value, maxAge)
}
// DeleteCookie convenience function to delete most cookies consistently
// DeleteSiteCookie convenience function to delete most cookies consistently
// CSRF and a few others are the exception here
func (ctx *Context) DeleteCookie(name string) {
middleware.SetCookie(ctx.Resp, name, "",
-1,
setting.AppSubURL,
setting.SessionConfig.Domain,
setting.SessionConfig.Secure,
true,
middleware.SameSite(setting.SessionConfig.SameSite))
func (ctx *Context) DeleteSiteCookie(name string) {
middleware.SetSiteCookie(ctx.Resp, name, "", -1)
}
// GetCookie returns given cookie value from request header.
func (ctx *Context) GetCookie(name string) string {
return middleware.GetCookie(ctx.Req, name)
// GetSiteCookie returns given cookie value from request header.
func (ctx *Context) GetSiteCookie(name string) string {
return middleware.GetSiteCookie(ctx.Req, name)
}
// GetSuperSecureCookie returns given cookie value from request header with secret string.
func (ctx *Context) GetSuperSecureCookie(secret, name string) (string, bool) {
val := ctx.GetCookie(name)
val := ctx.GetSiteCookie(name)
return ctx.CookieDecrypt(secret, val)
}
@ -534,10 +496,9 @@ func (ctx *Context) CookieDecrypt(secret, val string) (string, bool) {
}
// SetSuperSecureCookie sets given cookie value to response header with secret string.
func (ctx *Context) SetSuperSecureCookie(secret, name, value string, expiry int) {
func (ctx *Context) SetSuperSecureCookie(secret, name, value string, maxAge int) {
text := ctx.CookieEncrypt(secret, value)
ctx.SetCookie(name, text, expiry)
ctx.SetSiteCookie(name, text, maxAge)
}
// CookieEncrypt encrypts a given value using the provided secret
@ -553,19 +514,19 @@ func (ctx *Context) CookieEncrypt(secret, value string) string {
// GetCookieInt returns cookie result in int type.
func (ctx *Context) GetCookieInt(name string) int {
r, _ := strconv.Atoi(ctx.GetCookie(name))
r, _ := strconv.Atoi(ctx.GetSiteCookie(name))
return r
}
// GetCookieInt64 returns cookie result in int64 type.
func (ctx *Context) GetCookieInt64(name string) int64 {
r, _ := strconv.ParseInt(ctx.GetCookie(name), 10, 64)
r, _ := strconv.ParseInt(ctx.GetSiteCookie(name), 10, 64)
return r
}
// GetCookieFloat64 returns cookie result in float64 type.
func (ctx *Context) GetCookieFloat64(name string) float64 {
v, _ := strconv.ParseFloat(ctx.GetCookie(name), 64)
v, _ := strconv.ParseFloat(ctx.GetSiteCookie(name), 64)
return v
}
@ -663,7 +624,10 @@ func WithContext(req *http.Request, ctx *Context) *http.Request {
// GetContext retrieves install context from request
func GetContext(req *http.Request) *Context {
return req.Context().Value(contextKey).(*Context)
if ctx, ok := req.Context().Value(contextKey).(*Context); ok {
return ctx
}
return nil
}
// GetContextUser returns context user
@ -730,13 +694,13 @@ func Contexter(ctx context.Context) func(next http.Handler) http.Handler {
ctx.Data["Context"] = &ctx
ctx.Req = WithContext(req, &ctx)
ctx.csrf = PrepareCSRFProtector(csrfOpts, &ctx)
ctx.Csrf = PrepareCSRFProtector(csrfOpts, &ctx)
// Get flash.
flashCookie := ctx.GetCookie("macaron_flash")
vals, _ := url.ParseQuery(flashCookie)
if len(vals) > 0 {
f := &middleware.Flash{
// Get the last flash message from cookie
lastFlashCookie := middleware.GetSiteCookie(ctx.Req, CookieNameFlash)
if vals, _ := url.ParseQuery(lastFlashCookie); len(vals) > 0 {
// store last Flash message into the template data, to render it
ctx.Data["Flash"] = &middleware.Flash{
DataStore: &ctx,
Values: vals,
ErrorMsg: vals.Get("error"),
@ -744,40 +708,18 @@ func Contexter(ctx context.Context) func(next http.Handler) http.Handler {
InfoMsg: vals.Get("info"),
WarningMsg: vals.Get("warning"),
}
ctx.Data["Flash"] = f
}
f := &middleware.Flash{
DataStore: &ctx,
Values: url.Values{},
ErrorMsg: "",
WarningMsg: "",
InfoMsg: "",
SuccessMsg: "",
}
// prepare an empty Flash message for current request
ctx.Flash = &middleware.Flash{DataStore: &ctx, Values: url.Values{}}
ctx.Resp.Before(func(resp ResponseWriter) {
if flash := f.Encode(); len(flash) > 0 {
middleware.SetCookie(resp, "macaron_flash", flash, 0,
setting.SessionConfig.CookiePath,
middleware.Domain(setting.SessionConfig.Domain),
middleware.HTTPOnly(true),
middleware.Secure(setting.SessionConfig.Secure),
middleware.SameSite(setting.SessionConfig.SameSite),
)
return
if val := ctx.Flash.Encode(); val != "" {
middleware.SetSiteCookie(ctx.Resp, CookieNameFlash, val, 0)
} else if lastFlashCookie != "" {
middleware.SetSiteCookie(ctx.Resp, CookieNameFlash, "", -1)
}
middleware.SetCookie(ctx.Resp, "macaron_flash", "", -1,
setting.SessionConfig.CookiePath,
middleware.Domain(setting.SessionConfig.Domain),
middleware.HTTPOnly(true),
middleware.Secure(setting.SessionConfig.Secure),
middleware.SameSite(setting.SessionConfig.SameSite),
)
})
ctx.Flash = f
// If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.
if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") {
if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size
@ -789,7 +731,7 @@ func Contexter(ctx context.Context) func(next http.Handler) http.Handler {
httpcache.SetCacheControlInHeader(ctx.Resp.Header(), 0, "no-transform")
ctx.Resp.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions)
ctx.Data["CsrfToken"] = ctx.csrf.GetToken()
ctx.Data["CsrfToken"] = ctx.Csrf.GetToken()
ctx.Data["CsrfTokenHtml"] = template.HTML(`<input type="hidden" name="_csrf" value="` + ctx.Data["CsrfToken"].(string) + `">`)
// FIXME: do we really always need these setting? There should be someway to have to avoid having to always set these

View File

@ -42,37 +42,26 @@ type CSRFProtector interface {
GetToken() string
// Validate validates the token in http context.
Validate(ctx *Context)
// DeleteCookie deletes the cookie
DeleteCookie(ctx *Context)
}
type csrfProtector struct {
// Header name value for setting and getting csrf token.
Header string
// Form name value for setting and getting csrf token.
Form string
// Cookie name value for setting and getting csrf token.
Cookie string
// Cookie domain
CookieDomain string
// Cookie path
CookiePath string
// Cookie HttpOnly flag value used for the csrf token.
CookieHTTPOnly bool
opt CsrfOptions
// Token generated to pass via header, cookie, or hidden form value.
Token string
// This value must be unique per user.
ID string
// Secret used along with the unique id above to generate the Token.
Secret string
}
// GetHeaderName returns the name of the HTTP header for csrf token.
func (c *csrfProtector) GetHeaderName() string {
return c.Header
return c.opt.Header
}
// GetFormName returns the name of the form value for csrf token.
func (c *csrfProtector) GetFormName() string {
return c.Form
return c.opt.Form
}
// GetToken returns the current token. This is typically used
@ -138,23 +127,32 @@ func prepareDefaultCsrfOptions(opt CsrfOptions) CsrfOptions {
if opt.SessionKey == "" {
opt.SessionKey = "uid"
}
if opt.CookieLifeTime == 0 {
opt.CookieLifeTime = int(CsrfTokenTimeout.Seconds())
}
opt.oldSessionKey = "_old_" + opt.SessionKey
return opt
}
func newCsrfCookie(c *csrfProtector, value string) *http.Cookie {
return &http.Cookie{
Name: c.opt.Cookie,
Value: value,
Path: c.opt.CookiePath,
Domain: c.opt.CookieDomain,
MaxAge: c.opt.CookieLifeTime,
Secure: c.opt.Secure,
HttpOnly: c.opt.CookieHTTPOnly,
SameSite: c.opt.SameSite,
}
}
// PrepareCSRFProtector returns a CSRFProtector to be used for every request.
// Additionally, depending on options set, generated tokens will be sent via Header and/or Cookie.
func PrepareCSRFProtector(opt CsrfOptions, ctx *Context) CSRFProtector {
opt = prepareDefaultCsrfOptions(opt)
x := &csrfProtector{
Secret: opt.Secret,
Header: opt.Header,
Form: opt.Form,
Cookie: opt.Cookie,
CookieDomain: opt.CookieDomain,
CookiePath: opt.CookiePath,
CookieHTTPOnly: opt.CookieHTTPOnly,
}
x := &csrfProtector{opt: opt}
if opt.Origin && len(ctx.Req.Header.Get("Origin")) > 0 {
return x
@ -175,7 +173,7 @@ func PrepareCSRFProtector(opt CsrfOptions, ctx *Context) CSRFProtector {
oldUID := ctx.Session.Get(opt.oldSessionKey)
uidChanged := oldUID == nil || oldUID.(string) != x.ID
cookieToken := ctx.GetCookie(opt.Cookie)
cookieToken := ctx.GetSiteCookie(opt.Cookie)
needsNew := true
if uidChanged {
@ -193,21 +191,10 @@ func PrepareCSRFProtector(opt CsrfOptions, ctx *Context) CSRFProtector {
if needsNew {
// FIXME: actionId.
x.Token = GenerateCsrfToken(x.Secret, x.ID, "POST", time.Now())
x.Token = GenerateCsrfToken(x.opt.Secret, x.ID, "POST", time.Now())
if opt.SetCookie {
var expires interface{}
if opt.CookieLifeTime == 0 {
expires = time.Now().Add(CsrfTokenTimeout)
}
middleware.SetCookie(ctx.Resp, opt.Cookie, x.Token,
opt.CookieLifeTime,
opt.CookiePath,
opt.CookieDomain,
opt.Secure,
opt.CookieHTTPOnly,
expires,
middleware.SameSite(opt.SameSite),
)
cookie := newCsrfCookie(x, x.Token)
ctx.Resp.Header().Add("Set-Cookie", cookie.String())
}
}
@ -218,8 +205,8 @@ func PrepareCSRFProtector(opt CsrfOptions, ctx *Context) CSRFProtector {
}
func (c *csrfProtector) validateToken(ctx *Context, token string) {
if !ValidCsrfToken(token, c.Secret, c.ID, "POST", time.Now()) {
middleware.DeleteCSRFCookie(ctx.Resp)
if !ValidCsrfToken(token, c.opt.Secret, c.ID, "POST", time.Now()) {
c.DeleteCookie(ctx)
if middleware.IsAPIPath(ctx.Req) {
// currently, there should be no access to the APIPath with CSRF token. because templates shouldn't use the `/api/` endpoints.
http.Error(ctx.Resp, "Invalid CSRF token.", http.StatusBadRequest)
@ -245,3 +232,11 @@ func (c *csrfProtector) Validate(ctx *Context) {
}
c.validateToken(ctx, "") // no csrf token, use an empty token to respond error
}
func (c *csrfProtector) DeleteCookie(ctx *Context) {
if c.opt.SetCookie {
cookie := newCsrfCookie(c, "")
cookie.MaxAge = -1
ctx.Resp.Header().Add("Set-Cookie", cookie.String())
}
}

View File

@ -184,6 +184,9 @@ func (r *Repository) CanCreateIssueDependencies(user *user_model.User, isPull bo
// GetCommitsCount returns cached commit count for current view
func (r *Repository) GetCommitsCount() (int64, error) {
if r.Commit == nil {
return 0, nil
}
var contextName string
if r.IsViewBranch {
contextName = r.BranchName
@ -642,8 +645,7 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) {
if err != nil {
if strings.Contains(err.Error(), "repository does not exist") || strings.Contains(err.Error(), "no such file or directory") {
log.Error("Repository %-v has a broken repository on the file system: %s Error: %v", ctx.Repo.Repository, ctx.Repo.Repository.RepoPath(), err)
ctx.Repo.Repository.Status = repo_model.RepositoryBroken
ctx.Repo.Repository.IsEmpty = true
ctx.Repo.Repository.MarkAsBrokenEmpty()
ctx.Data["BranchName"] = ctx.Repo.Repository.DefaultBranch
// Only allow access to base of repo or settings
if !isHomeOrSettings {
@ -689,7 +691,7 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) {
ctx.Data["BranchesCount"] = len(brs)
// If not branch selected, try default one.
// If default branch doesn't exists, fall back to some other branch.
// If default branch doesn't exist, fall back to some other branch.
if len(ctx.Repo.BranchName) == 0 {
if len(ctx.Repo.Repository.DefaultBranch) > 0 && gitRepo.IsBranchExist(ctx.Repo.Repository.DefaultBranch) {
ctx.Repo.BranchName = ctx.Repo.Repository.DefaultBranch
@ -879,6 +881,10 @@ func RepoRefByType(refType RepoRefType, ignoreNotExistErr ...bool) func(*Context
return func(ctx *Context) (cancel context.CancelFunc) {
// Empty repository does not have reference information.
if ctx.Repo.Repository.IsEmpty {
// assume the user is viewing the (non-existent) default branch
ctx.Repo.IsViewBranch = true
ctx.Repo.BranchName = ctx.Repo.Repository.DefaultBranch
ctx.Data["TreePath"] = ""
return
}
@ -908,27 +914,30 @@ func RepoRefByType(refType RepoRefType, ignoreNotExistErr ...bool) func(*Context
refName = ctx.Repo.Repository.DefaultBranch
if !ctx.Repo.GitRepo.IsBranchExist(refName) {
brs, _, err := ctx.Repo.GitRepo.GetBranchNames(0, 0)
if err != nil {
ctx.ServerError("GetBranches", err)
return
if err == nil && len(brs) != 0 {
refName = brs[0]
} else if len(brs) == 0 {
err = fmt.Errorf("No branches in non-empty repository %s",
ctx.Repo.GitRepo.Path)
ctx.ServerError("GetBranches", err)
return
log.Error("No branches in non-empty repository %s", ctx.Repo.GitRepo.Path)
ctx.Repo.Repository.MarkAsBrokenEmpty()
} else {
log.Error("GetBranches error: %v", err)
ctx.Repo.Repository.MarkAsBrokenEmpty()
}
refName = brs[0]
}
ctx.Repo.RefName = refName
ctx.Repo.BranchName = refName
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
if err != nil {
if err == nil {
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
} else if strings.Contains(err.Error(), "fatal: not a git repository") || strings.Contains(err.Error(), "object does not exist") {
// if the repository is broken, we can continue to the handler code, to show "Settings -> Delete Repository" for end users
log.Error("GetBranchCommit: %v", err)
ctx.Repo.Repository.MarkAsBrokenEmpty()
} else {
ctx.ServerError("GetBranchCommit", err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
ctx.Repo.IsViewBranch = true
} else {
refName = getRefName(ctx, refType)
ctx.Repo.RefName = refName

View File

@ -13,6 +13,7 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/translation"
"github.com/stretchr/testify/assert"
)
@ -550,20 +551,6 @@ a|"he said, ""here I am"""`,
}
}
type mockLocale struct{}
func (l mockLocale) Language() string {
return "en"
}
func (l mockLocale) Tr(s string, _ ...interface{}) string {
return s
}
func (l mockLocale) TrN(_cnt interface{}, key1, _keyN string, _args ...interface{}) string {
return key1
}
func TestFormatError(t *testing.T) {
cases := []struct {
err error
@ -591,7 +578,7 @@ func TestFormatError(t *testing.T) {
}
for n, c := range cases {
message, err := FormatError(c.err, mockLocale{})
message, err := FormatError(c.err, &translation.MockLocale{})
if c.expectsError {
assert.Error(t, err, "case %d: expected an error to be returned", n)
} else {

View File

@ -9,7 +9,6 @@ import (
"os"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/options"
"code.gitea.io/gitea/modules/setting"
)
@ -79,7 +78,7 @@ func checkConfigurationFiles(ctx context.Context, logger log.Logger, autofix boo
{"Log Root Path", setting.Log.RootPath, true, true, true},
}
if options.IsDynamic() {
if !setting.HasBuiltinBindata {
configurationFiles = append(configurationFiles, configurationFile{"Static File Root Path", setting.StaticRootPath, true, true, false})
}

View File

@ -40,7 +40,7 @@ const DefaultLocale = "C"
// Command represents a command with its subcommands or arguments.
type Command struct {
name string
prog string
args []string
parentContext context.Context
desc string
@ -49,10 +49,28 @@ type Command struct {
}
func (c *Command) String() string {
if len(c.args) == 0 {
return c.name
return c.toString(false)
}
func (c *Command) toString(sanitizing bool) string {
// WARNING: this function is for debugging purposes only. It's much better than old code (which only joins args with space),
// It's impossible to make a simple and 100% correct implementation of argument quoting for different platforms.
debugQuote := func(s string) string {
if strings.ContainsAny(s, " `'\"\t\r\n") {
return fmt.Sprintf("%q", s)
}
return s
}
return fmt.Sprintf("%s %s", c.name, strings.Join(c.args, " "))
a := make([]string, 0, len(c.args)+1)
a = append(a, debugQuote(c.prog))
for _, arg := range c.args {
if sanitizing && (strings.Contains(arg, "://") && strings.Contains(arg, "@")) {
a = append(a, debugQuote(util.SanitizeCredentialURLs(arg)))
} else {
a = append(a, debugQuote(arg))
}
}
return strings.Join(a, " ")
}
// NewCommand creates and returns a new Git Command based on given command and arguments.
@ -67,7 +85,7 @@ func NewCommand(ctx context.Context, args ...internal.CmdArg) *Command {
cargs = append(cargs, string(arg))
}
return &Command{
name: GitExecutable,
prog: GitExecutable,
args: cargs,
parentContext: ctx,
globalArgsLength: len(globalCommandArgs),
@ -82,7 +100,7 @@ func NewCommandContextNoGlobals(ctx context.Context, args ...internal.CmdArg) *C
cargs = append(cargs, string(arg))
}
return &Command{
name: GitExecutable,
prog: GitExecutable,
args: cargs,
parentContext: ctx,
}
@ -193,10 +211,18 @@ type RunOpts struct {
Env []string
Timeout time.Duration
UseContextTimeout bool
Dir string
Stdout, Stderr io.Writer
Stdin io.Reader
PipelineFunc func(context.Context, context.CancelFunc) error
// Dir is the working dir for the git command, however:
// FIXME: this could be incorrect in many cases, for example:
// * /some/path/.git
// * /some/path/.git/gitea-data/data/repositories/user/repo.git
// If "user/repo.git" is invalid/broken, then running git command in it will use "/some/path/.git", and produce unexpected results
// The correct approach is to use `--git-dir" global argument
Dir string
Stdout, Stderr io.Writer
Stdin io.Reader
PipelineFunc func(context.Context, context.CancelFunc) error
}
func commonBaseEnvs() []string {
@ -250,28 +276,18 @@ func (c *Command) Run(opts *RunOpts) error {
}
if len(opts.Dir) == 0 {
log.Debug("%s", c)
log.Debug("git.Command.Run: %s", c)
} else {
log.Debug("%s: %v", opts.Dir, c)
log.Debug("git.Command.RunDir(%s): %s", opts.Dir, c)
}
desc := c.desc
if desc == "" {
args := c.args[c.globalArgsLength:]
var argSensitiveURLIndexes []int
for i, arg := range c.args {
if strings.Contains(arg, "://") && strings.Contains(arg, "@") {
argSensitiveURLIndexes = append(argSensitiveURLIndexes, i)
}
if opts.Dir == "" {
desc = fmt.Sprintf("git: %s", c.toString(true))
} else {
desc = fmt.Sprintf("git(dir:%s): %s", opts.Dir, c.toString(true))
}
if len(argSensitiveURLIndexes) > 0 {
args = make([]string, len(c.args))
copy(args, c.args)
for _, urlArgIndex := range argSensitiveURLIndexes {
args[urlArgIndex] = util.SanitizeCredentialURLs(args[urlArgIndex])
}
}
desc = fmt.Sprintf("%s %s [repo_path: %s]", c.name, strings.Join(args, " "), opts.Dir)
}
var ctx context.Context
@ -285,7 +301,7 @@ func (c *Command) Run(opts *RunOpts) error {
}
defer finished()
cmd := exec.CommandContext(ctx, c.name, c.args...)
cmd := exec.CommandContext(ctx, c.prog, c.args...)
if opts.Env == nil {
cmd.Env = os.Environ()
} else {

View File

@ -52,3 +52,11 @@ func TestGitArgument(t *testing.T) {
assert.True(t, isSafeArgumentValue("x"))
assert.False(t, isSafeArgumentValue("-x"))
}
func TestCommandString(t *testing.T) {
cmd := NewCommandContextNoGlobals(context.Background(), "a", "-m msg", "it's a test", `say "hello"`)
assert.EqualValues(t, cmd.prog+` a "-m msg" "it's a test" "say \"hello\""`, cmd.String())
cmd = NewCommandContextNoGlobals(context.Background(), "url: https://a:b@c/")
assert.EqualValues(t, cmd.prog+` "url: https://sanitized-credential@c/"`, cmd.toString(true))
}

View File

@ -80,7 +80,7 @@ func InitRepository(ctx context.Context, repoPath string, bare bool) error {
// IsEmpty Check if repository is empty.
func (repo *Repository) IsEmpty() (bool, error) {
var errbuf, output strings.Builder
if err := NewCommand(repo.Ctx, "show-ref", "--head", "^HEAD$").
if err := NewCommand(repo.Ctx).AddOptionFormat("--git-dir=%s", repo.Path).AddArguments("show-ref", "--head", "^HEAD$").
Run(&RunOpts{
Dir: repo.Path,
Stdout: &output,
@ -209,49 +209,22 @@ func Push(ctx context.Context, repoPath string, opts PushOptions) error {
} else {
cmd.SetDescription(fmt.Sprintf("push branch %s to %s (force: %t, mirror: %t)", opts.Branch, opts.Remote, opts.Force, opts.Mirror))
}
var outbuf, errbuf strings.Builder
if opts.Timeout == 0 {
opts.Timeout = -1
}
err := cmd.Run(&RunOpts{
Env: opts.Env,
Timeout: opts.Timeout,
Dir: repoPath,
Stdout: &outbuf,
Stderr: &errbuf,
})
stdout, stderr, err := cmd.RunStdString(&RunOpts{Env: opts.Env, Timeout: opts.Timeout, Dir: repoPath})
if err != nil {
if strings.Contains(errbuf.String(), "non-fast-forward") {
return &ErrPushOutOfDate{
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
} else if strings.Contains(errbuf.String(), "! [remote rejected]") {
err := &ErrPushRejected{
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
if strings.Contains(stderr, "non-fast-forward") {
return &ErrPushOutOfDate{StdOut: stdout, StdErr: stderr, Err: err}
} else if strings.Contains(stderr, "! [remote rejected]") {
err := &ErrPushRejected{StdOut: stdout, StdErr: stderr, Err: err}
err.GenerateMessage()
return err
} else if strings.Contains(errbuf.String(), "matches more than one") {
err := &ErrMoreThanOne{
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
return err
} else if strings.Contains(stderr, "matches more than one") {
return &ErrMoreThanOne{StdOut: stdout, StdErr: stderr, Err: err}
}
return fmt.Errorf("push failed: %w - %s\n%s", err, stderr, stdout)
}
if errbuf.Len() > 0 && err != nil {
return fmt.Errorf("%w - %s", err, errbuf.String())
}
return err
return nil
}
// GetLatestCommitTime returns time for latest commit in repository (across all branches)

View File

@ -61,7 +61,7 @@ func OpenRepository(ctx context.Context, repoPath string) (*Repository, error) {
}
repo.batchWriter, repo.batchReader, repo.batchCancel = CatFileBatch(ctx, repoPath)
repo.checkWriter, repo.checkReader, repo.checkCancel = CatFileBatchCheck(ctx, repo.Path)
repo.checkWriter, repo.checkReader, repo.checkCancel = CatFileBatchCheck(ctx, repoPath)
return repo, nil
}

View File

@ -84,6 +84,9 @@ func (repo *Repository) GetCommitByPath(relpath string) (*Commit, error) {
if err != nil {
return nil, err
}
if len(commits) == 0 {
return nil, ErrNotExist{ID: relpath}
}
return commits[0], nil
}

View File

@ -96,7 +96,7 @@ func (g *Manager) start() {
run := svc.Run
//lint:ignore SA1019 We use IsAnInteractiveSession because IsWindowsService has a different permissions profile
isAnInteractiveSession, err := svc.IsAnInteractiveSession()
isAnInteractiveSession, err := svc.IsAnInteractiveSession() //nolint:staticcheck
if err != nil {
log.Error("Unable to ascertain if running as an Windows Service: %v", err)
return

View File

@ -154,7 +154,9 @@ func Init() {
log.Trace("IndexerData Process Repo: %d", indexerData.RepoID)
if err := index(ctx, indexer, indexerData.RepoID); err != nil {
log.Error("index: %v", err)
if !setting.IsInTesting {
log.Error("indexer index error for repo %v: %v", indexerData.RepoID, err)
}
if indexer.Ping() {
continue
}

View File

@ -6,6 +6,7 @@ package issues
import (
"context"
"strconv"
"strings"
"sync"
"time"
@ -120,10 +121,11 @@ func (b *MeilisearchIndexer) Delete(ids ...int64) error {
// Search searches for issues by given conditions.
// Returns the matching issue IDs
func (b *MeilisearchIndexer) Search(ctx context.Context, keyword string, repoIDs []int64, limit, start int) (*SearchResult, error) {
filter := make([][]string, 0, len(repoIDs))
repoFilters := make([]string, 0, len(repoIDs))
for _, repoID := range repoIDs {
filter = append(filter, []string{"repo_id = " + strconv.FormatInt(repoID, 10)})
repoFilters = append(repoFilters, "repo_id = "+strconv.FormatInt(repoID, 10))
}
filter := strings.Join(repoFilters, " OR ")
searchRes, err := b.client.Index(b.indexerName).Search(keyword, &meilisearch.SearchRequest{
Filter: filter,
Limit: int64(limit),

View File

@ -11,6 +11,7 @@ import (
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/setting"
)
// DBIndexer implements Indexer interface to use database's like search
@ -46,7 +47,7 @@ func (db *DBIndexer) Index(id int64) error {
// Get latest commit for default branch
commitID, err := gitRepo.GetBranchCommitID(repo.DefaultBranch)
if err != nil {
if git.IsErrBranchNotExist(err) || git.IsErrNotExist(err) {
if git.IsErrBranchNotExist(err) || git.IsErrNotExist(err) || setting.IsInTesting {
log.Debug("Unable to get commit ID for default branch %s in %s ... skipping this repository", repo.DefaultBranch, repo.RepoPath())
return nil
}
@ -62,7 +63,9 @@ func (db *DBIndexer) Index(id int64) error {
// Calculate and save language statistics to database
stats, err := gitRepo.GetLanguageStats(commitID)
if err != nil {
log.Error("Unable to get language stats for ID %s for default branch %s in %s. Error: %v", commitID, repo.DefaultBranch, repo.RepoPath(), err)
if !setting.IsInTesting {
log.Error("Unable to get language stats for ID %s for default branch %s in %s. Error: %v", commitID, repo.DefaultBranch, repo.RepoPath(), err)
}
return err
}
err = repo_model.UpdateLanguageStats(repo, commitID, stats)

View File

@ -10,6 +10,7 @@ import (
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/queue"
"code.gitea.io/gitea/modules/setting"
)
// statsQueue represents a queue to handle repository stats updates
@ -20,7 +21,9 @@ func handle(data ...queue.Data) []queue.Data {
for _, datum := range data {
opts := datum.(int64)
if err := indexer.Index(opts); err != nil {
log.Error("stats queue indexer.Index(%d) failed: %v", opts, err)
if !setting.IsInTesting {
log.Error("stats queue indexer.Index(%d) failed: %v", opts, err)
}
}
}
return nil

View File

@ -30,31 +30,23 @@ func IsErrTemplateLoad(err error) bool {
}
func (err ErrTemplateLoad) Error() string {
return fmt.Sprintf("Failed to load label template file '%s': %v", err.TemplateFile, err.OriginalError)
return fmt.Sprintf("failed to load label template file %q: %v", err.TemplateFile, err.OriginalError)
}
// GetTemplateFile loads the label template file by given name,
// then parses and returns a list of name-color pairs and optionally description.
func GetTemplateFile(name string) ([]*Label, error) {
data, err := options.Labels(name + ".yaml")
if err == nil && len(data) > 0 {
return parseYamlFormat(name+".yaml", data)
}
data, err = options.Labels(name + ".yml")
if err == nil && len(data) > 0 {
return parseYamlFormat(name+".yml", data)
}
data, err = options.Labels(name)
// LoadTemplateFile loads the label template file by given file name, returns a slice of Label structs.
func LoadTemplateFile(fileName string) ([]*Label, error) {
data, err := options.Labels(fileName)
if err != nil {
return nil, ErrTemplateLoad{name, fmt.Errorf("GetRepoInitFile: %w", err)}
return nil, ErrTemplateLoad{fileName, fmt.Errorf("LoadTemplateFile: %w", err)}
}
return parseLegacyFormat(name, data)
if strings.HasSuffix(fileName, ".yaml") || strings.HasSuffix(fileName, ".yml") {
return parseYamlFormat(fileName, data)
}
return parseLegacyFormat(fileName, data)
}
func parseYamlFormat(name string, data []byte) ([]*Label, error) {
func parseYamlFormat(fileName string, data []byte) ([]*Label, error) {
lf := &labelFile{}
if err := yaml.Unmarshal(data, lf); err != nil {
@ -65,11 +57,11 @@ func parseYamlFormat(name string, data []byte) ([]*Label, error) {
for _, l := range lf.Labels {
l.Color = strings.TrimSpace(l.Color)
if len(l.Name) == 0 || len(l.Color) == 0 {
return nil, ErrTemplateLoad{name, errors.New("label name and color are required fields")}
return nil, ErrTemplateLoad{fileName, errors.New("label name and color are required fields")}
}
color, err := NormalizeColor(l.Color)
if err != nil {
return nil, ErrTemplateLoad{name, fmt.Errorf("bad HTML color code '%s' in label: %s", l.Color, l.Name)}
return nil, ErrTemplateLoad{fileName, fmt.Errorf("bad HTML color code '%s' in label: %s", l.Color, l.Name)}
}
l.Color = color
}
@ -77,7 +69,7 @@ func parseYamlFormat(name string, data []byte) ([]*Label, error) {
return lf.Labels, nil
}
func parseLegacyFormat(name string, data []byte) ([]*Label, error) {
func parseLegacyFormat(fileName string, data []byte) ([]*Label, error) {
lines := strings.Split(string(data), "\n")
list := make([]*Label, 0, len(lines))
for i := 0; i < len(lines); i++ {
@ -88,18 +80,18 @@ func parseLegacyFormat(name string, data []byte) ([]*Label, error) {
parts, description, _ := strings.Cut(line, ";")
color, name, ok := strings.Cut(parts, " ")
color, labelName, ok := strings.Cut(parts, " ")
if !ok {
return nil, ErrTemplateLoad{name, fmt.Errorf("line is malformed: %s", line)}
return nil, ErrTemplateLoad{fileName, fmt.Errorf("line is malformed: %s", line)}
}
color, err := NormalizeColor(color)
if err != nil {
return nil, ErrTemplateLoad{name, fmt.Errorf("bad HTML color code '%s' in line: %s", color, line)}
return nil, ErrTemplateLoad{fileName, fmt.Errorf("bad HTML color code '%s' in line: %s", color, line)}
}
list = append(list, &Label{
Name: strings.TrimSpace(name),
Name: strings.TrimSpace(labelName),
Color: color,
Description: strings.TrimSpace(description),
})
@ -108,10 +100,10 @@ func parseLegacyFormat(name string, data []byte) ([]*Label, error) {
return list, nil
}
// LoadFormatted loads the labels' list of a template file as a string separated by comma
func LoadFormatted(name string) (string, error) {
// LoadTemplateDescription loads the labels from a template file, returns a description string by joining each Label.Name with comma
func LoadTemplateDescription(fileName string) (string, error) {
var buf strings.Builder
list, err := GetTemplateFile(name)
list, err := LoadTemplateFile(fileName)
if err != nil {
return "", err
}

View File

@ -1,71 +0,0 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package log
import (
"bytes"
"sync"
)
type bufferWriteCloser struct {
mu sync.Mutex
buffer bytes.Buffer
}
func (b *bufferWriteCloser) Write(p []byte) (int, error) {
b.mu.Lock()
defer b.mu.Unlock()
return b.buffer.Write(p)
}
func (b *bufferWriteCloser) Close() error {
return nil
}
func (b *bufferWriteCloser) String() string {
b.mu.Lock()
defer b.mu.Unlock()
return b.buffer.String()
}
// BufferLogger implements LoggerProvider and writes messages in a buffer.
type BufferLogger struct {
WriterLogger
}
// NewBufferLogger create BufferLogger returning as LoggerProvider.
func NewBufferLogger() LoggerProvider {
log := &BufferLogger{}
log.NewWriterLogger(&bufferWriteCloser{})
return log
}
// Init inits connection writer
func (log *BufferLogger) Init(string) error {
log.NewWriterLogger(log.out)
return nil
}
// Content returns the content accumulated in the content provider
func (log *BufferLogger) Content() (string, error) {
return log.out.(*bufferWriteCloser).String(), nil
}
// Flush when log should be flushed
func (log *BufferLogger) Flush() {
}
// ReleaseReopen does nothing
func (log *BufferLogger) ReleaseReopen() error {
return nil
}
// GetName returns the default name for this implementation
func (log *BufferLogger) GetName() string {
return "buffer"
}
func init() {
Register("buffer", NewBufferLogger)
}

View File

@ -1,63 +0,0 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package log
import (
"fmt"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestBufferLogger(t *testing.T) {
logger := NewBufferLogger()
bufferLogger := logger.(*BufferLogger)
assert.NotNil(t, bufferLogger)
err := logger.Init("")
assert.NoError(t, err)
location, _ := time.LoadLocation("EST")
date := time.Date(2019, time.January, 13, 22, 3, 30, 15, location)
msg := "TEST MSG"
event := Event{
level: INFO,
msg: msg,
caller: "CALLER",
filename: "FULL/FILENAME",
line: 1,
time: date,
}
logger.LogEvent(&event)
content, err := bufferLogger.Content()
assert.NoError(t, err)
assert.Contains(t, content, msg)
logger.Close()
}
func TestBufferLoggerContent(t *testing.T) {
level := INFO
logger := NewLogger(0, "console", "console", fmt.Sprintf(`{"level":"%s"}`, level.String()))
logger.SetLogger("buffer", "buffer", "{}")
defer logger.DelLogger("buffer")
msg := "A UNIQUE MESSAGE"
Error(msg)
found := false
for i := 0; i < 30000; i++ {
content, err := logger.GetLoggerProviderContent("buffer")
assert.NoError(t, err)
if strings.Contains(content, msg) {
found = true
break
}
time.Sleep(1 * time.Millisecond)
}
assert.True(t, found)
}

View File

@ -118,11 +118,6 @@ func (log *ConnLogger) Init(jsonconfig string) error {
return nil
}
// Content returns the content accumulated in the content provider
func (log *ConnLogger) Content() (string, error) {
return "", fmt.Errorf("not supported")
}
// Flush does nothing for this implementation
func (log *ConnLogger) Flush() {
}

View File

@ -65,11 +65,6 @@ func (log *ConsoleLogger) Init(config string) error {
return nil
}
// Content returns the content accumulated in the content provider
func (log *ConsoleLogger) Content() (string, error) {
return "", fmt.Errorf("not supported")
}
// Flush when log should be flushed
func (log *ConsoleLogger) Flush() {
}

View File

@ -82,7 +82,7 @@ func (l *ChannelledLog) Start() {
l.closeLogger()
return
}
l.loggerProvider.LogEvent(event)
l.loggerProvider.LogEvent(event) //nolint:errcheck
case _, ok := <-l.flush:
if !ok {
l.closeLogger()
@ -119,7 +119,7 @@ func (l *ChannelledLog) emptyQueue() bool {
if !ok {
return false
}
l.loggerProvider.LogEvent(event)
l.loggerProvider.LogEvent(event) //nolint:errcheck
default:
return true
}
@ -247,12 +247,6 @@ func (m *MultiChannelledLog) GetEventLogger(name string) EventLogger {
return m.loggers[name]
}
// GetEventProvider returns a sub logger provider content from this MultiChannelledLog
func (m *MultiChannelledLog) GetLoggerProviderContent(name string) (string, error) {
channelledLogger := m.GetEventLogger(name).(*ChannelledLog)
return channelledLogger.loggerProvider.Content()
}
// GetEventLoggerNames returns a list of names
func (m *MultiChannelledLog) GetEventLoggerNames() []string {
m.rwmutex.RLock()
@ -460,3 +454,7 @@ func (m *MultiChannelledLog) ResetLevel() Level {
func (m *MultiChannelledLog) GetName() string {
return m.name
}
func (e *Event) GetMsg() string {
return e.msg
}

View File

@ -181,7 +181,7 @@ func (log *FileLogger) DoRotate() error {
}
if log.Compress {
go compressOldLogFile(fname, log.CompressionLevel)
go compressOldLogFile(fname, log.CompressionLevel) //nolint:errcheck
}
// re-start logger
@ -216,7 +216,7 @@ func compressOldLogFile(fname string, compressionLevel int) error {
if err != nil {
zw.Close()
fw.Close()
util.Remove(fname + ".gz")
util.Remove(fname + ".gz") //nolint:errcheck
return err
}
reader.Close()
@ -253,15 +253,6 @@ func (log *FileLogger) deleteOldLog() {
})
}
// Content returns the content accumulated in the content provider
func (log *FileLogger) Content() (string, error) {
b, err := os.ReadFile(log.Filename)
if err != nil {
return "", err
}
return string(b), nil
}
// Flush flush file logger.
// there are no buffering messages in file logger in memory.
// flush file means sync file from disk.

Some files were not shown because too many files have changed in this diff Show More