Merge branch 'main' into sync-issue-pr-and-more

This commit is contained in:
harryzcy 2023-04-22 18:58:18 -04:00 committed by GitHub
commit d9cfdb1ed3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
91 changed files with 747 additions and 1145 deletions

View File

@ -5,6 +5,7 @@ tmp_dir = ".air"
cmd = "make backend"
bin = "gitea"
include_ext = ["go", "tmpl"]
exclude_dir = ["modules/git/tests", "services/gitdiff/testdata", "modules/avatar/testdata", "models/fixtures", "models/migrations/fixtures", "modules/migration/file_format_testdata", "modules/avatar/identicon/testdata"]
include_file = ["main.go"]
include_dir = ["cmd", "models", "modules", "options", "routers", "services"]
exclude_dir = ["modules/git/tests", "services/gitdiff/testdata", "modules/avatar/testdata", "models/fixtures", "models/migrations/fixtures", "modules/migration/file_format_testdata", "modules/avatar/identicon/testdata"]
exclude_regex = ["_test.go$", "_gen.go$"]

View File

@ -59,7 +59,7 @@ steps:
- name: lint-backend-windows
image: gitea/test_env:linux-1.20-amd64 # https://gitea.com/gitea/test-env
commands:
- make golangci-lint-windows vet
- make lint-go-windows lint-go-vet
environment:
GOPROXY: https://goproxy.io # proxy.golang.org is blocked in China, this proxy is not
GOSUMDB: sum.golang.org

1
.gitignore vendored
View File

@ -71,6 +71,7 @@ cpu.out
/tests/e2e/test-artifacts
/tests/e2e/test-snapshots
/tests/*.ini
/tests/**/*.git/**/*.sample
/node_modules
/yarn.lock
/yarn-error.log

View File

@ -22,7 +22,7 @@ linters:
- unconvert
- unused
# - varcheck # deprecated - https://github.com/golangci/golangci-lint/issues/1841
# - wastedassign # disabled - https://github.com/golangci/golangci-lint/issues/2649
- wastedassign
enable-all: false
disable-all: true
fast: false

110
Makefile
View File

@ -25,16 +25,15 @@ COMMA := ,
XGO_VERSION := go-1.20.x
AIR_PACKAGE ?= github.com/cosmtrek/air@v1.40.4
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.6.0
ERRCHECK_PACKAGE ?= github.com/kisielk/errcheck@v1.6.2
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.4.0
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.51.2
GXZ_PAGAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.10
AIR_PACKAGE ?= github.com/cosmtrek/air@v1.43.0
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.7.0
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.5.0
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.52.2
GXZ_PAGAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.11
MISSPELL_PACKAGE ?= github.com/client9/misspell/cmd/misspell@v0.3.4
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.30.4
XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1.5.0
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1.6.0
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@latest
DOCKER_IMAGE ?= gitea/gitea
@ -194,9 +193,20 @@ help:
@echo " - deps-backend install backend dependencies"
@echo " - deps-tools install tool dependencies"
@echo " - lint lint everything"
@echo " - lint-fix lint everything and fix issues"
@echo " - lint-frontend lint frontend files"
@echo " - lint-frontend-fix lint frontend files and fix issues"
@echo " - lint-backend lint backend files"
@echo " - lint-backend-fix lint backend files and fix issues"
@echo " - lint-go lint go files"
@echo " - lint-go-fix lint go files and fix issues"
@echo " - lint-go-vet lint go files with vet"
@echo " - lint-js lint js files"
@echo " - lint-js-fix lint js files and fix issues"
@echo " - lint-css lint css files"
@echo " - lint-css-fix lint css files and fix issues"
@echo " - lint-md lint markdown files"
@echo " - lint-swagger lint swagger files"
@echo " - checks run various consistency checks"
@echo " - checks-frontend check frontend files"
@echo " - checks-backend check backend files"
@ -214,9 +224,7 @@ help:
@echo " - generate-manpage generate manpage"
@echo " - generate-swagger generate the swagger spec from code comments"
@echo " - swagger-validate check if the swagger spec is valid"
@echo " - golangci-lint run golangci-lint linter"
@echo " - go-licenses regenerate go licenses"
@echo " - vet examines Go source code and reports suspicious constructs"
@echo " - tidy run go mod tidy"
@echo " - test[\#TestSpecificName] run unit test"
@echo " - test-sqlite[\#TestSpecificName] run integration test for sqlite"
@ -286,12 +294,6 @@ fmt-check: fmt
misspell-check:
go run $(MISSPELL_PACKAGE) -error $(GO_DIRS) $(WEB_DIRS)
.PHONY: vet
vet:
@echo "Running go vet..."
@GOOS= GOARCH= $(GO) build code.gitea.io/gitea-vet
@$(GO) vet -vettool=gitea-vet $(GO_PACKAGES)
.PHONY: $(TAGS_EVIDENCE)
$(TAGS_EVIDENCE):
@mkdir -p $(MAKE_EVIDENCE_DIR)
@ -324,11 +326,6 @@ swagger-validate:
$(GO) run $(SWAGGER_PACKAGE) validate './$(SWAGGER_SPEC)'
$(SED_INPLACE) '$(SWAGGER_SPEC_S_TMPL)' './$(SWAGGER_SPEC)'
.PHONY: errcheck
errcheck:
@echo "Running errcheck..."
$(GO) run $(ERRCHECK_PACKAGE) $(GO_PACKAGES)
.PHONY: checks
checks: checks-frontend checks-backend
@ -341,18 +338,69 @@ checks-backend: tidy-check swagger-check fmt-check misspell-check swagger-valida
.PHONY: lint
lint: lint-frontend lint-backend
.PHONY: lint-fix
lint-fix: lint-frontend-fix lint-backend-fix
.PHONY: lint-frontend
lint-frontend: node_modules lint-md
lint-frontend: lint-js lint-css lint-md lint-swagger
.PHONY: lint-frontend-fix
lint-frontend-fix: lint-js-fix lint-css-fix lint-md lint-swagger
.PHONY: lint-backend
lint-backend: lint-go lint-go-vet lint-editorconfig
.PHONY: lint-backend-fix
lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig
.PHONY: lint-js
lint-js: node_modules
npx eslint --color --max-warnings=0 --ext js,vue web_src/js build *.config.js docs/assets/js tests/e2e
.PHONY: lint-js-fix
lint-js-fix: node_modules
npx eslint --color --max-warnings=0 --ext js,vue web_src/js build *.config.js docs/assets/js tests/e2e --fix
.PHONY: lint-css
lint-css: node_modules
npx stylelint --color --max-warnings=0 web_src/css
.PHONY: lint-css-fix
lint-css-fix: node_modules
npx stylelint --color --max-warnings=0 web_src/css --fix
.PHONY: lint-swagger
lint-swagger: node_modules
npx spectral lint -q -F hint $(SWAGGER_SPEC)
.PHONY: lint-md
lint-md: node_modules
npx markdownlint docs *.md
.PHONY: lint-backend
lint-backend: golangci-lint vet editorconfig-checker
.PHONY: lint-go
lint-go:
$(GO) run $(GOLANGCI_LINT_PACKAGE) run
.PHONY: lint-go-fix
lint-go-fix:
$(GO) run $(GOLANGCI_LINT_PACKAGE) run --fix
# workaround step for the lint-backend-windows CI task because 'go run' can not
# have distinct GOOS/GOARCH for its build and run steps
.PHONY: lint-go-windows
lint-go-windows:
@GOOS= GOARCH= $(GO) install $(GOLANGCI_LINT_PACKAGE)
golangci-lint run
.PHONY: lint-go-vet
lint-go-vet:
@echo "Running go vet..."
@GOOS= GOARCH= $(GO) build code.gitea.io/gitea-vet
@$(GO) vet -vettool=gitea-vet $(GO_PACKAGES)
.PHONY: lint-editorconfig
lint-editorconfig:
$(GO) run $(EDITORCONFIG_CHECKER_PACKAGE) templates
.PHONY: watch
watch:
@ -843,7 +891,6 @@ deps-backend:
deps-tools:
$(GO) install $(AIR_PACKAGE)
$(GO) install $(EDITORCONFIG_CHECKER_PACKAGE)
$(GO) install $(ERRCHECK_PACKAGE)
$(GO) install $(GOFUMPT_PACKAGE)
$(GO) install $(GOLANGCI_LINT_PACKAGE)
$(GO) install $(GXZ_PAGAGE)
@ -942,21 +989,6 @@ generate-manpage:
@gzip -9 man/man1/gitea.1 && echo man/man1/gitea.1.gz created
@#TODO A small script that formats config-cheat-sheet.en-us.md nicely for use as a config man page
.PHONY: golangci-lint
golangci-lint:
$(GO) run $(GOLANGCI_LINT_PACKAGE) run
# workaround step for the lint-backend-windows CI task because 'go run' can not
# have distinct GOOS/GOARCH for its build and run steps
.PHONY: golangci-lint-windows
golangci-lint-windows:
@GOOS= GOARCH= $(GO) install $(GOLANGCI_LINT_PACKAGE)
golangci-lint run
.PHONY: editorconfig-checker
editorconfig-checker:
$(GO) run $(EDITORCONFIG_CHECKER_PACKAGE) templates
.PHONY: docker
docker:
docker build --disable-content-trust=false -t $(DOCKER_REF) .

View File

@ -67,7 +67,7 @@ func TestMigratePackages(t *testing.T) {
entries, err := os.ReadDir(p)
assert.NoError(t, err)
assert.EqualValues(t, 2, len(entries))
assert.Len(t, entries, 2)
assert.EqualValues(t, "01", entries[0].Name())
assert.EqualValues(t, "tmp", entries[1].Name())
}

2
go.mod
View File

@ -3,7 +3,7 @@ module code.gitea.io/gitea
go 1.19
require (
code.gitea.io/actions-proto-go v0.2.0
code.gitea.io/actions-proto-go v0.2.1
code.gitea.io/gitea-vet v0.2.2
code.gitea.io/sdk/gitea v0.15.1
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570

4
go.sum
View File

@ -40,8 +40,8 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
code.gitea.io/actions-proto-go v0.2.0 h1:nYh9nhhfk67YA4wVNLsCzd//RCvXnljwXClJ33+HPVk=
code.gitea.io/actions-proto-go v0.2.0/go.mod h1:00ys5QDo1iHN1tHNvvddAcy2W/g+425hQya1cCSvq9A=
code.gitea.io/actions-proto-go v0.2.1 h1:ToMN/8thz2q10TuCq8dL2d8mI+/pWpJcHCvG+TELwa0=
code.gitea.io/actions-proto-go v0.2.1/go.mod h1:00ys5QDo1iHN1tHNvvddAcy2W/g+425hQya1cCSvq9A=
code.gitea.io/gitea-vet v0.2.1/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=
code.gitea.io/gitea-vet v0.2.2 h1:TEOV/Glf38iGmKzKP0EB++Z5OSL4zGg3RrAvlwaMuvk=
code.gitea.io/gitea-vet v0.2.2/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=

View File

@ -0,0 +1,51 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"code.gitea.io/gitea/models/db"
)
// ActionTaskOutput represents an output of ActionTask.
// So the outputs are bound to a task, that means when a completed job has been rerun,
// the outputs of the job will be reset because the task is new.
// It's by design, to avoid the outputs of the old task to be mixed with the new task.
type ActionTaskOutput struct {
ID int64
TaskID int64 `xorm:"INDEX UNIQUE(task_id_output_key)"`
OutputKey string `xorm:"VARCHAR(255) UNIQUE(task_id_output_key)"`
OutputValue string `xorm:"MEDIUMTEXT"`
}
// FindTaskOutputByTaskID returns the outputs of the task.
func FindTaskOutputByTaskID(ctx context.Context, taskID int64) ([]*ActionTaskOutput, error) {
var outputs []*ActionTaskOutput
return outputs, db.GetEngine(ctx).Where("task_id=?", taskID).Find(&outputs)
}
// FindTaskOutputKeyByTaskID returns the keys of the outputs of the task.
func FindTaskOutputKeyByTaskID(ctx context.Context, taskID int64) ([]string, error) {
var keys []string
return keys, db.GetEngine(ctx).Table(ActionTaskOutput{}).Where("task_id=?", taskID).Cols("output_key").Find(&keys)
}
// InsertTaskOutputIfNotExist inserts a new task output if it does not exist.
func InsertTaskOutputIfNotExist(ctx context.Context, taskID int64, key, value string) error {
return db.WithTx(ctx, func(ctx context.Context) error {
sess := db.GetEngine(ctx)
if exist, err := sess.Exist(&ActionTaskOutput{TaskID: taskID, OutputKey: key}); err != nil {
return err
} else if exist {
return nil
}
_, err := sess.Insert(&ActionTaskOutput{
TaskID: taskID,
OutputKey: key,
OutputValue: value,
})
return err
})
}

View File

@ -40,7 +40,7 @@ func TestFind(t *testing.T) {
var repoUnits []repo_model.RepoUnit
err = db.Find(db.DefaultContext, &opts, &repoUnits)
assert.NoError(t, err)
assert.EqualValues(t, repoUnitCount, len(repoUnits))
assert.Len(t, repoUnits, repoUnitCount)
cnt, err := db.Count(db.DefaultContext, &opts, new(repo_model.RepoUnit))
assert.NoError(t, err)

View File

@ -88,12 +88,12 @@ func TestFindRenamedBranch(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
branch, exist, err := git_model.FindRenamedBranch(db.DefaultContext, 1, "dev")
assert.NoError(t, err)
assert.Equal(t, true, exist)
assert.True(t, exist)
assert.Equal(t, "master", branch.To)
_, exist, err = git_model.FindRenamedBranch(db.DefaultContext, 1, "unknow")
assert.NoError(t, err)
assert.Equal(t, false, exist)
assert.False(t, exist)
}
func TestRenameBranch(t *testing.T) {
@ -115,7 +115,7 @@ func TestRenameBranch(t *testing.T) {
return nil
}))
assert.Equal(t, true, _isDefault)
assert.True(t, _isDefault)
repo1 = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
assert.Equal(t, "main", repo1.DefaultBranch)

View File

@ -109,11 +109,11 @@ func TestHasUnmergedPullRequestsByHeadInfo(t *testing.T) {
exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(db.DefaultContext, 1, "branch2")
assert.NoError(t, err)
assert.Equal(t, true, exist)
assert.True(t, exist)
exist, err = issues_model.HasUnmergedPullRequestsByHeadInfo(db.DefaultContext, 1, "not_exist_branch")
assert.NoError(t, err)
assert.Equal(t, false, exist)
assert.False(t, exist)
}
func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) {

View File

@ -35,7 +35,7 @@ func TestAddTime(t *testing.T) {
assert.Equal(t, int64(3661), tt.Time)
comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeAddTimeManual, PosterID: 3, IssueID: 1})
assert.Equal(t, comment.Content, "1 hour 1 minute")
assert.Equal(t, "1 hour 1 minute", comment.Content)
}
func TestGetTrackedTimes(t *testing.T) {

View File

@ -485,6 +485,8 @@ var migrations = []Migration{
NewMigration("Fix incorrect admin team unit access mode", v1_20.FixIncorrectAdminTeamUnitAccessMode),
// v253 -> v254
NewMigration("Fix ExternalTracker and ExternalWiki accessMode in owner and admin team", v1_20.FixExternalTrackerAndExternalWikiAccessModeInOwnerAndAdminTeam),
// v254 -> v255
NewMigration("Add ActionTaskOutput table", v1_20.AddActionTaskOutputTable),
}
// GetCurrentDBVersion returns the current db version

View File

@ -0,0 +1,18 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_20 //nolint
import (
"xorm.io/xorm"
)
func AddActionTaskOutputTable(x *xorm.Engine) error {
type ActionTaskOutput struct {
ID int64
TaskID int64 `xorm:"INDEX UNIQUE(task_id_output_key)"`
OutputKey string `xorm:"VARCHAR(255) UNIQUE(task_id_output_key)"`
OutputValue string `xorm:"MEDIUMTEXT"`
}
return x.Sync(new(ActionTaskOutput))
}

View File

@ -152,7 +152,7 @@ func init() {
Query()
assert.NoError(t, err)
assert.EqualValues(t, label.int("NumIssues"), len(issueLabels), "Unexpected number of issue for label id: %d", label.int("ID"))
assert.Len(t, issueLabels, label.int("NumIssues"), "Unexpected number of issue for label id: %d", label.int("ID"))
issueIDs := make([]int, len(issueLabels))
for i, issueLabel := range issueLabels {

View File

@ -36,10 +36,10 @@ func TestGetUserEmailsByNames(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
// ignore none active user email
assert.Equal(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user9"}))
assert.Equal(t, []string{"user8@example.com", "user5@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user5"}))
assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user9"}))
assert.ElementsMatch(t, []string{"user8@example.com", "user5@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user5"}))
assert.Equal(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user7"}))
assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(db.DefaultContext, []string{"user8", "user7"}))
}
func TestCanCreateOrganization(t *testing.T) {

View File

@ -47,7 +47,9 @@ func (e *escapeStreamer) EscapeStatus() *EscapeStatus {
// Text tells the next streamer there is a text
func (e *escapeStreamer) Text(data string) error {
sb := &strings.Builder{}
pos, until, next := 0, 0, 0
var until int
var next int
pos := 0
if len(data) > len(UTF8BOM) && data[:len(UTF8BOM)] == string(UTF8BOM) {
_, _ = sb.WriteString(data[:len(UTF8BOM)])
pos = len(UTF8BOM)

View File

@ -19,7 +19,7 @@ func Test_nulSeparatedAttributeWriter_ReadAttribute(t *testing.T) {
n, err := wr.Write([]byte(testStr))
assert.Equal(t, n, len(testStr))
assert.Len(t, testStr, n)
assert.NoError(t, err)
select {
case attr := <-wr.ReadAttribute():
@ -32,7 +32,7 @@ func Test_nulSeparatedAttributeWriter_ReadAttribute(t *testing.T) {
// Write a second attribute again
n, err = wr.Write([]byte(testStr))
assert.Equal(t, n, len(testStr))
assert.Len(t, testStr, n)
assert.NoError(t, err)
select {

View File

@ -97,6 +97,6 @@ func TestRepository_CommitsBetweenIDs(t *testing.T) {
for i, c := range cases {
commits, err := bareRepo1.CommitsBetweenIDs(c.NewID, c.OldID)
assert.NoError(t, err)
assert.Equal(t, c.ExpectedCommits, len(commits), "case %d", i)
assert.Len(t, commits, c.ExpectedCommits, "case %d", i)
}
}

View File

@ -26,7 +26,7 @@ func TestRepository_GetTags(t *testing.T) {
return
}
assert.Len(t, tags, 2)
assert.Equal(t, len(tags), total)
assert.Len(t, tags, total)
assert.EqualValues(t, "signed-tag", tags[0].Name)
assert.EqualValues(t, "36f97d9a96457e2bab511db30fe2db03893ebc64", tags[0].ID.String())
assert.EqualValues(t, "tag", tags[0].Type)

View File

@ -69,7 +69,7 @@ func testIndexer(name string, t *testing.T, indexer Indexer) {
t.Run(kw.Keyword, func(t *testing.T) {
total, res, langs, err := indexer.Search(context.TODO(), kw.RepoIDs, "", kw.Keyword, 1, 10, false)
assert.NoError(t, err)
assert.EqualValues(t, len(kw.IDs), total)
assert.Len(t, kw.IDs, int(total))
assert.Len(t, langs, kw.Langs)
ids := make([]int64, 0, len(res))

View File

@ -162,7 +162,7 @@ func TestHTTPClientDownload(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, "download", batchRequest.Operation)
assert.Equal(t, 1, len(batchRequest.Objects))
assert.Len(t, batchRequest.Objects, 1)
assert.Equal(t, p.Oid, batchRequest.Objects[0].Oid)
assert.Equal(t, p.Size, batchRequest.Objects[0].Size)
@ -269,7 +269,7 @@ func TestHTTPClientUpload(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, "upload", batchRequest.Operation)
assert.Equal(t, 1, len(batchRequest.Objects))
assert.Len(t, batchRequest.Objects, 1)
assert.Equal(t, p.Oid, batchRequest.Objects[0].Oid)
assert.Equal(t, p.Size, batchRequest.Objects[0].Size)

View File

@ -51,7 +51,7 @@ func TestFileLogger(t *testing.T) {
fileLogger := NewFileLogger()
realFileLogger, ok := fileLogger.(*FileLogger)
assert.Equal(t, true, ok)
assert.True(t, ok)
location, _ := time.LoadLocation("EST")

View File

@ -593,5 +593,5 @@ func TestIssue18471(t *testing.T) {
}, strings.NewReader(data), &res)
assert.NoError(t, err)
assert.Equal(t, res.String(), "<a href=\"http://domain/org/repo/compare/783b039...da951ce\" class=\"compare\"><code class=\"nohighlight\">783b039...da951ce</code></a>")
assert.Equal(t, "<a href=\"http://domain/org/repo/compare/783b039...da951ce\" class=\"compare\"><code class=\"nohighlight\">783b039...da951ce</code></a>", res.String())
}

View File

@ -33,7 +33,7 @@ func TestParseImageConfig(t *testing.T) {
assert.Equal(t, projectURL, metadata.ProjectURL)
assert.Equal(t, repositoryURL, metadata.RepositoryURL)
assert.Equal(t, documentationURL, metadata.DocumentationURL)
assert.Equal(t, []string{"do it 1", "do it 2"}, metadata.ImageLayers)
assert.ElementsMatch(t, []string{"do it 1", "do it 2"}, metadata.ImageLayers)
assert.Equal(
t,
map[string]string{

View File

@ -70,19 +70,19 @@ func TestPaginator(t *testing.T) {
t.Run("Generate pages", func(t *testing.T) {
p := New(0, 10, 1, 0)
pages := p.Pages()
assert.Equal(t, 0, len(pages))
assert.Empty(t, pages)
})
t.Run("Only current page", func(t *testing.T) {
p := New(0, 10, 1, 1)
pages := p.Pages()
assert.Equal(t, 1, len(pages))
assert.Len(t, pages, 1)
assert.Equal(t, 1, pages[0].Num())
assert.True(t, pages[0].IsCurrent())
p = New(1, 10, 1, 1)
pages = p.Pages()
assert.Equal(t, 1, len(pages))
assert.Len(t, pages, 1)
assert.Equal(t, 1, pages[0].Num())
assert.True(t, pages[0].IsCurrent())
})
@ -90,13 +90,13 @@ func TestPaginator(t *testing.T) {
t.Run("Total page number is less or equal", func(t *testing.T) {
p := New(1, 10, 1, 2)
pages := p.Pages()
assert.Equal(t, 1, len(pages))
assert.Len(t, pages, 1)
assert.Equal(t, 1, pages[0].Num())
assert.True(t, pages[0].IsCurrent())
p = New(11, 10, 1, 2)
pages = p.Pages()
assert.Equal(t, 2, len(pages))
assert.Len(t, pages, 2)
assert.Equal(t, 1, pages[0].Num())
assert.True(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -104,7 +104,7 @@ func TestPaginator(t *testing.T) {
p = New(11, 10, 2, 2)
pages = p.Pages()
assert.Equal(t, 2, len(pages))
assert.Len(t, pages, 2)
assert.Equal(t, 1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -112,7 +112,7 @@ func TestPaginator(t *testing.T) {
p = New(25, 10, 2, 3)
pages = p.Pages()
assert.Equal(t, 3, len(pages))
assert.Len(t, pages, 3)
assert.Equal(t, 1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -125,7 +125,7 @@ func TestPaginator(t *testing.T) {
// ... 2
p := New(11, 10, 2, 1)
pages := p.Pages()
assert.Equal(t, 2, len(pages))
assert.Len(t, pages, 2)
assert.Equal(t, -1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -134,7 +134,7 @@ func TestPaginator(t *testing.T) {
// ... 2 3
p = New(21, 10, 2, 2)
pages = p.Pages()
assert.Equal(t, 3, len(pages))
assert.Len(t, pages, 3)
assert.Equal(t, -1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -145,7 +145,7 @@ func TestPaginator(t *testing.T) {
// ... 2 3 4
p = New(31, 10, 3, 3)
pages = p.Pages()
assert.Equal(t, 4, len(pages))
assert.Len(t, pages, 4)
assert.Equal(t, -1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -158,7 +158,7 @@ func TestPaginator(t *testing.T) {
// ... 3 4 5
p = New(41, 10, 4, 3)
pages = p.Pages()
assert.Equal(t, 4, len(pages))
assert.Len(t, pages, 4)
assert.Equal(t, -1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 3, pages[1].Num())
@ -171,7 +171,7 @@ func TestPaginator(t *testing.T) {
// ... 4 5 6 7 8 9 10
p = New(100, 10, 9, 7)
pages = p.Pages()
assert.Equal(t, 8, len(pages))
assert.Len(t, pages, 8)
assert.Equal(t, -1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 4, pages[1].Num())
@ -194,7 +194,7 @@ func TestPaginator(t *testing.T) {
// 1 ...
p := New(21, 10, 1, 1)
pages := p.Pages()
assert.Equal(t, 2, len(pages))
assert.Len(t, pages, 2)
assert.Equal(t, 1, pages[0].Num())
assert.True(t, pages[0].IsCurrent())
assert.Equal(t, -1, pages[1].Num())
@ -203,7 +203,7 @@ func TestPaginator(t *testing.T) {
// 1 2 ...
p = New(21, 10, 1, 2)
pages = p.Pages()
assert.Equal(t, 3, len(pages))
assert.Len(t, pages, 3)
assert.Equal(t, 1, pages[0].Num())
assert.True(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -214,7 +214,7 @@ func TestPaginator(t *testing.T) {
// 1 2 3 ...
p = New(31, 10, 2, 3)
pages = p.Pages()
assert.Equal(t, 4, len(pages))
assert.Len(t, pages, 4)
assert.Equal(t, 1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -227,7 +227,7 @@ func TestPaginator(t *testing.T) {
// 1 2 3 ...
p = New(41, 10, 2, 3)
pages = p.Pages()
assert.Equal(t, 4, len(pages))
assert.Len(t, pages, 4)
assert.Equal(t, 1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -240,7 +240,7 @@ func TestPaginator(t *testing.T) {
// 1 2 3 4 5 6 7 ...
p = New(100, 10, 1, 7)
pages = p.Pages()
assert.Equal(t, 8, len(pages))
assert.Len(t, pages, 8)
assert.Equal(t, 1, pages[0].Num())
assert.True(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -261,7 +261,7 @@ func TestPaginator(t *testing.T) {
// 1 2 3 4 5 6 7 ...
p = New(100, 10, 2, 7)
pages = p.Pages()
assert.Equal(t, 8, len(pages))
assert.Len(t, pages, 8)
assert.Equal(t, 1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -284,7 +284,7 @@ func TestPaginator(t *testing.T) {
// ... 2 3 ...
p := New(35, 10, 2, 2)
pages := p.Pages()
assert.Equal(t, 4, len(pages))
assert.Len(t, pages, 4)
assert.Equal(t, -1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())
@ -297,7 +297,7 @@ func TestPaginator(t *testing.T) {
// ... 2 3 4 ...
p = New(49, 10, 3, 3)
pages = p.Pages()
assert.Equal(t, 5, len(pages))
assert.Len(t, pages, 5)
assert.Equal(t, -1, pages[0].Num())
assert.False(t, pages[0].IsCurrent())
assert.Equal(t, 2, pages[1].Num())

View File

@ -208,7 +208,7 @@ func TestPersistableChannelUniqueQueue(t *testing.T) {
mapLock.Lock()
assert.Equal(t, 101, len(executedInitial[name])+len(executedEmpty[name]))
assert.Equal(t, 0, len(hasEmpty[name]))
assert.Empty(t, hasEmpty[name])
mapLock.Unlock()
})
close(done)

View File

@ -15,9 +15,9 @@ func TestEncryptDecrypt(t *testing.T) {
hex, _ = EncryptSecret("foo", "baz")
str, _ = DecryptSecret("foo", hex)
assert.Equal(t, str, "baz")
assert.Equal(t, "baz", str)
hex, _ = EncryptSecret("bar", "baz")
str, _ = DecryptSecret("foo", hex)
assert.NotEqual(t, str, "baz")
assert.NotEqual(t, "baz", str)
}

View File

@ -89,6 +89,6 @@ func TestLocalStorageIterator(t *testing.T) {
return nil
})
assert.NoError(t, err)
assert.Equal(t, count, len(expected))
assert.Len(t, expected, count)
}
}

View File

@ -15,7 +15,6 @@ import (
"mime"
"net/url"
"path/filepath"
"reflect"
"regexp"
"strings"
"time"
@ -68,12 +67,17 @@ func NewFuncMap() []template.FuncMap {
"PathEscape": url.PathEscape,
"PathEscapeSegments": util.PathEscapeSegments,
// utils
"StringUtils": NewStringUtils,
"SliceUtils": NewSliceUtils,
// -----------------------------------------------------------------
// string / json
// TODO: move string helper functions to StringUtils
"Join": strings.Join,
"DotEscape": DotEscape,
"HasPrefix": strings.HasPrefix,
"EllipsisString": base.EllipsisString,
"DumpVar": dumpVar,
"Json": func(in interface{}) string {
out, err := json.Marshal(in)
@ -143,35 +147,6 @@ func NewFuncMap() []template.FuncMap {
return fmt.Sprint(time.Since(startTime).Nanoseconds()/1e6) + "ms"
},
// -----------------------------------------------------------------
// slice
"containGeneric": func(arr, v interface{}) bool {
arrV := reflect.ValueOf(arr)
if arrV.Kind() == reflect.String && reflect.ValueOf(v).Kind() == reflect.String {
return strings.Contains(arr.(string), v.(string))
}
if arrV.Kind() == reflect.Slice {
for i := 0; i < arrV.Len(); i++ {
iV := arrV.Index(i)
if !iV.CanInterface() {
continue
}
if iV.Interface() == v {
return true
}
}
}
return false
},
"contain": func(s []int64, id int64) bool {
for i := 0; i < len(s); i++ {
if s[i] == id {
return true
}
}
return false
},
// -----------------------------------------------------------------
// setting
"AppName": func() string {

View File

@ -1,47 +0,0 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package templates
import (
"fmt"
"reflect"
)
func dictMerge(base map[string]any, arg any) bool {
if arg == nil {
return true
}
rv := reflect.ValueOf(arg)
if rv.Kind() == reflect.Map {
for _, k := range rv.MapKeys() {
base[k.String()] = rv.MapIndex(k).Interface()
}
return true
}
return false
}
// dict is a helper function for creating a map[string]any from a list of key-value pairs.
// If the key is dot ".", the value is merged into the base map, just like Golang template's dot syntax: dot means current
// The dot syntax is highly discouraged because it might cause unclear key conflicts. It's always good to use explicit keys.
func dict(args ...any) (map[string]any, error) {
if len(args)%2 != 0 {
return nil, fmt.Errorf("invalid dict constructor syntax: must have key-value pairs")
}
m := make(map[string]any, len(args)/2)
for i := 0; i < len(args); i += 2 {
key, ok := args[i].(string)
if !ok {
return nil, fmt.Errorf("invalid dict constructor syntax: unable to merge args[%d]", i)
}
if key == "." {
if ok = dictMerge(m, args[i+1]); !ok {
return nil, fmt.Errorf("invalid dict constructor syntax: dot arg[%d] must be followed by a dict", i)
}
} else {
m[key] = args[i+1]
}
}
return m, nil
}

View File

@ -0,0 +1,121 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package templates
import (
"fmt"
"html"
"html/template"
"reflect"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting"
)
func dictMerge(base map[string]any, arg any) bool {
if arg == nil {
return true
}
rv := reflect.ValueOf(arg)
if rv.Kind() == reflect.Map {
for _, k := range rv.MapKeys() {
base[k.String()] = rv.MapIndex(k).Interface()
}
return true
}
return false
}
// dict is a helper function for creating a map[string]any from a list of key-value pairs.
// If the key is dot ".", the value is merged into the base map, just like Golang template's dot syntax: dot means current
// The dot syntax is highly discouraged because it might cause unclear key conflicts. It's always good to use explicit keys.
func dict(args ...any) (map[string]any, error) {
if len(args)%2 != 0 {
return nil, fmt.Errorf("invalid dict constructor syntax: must have key-value pairs")
}
m := make(map[string]any, len(args)/2)
for i := 0; i < len(args); i += 2 {
key, ok := args[i].(string)
if !ok {
return nil, fmt.Errorf("invalid dict constructor syntax: unable to merge args[%d]", i)
}
if key == "." {
if ok = dictMerge(m, args[i+1]); !ok {
return nil, fmt.Errorf("invalid dict constructor syntax: dot arg[%d] must be followed by a dict", i)
}
} else {
m[key] = args[i+1]
}
}
return m, nil
}
func dumpVarMarshalable(v any, dumped map[uintptr]bool) (ret any, ok bool) {
if v == nil {
return nil, true
}
e := reflect.ValueOf(v)
for e.Kind() == reflect.Pointer {
e = e.Elem()
}
if e.CanAddr() {
addr := e.UnsafeAddr()
if dumped[addr] {
return "[dumped]", false
}
dumped[addr] = true
defer delete(dumped, addr)
}
switch e.Kind() {
case reflect.Bool, reflect.String,
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
reflect.Float32, reflect.Float64:
return e.Interface(), true
case reflect.Struct:
m := map[string]any{}
for i := 0; i < e.NumField(); i++ {
k := e.Type().Field(i).Name
if !e.Type().Field(i).IsExported() {
continue
}
v := e.Field(i).Interface()
m[k], _ = dumpVarMarshalable(v, dumped)
}
return m, true
case reflect.Map:
m := map[string]any{}
for _, k := range e.MapKeys() {
m[k.String()], _ = dumpVarMarshalable(e.MapIndex(k).Interface(), dumped)
}
return m, true
case reflect.Array, reflect.Slice:
var m []any
for i := 0; i < e.Len(); i++ {
v, _ := dumpVarMarshalable(e.Index(i).Interface(), dumped)
m = append(m, v)
}
return m, true
default:
return "[" + reflect.TypeOf(v).String() + "]", false
}
}
// dumpVar helps to dump a variable in a template, to help debugging and development.
func dumpVar(v any) template.HTML {
if setting.IsProd {
return "<pre>dumpVar: only available in dev mode</pre>"
}
m, ok := dumpVarMarshalable(v, map[uintptr]bool{})
var dumpStr string
jsonBytes, err := json.MarshalIndent(m, "", " ")
if err != nil {
dumpStr = fmt.Sprintf("dumpVar: unable to marshal %T: %v", v, err)
} else if ok {
dumpStr = fmt.Sprintf("dumpVar: %T\n%s", v, string(jsonBytes))
} else {
dumpStr = fmt.Sprintf("dumpVar: unmarshalable %T\n%s", v, string(jsonBytes))
}
return template.HTML("<pre>" + html.EscapeString(dumpStr) + "</pre>")
}

View File

@ -0,0 +1,35 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package templates
import (
"fmt"
"reflect"
)
type SliceUtils struct{}
func NewSliceUtils() *SliceUtils {
return &SliceUtils{}
}
func (su *SliceUtils) Contains(s, v any) bool {
if s == nil {
return false
}
sv := reflect.ValueOf(s)
if sv.Kind() != reflect.Slice && sv.Kind() != reflect.Array {
panic(fmt.Sprintf("invalid type, expected slice or array, but got: %T", s))
}
for i := 0; i < sv.Len(); i++ {
it := sv.Index(i)
if !it.CanInterface() {
continue
}
if it.Interface() == v {
return true
}
}
return false
}

View File

@ -0,0 +1,20 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package templates
import "strings"
type StringUtils struct{}
func NewStringUtils() *StringUtils {
return &StringUtils{}
}
func (su *StringUtils) HasPrefix(s, prefix string) bool {
return strings.HasPrefix(s, prefix)
}
func (su *StringUtils) Contains(s, substr string) bool {
return strings.Contains(s, substr)
}

View File

@ -4,6 +4,9 @@
package templates
import (
"html/template"
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
@ -41,3 +44,36 @@ func TestDict(t *testing.T) {
assert.Error(t, err)
}
}
func TestUtils(t *testing.T) {
execTmpl := func(code string, data any) string {
tmpl := template.New("test")
tmpl.Funcs(template.FuncMap{"SliceUtils": NewSliceUtils, "StringUtils": NewStringUtils})
template.Must(tmpl.Parse(code))
w := &strings.Builder{}
assert.NoError(t, tmpl.Execute(w, data))
return w.String()
}
actual := execTmpl("{{SliceUtils.Contains .Slice .Value}}", map[string]any{"Slice": []string{"a", "b"}, "Value": "a"})
assert.Equal(t, "true", actual)
actual = execTmpl("{{SliceUtils.Contains .Slice .Value}}", map[string]any{"Slice": []string{"a", "b"}, "Value": "x"})
assert.Equal(t, "false", actual)
actual = execTmpl("{{SliceUtils.Contains .Slice .Value}}", map[string]any{"Slice": []int64{1, 2}, "Value": int64(2)})
assert.Equal(t, "true", actual)
actual = execTmpl("{{StringUtils.Contains .String .Value}}", map[string]any{"String": "abc", "Value": "b"})
assert.Equal(t, "true", actual)
actual = execTmpl("{{StringUtils.Contains .String .Value}}", map[string]any{"String": "abc", "Value": "x"})
assert.Equal(t, "false", actual)
tmpl := template.New("test")
tmpl.Funcs(template.FuncMap{"SliceUtils": NewSliceUtils, "StringUtils": NewStringUtils})
template.Must(tmpl.Parse("{{SliceUtils.Contains .Slice .Value}}"))
// error is like this: `template: test:1:12: executing "test" at <SliceUtils.Contains>: error calling Contains: ...`
err := tmpl.Execute(io.Discard, map[string]any{"Slice": struct{}{}})
assert.ErrorContains(t, err, "invalid type, expected slice or array")
}

View File

@ -22,26 +22,26 @@ func TestLogChecker(t *testing.T) {
log.Info("test")
filtered, stopped := lc.Check(100 * time.Millisecond)
assert.EqualValues(t, []bool{false, false}, filtered)
assert.EqualValues(t, false, stopped)
assert.ElementsMatch(t, []bool{false, false}, filtered)
assert.False(t, stopped)
log.Info("First")
filtered, stopped = lc.Check(100 * time.Millisecond)
assert.EqualValues(t, []bool{true, false}, filtered)
assert.EqualValues(t, false, stopped)
assert.ElementsMatch(t, []bool{true, false}, filtered)
assert.False(t, stopped)
log.Info("Second")
filtered, stopped = lc.Check(100 * time.Millisecond)
assert.EqualValues(t, []bool{true, false}, filtered)
assert.EqualValues(t, false, stopped)
assert.ElementsMatch(t, []bool{true, false}, filtered)
assert.False(t, stopped)
log.Info("Third")
filtered, stopped = lc.Check(100 * time.Millisecond)
assert.EqualValues(t, []bool{true, true}, filtered)
assert.EqualValues(t, false, stopped)
assert.ElementsMatch(t, []bool{true, true}, filtered)
assert.False(t, stopped)
log.Info("End")
filtered, stopped = lc.Check(100 * time.Millisecond)
assert.EqualValues(t, []bool{true, true}, filtered)
assert.EqualValues(t, true, stopped)
assert.ElementsMatch(t, []bool{true, true}, filtered)
assert.True(t, stopped)
}

View File

@ -23,7 +23,7 @@ const (
)
func computeTimeDiffFloor(diff int64, lang translation.Locale) (int64, string) {
diffStr := ""
var diffStr string
switch {
case diff <= 0:
diff = 0

View File

@ -51,8 +51,8 @@ sub = Changed Sub String
assert.Equal(t, `test value; <span style="color: red; background: none;">more text</span>`, result)
langs, descs := ls.ListLangNameDesc()
assert.Equal(t, []string{"lang1", "lang2"}, langs)
assert.Equal(t, []string{"Lang1", "Lang2"}, descs)
assert.ElementsMatch(t, []string{"lang1", "lang2"}, langs)
assert.ElementsMatch(t, []string{"Lang1", "Lang2"}, descs)
found := ls.Has("lang1", "no-such")
assert.False(t, found)

View File

@ -72,7 +72,6 @@ func (store *localeStore) AddLocaleByIni(langName, langDesc string, source, more
l.idxToMsgMap[idx] = key.Value()
}
}
iniFile = nil
return nil
}

View File

@ -74,15 +74,15 @@ func TestSliceEqual(t *testing.T) {
}
func TestSliceRemoveAll(t *testing.T) {
assert.Equal(t, SliceRemoveAll([]int{2, 0, 2, 3}, 0), []int{2, 2, 3})
assert.Equal(t, SliceRemoveAll([]int{2, 0, 2, 3}, 2), []int{0, 3})
assert.Equal(t, SliceRemoveAll([]int{0, 0, 0, 0}, 0), []int{})
assert.Equal(t, SliceRemoveAll([]int{2, 0, 2, 3}, 4), []int{2, 0, 2, 3})
assert.Equal(t, SliceRemoveAll([]int{}, 0), []int{})
assert.Equal(t, SliceRemoveAll([]int(nil), 0), []int(nil))
assert.Equal(t, SliceRemoveAll([]int{}, 0), []int{})
assert.ElementsMatch(t, []int{2, 2, 3}, SliceRemoveAll([]int{2, 0, 2, 3}, 0))
assert.ElementsMatch(t, []int{0, 3}, SliceRemoveAll([]int{2, 0, 2, 3}, 2))
assert.Empty(t, SliceRemoveAll([]int{0, 0, 0, 0}, 0))
assert.ElementsMatch(t, []int{2, 0, 2, 3}, SliceRemoveAll([]int{2, 0, 2, 3}, 4))
assert.Empty(t, SliceRemoveAll([]int{}, 0))
assert.ElementsMatch(t, []int(nil), SliceRemoveAll([]int(nil), 0))
assert.Empty(t, SliceRemoveAll([]int{}, 0))
assert.Equal(t, SliceRemoveAll([]string{"2", "0", "2", "3"}, "0"), []string{"2", "2", "3"})
assert.Equal(t, SliceRemoveAll([]float64{2, 0, 2, 3}, 0), []float64{2, 2, 3})
assert.Equal(t, SliceRemoveAll([]bool{false, true, false}, true), []bool{false, false})
assert.ElementsMatch(t, []string{"2", "2", "3"}, SliceRemoveAll([]string{"2", "0", "2", "3"}, "0"))
assert.ElementsMatch(t, []float64{2, 2, 3}, SliceRemoveAll([]float64{2, 0, 2, 3}, 0))
assert.ElementsMatch(t, []bool{false, false}, SliceRemoveAll([]bool{false, true, false}, true))
}

View File

@ -25,7 +25,7 @@ func TestSplitString(t *testing.T) {
assert.Equal(t, c.ellipsis+c.input[len(c.leftSub):], r, "test split %s at %d, expected rightSub: %q", c.input, c.n, c.input[len(c.leftSub):])
} else {
assert.Equal(t, c.leftSub, l, "test split %q at %d, expected leftSub: %q", c.input, c.n, c.leftSub)
assert.Equal(t, "", r, "test split %q at %d, expected rightSub: %q", c.input, c.n, "")
assert.Empty(t, r, "test split %q at %d, expected rightSub: %q", c.input, c.n, "")
}
}
}

View File

@ -97,7 +97,7 @@ func (s *Service) Register(
// FetchTask assigns a task to the runner
func (s *Service) FetchTask(
ctx context.Context,
req *connect.Request[runnerv1.FetchTaskRequest],
_ *connect.Request[runnerv1.FetchTaskRequest],
) (*connect.Response[runnerv1.FetchTaskResponse], error) {
runner := GetRunner(ctx)
@ -145,6 +145,31 @@ func (s *Service) UpdateTask(
return nil, status.Errorf(codes.Internal, "update task: %v", err)
}
for k, v := range req.Msg.Outputs {
if len(k) > 255 {
log.Warn("Ignore the output of task %d because the key is too long: %q", task.ID, k)
continue
}
// The value can be a maximum of 1 MB
if l := len(v); l > 1024*1024 {
log.Warn("Ignore the output %q of task %d because the value is too long: %v", k, task.ID, l)
continue
}
// There's another limitation on GitHub that the total of all outputs in a workflow run can be a maximum of 50 MB.
// We don't check the total size here because it's not easy to do, and it doesn't really worth it.
// See https://docs.github.com/en/actions/using-jobs/defining-outputs-for-jobs
if err := actions_model.InsertTaskOutputIfNotExist(ctx, task.ID, k, v); err != nil {
log.Warn("Failed to insert the output %q of task %d: %v", k, task.ID, err)
// It's ok not to return errors, the runner will resend the outputs.
}
}
sentOutputs, err := actions_model.FindTaskOutputKeyByTaskID(ctx, task.ID)
if err != nil {
log.Warn("Failed to find the sent outputs of task %d: %v", task.ID, err)
// It's not to return errors, it can be handled when the runner resends sent outputs.
}
if err := task.LoadJob(ctx); err != nil {
return nil, status.Errorf(codes.Internal, "load job: %v", err)
}
@ -162,6 +187,7 @@ func (s *Service) UpdateTask(
Id: req.Msg.State.Id,
Result: task.Status.AsResult(),
},
SentOutputs: sentOutputs,
}), nil
}

View File

@ -37,6 +37,17 @@ func pickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv
Context: generateTaskContext(t),
Secrets: getSecretsOfTask(ctx, t),
}
if needs, err := findTaskNeeds(ctx, t); err != nil {
log.Error("Cannot find needs for task %v: %v", t.ID, err)
// Go on with empty needs.
// If return error, the task will be wild, which means the runner will never get it when it has been assigned to the runner.
// In contrast, missing needs is less serious.
// And the task will fail and the runner will report the error in the logs.
} else {
task.Needs = needs
}
return task, true, nil
}
@ -124,3 +135,46 @@ func generateTaskContext(t *actions_model.ActionTask) *structpb.Struct {
return taskContext
}
func findTaskNeeds(ctx context.Context, task *actions_model.ActionTask) (map[string]*runnerv1.TaskNeed, error) {
if err := task.LoadAttributes(ctx); err != nil {
return nil, fmt.Errorf("LoadAttributes: %w", err)
}
if len(task.Job.Needs) == 0 {
return nil, nil
}
needs := map[string]struct{}{}
for _, v := range task.Job.Needs {
needs[v] = struct{}{}
}
jobs, _, err := actions_model.FindRunJobs(ctx, actions_model.FindRunJobOptions{RunID: task.Job.RunID})
if err != nil {
return nil, fmt.Errorf("FindRunJobs: %w", err)
}
ret := make(map[string]*runnerv1.TaskNeed, len(needs))
for _, job := range jobs {
if _, ok := needs[job.JobID]; !ok {
continue
}
if job.TaskID == 0 || !job.Status.IsDone() {
// it shouldn't happen, or the job has been rerun
continue
}
outputs := make(map[string]string)
got, err := actions_model.FindTaskOutputByTaskID(ctx, job.TaskID)
if err != nil {
return nil, fmt.Errorf("FindTaskOutputByTaskID: %w", err)
}
for _, v := range got {
outputs[v.OutputKey] = v.OutputValue
}
ret[job.JobID] = &runnerv1.TaskNeed{
Outputs: outputs,
Result: runnerv1.Result(job.Status),
}
}
return ret, nil
}

View File

@ -20,7 +20,7 @@ import (
// RenderMarkup renders markup text for the /markup and /markdown endpoints
func RenderMarkup(ctx *context.Context, mode, text, urlPrefix, filePath string, wiki bool) {
markupType := ""
var markupType string
relativePath := ""
if len(text) == 0 {

View File

@ -520,7 +520,7 @@ index 0000000..6bb8f39
Docker Pulls
+ cut off
+ cut off`
result, err = ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff), "")
_, err = ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff), "")
if err != nil {
t.Errorf("ParsePatch failed: %s", err)
}
@ -536,11 +536,10 @@ index 0000000..6bb8f39
Docker Pulls
+ cut off
+ cut off`
result, err = ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff2), "")
_, err = ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff2), "")
if err != nil {
t.Errorf("ParsePatch failed: %s", err)
}
println(result)
diff2a := `diff --git "a/A \\ B" b/A/B
--- "a/A \\ B"
@ -553,11 +552,10 @@ index 0000000..6bb8f39
Docker Pulls
+ cut off
+ cut off`
result, err = ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff2a), "")
_, err = ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff2a), "")
if err != nil {
t.Errorf("ParsePatch failed: %s", err)
}
println(result)
diff3 := `diff --git a/README.md b/README.md
--- a/README.md
@ -570,11 +568,10 @@ index 0000000..6bb8f39
Docker Pulls
+ cut off
+ cut off`
result, err = ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff3), "")
_, err = ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(diff3), "")
if err != nil {
t.Errorf("ParsePatch failed: %s", err)
}
println(result)
}
func setupDefaultDiff() *Diff {

View File

@ -20,7 +20,7 @@ func TestDeleteNotPassedAssignee(t *testing.T) {
// Fake issue with assignees
issue, err := issues_model.GetIssueWithAttrsByID(1)
assert.NoError(t, err)
assert.EqualValues(t, 1, len(issue.Assignees))
assert.Len(t, issue.Assignees, 1)
user1, err := user_model.GetUserByID(db.DefaultContext, 1) // This user is already assigned (see the definition in fixtures), so running UpdateAssignee should unassign him
assert.NoError(t, err)
@ -33,10 +33,10 @@ func TestDeleteNotPassedAssignee(t *testing.T) {
// Clean everyone
err = DeleteNotPassedAssignee(db.DefaultContext, issue, user1, []*user_model.User{})
assert.NoError(t, err)
assert.EqualValues(t, 0, len(issue.Assignees))
assert.Empty(t, issue.Assignees)
// Check they're gone
assert.NoError(t, issue.LoadAssignees(db.DefaultContext))
assert.EqualValues(t, 0, len(issue.Assignees))
assert.Empty(t, issue.Assignees)
assert.Empty(t, issue.Assignee)
}

View File

@ -37,7 +37,7 @@ func TestIssue_DeleteIssue(t *testing.T) {
issueIDs, err := issues_model.GetIssueIDsByRepoID(db.DefaultContext, 1)
assert.NoError(t, err)
assert.EqualValues(t, 5, len(issueIDs))
assert.Len(t, issueIDs, 5)
issue := &issues_model.Issue{
RepoID: 1,
@ -48,7 +48,7 @@ func TestIssue_DeleteIssue(t *testing.T) {
assert.NoError(t, err)
issueIDs, err = issues_model.GetIssueIDsByRepoID(db.DefaultContext, 1)
assert.NoError(t, err)
assert.EqualValues(t, 4, len(issueIDs))
assert.Len(t, issueIDs, 4)
// check attachment removal
attachments, err := repo_model.GetAttachmentsByIssueID(db.DefaultContext, 4)
@ -57,7 +57,7 @@ func TestIssue_DeleteIssue(t *testing.T) {
assert.NoError(t, err)
err = deleteIssue(db.DefaultContext, issue)
assert.NoError(t, err)
assert.EqualValues(t, 2, len(attachments))
assert.Len(t, attachments, 2)
for i := range attachments {
attachment, err := repo_model.GetAttachmentByUUID(db.DefaultContext, attachments[i].UUID)
assert.Error(t, err)

View File

@ -30,7 +30,7 @@ func TestCheckUnadoptedRepositories_Add(t *testing.T) {
}
assert.Equal(t, total, unadopted.index)
assert.Equal(t, end-start, len(unadopted.repositories))
assert.Len(t, unadopted.repositories, end-start)
}
func TestCheckUnadoptedRepositories(t *testing.T) {
@ -41,7 +41,7 @@ func TestCheckUnadoptedRepositories(t *testing.T) {
unadopted := &unadoptedRepositories{start: 0, end: 100}
err := checkUnadoptedRepositories(db.DefaultContext, "notauser", []string{"repo"}, unadopted)
assert.NoError(t, err)
assert.Equal(t, 0, len(unadopted.repositories))
assert.Empty(t, unadopted.repositories)
//
// Unadopted repository is returned
// Existing (adopted) repository is not returned
@ -59,7 +59,7 @@ func TestCheckUnadoptedRepositories(t *testing.T) {
unadopted = &unadoptedRepositories{start: 0, end: 100}
err = checkUnadoptedRepositories(db.DefaultContext, userName, []string{repoName}, unadopted)
assert.NoError(t, err)
assert.Equal(t, 0, len(unadopted.repositories))
assert.Empty(t, unadopted.repositories)
assert.Equal(t, 0, unadopted.index)
}

View File

@ -1,35 +1,33 @@
<footer role="group" aria-label="{{.locale.Tr "aria.footer"}}">
<div class="ui container">
<div class="ui left" role="contentinfo" aria-label="{{.locale.Tr "aria.footer.software"}}">
<a target="_blank" rel="noopener noreferrer" href="https://gitea.io">{{.locale.Tr "powered_by" "Gitea"}}</a>
{{if (or .ShowFooterVersion .PageIsAdmin)}}
{{.locale.Tr "version"}}:
{{if .IsAdmin}}
<a href="{{AppSubUrl}}/admin/config">{{AppVer}}</a>
{{else}}
{{AppVer}}
<div class="ui left" role="contentinfo" aria-label="{{.locale.Tr "aria.footer.software"}}">
<a target="_blank" rel="noopener noreferrer" href="https://gitea.io">{{.locale.Tr "powered_by" "Gitea"}}</a>
{{if (or .ShowFooterVersion .PageIsAdmin)}}
{{.locale.Tr "version"}}:
{{if .IsAdmin}}
<a href="{{AppSubUrl}}/admin/config">{{AppVer}}</a>
{{else}}
{{AppVer}}
{{end}}
{{end}}
{{if and .TemplateLoadTimes ShowFooterTemplateLoadTime}}
{{.locale.Tr "page"}}: <strong>{{LoadTimes .PageStartTime}}</strong>
{{.locale.Tr "template"}}{{if .TemplateName}} {{.TemplateName}}{{end}}: <strong>{{call .TemplateLoadTimes}}</strong>
{{end}}
</div>
<div class="ui right links" role="group" aria-label="{{.locale.Tr "aria.footer.links"}}">
{{if .ShowFooterBranding}}
<a target="_blank" rel="noopener noreferrer" href="https://github.com/go-gitea/gitea">{{svg "octicon-mark-github"}}<span class="sr-only">GitHub</span></a>
{{end}}
<div class="ui dropdown upward language">
<span>{{svg "octicon-globe"}} {{.locale.LangName}}</span>
<div class="menu language-menu">
{{range .AllLangs}}
<a lang="{{.Lang}}" data-url="{{AppSubUrl}}/?lang={{.Lang}}" class="item {{if eq $.locale.Lang .Lang}}active selected{{end}}">{{.Name}}</a>
{{end}}
{{end}}
{{if and .TemplateLoadTimes ShowFooterTemplateLoadTime}}
{{.locale.Tr "page"}}: <strong>{{LoadTimes .PageStartTime}}</strong>
{{.locale.Tr "template"}}{{if .TemplateName}} {{.TemplateName}}{{end}}: <strong>{{call .TemplateLoadTimes}}</strong>
{{end}}
</div>
<div class="ui right links" role="group" aria-label="{{.locale.Tr "aria.footer.links"}}">
{{if .ShowFooterBranding}}
<a target="_blank" rel="noopener noreferrer" href="https://github.com/go-gitea/gitea">{{svg "octicon-mark-github"}}<span class="sr-only">GitHub</span></a>
{{end}}
<div class="ui dropdown upward language">
<span>{{svg "octicon-globe"}} {{.locale.LangName}}</span>
<div class="menu language-menu">
{{range .AllLangs}}
<a lang="{{.Lang}}" data-url="{{AppSubUrl}}/?lang={{.Lang}}" class="item {{if eq $.locale.Lang .Lang}}active selected{{end}}">{{.Name}}</a>
{{end}}
</div>
</div>
<a href="{{AssetUrlPrefix}}/js/licenses.txt">{{.locale.Tr "licenses"}}</a>
{{if .EnableSwagger}}<a href="{{AppSubUrl}}/api/swagger">API</a>{{end}}
{{template "custom/extra_links_footer" .}}
</div>
<a href="{{AssetUrlPrefix}}/js/licenses.txt">{{.locale.Tr "licenses"}}</a>
{{if .EnableSwagger}}<a href="{{AppSubUrl}}/api/swagger">API</a>{{end}}
{{template "custom/extra_links_footer" .}}
</div>
</footer>

View File

@ -35,7 +35,7 @@
{{range $commit.Refs}}
{{$refGroup := .RefGroup}}
{{if eq $refGroup "pull"}}
{{if or (not $.HidePRRefs) (containGeneric $.SelectedBranches .Name)}}
{{if or (not $.HidePRRefs) (SliceUtils.Contains $.SelectedBranches .Name)}}
<!-- it's intended to use issues not pulls, if it's a pull you will get redirected -->
<a class="ui labelled icon button basic tiny gt-mr-2" href="{{$.RepoLink}}/{{if $.Repository.UnitEnabled $.Context $.UnitTypePullRequests}}pulls{{else}}issues{{end}}/{{.ShortName|PathEscape}}">
{{svg "octicon-git-pull-request" 16 "gt-mr-2"}}#{{.ShortName}}

View File

@ -217,7 +217,7 @@
{{end}}
{{if or (.Permission.CanRead $.UnitTypeWiki) (.Permission.CanRead $.UnitTypeExternalWiki)}}
<a class="{{if .PageIsWiki}}active {{end}}item" href="{{.RepoLink}}/wiki" {{if and (.Permission.CanRead $.UnitTypeExternalWiki) (not (HasPrefix ((.Repository.MustGetUnit $.Context $.UnitTypeExternalWiki).ExternalWikiConfig.ExternalWikiURL) (.Repository.Link)))}} target="_blank" rel="noopener noreferrer" {{end}}>
<a class="{{if .PageIsWiki}}active {{end}}item" href="{{.RepoLink}}/wiki" {{if and (.Permission.CanRead $.UnitTypeExternalWiki) (not (StringUtils.HasPrefix ((.Repository.MustGetUnit $.Context $.UnitTypeExternalWiki).ExternalWikiConfig.ExternalWikiURL) (.Repository.Link)))}} target="_blank" rel="noopener noreferrer" {{end}}>
{{svg "octicon-book"}} {{.locale.Tr "repo.wiki"}}
</a>
{{end}}

View File

@ -227,7 +227,7 @@
{{end}}
{{$previousExclusiveScope = $exclusiveScope}}
<div class="item issue-action" data-action="toggle" data-element-id="{{.ID}}" data-url="{{$.RepoLink}}/issues/labels">
{{if contain $.SelLabelIDs .ID}}{{if $exclusiveScope}}{{svg "octicon-dot-fill"}}{{else}}{{svg "octicon-check"}}{{end}}{{end}} {{RenderLabel $.Context .}}
{{if SliceUtils.Contains $.SelLabelIDs .ID}}{{if $exclusiveScope}}{{svg "octicon-dot-fill"}}{{else}}{{svg "octicon-check"}}{{end}}{{end}} {{RenderLabel $.Context .}}
</div>
{{end}}
</div>

View File

@ -65,7 +65,7 @@
<span class="info">{{.locale.Tr "repo.issues.filter_label_exclude" | Safe}}</span>
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&type={{$.ViewType}}&sort={{$.SortType}}&state={{$.State}}&assignee={{$.AssigneeID}}&poster={{$.PosterID}}">{{.locale.Tr "repo.issues.filter_label_no_select"}}</a>
{{range .Labels}}
<a class="item label-filter-item" href="{{$.Link}}?q={{$.Keyword}}&type={{$.ViewType}}&sort={{$.SortType}}&state={{$.State}}&labels={{.QueryString}}&assignee={{$.AssigneeID}}&poster={{$.PosterID}}" data-label-id="{{.ID}}">{{if .IsExcluded}}{{svg "octicon-circle-slash"}}{{else if contain $.SelLabelIDs .ID}}{{svg "octicon-check"}}{{end}} {{RenderLabel $.Context .}}</a>
<a class="item label-filter-item" href="{{$.Link}}?q={{$.Keyword}}&type={{$.ViewType}}&sort={{$.SortType}}&state={{$.State}}&labels={{.QueryString}}&assignee={{$.AssigneeID}}&poster={{$.PosterID}}" data-label-id="{{.ID}}">{{if .IsExcluded}}{{svg "octicon-circle-slash"}}{{else if SliceUtils.Contains $.SelLabelIDs .ID}}{{svg "octicon-check"}}{{end}} {{RenderLabel $.Context .}}</a>
{{end}}
</div>
</div>
@ -171,7 +171,7 @@
<div class="menu">
{{range .Labels}}
<div class="item issue-action" data-action="toggle" data-element-id="{{.ID}}" data-url="{{$.RepoLink}}/issues/labels">
{{if contain $.SelLabelIDs .ID}}{{svg "octicon-check"}}{{end}} {{RenderLabel $.Context .}}
{{if SliceUtils.Contains $.SelLabelIDs .ID}}{{svg "octicon-check"}}{{end}} {{RenderLabel $.Context .}}
</div>
{{end}}
</div>

View File

@ -8,7 +8,7 @@
<div class="gt-f1 gt-p-3">
<a target="_blank" rel="noopener noreferrer" href="{{.DownloadURL}}" title='{{$.ctxData.locale.Tr "repo.issues.attachment.open_tab" .Name}}'>
{{if FilenameIsImage .Name}}
{{if not (containGeneric $.Content .UUID)}}
{{if not (StringUtils.Contains $.Content .UUID)}}
{{$hasThumbnails = true}}
{{end}}
{{svg "octicon-file"}}
@ -29,7 +29,7 @@
<div class="ui small thumbnails">
{{- range .Attachments -}}
{{if FilenameIsImage .Name}}
{{if not (containGeneric $.Content .UUID)}}
{{if not (StringUtils.Contains $.Content .UUID)}}
<a target="_blank" rel="noopener noreferrer" href="{{.DownloadURL}}">
<img alt="{{.Name}}" src="{{.DownloadURL}}" title='{{$.ctxData.locale.Tr "repo.issues.attachment.open_tab" .Name}}'>
</a>

View File

@ -92,14 +92,14 @@
{{if or .AllowlistUserIDs (and $.Owner.IsOrganization .AllowlistTeamIDs)}}
{{$userIDs := .AllowlistUserIDs}}
{{range $.Users}}
{{if contain $userIDs .ID}}
{{if SliceUtils.Contains $userIDs .ID}}
<a class="ui basic label" href="{{.HomeLink}}">{{avatar $.Context . 26}} {{.GetDisplayName}}</a>
{{end}}
{{end}}
{{if $.Owner.IsOrganization}}
{{$teamIDs := .AllowlistTeamIDs}}
{{range $.Teams}}
{{if contain $teamIDs .ID}}
{{if SliceUtils.Contains $teamIDs .ID}}
<a class="ui basic label" href="{{$.Owner.OrganisationLink}}/teams/{{PathEscape .LowerName}}">{{.Name}}</a>
{{end}}
{{end}}

View File

@ -1,15 +0,0 @@
#!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit. The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".
. git-sh-setup
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
:

View File

@ -1,24 +0,0 @@
#!/bin/sh
#
# An example hook script to check the commit log message.
# Called by "git commit" with one argument, the name of the file
# that has the commit message. The hook should exit with non-zero
# status after issuing an appropriate message if it wants to stop the
# commit. The hook is allowed to edit the commit message file.
#
# To enable this hook, rename this file to "commit-msg".
# Uncomment the below to add a Signed-off-by line to the message.
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
# hook is more suited to it.
#
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
# This example catches duplicate Signed-off-by lines.
test "" = "$(grep '^Signed-off-by: ' "$1" |
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
echo >&2 Duplicate Signed-off-by lines.
exit 1
}

View File

@ -1,174 +0,0 @@
#!/usr/bin/perl
use strict;
use warnings;
use IPC::Open2;
# An example hook script to integrate Watchman
# (https://facebook.github.io/watchman/) with git to speed up detecting
# new and modified files.
#
# The hook is passed a version (currently 2) and last update token
# formatted as a string and outputs to stdout a new update token and
# all files that have been modified since the update token. Paths must
# be relative to the root of the working tree and separated by a single NUL.
#
# To enable this hook, rename this file to "query-watchman" and set
# 'git config core.fsmonitor .git/hooks/query-watchman'
#
my ($version, $last_update_token) = @ARGV;
# Uncomment for debugging
# print STDERR "$0 $version $last_update_token\n";
# Check the hook interface version
if ($version ne 2) {
die "Unsupported query-fsmonitor hook version '$version'.\n" .
"Falling back to scanning...\n";
}
my $git_work_tree = get_working_dir();
my $retry = 1;
my $json_pkg;
eval {
require JSON::XS;
$json_pkg = "JSON::XS";
1;
} or do {
require JSON::PP;
$json_pkg = "JSON::PP";
};
launch_watchman();
sub launch_watchman {
my $o = watchman_query();
if (is_work_tree_watched($o)) {
output_result($o->{clock}, @{$o->{files}});
}
}
sub output_result {
my ($clockid, @files) = @_;
# Uncomment for debugging watchman output
# open (my $fh, ">", ".git/watchman-output.out");
# binmode $fh, ":utf8";
# print $fh "$clockid\n@files\n";
# close $fh;
binmode STDOUT, ":utf8";
print $clockid;
print "\0";
local $, = "\0";
print @files;
}
sub watchman_clock {
my $response = qx/watchman clock "$git_work_tree"/;
die "Failed to get clock id on '$git_work_tree'.\n" .
"Falling back to scanning...\n" if $? != 0;
return $json_pkg->new->utf8->decode($response);
}
sub watchman_query {
my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
or die "open2() failed: $!\n" .
"Falling back to scanning...\n";
# In the query expression below we're asking for names of files that
# changed since $last_update_token but not from the .git folder.
#
# To accomplish this, we're using the "since" generator to use the
# recency index to select candidate nodes and "fields" to limit the
# output to file names only. Then we're using the "expression" term to
# further constrain the results.
my $last_update_line = "";
if (substr($last_update_token, 0, 1) eq "c") {
$last_update_token = "\"$last_update_token\"";
$last_update_line = qq[\n"since": $last_update_token,];
}
my $query = <<" END";
["query", "$git_work_tree", {$last_update_line
"fields": ["name"],
"expression": ["not", ["dirname", ".git"]]
}]
END
# Uncomment for debugging the watchman query
# open (my $fh, ">", ".git/watchman-query.json");
# print $fh $query;
# close $fh;
print CHLD_IN $query;
close CHLD_IN;
my $response = do {local $/; <CHLD_OUT>};
# Uncomment for debugging the watch response
# open ($fh, ">", ".git/watchman-response.json");
# print $fh $response;
# close $fh;
die "Watchman: command returned no output.\n" .
"Falling back to scanning...\n" if $response eq "";
die "Watchman: command returned invalid output: $response\n" .
"Falling back to scanning...\n" unless $response =~ /^\{/;
return $json_pkg->new->utf8->decode($response);
}
sub is_work_tree_watched {
my ($output) = @_;
my $error = $output->{error};
if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
$retry--;
my $response = qx/watchman watch "$git_work_tree"/;
die "Failed to make watchman watch '$git_work_tree'.\n" .
"Falling back to scanning...\n" if $? != 0;
$output = $json_pkg->new->utf8->decode($response);
$error = $output->{error};
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
# Uncomment for debugging watchman output
# open (my $fh, ">", ".git/watchman-output.out");
# close $fh;
# Watchman will always return all files on the first query so
# return the fast "everything is dirty" flag to git and do the
# Watchman query just to get it over with now so we won't pay
# the cost in git to look up each individual file.
my $o = watchman_clock();
$error = $output->{error};
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
output_result($o->{clock}, ("/"));
$last_update_token = $o->{clock};
eval { launch_watchman() };
return 0;
}
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
return 1;
}
sub get_working_dir {
my $working_dir;
if ($^O =~ 'msys' || $^O =~ 'cygwin') {
$working_dir = Win32::GetCwd();
$working_dir =~ tr/\\/\//;
} else {
require Cwd;
$working_dir = Cwd::cwd();
}
return $working_dir;
}

View File

@ -1,8 +0,0 @@
#!/bin/sh
#
# An example hook script to prepare a packed repository for use over
# dumb transports.
#
# To enable this hook, rename this file to "post-update".
exec git update-server-info

View File

@ -1,14 +0,0 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed
# by applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-applypatch".
. git-sh-setup
precommit="$(git rev-parse --git-path hooks/pre-commit)"
test -x "$precommit" && exec "$precommit" ${1+"$@"}
:

View File

@ -1,49 +0,0 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git commit" with no arguments. The hook should
# exit with non-zero status after issuing an appropriate message if
# it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-commit".
if git rev-parse --verify HEAD >/dev/null 2>&1
then
against=HEAD
else
# Initial commit: diff against an empty tree object
against=$(git hash-object -t tree /dev/null)
fi
# If you want to allow non-ASCII filenames set this variable to true.
allownonascii=$(git config --type=bool hooks.allownonascii)
# Redirect output to stderr.
exec 1>&2
# Cross platform projects tend to avoid non-ASCII filenames; prevent
# them from being added to the repository. We exploit the fact that the
# printable range starts at the space character and ends with tilde.
if [ "$allownonascii" != "true" ] &&
# Note that the use of brackets around a tr range is ok here, (it's
# even required, for portability to Solaris 10's /usr/bin/tr), since
# the square bracket bytes happen to fall in the designated range.
test $(git diff --cached --name-only --diff-filter=A -z $against |
LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
then
cat <<\EOF
Error: Attempt to add a non-ASCII file name.
This can cause problems if you want to work with people on other platforms.
To be portable it is advisable to rename the file.
If you know what you are doing you can disable this check using:
git config hooks.allownonascii true
EOF
exit 1
fi
# If there are whitespace errors, print the offending file names and fail.
exec git diff-index --check --cached $against --

View File

@ -1,13 +0,0 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git merge" with no arguments. The hook should
# exit with non-zero status after issuing an appropriate message to
# stderr if it wants to stop the merge commit.
#
# To enable this hook, rename this file to "pre-merge-commit".
. git-sh-setup
test -x "$GIT_DIR/hooks/pre-commit" &&
exec "$GIT_DIR/hooks/pre-commit"
:

View File

@ -1,53 +0,0 @@
#!/bin/sh
# An example hook script to verify what is about to be pushed. Called by "git
# push" after it has checked the remote status, but before anything has been
# pushed. If this script exits with a non-zero status nothing will be pushed.
#
# This hook is called with the following parameters:
#
# $1 -- Name of the remote to which the push is being done
# $2 -- URL to which the push is being done
#
# If pushing without using a named remote those arguments will be equal.
#
# Information about the commits which are being pushed is supplied as lines to
# the standard input in the form:
#
# <local ref> <local oid> <remote ref> <remote oid>
#
# This sample shows how to prevent push of commits where the log message starts
# with "WIP" (work in progress).
remote="$1"
url="$2"
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
while read local_ref local_oid remote_ref remote_oid
do
if test "$local_oid" = "$zero"
then
# Handle delete
:
else
if test "$remote_oid" = "$zero"
then
# New branch, examine all commits
range="$local_oid"
else
# Update to existing branch, examine new commits
range="$remote_oid..$local_oid"
fi
# Check for WIP commit
commit=$(git rev-list -n 1 --grep '^WIP' "$range")
if test -n "$commit"
then
echo >&2 "Found WIP commit in $local_ref, not pushing"
exit 1
fi
fi
done
exit 0

View File

@ -1,169 +0,0 @@
#!/bin/sh
#
# Copyright (c) 2006, 2008 Junio C Hamano
#
# The "pre-rebase" hook is run just before "git rebase" starts doing
# its job, and can prevent the command from running by exiting with
# non-zero status.
#
# The hook is called with the following parameters:
#
# $1 -- the upstream the series was forked from.
# $2 -- the branch being rebased (or empty when rebasing the current branch).
#
# This sample shows how to prevent topic branches that are already
# merged to 'next' branch from getting rebased, because allowing it
# would result in rebasing already published history.
publish=next
basebranch="$1"
if test "$#" = 2
then
topic="refs/heads/$2"
else
topic=`git symbolic-ref HEAD` ||
exit 0 ;# we do not interrupt rebasing detached HEAD
fi
case "$topic" in
refs/heads/??/*)
;;
*)
exit 0 ;# we do not interrupt others.
;;
esac
# Now we are dealing with a topic branch being rebased
# on top of master. Is it OK to rebase it?
# Does the topic really exist?
git show-ref -q "$topic" || {
echo >&2 "No such branch $topic"
exit 1
}
# Is topic fully merged to master?
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
if test -z "$not_in_master"
then
echo >&2 "$topic is fully merged to master; better remove it."
exit 1 ;# we could allow it, but there is no point.
fi
# Is topic ever merged to next? If so you should not be rebasing it.
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
only_next_2=`git rev-list ^master ${publish} | sort`
if test "$only_next_1" = "$only_next_2"
then
not_in_topic=`git rev-list "^$topic" master`
if test -z "$not_in_topic"
then
echo >&2 "$topic is already up to date with master"
exit 1 ;# we could allow it, but there is no point.
else
exit 0
fi
else
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
/usr/bin/perl -e '
my $topic = $ARGV[0];
my $msg = "* $topic has commits already merged to public branch:\n";
my (%not_in_next) = map {
/^([0-9a-f]+) /;
($1 => 1);
} split(/\n/, $ARGV[1]);
for my $elem (map {
/^([0-9a-f]+) (.*)$/;
[$1 => $2];
} split(/\n/, $ARGV[2])) {
if (!exists $not_in_next{$elem->[0]}) {
if ($msg) {
print STDERR $msg;
undef $msg;
}
print STDERR " $elem->[1]\n";
}
}
' "$topic" "$not_in_next" "$not_in_master"
exit 1
fi
<<\DOC_END
This sample hook safeguards topic branches that have been
published from being rewound.
The workflow assumed here is:
* Once a topic branch forks from "master", "master" is never
merged into it again (either directly or indirectly).
* Once a topic branch is fully cooked and merged into "master",
it is deleted. If you need to build on top of it to correct
earlier mistakes, a new topic branch is created by forking at
the tip of the "master". This is not strictly necessary, but
it makes it easier to keep your history simple.
* Whenever you need to test or publish your changes to topic
branches, merge them into "next" branch.
The script, being an example, hardcodes the publish branch name
to be "next", but it is trivial to make it configurable via
$GIT_DIR/config mechanism.
With this workflow, you would want to know:
(1) ... if a topic branch has ever been merged to "next". Young
topic branches can have stupid mistakes you would rather
clean up before publishing, and things that have not been
merged into other branches can be easily rebased without
affecting other people. But once it is published, you would
not want to rewind it.
(2) ... if a topic branch has been fully merged to "master".
Then you can delete it. More importantly, you should not
build on top of it -- other people may already want to
change things related to the topic as patches against your
"master", so if you need further changes, it is better to
fork the topic (perhaps with the same name) afresh from the
tip of "master".
Let's look at this example:
o---o---o---o---o---o---o---o---o---o "next"
/ / / /
/ a---a---b A / /
/ / / /
/ / c---c---c---c B /
/ / / \ /
/ / / b---b C \ /
/ / / / \ /
---o---o---o---o---o---o---o---o---o---o---o "master"
A, B and C are topic branches.
* A has one fix since it was merged up to "next".
* B has finished. It has been fully merged up to "master" and "next",
and is ready to be deleted.
* C has not merged to "next" at all.
We would want to allow C to be rebased, refuse A, and encourage
B to be deleted.
To compute (1):
git rev-list ^master ^topic next
git rev-list ^master next
if these match, topic has not merged in next at all.
To compute (2):
git rev-list master..topic
if this is empty, it is fully merged to "master".
DOC_END

View File

@ -1,24 +0,0 @@
#!/bin/sh
#
# An example hook script to make use of push options.
# The example simply echoes all push options that start with 'echoback='
# and rejects all pushes when the "reject" push option is used.
#
# To enable this hook, rename this file to "pre-receive".
if test -n "$GIT_PUSH_OPTION_COUNT"
then
i=0
while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
do
eval "value=\$GIT_PUSH_OPTION_$i"
case "$value" in
echoback=*)
echo "echo from the pre-receive-hook: ${value#*=}" >&2
;;
reject)
exit 1
esac
i=$((i + 1))
done
fi

View File

@ -1,42 +0,0 @@
#!/bin/sh
#
# An example hook script to prepare the commit log message.
# Called by "git commit" with the name of the file that has the
# commit message, followed by the description of the commit
# message's source. The hook's purpose is to edit the commit
# message file. If the hook fails with a non-zero status,
# the commit is aborted.
#
# To enable this hook, rename this file to "prepare-commit-msg".
# This hook includes three examples. The first one removes the
# "# Please enter the commit message..." help message.
#
# The second includes the output of "git diff --name-status -r"
# into the message, just before the "git status" output. It is
# commented because it doesn't cope with --amend or with squashed
# commits.
#
# The third example adds a Signed-off-by line to the message, that can
# still be edited. This is rarely a good idea.
COMMIT_MSG_FILE=$1
COMMIT_SOURCE=$2
SHA1=$3
/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
# case "$COMMIT_SOURCE,$SHA1" in
# ,|template,)
# /usr/bin/perl -i.bak -pe '
# print "\n" . `git diff --cached --name-status -r`
# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
# *) ;;
# esac
# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
# if test -z "$COMMIT_SOURCE"
# then
# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
# fi

View File

@ -1,78 +0,0 @@
#!/bin/sh
# An example hook script to update a checked-out tree on a git push.
#
# This hook is invoked by git-receive-pack(1) when it reacts to git
# push and updates reference(s) in its repository, and when the push
# tries to update the branch that is currently checked out and the
# receive.denyCurrentBranch configuration variable is set to
# updateInstead.
#
# By default, such a push is refused if the working tree and the index
# of the remote repository has any difference from the currently
# checked out commit; when both the working tree and the index match
# the current commit, they are updated to match the newly pushed tip
# of the branch. This hook is to be used to override the default
# behaviour; however the code below reimplements the default behaviour
# as a starting point for convenient modification.
#
# The hook receives the commit with which the tip of the current
# branch is going to be updated:
commit=$1
# It can exit with a non-zero status to refuse the push (when it does
# so, it must not modify the index or the working tree).
die () {
echo >&2 "$*"
exit 1
}
# Or it can make any necessary changes to the working tree and to the
# index to bring them to the desired state when the tip of the current
# branch is updated to the new commit, and exit with a zero status.
#
# For example, the hook can simply run git read-tree -u -m HEAD "$1"
# in order to emulate git fetch that is run in the reverse direction
# with git push, as the two-tree form of git read-tree -u -m is
# essentially the same as git switch or git checkout that switches
# branches while keeping the local changes in the working tree that do
# not interfere with the difference between the branches.
# The below is a more-or-less exact translation to shell of the C code
# for the default behaviour for git's push-to-checkout hook defined in
# the push_to_deploy() function in builtin/receive-pack.c.
#
# Note that the hook will be executed from the repository directory,
# not from the working tree, so if you want to perform operations on
# the working tree, you will have to adapt your code accordingly, e.g.
# by adding "cd .." or using relative paths.
if ! git update-index -q --ignore-submodules --refresh
then
die "Up-to-date check failed"
fi
if ! git diff-files --quiet --ignore-submodules --
then
die "Working directory has unstaged changes"
fi
# This is a rough translation of:
#
# head_has_history() ? "HEAD" : EMPTY_TREE_SHA1_HEX
if git cat-file -e HEAD 2>/dev/null
then
head=HEAD
else
head=$(git hash-object -t tree --stdin </dev/null)
fi
if ! git diff-index --quiet --cached --ignore-submodules $head --
then
die "Working directory has staged changes"
fi
if ! git read-tree -u -m "$commit"
then
die "Could not update working tree to new HEAD"
fi

View File

@ -1,128 +0,0 @@
#!/bin/sh
#
# An example hook script to block unannotated tags from entering.
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
#
# To enable this hook, rename this file to "update".
#
# Config
# ------
# hooks.allowunannotated
# This boolean sets whether unannotated tags will be allowed into the
# repository. By default they won't be.
# hooks.allowdeletetag
# This boolean sets whether deleting tags will be allowed in the
# repository. By default they won't be.
# hooks.allowmodifytag
# This boolean sets whether a tag may be modified after creation. By default
# it won't be.
# hooks.allowdeletebranch
# This boolean sets whether deleting branches will be allowed in the
# repository. By default they won't be.
# hooks.denycreatebranch
# This boolean sets whether remotely creating branches will be denied
# in the repository. By default this is allowed.
#
# --- Command line
refname="$1"
oldrev="$2"
newrev="$3"
# --- Safety check
if [ -z "$GIT_DIR" ]; then
echo "Don't run this script from the command line." >&2
echo " (if you want, you could supply GIT_DIR then run" >&2
echo " $0 <ref> <oldrev> <newrev>)" >&2
exit 1
fi
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
echo "usage: $0 <ref> <oldrev> <newrev>" >&2
exit 1
fi
# --- Config
allowunannotated=$(git config --type=bool hooks.allowunannotated)
allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)
denycreatebranch=$(git config --type=bool hooks.denycreatebranch)
allowdeletetag=$(git config --type=bool hooks.allowdeletetag)
allowmodifytag=$(git config --type=bool hooks.allowmodifytag)
# check for no description
projectdesc=$(sed -e '1q' "$GIT_DIR/description")
case "$projectdesc" in
"Unnamed repository"* | "")
echo "*** Project description file hasn't been set" >&2
exit 1
;;
esac
# --- Check types
# if $newrev is 0000...0000, it's a commit to delete a ref.
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
if [ "$newrev" = "$zero" ]; then
newrev_type=delete
else
newrev_type=$(git cat-file -t $newrev)
fi
case "$refname","$newrev_type" in
refs/tags/*,commit)
# un-annotated tag
short_refname=${refname##refs/tags/}
if [ "$allowunannotated" != "true" ]; then
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
exit 1
fi
;;
refs/tags/*,delete)
# delete tag
if [ "$allowdeletetag" != "true" ]; then
echo "*** Deleting a tag is not allowed in this repository" >&2
exit 1
fi
;;
refs/tags/*,tag)
# annotated tag
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
then
echo "*** Tag '$refname' already exists." >&2
echo "*** Modifying a tag is not allowed in this repository." >&2
exit 1
fi
;;
refs/heads/*,commit)
# branch
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
echo "*** Creating a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/heads/*,delete)
# delete branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/remotes/*,commit)
# tracking branch
;;
refs/remotes/*,delete)
# delete tracking branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a tracking branch is not allowed in this repository" >&2
exit 1
fi
;;
*)
# Anything else (is there anything else?)
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
exit 1
;;
esac
# --- Finished
exit 0

View File

@ -123,7 +123,7 @@ func TestAPIListUsers(t *testing.T) {
}
assert.True(t, found)
numberOfUsers := unittest.GetCount(t, &user_model.User{}, "type = 0")
assert.Equal(t, numberOfUsers, len(users))
assert.Len(t, users, numberOfUsers)
}
func TestAPIListUsersNotLoggedIn(t *testing.T) {

View File

@ -68,7 +68,7 @@ func TestAPIListCommentAttachments(t *testing.T) {
var apiAttachments []*api.Attachment
DecodeJSON(t, resp, &apiAttachments)
expectedCount := unittest.GetCount(t, &repo_model.Attachment{CommentID: comment.ID})
assert.EqualValues(t, expectedCount, len(apiAttachments))
assert.Len(t, apiAttachments, expectedCount)
unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: apiAttachments[0].ID, CommentID: comment.ID})
}

View File

@ -85,7 +85,7 @@ func TestAPIListIssueComments(t *testing.T) {
DecodeJSON(t, resp, &comments)
expectedCount := unittest.GetCount(t, &issues_model.Comment{IssueID: issue.ID},
unittest.Cond("type = ?", issues_model.CommentTypeComment))
assert.EqualValues(t, expectedCount, len(comments))
assert.Len(t, comments, expectedCount)
}
func TestAPICreateComment(t *testing.T) {
@ -196,5 +196,5 @@ func TestAPIListIssueTimeline(t *testing.T) {
var comments []*api.TimelineComment
DecodeJSON(t, resp, &comments)
expectedCount := unittest.GetCount(t, &issues_model.Comment{IssueID: issue.ID})
assert.EqualValues(t, expectedCount, len(comments))
assert.Len(t, comments, expectedCount)
}

View File

@ -48,5 +48,5 @@ func TestAPIReposValidateDefaultIssueConfig(t *testing.T) {
DecodeJSON(t, resp, &issueConfigValidation)
assert.True(t, issueConfigValidation.Valid)
assert.Equal(t, issueConfigValidation.Message, "")
assert.Empty(t, issueConfigValidation.Message)
}

View File

@ -120,7 +120,7 @@ description: ` + packageDescription
assert.NoError(t, err)
assert.Equal(t, int64(len(content)), pb.Size)
resp = uploadFile(t, result.URL, content, http.StatusBadRequest)
_ = uploadFile(t, result.URL, content, http.StatusBadRequest)
})
t.Run("Download", func(t *testing.T) {

View File

@ -170,8 +170,8 @@ func TestAPIPullReview(t *testing.T) {
DecodeJSON(t, resp, &commentReview)
assert.EqualValues(t, "COMMENT", commentReview.State)
assert.EqualValues(t, 2, commentReview.CodeCommentsCount)
assert.EqualValues(t, "", commentReview.Body)
assert.EqualValues(t, false, commentReview.Dismissed)
assert.Empty(t, commentReview.Body)
assert.False(t, commentReview.Dismissed)
// test CreatePullReview Comment with body but without comments
commentBody := "This is a body of the comment."
@ -186,7 +186,7 @@ func TestAPIPullReview(t *testing.T) {
assert.EqualValues(t, "COMMENT", commentReview.State)
assert.EqualValues(t, 0, commentReview.CodeCommentsCount)
assert.EqualValues(t, commentBody, commentReview.Body)
assert.EqualValues(t, false, commentReview.Dismissed)
assert.False(t, commentReview.Dismissed)
// test CreatePullReview Comment without body and no comments
req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{

View File

@ -49,12 +49,12 @@ func TestAPIViewPulls(t *testing.T) {
t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID),
doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) {
if assert.Len(t, files, 1) {
assert.EqualValues(t, "File-WoW", files[0].Filename)
assert.EqualValues(t, "", files[0].PreviousFilename)
assert.Equal(t, "File-WoW", files[0].Filename)
assert.Empty(t, files[0].PreviousFilename)
assert.EqualValues(t, 1, files[0].Additions)
assert.EqualValues(t, 1, files[0].Changes)
assert.EqualValues(t, 0, files[0].Deletions)
assert.EqualValues(t, "added", files[0].Status)
assert.Equal(t, "added", files[0].Status)
}
}))
}

View File

@ -32,21 +32,21 @@ func TestAPIDownloadArchive(t *testing.T) {
resp := MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK)
bs, err := io.ReadAll(resp.Body)
assert.NoError(t, err)
assert.EqualValues(t, 320, len(bs))
assert.Len(t, bs, 320)
link, _ = url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/archive/master.tar.gz", user2.Name, repo.Name))
link.RawQuery = url.Values{"token": {token}}.Encode()
resp = MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK)
bs, err = io.ReadAll(resp.Body)
assert.NoError(t, err)
assert.EqualValues(t, 266, len(bs))
assert.Len(t, bs, 266)
link, _ = url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/archive/master.bundle", user2.Name, repo.Name))
link.RawQuery = url.Values{"token": {token}}.Encode()
resp = MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK)
bs, err = io.ReadAll(resp.Body)
assert.NoError(t, err)
assert.EqualValues(t, 382, len(bs))
assert.Len(t, bs, 382)
link, _ = url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/archive/master", user2.Name, repo.Name))
link.RawQuery = url.Values{"token": {token}}.Encode()

View File

@ -194,10 +194,10 @@ func TestAPIRepoEdit(t *testing.T) {
// check repo1 was written to database
repo1edited = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
repo1editedOption = getRepoEditOptionFromRepo(repo1edited)
assert.Equal(t, *repo1editedOption.HasIssues, true)
assert.True(t, *repo1editedOption.HasIssues)
assert.Nil(t, repo1editedOption.ExternalTracker)
assert.Equal(t, *repo1editedOption.InternalTracker, *repoEditOption.InternalTracker)
assert.Equal(t, *repo1editedOption.HasWiki, true)
assert.True(t, *repo1editedOption.HasWiki)
assert.Nil(t, repo1editedOption.ExternalWiki)
// Test editing repo1 to use external issue and wiki
@ -216,9 +216,9 @@ func TestAPIRepoEdit(t *testing.T) {
// check repo1 was written to database
repo1edited = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
repo1editedOption = getRepoEditOptionFromRepo(repo1edited)
assert.Equal(t, *repo1editedOption.HasIssues, true)
assert.True(t, *repo1editedOption.HasIssues)
assert.Equal(t, *repo1editedOption.ExternalTracker, *repoEditOption.ExternalTracker)
assert.Equal(t, *repo1editedOption.HasWiki, true)
assert.True(t, *repo1editedOption.HasWiki)
assert.Equal(t, *repo1editedOption.ExternalWiki, *repoEditOption.ExternalWiki)
repoEditOption.ExternalTracker.ExternalTrackerStyle = "regexp"
@ -229,7 +229,7 @@ func TestAPIRepoEdit(t *testing.T) {
assert.NotNil(t, repo)
repo1edited = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
repo1editedOption = getRepoEditOptionFromRepo(repo1edited)
assert.Equal(t, *repo1editedOption.HasIssues, true)
assert.True(t, *repo1editedOption.HasIssues)
assert.Equal(t, *repo1editedOption.ExternalTracker, *repoEditOption.ExternalTracker)
// Do some tests with invalid URL for external tracker and wiki
@ -259,9 +259,9 @@ func TestAPIRepoEdit(t *testing.T) {
repo1edited = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
repo1editedOption = getRepoEditOptionFromRepo(repo1edited)
assert.Equal(t, *repo1editedOption.Description, *repoEditOption.Description)
assert.Equal(t, *repo1editedOption.HasIssues, true)
assert.True(t, *repo1editedOption.HasIssues)
assert.NotNil(t, *repo1editedOption.ExternalTracker)
assert.Equal(t, *repo1editedOption.HasWiki, true)
assert.True(t, *repo1editedOption.HasWiki)
assert.NotNil(t, *repo1editedOption.ExternalWiki)
// reset repo in db

View File

@ -183,18 +183,17 @@ func TestAPISearchRepo(t *testing.T) {
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
for userToLogin, expected := range testCase.expectedResults {
var session *TestSession
var testName string
var userID int64
var token string
if userToLogin != nil && userToLogin.ID > 0 {
testName = fmt.Sprintf("LoggedUser%d", userToLogin.ID)
session = loginUser(t, userToLogin.Name)
session := loginUser(t, userToLogin.Name)
token = getTokenForLoggedInUser(t, session)
userID = userToLogin.ID
} else {
testName = "AnonymousUser"
session = emptyTestSession(t)
_ = emptyTestSession(t)
}
t.Run(testName, func(t *testing.T) {
@ -413,7 +412,7 @@ func TestAPIMirrorSyncNonMirrorRepo(t *testing.T) {
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1")
resp := MakeRequest(t, req, http.StatusOK)
DecodeJSON(t, resp, &repo)
assert.EqualValues(t, false, repo.Mirror)
assert.False(t, repo.Mirror)
req = NewRequestf(t, "POST", "/api/v1/repos/user2/repo1/mirror-sync?token=%s", token)
resp = MakeRequest(t, req, http.StatusBadRequest)
@ -470,7 +469,7 @@ func testAPIRepoCreateConflict(t *testing.T, u *url.URL) {
resp := httpContext.Session.MakeRequest(t, req, http.StatusConflict)
respJSON := map[string]string{}
DecodeJSON(t, resp, &respJSON)
assert.Equal(t, respJSON["message"], "The repository with the same name already exists.")
assert.Equal(t, "The repository with the same name already exists.", respJSON["message"])
})
}

View File

@ -203,7 +203,7 @@ func TestLDAPAuthChange(t *testing.T) {
host, _ := doc.Find(`input[name="host"]`).Attr("value")
assert.Equal(t, host, getLDAPServerHost())
binddn, _ := doc.Find(`input[name="bind_dn"]`).Attr("value")
assert.Equal(t, binddn, "uid=gitea,ou=service,dc=planetexpress,dc=com")
assert.Equal(t, "uid=gitea,ou=service,dc=planetexpress,dc=com", binddn)
req = NewRequestWithValues(t, "POST", href, buildAuthSourceLDAPPayload(csrf, "", "", "", "off"))
session.MakeRequest(t, req, http.StatusSeeOther)
@ -214,7 +214,7 @@ func TestLDAPAuthChange(t *testing.T) {
host, _ = doc.Find(`input[name="host"]`).Attr("value")
assert.Equal(t, host, getLDAPServerHost())
binddn, _ = doc.Find(`input[name="bind_dn"]`).Attr("value")
assert.Equal(t, binddn, "uid=gitea,ou=service,dc=planetexpress,dc=com")
assert.Equal(t, "uid=gitea,ou=service,dc=planetexpress,dc=com", binddn)
}
func TestLDAPUserSync(t *testing.T) {
@ -397,8 +397,8 @@ func TestLDAPGroupTeamSyncAddMember(t *testing.T) {
assert.NoError(t, err)
if user.Name == "fry" || user.Name == "leela" || user.Name == "bender" {
// assert members of LDAP group "cn=ship_crew" are added to mapped teams
assert.Equal(t, len(usersOrgs), 1, "User [%s] should be member of one organization", user.Name)
assert.Equal(t, usersOrgs[0].Name, "org26", "Membership should be added to the right organization")
assert.Len(t, usersOrgs, 1, "User [%s] should be member of one organization", user.Name)
assert.Equal(t, "org26", usersOrgs[0].Name, "Membership should be added to the right organization")
isMember, err := organization.IsTeamMember(db.DefaultContext, usersOrgs[0].ID, team.ID, user.ID)
assert.NoError(t, err)
assert.True(t, isMember, "Membership should be added to the right team")

View File

@ -19,5 +19,5 @@ func TestCORSNotSet(t *testing.T) {
resp := session.MakeRequest(t, req, http.StatusOK)
assert.Equal(t, resp.Code, http.StatusOK)
corsHeader := resp.Header().Get("Access-Control-Allow-Origin")
assert.Equal(t, corsHeader, "", "Access-Control-Allow-Origin: generated header should match") // header not set
assert.Empty(t, corsHeader, "Access-Control-Allow-Origin: generated header should match") // header not set
}

View File

@ -49,7 +49,7 @@ func TestEmptyRepoAddFile(t *testing.T) {
req := NewRequest(t, "GET", "/user30/empty/_new/"+setting.Repository.DefaultBranch)
resp := session.MakeRequest(t, req, http.StatusOK)
doc := NewHTMLParser(t, resp.Body).Find(`input[name="commit_choice"]`)
assert.Equal(t, "", doc.AttrOr("checked", "_no_"))
assert.Empty(t, doc.AttrOr("checked", "_no_"))
req = NewRequestWithValues(t, "POST", "/user30/empty/_new/"+setting.Repository.DefaultBranch, map[string]string{
"_csrf": GetCSRF(t, session, "/user/settings"),
"commit_choice": "direct",
@ -76,7 +76,7 @@ func TestEmptyRepoUploadFile(t *testing.T) {
req := NewRequest(t, "GET", "/user30/empty/_new/"+setting.Repository.DefaultBranch)
resp := session.MakeRequest(t, req, http.StatusOK)
doc := NewHTMLParser(t, resp.Body).Find(`input[name="commit_choice"]`)
assert.Equal(t, "", doc.AttrOr("checked", "_no_"))
assert.Empty(t, doc.AttrOr("checked", "_no_"))
body := &bytes.Buffer{}
mpForm := multipart.NewWriter(body)

View File

@ -763,7 +763,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB
return
}
assert.Equal(t, "user2/"+headBranch, pr1.HeadBranch)
assert.Equal(t, false, prMsg.HasMerged)
assert.False(t, prMsg.HasMerged)
assert.Contains(t, "Testing commit 1", prMsg.Body)
assert.Equal(t, commit, prMsg.Head.Sha)
@ -785,7 +785,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB
return
}
assert.Equal(t, "user2/test/"+headBranch, pr2.HeadBranch)
assert.Equal(t, false, prMsg.HasMerged)
assert.False(t, prMsg.HasMerged)
})
if pr1 == nil || pr2 == nil {
@ -829,7 +829,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB
if !assert.NoError(t, err) {
return
}
assert.Equal(t, false, prMsg.HasMerged)
assert.False(t, prMsg.HasMerged)
assert.Equal(t, commit, prMsg.Head.Sha)
_, _, err = git.NewCommand(git.DefaultContext, "push", "origin").AddDynamicArguments("HEAD:refs/for/master/test/" + headBranch).RunStdString(&git.RunOpts{Dir: dstPath})
@ -841,7 +841,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB
if !assert.NoError(t, err) {
return
}
assert.Equal(t, false, prMsg.HasMerged)
assert.False(t, prMsg.HasMerged)
assert.Equal(t, commit, prMsg.Head.Sha)
})
t.Run("Merge", doAPIMergePullRequest(*ctx, ctx.Username, ctx.Reponame, pr1.Index))

View File

@ -414,7 +414,7 @@ func TestConflictChecking(t *testing.T) {
assert.NoError(t, err)
// Ensure conflictedFiles is populated.
assert.Equal(t, 1, len(conflictingPR.ConflictedFiles))
assert.Len(t, conflictingPR.ConflictedFiles, 1)
// Check if status is correct.
assert.Equal(t, issues_model.PullRequestStatusConflict, conflictingPR.Status)
// Ensure that mergeable returns false

View File

@ -376,7 +376,7 @@ func TestMarkDownReadmeImage(t *testing.T) {
htmlDoc := NewHTMLParser(t, resp.Body)
src, exists := htmlDoc.doc.Find(`.markdown img`).Attr("src")
assert.True(t, exists, "Image not found in README")
assert.Equal(t, src, "/user2/repo1/media/branch/home-md-img-check/test-fake-img.jpg")
assert.Equal(t, "/user2/repo1/media/branch/home-md-img-check/test-fake-img.jpg", src)
req = NewRequest(t, "GET", "/user2/repo1/src/branch/home-md-img-check/README.md")
resp = session.MakeRequest(t, req, http.StatusOK)
@ -384,7 +384,7 @@ func TestMarkDownReadmeImage(t *testing.T) {
htmlDoc = NewHTMLParser(t, resp.Body)
src, exists = htmlDoc.doc.Find(`.markdown img`).Attr("src")
assert.True(t, exists, "Image not found in markdown file")
assert.Equal(t, src, "/user2/repo1/media/branch/home-md-img-check/test-fake-img.jpg")
assert.Equal(t, "/user2/repo1/media/branch/home-md-img-check/test-fake-img.jpg", src)
}
func TestMarkDownReadmeImageSubfolder(t *testing.T) {
@ -399,7 +399,7 @@ func TestMarkDownReadmeImageSubfolder(t *testing.T) {
htmlDoc := NewHTMLParser(t, resp.Body)
src, exists := htmlDoc.doc.Find(`.markdown img`).Attr("src")
assert.True(t, exists, "Image not found in README")
assert.Equal(t, src, "/user2/repo1/media/branch/sub-home-md-img-check/docs/test-fake-img.jpg")
assert.Equal(t, "/user2/repo1/media/branch/sub-home-md-img-check/docs/test-fake-img.jpg", src)
req = NewRequest(t, "GET", "/user2/repo1/src/branch/sub-home-md-img-check/docs/README.md")
resp = session.MakeRequest(t, req, http.StatusOK)
@ -407,5 +407,5 @@ func TestMarkDownReadmeImageSubfolder(t *testing.T) {
htmlDoc = NewHTMLParser(t, resp.Body)
src, exists = htmlDoc.doc.Find(`.markdown img`).Attr("src")
assert.True(t, exists, "Image not found in markdown file")
assert.Equal(t, src, "/user2/repo1/media/branch/sub-home-md-img-check/docs/test-fake-img.jpg")
assert.Equal(t, "/user2/repo1/media/branch/sub-home-md-img-check/docs/test-fake-img.jpg", src)
}

View File

@ -1805,46 +1805,6 @@ img.ui.avatar,
color: var(--color-warning-text);
}
footer {
background-color: var(--color-footer);
border-top: 1px solid var(--color-secondary);
width: 100%;
flex-basis: 40px;
color: var(--color-text-light);
}
footer .container {
padding: 0 0.5rem;
max-width: 100%;
}
footer .container .links > * {
border-left: 1px solid var(--color-secondary-dark-1);
padding-left: 8px;
margin-left: 5px;
}
footer .container .links > *:first-child {
border-left: 0;
}
footer .ui.language .menu {
height: 500px;
max-height: calc(100vh - 60px);
overflow-y: auto;
margin-bottom: 7px;
}
footer .ui.language .svg {
margin-right: 0.15em;
margin-top: 1px;
}
footer .ui.left,
footer .ui.right {
line-height: 39px; /* there is a border-top on the footer, so make the line-height 1px less */
}
.center:not(.popup) {
text-align: center;
}

View File

@ -43,9 +43,37 @@
color: var(--color-green);
}
footer {
background-color: var(--color-footer);
border-top: 1px solid var(--color-secondary);
line-height: 39px;
flex-basis: 40px;
color: var(--color-text-light);
padding: 0 20px;
}
footer .right.links {
min-width: 180px; /* make sure the menu dropdown doesn't overflow horizontally when language name is short */
}
footer .right.links > a {
border-left: 1px solid var(--color-secondary-dark-1);
padding-left: 8px;
margin-left: 5px;
}
footer .ui.dropdown.language .menu {
height: 500px;
max-height: calc(100vh - 60px);
overflow-y: auto;
margin-bottom: 10px;
}
@media (max-width: 880px) {
footer .ui.container .left,
footer .ui.container .right {
footer .ui.left,
footer .ui.right {
width: 100%;
display: block;
text-align: center;
float: none;

View File

@ -5,11 +5,11 @@ import {attachTribute} from '../tribute.js';
import {hideElem, showElem, autosize} from '../../utils/dom.js';
import {initEasyMDEImagePaste, initTextareaImagePaste} from './ImagePaste.js';
import {handleGlobalEnterQuickSubmit} from './QuickSubmit.js';
import {emojiKeys, emojiString} from '../emoji.js';
import {emojiString} from '../emoji.js';
import {renderPreviewPanelContent} from '../repo-editor.js';
import {matchEmoji, matchMention} from '../../utils/match.js';
let elementIdCounter = 0;
const maxExpanderMatches = 6;
/**
* validate if the given textarea is non-empty.
@ -106,14 +106,7 @@ class ComboMarkdownEditor {
const expander = this.container.querySelector('text-expander');
expander?.addEventListener('text-expander-change', ({detail: {key, provide, text}}) => {
if (key === ':') {
const matches = [];
const textLowerCase = text.toLowerCase();
for (const name of emojiKeys) {
if (name.toLowerCase().includes(textLowerCase)) {
matches.push(name);
if (matches.length >= maxExpanderMatches) break;
}
}
const matches = matchEmoji(text);
if (!matches.length) return provide({matched: false});
const ul = document.createElement('ul');
@ -129,14 +122,7 @@ class ComboMarkdownEditor {
provide({matched: true, fragment: ul});
} else if (key === '@') {
const matches = [];
const textLowerCase = text.toLowerCase();
for (const obj of window.config.tributeValues) {
if (obj.key.toLowerCase().includes(textLowerCase)) {
matches.push(obj);
if (matches.length >= maxExpanderMatches) break;
}
}
const matches = matchMention(text);
if (!matches.length) return provide({matched: false});
const ul = document.createElement('ul');

View File

@ -3,4 +3,13 @@ window.config = {
pageData: {},
i18n: {},
appSubUrl: '',
tributeValues: [
{key: 'user1 User 1', value: 'user1', name: 'user1', fullname: 'User 1', avatar: 'https://avatar1.com'},
{key: 'user2 User 2', value: 'user2', name: 'user2', fullname: 'User 2', avatar: 'https://avatar2.com'},
{key: 'user3 User 3', value: 'user3', name: 'user3', fullname: 'User 3', avatar: 'https://avatar3.com'},
{key: 'user4 User 4', value: 'user4', name: 'user4', fullname: 'User 4', avatar: 'https://avatar4.com'},
{key: 'user5 User 5', value: 'user5', name: 'user5', fullname: 'User 5', avatar: 'https://avatar5.com'},
{key: 'user6 User 6', value: 'user6', name: 'user6', fullname: 'User 6', avatar: 'https://avatar6.com'},
{key: 'user7 User 7', value: 'user7', name: 'user7', fullname: 'User 7', avatar: 'https://avatar7.com'},
],
};

43
web_src/js/utils/match.js Normal file
View File

@ -0,0 +1,43 @@
import emojis from '../../../assets/emoji.json';
const maxMatches = 6;
function sortAndReduce(map) {
const sortedMap = new Map([...map.entries()].sort((a, b) => a[1] - b[1]));
return Array.from(sortedMap.keys()).slice(0, maxMatches);
}
export function matchEmoji(queryText) {
const query = queryText.toLowerCase().replaceAll('_', ' ');
if (!query) return emojis.slice(0, maxMatches).map((e) => e.aliases[0]);
// results is a map of weights, lower is better
const results = new Map();
for (const {aliases} of emojis) {
const mainAlias = aliases[0];
for (const [aliasIndex, alias] of aliases.entries()) {
const index = alias.replaceAll('_', ' ').indexOf(query);
if (index === -1) continue;
const existing = results.get(mainAlias);
const rankedIndex = index + aliasIndex;
results.set(mainAlias, existing ? existing - rankedIndex : rankedIndex);
}
}
return sortAndReduce(results);
}
export function matchMention(queryText) {
const query = queryText.toLowerCase();
// results is a map of weights, lower is better
const results = new Map();
for (const obj of window.config.tributeValues) {
const index = obj.key.toLowerCase().indexOf(query);
if (index === -1) continue;
const existing = results.get(obj);
results.set(obj, existing ? existing - index : index);
}
return sortAndReduce(results);
}

View File

@ -0,0 +1,47 @@
import {test, expect} from 'vitest';
import {matchEmoji, matchMention} from './match.js';
test('matchEmoji', () => {
expect(matchEmoji('')).toEqual([
'+1',
'-1',
'100',
'1234',
'1st_place_medal',
'2nd_place_medal',
]);
expect(matchEmoji('hea')).toEqual([
'headphones',
'headstone',
'health_worker',
'hear_no_evil',
'heard_mcdonald_islands',
'heart',
]);
expect(matchEmoji('hear')).toEqual([
'hear_no_evil',
'heard_mcdonald_islands',
'heart',
'heart_decoration',
'heart_eyes',
'heart_eyes_cat',
]);
expect(matchEmoji('poo')).toEqual([
'poodle',
'hankey',
'spoon',
'bowl_with_spoon',
]);
expect(matchEmoji('1st_')).toEqual([
'1st_place_medal',
]);
});
test('matchMention', () => {
expect(matchMention('')).toEqual(window.config.tributeValues.slice(0, 6));
expect(matchMention('user4')).toEqual([window.config.tributeValues[3]]);
});