mirror of
https://github.com/go-gitea/gitea
synced 2024-12-21 18:07:51 +01:00
Merge branch 'main' into feat-32257-add-comments-unchanged-lines-and-show
This commit is contained in:
commit
d8eeb08d2a
@ -19,6 +19,8 @@ linters:
|
|||||||
- revive
|
- revive
|
||||||
- staticcheck
|
- staticcheck
|
||||||
- stylecheck
|
- stylecheck
|
||||||
|
- tenv
|
||||||
|
- testifylint
|
||||||
- typecheck
|
- typecheck
|
||||||
- unconvert
|
- unconvert
|
||||||
- unused
|
- unused
|
||||||
@ -34,6 +36,10 @@ output:
|
|||||||
show-stats: true
|
show-stats: true
|
||||||
|
|
||||||
linters-settings:
|
linters-settings:
|
||||||
|
testifylint:
|
||||||
|
disable:
|
||||||
|
- go-require
|
||||||
|
- require-error
|
||||||
stylecheck:
|
stylecheck:
|
||||||
checks: ["all", "-ST1005", "-ST1003"]
|
checks: ["all", "-ST1005", "-ST1003"]
|
||||||
nakedret:
|
nakedret:
|
||||||
|
@ -46,7 +46,6 @@ Wim <wim@42.be> (@42wim)
|
|||||||
Jason Song <i@wolfogre.com> (@wolfogre)
|
Jason Song <i@wolfogre.com> (@wolfogre)
|
||||||
Yarden Shoham <git@yardenshoham.com> (@yardenshoham)
|
Yarden Shoham <git@yardenshoham.com> (@yardenshoham)
|
||||||
Yu Tian <zettat123@gmail.com> (@Zettat123)
|
Yu Tian <zettat123@gmail.com> (@Zettat123)
|
||||||
Eddie Yang <576951401@qq.com> (@yp05327)
|
|
||||||
Dong Ge <gedong_1994@163.com> (@sillyguodong)
|
Dong Ge <gedong_1994@163.com> (@sillyguodong)
|
||||||
Xinyi Gong <hestergong@gmail.com> (@HesterG)
|
Xinyi Gong <hestergong@gmail.com> (@HesterG)
|
||||||
wxiaoguang <wxiaoguang@gmail.com> (@wxiaoguang)
|
wxiaoguang <wxiaoguang@gmail.com> (@wxiaoguang)
|
||||||
|
10
Makefile
10
Makefile
@ -28,7 +28,7 @@ XGO_VERSION := go-1.23.x
|
|||||||
AIR_PACKAGE ?= github.com/air-verse/air@v1
|
AIR_PACKAGE ?= github.com/air-verse/air@v1
|
||||||
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.7.0
|
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.7.0
|
||||||
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.7.0
|
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.7.0
|
||||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.60.3
|
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.2
|
||||||
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.11
|
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.11
|
||||||
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.5.1
|
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.5.1
|
||||||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0
|
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0
|
||||||
@ -377,12 +377,12 @@ lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig
|
|||||||
.PHONY: lint-js
|
.PHONY: lint-js
|
||||||
lint-js: node_modules
|
lint-js: node_modules
|
||||||
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES)
|
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES)
|
||||||
# npx vue-tsc
|
npx vue-tsc
|
||||||
|
|
||||||
.PHONY: lint-js-fix
|
.PHONY: lint-js-fix
|
||||||
lint-js-fix: node_modules
|
lint-js-fix: node_modules
|
||||||
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES) --fix
|
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES) --fix
|
||||||
# npx vue-tsc
|
npx vue-tsc
|
||||||
|
|
||||||
.PHONY: lint-css
|
.PHONY: lint-css
|
||||||
lint-css: node_modules
|
lint-css: node_modules
|
||||||
@ -451,10 +451,6 @@ lint-templates: .venv node_modules
|
|||||||
lint-yaml: .venv
|
lint-yaml: .venv
|
||||||
@poetry run yamllint .
|
@poetry run yamllint .
|
||||||
|
|
||||||
.PHONY: tsc
|
|
||||||
tsc:
|
|
||||||
npx vue-tsc
|
|
||||||
|
|
||||||
.PHONY: watch
|
.PHONY: watch
|
||||||
watch:
|
watch:
|
||||||
@bash tools/watch.sh
|
@bash tools/watch.sh
|
||||||
|
@ -1040,9 +1040,13 @@ LEVEL = Info
|
|||||||
;; Don't allow download source archive files from UI
|
;; Don't allow download source archive files from UI
|
||||||
;DISABLE_DOWNLOAD_SOURCE_ARCHIVES = false
|
;DISABLE_DOWNLOAD_SOURCE_ARCHIVES = false
|
||||||
|
|
||||||
;; Allow fork repositories without maximum number limit
|
;; Allow to fork repositories without maximum number limit
|
||||||
;ALLOW_FORK_WITHOUT_MAXIMUM_LIMIT = true
|
;ALLOW_FORK_WITHOUT_MAXIMUM_LIMIT = true
|
||||||
|
|
||||||
|
;; Allow to fork repositories into the same owner (user or organization)
|
||||||
|
;; This feature is experimental, not fully tested, and may be changed in the future
|
||||||
|
;ALLOW_FORK_INTO_SAME_OWNER = false
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;[repository.editor]
|
;[repository.editor]
|
||||||
|
@ -137,7 +137,7 @@ func UpdateRunJob(ctx context.Context, job *ActionRunJob, cond builder.Cond, col
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
run.Status = aggregateJobStatus(jobs)
|
run.Status = AggregateJobStatus(jobs)
|
||||||
if run.Started.IsZero() && run.Status.IsRunning() {
|
if run.Started.IsZero() && run.Status.IsRunning() {
|
||||||
run.Started = timeutil.TimeStampNow()
|
run.Started = timeutil.TimeStampNow()
|
||||||
}
|
}
|
||||||
@ -152,29 +152,34 @@ func UpdateRunJob(ctx context.Context, job *ActionRunJob, cond builder.Cond, col
|
|||||||
return affected, nil
|
return affected, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func aggregateJobStatus(jobs []*ActionRunJob) Status {
|
func AggregateJobStatus(jobs []*ActionRunJob) Status {
|
||||||
allDone := true
|
allSuccessOrSkipped := true
|
||||||
allWaiting := true
|
var hasFailure, hasCancelled, hasSkipped, hasWaiting, hasRunning, hasBlocked bool
|
||||||
hasFailure := false
|
|
||||||
for _, job := range jobs {
|
for _, job := range jobs {
|
||||||
if !job.Status.IsDone() {
|
allSuccessOrSkipped = allSuccessOrSkipped && (job.Status == StatusSuccess || job.Status == StatusSkipped)
|
||||||
allDone = false
|
hasFailure = hasFailure || job.Status == StatusFailure
|
||||||
}
|
hasCancelled = hasCancelled || job.Status == StatusCancelled
|
||||||
if job.Status != StatusWaiting && !job.Status.IsDone() {
|
hasSkipped = hasSkipped || job.Status == StatusSkipped
|
||||||
allWaiting = false
|
hasWaiting = hasWaiting || job.Status == StatusWaiting
|
||||||
}
|
hasRunning = hasRunning || job.Status == StatusRunning
|
||||||
if job.Status == StatusFailure || job.Status == StatusCancelled {
|
hasBlocked = hasBlocked || job.Status == StatusBlocked
|
||||||
hasFailure = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if allDone {
|
switch {
|
||||||
if hasFailure {
|
case allSuccessOrSkipped:
|
||||||
return StatusFailure
|
|
||||||
}
|
|
||||||
return StatusSuccess
|
return StatusSuccess
|
||||||
}
|
case hasFailure:
|
||||||
if allWaiting {
|
return StatusFailure
|
||||||
|
case hasRunning:
|
||||||
|
return StatusRunning
|
||||||
|
case hasWaiting:
|
||||||
return StatusWaiting
|
return StatusWaiting
|
||||||
|
case hasBlocked:
|
||||||
|
return StatusBlocked
|
||||||
|
case hasCancelled:
|
||||||
|
return StatusCancelled
|
||||||
|
case hasSkipped:
|
||||||
|
return StatusSkipped
|
||||||
|
default:
|
||||||
|
return StatusUnknown // it shouldn't happen
|
||||||
}
|
}
|
||||||
return StatusRunning
|
|
||||||
}
|
}
|
||||||
|
64
models/actions/run_job_status_test.go
Normal file
64
models/actions/run_job_status_test.go
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package actions
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAggregateJobStatus(t *testing.T) {
|
||||||
|
testStatuses := func(expected Status, statuses []Status) {
|
||||||
|
var jobs []*ActionRunJob
|
||||||
|
for _, v := range statuses {
|
||||||
|
jobs = append(jobs, &ActionRunJob{Status: v})
|
||||||
|
}
|
||||||
|
actual := AggregateJobStatus(jobs)
|
||||||
|
if !assert.Equal(t, expected, actual) {
|
||||||
|
var statusStrings []string
|
||||||
|
for _, s := range statuses {
|
||||||
|
statusStrings = append(statusStrings, s.String())
|
||||||
|
}
|
||||||
|
t.Errorf("AggregateJobStatus(%v) = %v, want %v", statusStrings, statusNames[actual], statusNames[expected])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cases := []struct {
|
||||||
|
statuses []Status
|
||||||
|
expected Status
|
||||||
|
}{
|
||||||
|
// success with other status
|
||||||
|
{[]Status{StatusSuccess}, StatusSuccess},
|
||||||
|
{[]Status{StatusSuccess, StatusSkipped}, StatusSuccess}, // skipped doesn't affect success
|
||||||
|
{[]Status{StatusSuccess, StatusFailure}, StatusFailure},
|
||||||
|
{[]Status{StatusSuccess, StatusCancelled}, StatusCancelled},
|
||||||
|
{[]Status{StatusSuccess, StatusWaiting}, StatusWaiting},
|
||||||
|
{[]Status{StatusSuccess, StatusRunning}, StatusRunning},
|
||||||
|
{[]Status{StatusSuccess, StatusBlocked}, StatusBlocked},
|
||||||
|
|
||||||
|
// failure with other status, fail fast
|
||||||
|
// Should "running" win? Maybe no: old code does make "running" win, but GitHub does fail fast.
|
||||||
|
{[]Status{StatusFailure}, StatusFailure},
|
||||||
|
{[]Status{StatusFailure, StatusSuccess}, StatusFailure},
|
||||||
|
{[]Status{StatusFailure, StatusSkipped}, StatusFailure},
|
||||||
|
{[]Status{StatusFailure, StatusCancelled}, StatusFailure},
|
||||||
|
{[]Status{StatusFailure, StatusWaiting}, StatusFailure},
|
||||||
|
{[]Status{StatusFailure, StatusRunning}, StatusFailure},
|
||||||
|
{[]Status{StatusFailure, StatusBlocked}, StatusFailure},
|
||||||
|
|
||||||
|
// skipped with other status
|
||||||
|
{[]Status{StatusSkipped}, StatusSuccess},
|
||||||
|
{[]Status{StatusSkipped, StatusSuccess}, StatusSuccess},
|
||||||
|
{[]Status{StatusSkipped, StatusFailure}, StatusFailure},
|
||||||
|
{[]Status{StatusSkipped, StatusCancelled}, StatusCancelled},
|
||||||
|
{[]Status{StatusSkipped, StatusWaiting}, StatusWaiting},
|
||||||
|
{[]Status{StatusSkipped, StatusRunning}, StatusRunning},
|
||||||
|
{[]Status{StatusSkipped, StatusBlocked}, StatusBlocked},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, c := range cases {
|
||||||
|
testStatuses(c.expected, c.statuses)
|
||||||
|
}
|
||||||
|
}
|
@ -17,7 +17,7 @@ func TestGetLatestRunnerToken(t *testing.T) {
|
|||||||
token := unittest.AssertExistsAndLoadBean(t, &ActionRunnerToken{ID: 3})
|
token := unittest.AssertExistsAndLoadBean(t, &ActionRunnerToken{ID: 3})
|
||||||
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
|
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, token, expectedToken)
|
assert.EqualValues(t, expectedToken, token)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewRunnerToken(t *testing.T) {
|
func TestNewRunnerToken(t *testing.T) {
|
||||||
@ -26,7 +26,7 @@ func TestNewRunnerToken(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
|
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, token, expectedToken)
|
assert.EqualValues(t, expectedToken, token)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUpdateRunnerToken(t *testing.T) {
|
func TestUpdateRunnerToken(t *testing.T) {
|
||||||
@ -36,5 +36,5 @@ func TestUpdateRunnerToken(t *testing.T) {
|
|||||||
assert.NoError(t, UpdateRunnerToken(db.DefaultContext, token))
|
assert.NoError(t, UpdateRunnerToken(db.DefaultContext, token))
|
||||||
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
|
expectedToken, err := GetLatestRunnerToken(db.DefaultContext, 1, 0)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, token, expectedToken)
|
assert.EqualValues(t, expectedToken, token)
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
package activities_test
|
package activities_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -91,11 +90,11 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, actions, contributions, "invalid action count: did the test data became too old?")
|
assert.Len(t, actions, contributions, "invalid action count: did the test data became too old?")
|
||||||
assert.Equal(t, count, int64(contributions))
|
assert.Equal(t, count, int64(contributions))
|
||||||
assert.Equal(t, tc.CountResult, contributions, fmt.Sprintf("testcase '%s'", tc.desc))
|
assert.Equal(t, tc.CountResult, contributions, "testcase '%s'", tc.desc)
|
||||||
|
|
||||||
// Test JSON rendering
|
// Test JSON rendering
|
||||||
jsonData, err := json.Marshal(heatmap)
|
jsonData, err := json.Marshal(heatmap)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, tc.JSONResult, string(jsonData))
|
assert.JSONEq(t, tc.JSONResult, string(jsonData))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ func TestOAuth2Application_GenerateClientSecret(t *testing.T) {
|
|||||||
app := unittest.AssertExistsAndLoadBean(t, &auth_model.OAuth2Application{ID: 1})
|
app := unittest.AssertExistsAndLoadBean(t, &auth_model.OAuth2Application{ID: 1})
|
||||||
secret, err := app.GenerateClientSecret(db.DefaultContext)
|
secret, err := app.GenerateClientSecret(db.DefaultContext)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.True(t, len(secret) > 0)
|
assert.NotEmpty(t, secret)
|
||||||
unittest.AssertExistsAndLoadBean(t, &auth_model.OAuth2Application{ID: 1, ClientSecret: app.ClientSecret})
|
unittest.AssertExistsAndLoadBean(t, &auth_model.OAuth2Application{ID: 1, ClientSecret: app.ClientSecret})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -165,7 +165,7 @@ func TestOAuth2Grant_GenerateNewAuthorizationCode(t *testing.T) {
|
|||||||
code, err := grant.GenerateNewAuthorizationCode(db.DefaultContext, "https://example2.com/callback", "CjvyTLSdR47G5zYenDA-eDWW4lRrO8yvjcWwbD_deOg", "S256")
|
code, err := grant.GenerateNewAuthorizationCode(db.DefaultContext, "https://example2.com/callback", "CjvyTLSdR47G5zYenDA-eDWW4lRrO8yvjcWwbD_deOg", "S256")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotNil(t, code)
|
assert.NotNil(t, code)
|
||||||
assert.True(t, len(code.Code) > 32) // secret length > 32
|
assert.Greater(t, len(code.Code), 32) // secret length > 32
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestOAuth2Grant_TableName(t *testing.T) {
|
func TestOAuth2Grant_TableName(t *testing.T) {
|
||||||
|
@ -38,8 +38,6 @@ func TestIterate(t *testing.T) {
|
|||||||
if !has {
|
if !has {
|
||||||
return db.ErrNotExist{Resource: "repo_unit", ID: repoUnit.ID}
|
return db.ErrNotExist{Resource: "repo_unit", ID: repoUnit.ID}
|
||||||
}
|
}
|
||||||
assert.EqualValues(t, repoUnit.RepoID, repoUnit.RepoID)
|
|
||||||
assert.EqualValues(t, repoUnit.CreatedUnix, repoUnit.CreatedUnix)
|
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -36,3 +36,41 @@
|
|||||||
updated: 1683636626
|
updated: 1683636626
|
||||||
need_approval: 0
|
need_approval: 0
|
||||||
approved_by: 0
|
approved_by: 0
|
||||||
|
-
|
||||||
|
id: 793
|
||||||
|
title: "job output"
|
||||||
|
repo_id: 4
|
||||||
|
owner_id: 1
|
||||||
|
workflow_id: "test.yaml"
|
||||||
|
index: 189
|
||||||
|
trigger_user_id: 1
|
||||||
|
ref: "refs/heads/master"
|
||||||
|
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||||
|
event: "push"
|
||||||
|
is_fork_pull_request: 0
|
||||||
|
status: 1
|
||||||
|
started: 1683636528
|
||||||
|
stopped: 1683636626
|
||||||
|
created: 1683636108
|
||||||
|
updated: 1683636626
|
||||||
|
need_approval: 0
|
||||||
|
approved_by: 0
|
||||||
|
-
|
||||||
|
id: 794
|
||||||
|
title: "job output"
|
||||||
|
repo_id: 4
|
||||||
|
owner_id: 1
|
||||||
|
workflow_id: "test.yaml"
|
||||||
|
index: 190
|
||||||
|
trigger_user_id: 1
|
||||||
|
ref: "refs/heads/test"
|
||||||
|
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||||
|
event: "push"
|
||||||
|
is_fork_pull_request: 0
|
||||||
|
status: 1
|
||||||
|
started: 1683636528
|
||||||
|
stopped: 1683636626
|
||||||
|
created: 1683636108
|
||||||
|
updated: 1683636626
|
||||||
|
need_approval: 0
|
||||||
|
approved_by: 0
|
||||||
|
@ -26,3 +26,46 @@
|
|||||||
status: 1
|
status: 1
|
||||||
started: 1683636528
|
started: 1683636528
|
||||||
stopped: 1683636626
|
stopped: 1683636626
|
||||||
|
-
|
||||||
|
id: 194
|
||||||
|
run_id: 793
|
||||||
|
repo_id: 4
|
||||||
|
owner_id: 1
|
||||||
|
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||||
|
is_fork_pull_request: 0
|
||||||
|
name: job1 (1)
|
||||||
|
attempt: 1
|
||||||
|
job_id: job1
|
||||||
|
task_id: 49
|
||||||
|
status: 1
|
||||||
|
started: 1683636528
|
||||||
|
stopped: 1683636626
|
||||||
|
-
|
||||||
|
id: 195
|
||||||
|
run_id: 793
|
||||||
|
repo_id: 4
|
||||||
|
owner_id: 1
|
||||||
|
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||||
|
is_fork_pull_request: 0
|
||||||
|
name: job1 (2)
|
||||||
|
attempt: 1
|
||||||
|
job_id: job1
|
||||||
|
task_id: 50
|
||||||
|
status: 1
|
||||||
|
started: 1683636528
|
||||||
|
stopped: 1683636626
|
||||||
|
-
|
||||||
|
id: 196
|
||||||
|
run_id: 793
|
||||||
|
repo_id: 4
|
||||||
|
owner_id: 1
|
||||||
|
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||||
|
is_fork_pull_request: 0
|
||||||
|
name: job2
|
||||||
|
attempt: 1
|
||||||
|
job_id: job2
|
||||||
|
needs: [job1]
|
||||||
|
task_id: 51
|
||||||
|
status: 5
|
||||||
|
started: 1683636528
|
||||||
|
stopped: 1683636626
|
||||||
|
@ -57,3 +57,63 @@
|
|||||||
log_length: 707
|
log_length: 707
|
||||||
log_size: 90179
|
log_size: 90179
|
||||||
log_expired: 0
|
log_expired: 0
|
||||||
|
-
|
||||||
|
id: 49
|
||||||
|
job_id: 194
|
||||||
|
attempt: 1
|
||||||
|
runner_id: 1
|
||||||
|
status: 1 # success
|
||||||
|
started: 1683636528
|
||||||
|
stopped: 1683636626
|
||||||
|
repo_id: 4
|
||||||
|
owner_id: 1
|
||||||
|
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||||
|
is_fork_pull_request: 0
|
||||||
|
token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784220
|
||||||
|
token_salt: ffffffffff
|
||||||
|
token_last_eight: ffffffff
|
||||||
|
log_filename: artifact-test2/2f/47.log
|
||||||
|
log_in_storage: 1
|
||||||
|
log_length: 707
|
||||||
|
log_size: 90179
|
||||||
|
log_expired: 0
|
||||||
|
-
|
||||||
|
id: 50
|
||||||
|
job_id: 195
|
||||||
|
attempt: 1
|
||||||
|
runner_id: 1
|
||||||
|
status: 1 # success
|
||||||
|
started: 1683636528
|
||||||
|
stopped: 1683636626
|
||||||
|
repo_id: 4
|
||||||
|
owner_id: 1
|
||||||
|
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||||
|
is_fork_pull_request: 0
|
||||||
|
token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784221
|
||||||
|
token_salt: ffffffffff
|
||||||
|
token_last_eight: ffffffff
|
||||||
|
log_filename: artifact-test2/2f/47.log
|
||||||
|
log_in_storage: 1
|
||||||
|
log_length: 707
|
||||||
|
log_size: 90179
|
||||||
|
log_expired: 0
|
||||||
|
-
|
||||||
|
id: 51
|
||||||
|
job_id: 196
|
||||||
|
attempt: 1
|
||||||
|
runner_id: 1
|
||||||
|
status: 6 # running
|
||||||
|
started: 1683636528
|
||||||
|
stopped: 1683636626
|
||||||
|
repo_id: 4
|
||||||
|
owner_id: 1
|
||||||
|
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||||
|
is_fork_pull_request: 0
|
||||||
|
token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784222
|
||||||
|
token_salt: ffffffffff
|
||||||
|
token_last_eight: ffffffff
|
||||||
|
log_filename: artifact-test2/2f/47.log
|
||||||
|
log_in_storage: 1
|
||||||
|
log_length: 707
|
||||||
|
log_size: 90179
|
||||||
|
log_expired: 0
|
||||||
|
20
models/fixtures/action_task_output.yml
Normal file
20
models/fixtures/action_task_output.yml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
-
|
||||||
|
id: 1
|
||||||
|
task_id: 49
|
||||||
|
output_key: output_a
|
||||||
|
output_value: abc
|
||||||
|
-
|
||||||
|
id: 2
|
||||||
|
task_id: 49
|
||||||
|
output_key: output_b
|
||||||
|
output_value: ''
|
||||||
|
-
|
||||||
|
id: 3
|
||||||
|
task_id: 50
|
||||||
|
output_key: output_a
|
||||||
|
output_value: ''
|
||||||
|
-
|
||||||
|
id: 4
|
||||||
|
task_id: 50
|
||||||
|
output_key: output_b
|
||||||
|
output_value: bbb
|
@ -81,3 +81,15 @@
|
|||||||
is_deleted: false
|
is_deleted: false
|
||||||
deleted_by_id: 0
|
deleted_by_id: 0
|
||||||
deleted_unix: 0
|
deleted_unix: 0
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 15
|
||||||
|
repo_id: 4
|
||||||
|
name: 'master'
|
||||||
|
commit_id: 'c7cd3cd144e6d23c9d6f3d07e52b2c1a956e0338'
|
||||||
|
commit_message: 'add Readme'
|
||||||
|
commit_time: 1588147171
|
||||||
|
pusher_id: 13
|
||||||
|
is_deleted: false
|
||||||
|
deleted_by_id: 0
|
||||||
|
deleted_unix: 0
|
||||||
|
@ -34,7 +34,7 @@ func TestGetCommitStatuses(t *testing.T) {
|
|||||||
SHA: sha1,
|
SHA: sha1,
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int(maxResults), 5)
|
assert.Equal(t, 5, int(maxResults))
|
||||||
assert.Len(t, statuses, 5)
|
assert.Len(t, statuses, 5)
|
||||||
|
|
||||||
assert.Equal(t, "ci/awesomeness", statuses[0].Context)
|
assert.Equal(t, "ci/awesomeness", statuses[0].Context)
|
||||||
@ -63,7 +63,7 @@ func TestGetCommitStatuses(t *testing.T) {
|
|||||||
SHA: sha1,
|
SHA: sha1,
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int(maxResults), 5)
|
assert.Equal(t, 5, int(maxResults))
|
||||||
assert.Empty(t, statuses)
|
assert.Empty(t, statuses)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
package git
|
package git
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
@ -76,7 +75,7 @@ func TestBranchRuleMatch(t *testing.T) {
|
|||||||
infact = " not"
|
infact = " not"
|
||||||
}
|
}
|
||||||
assert.EqualValues(t, kase.ExpectedMatch, pb.Match(kase.BranchName),
|
assert.EqualValues(t, kase.ExpectedMatch, pb.Match(kase.BranchName),
|
||||||
fmt.Sprintf("%s should%s match %s but it is%s", kase.BranchName, should, kase.Rule, infact),
|
"%s should%s match %s but it is%s", kase.BranchName, should, kase.Rule, infact,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -64,7 +64,7 @@ func TestFetchCodeComments(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAsCommentType(t *testing.T) {
|
func TestAsCommentType(t *testing.T) {
|
||||||
assert.Equal(t, issues_model.CommentType(0), issues_model.CommentTypeComment)
|
assert.Equal(t, issues_model.CommentTypeComment, issues_model.CommentType(0))
|
||||||
assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType(""))
|
assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType(""))
|
||||||
assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType("nonsense"))
|
assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType("nonsense"))
|
||||||
assert.Equal(t, issues_model.CommentTypeComment, issues_model.AsCommentType("comment"))
|
assert.Equal(t, issues_model.CommentTypeComment, issues_model.AsCommentType("comment"))
|
||||||
|
@ -18,12 +18,12 @@ func RecalculateIssueIndexForRepo(ctx context.Context, repoID int64) error {
|
|||||||
}
|
}
|
||||||
defer committer.Close()
|
defer committer.Close()
|
||||||
|
|
||||||
var max int64
|
var maxIndex int64
|
||||||
if _, err = db.GetEngine(ctx).Select(" MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&max); err != nil {
|
if _, err = db.GetEngine(ctx).Select(" MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&maxIndex); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = db.SyncMaxResourceIndex(ctx, "issue_index", repoID, max); err != nil {
|
if err = db.SyncMaxResourceIndex(ctx, "issue_index", repoID, maxIndex); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -434,7 +434,7 @@ func assertCreateIssues(t *testing.T, isPull bool) {
|
|||||||
owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
|
owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
|
||||||
label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
|
label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
|
||||||
milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
|
milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
|
||||||
assert.EqualValues(t, milestone.ID, 1)
|
assert.EqualValues(t, 1, milestone.ID)
|
||||||
reaction := &issues_model.Reaction{
|
reaction := &issues_model.Reaction{
|
||||||
Type: "heart",
|
Type: "heart",
|
||||||
UserID: owner.ID,
|
UserID: owner.ID,
|
||||||
|
@ -48,17 +48,17 @@ func TestGetIssueWatchers(t *testing.T) {
|
|||||||
iws, err := issues_model.GetIssueWatchers(db.DefaultContext, 1, db.ListOptions{})
|
iws, err := issues_model.GetIssueWatchers(db.DefaultContext, 1, db.ListOptions{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
// Watcher is inactive, thus 0
|
// Watcher is inactive, thus 0
|
||||||
assert.Len(t, iws, 0)
|
assert.Empty(t, iws)
|
||||||
|
|
||||||
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 2, db.ListOptions{})
|
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 2, db.ListOptions{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
// Watcher is explicit not watching
|
// Watcher is explicit not watching
|
||||||
assert.Len(t, iws, 0)
|
assert.Empty(t, iws)
|
||||||
|
|
||||||
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 5, db.ListOptions{})
|
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 5, db.ListOptions{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
// Issue has no Watchers
|
// Issue has no Watchers
|
||||||
assert.Len(t, iws, 0)
|
assert.Empty(t, iws)
|
||||||
|
|
||||||
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 7, db.ListOptions{})
|
iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 7, db.ListOptions{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -31,12 +31,12 @@ func TestLabel_LoadSelectedLabelsAfterClick(t *testing.T) {
|
|||||||
// First test : with negative and scope
|
// First test : with negative and scope
|
||||||
label.LoadSelectedLabelsAfterClick([]int64{1, -8}, []string{"", "scope"})
|
label.LoadSelectedLabelsAfterClick([]int64{1, -8}, []string{"", "scope"})
|
||||||
assert.Equal(t, "1", label.QueryString)
|
assert.Equal(t, "1", label.QueryString)
|
||||||
assert.Equal(t, true, label.IsSelected)
|
assert.True(t, label.IsSelected)
|
||||||
|
|
||||||
// Second test : with duplicates
|
// Second test : with duplicates
|
||||||
label.LoadSelectedLabelsAfterClick([]int64{1, 7, 1, 7, 7}, []string{"", "scope", "", "scope", "scope"})
|
label.LoadSelectedLabelsAfterClick([]int64{1, 7, 1, 7, 7}, []string{"", "scope", "", "scope", "scope"})
|
||||||
assert.Equal(t, "1,8", label.QueryString)
|
assert.Equal(t, "1,8", label.QueryString)
|
||||||
assert.Equal(t, false, label.IsSelected)
|
assert.False(t, label.IsSelected)
|
||||||
|
|
||||||
// Third test : empty set
|
// Third test : empty set
|
||||||
label.LoadSelectedLabelsAfterClick([]int64{}, []string{})
|
label.LoadSelectedLabelsAfterClick([]int64{}, []string{})
|
||||||
@ -248,7 +248,7 @@ func TestGetLabelsByIssueID(t *testing.T) {
|
|||||||
|
|
||||||
labels, err = issues_model.GetLabelsByIssueID(db.DefaultContext, unittest.NonexistentID)
|
labels, err = issues_model.GetLabelsByIssueID(db.DefaultContext, unittest.NonexistentID)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, labels, 0)
|
assert.Empty(t, labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUpdateLabel(t *testing.T) {
|
func TestUpdateLabel(t *testing.T) {
|
||||||
@ -271,7 +271,7 @@ func TestUpdateLabel(t *testing.T) {
|
|||||||
assert.EqualValues(t, label.Color, newLabel.Color)
|
assert.EqualValues(t, label.Color, newLabel.Color)
|
||||||
assert.EqualValues(t, label.Name, newLabel.Name)
|
assert.EqualValues(t, label.Name, newLabel.Name)
|
||||||
assert.EqualValues(t, label.Description, newLabel.Description)
|
assert.EqualValues(t, label.Description, newLabel.Description)
|
||||||
assert.EqualValues(t, newLabel.ArchivedUnix, 0)
|
assert.EqualValues(t, 0, newLabel.ArchivedUnix)
|
||||||
unittest.CheckConsistencyFor(t, &issues_model.Label{}, &repo_model.Repository{})
|
unittest.CheckConsistencyFor(t, &issues_model.Label{}, &repo_model.Repository{})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ func TestGetMilestonesByRepoID(t *testing.T) {
|
|||||||
IsClosed: optional.Some(false),
|
IsClosed: optional.Some(false),
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, milestones, 0)
|
assert.Empty(t, milestones)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetMilestones(t *testing.T) {
|
func TestGetMilestones(t *testing.T) {
|
||||||
|
@ -40,7 +40,7 @@ func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, reviewComments, 2)
|
assert.Len(t, reviewComments, 2)
|
||||||
for _, pr := range prs {
|
for _, pr := range prs {
|
||||||
assert.EqualValues(t, reviewComments[pr.IssueID], 1)
|
assert.EqualValues(t, 1, reviewComments[pr.IssueID])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ func TestLoadRequestedReviewers(t *testing.T) {
|
|||||||
assert.NoError(t, pull.LoadIssue(db.DefaultContext))
|
assert.NoError(t, pull.LoadIssue(db.DefaultContext))
|
||||||
issue := pull.Issue
|
issue := pull.Issue
|
||||||
assert.NoError(t, issue.LoadRepo(db.DefaultContext))
|
assert.NoError(t, issue.LoadRepo(db.DefaultContext))
|
||||||
assert.Len(t, pull.RequestedReviewers, 0)
|
assert.Empty(t, pull.RequestedReviewers)
|
||||||
|
|
||||||
user1, err := user_model.GetUserByID(db.DefaultContext, 1)
|
user1, err := user_model.GetUserByID(db.DefaultContext, 1)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -32,7 +32,7 @@ func TestCancelStopwatch(t *testing.T) {
|
|||||||
|
|
||||||
_ = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeCancelTracking, PosterID: user1.ID, IssueID: issue1.ID})
|
_ = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeCancelTracking, PosterID: user1.ID, IssueID: issue1.ID})
|
||||||
|
|
||||||
assert.Nil(t, issues_model.CancelStopwatch(db.DefaultContext, user1, issue2))
|
assert.NoError(t, issues_model.CancelStopwatch(db.DefaultContext, user1, issue2))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestStopwatchExists(t *testing.T) {
|
func TestStopwatchExists(t *testing.T) {
|
||||||
|
@ -50,7 +50,7 @@ func TestGetTrackedTimes(t *testing.T) {
|
|||||||
|
|
||||||
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: -1})
|
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: -1})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, times, 0)
|
assert.Empty(t, times)
|
||||||
|
|
||||||
// by User
|
// by User
|
||||||
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{UserID: 1})
|
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{UserID: 1})
|
||||||
@ -60,7 +60,7 @@ func TestGetTrackedTimes(t *testing.T) {
|
|||||||
|
|
||||||
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{UserID: 3})
|
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{UserID: 3})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, times, 0)
|
assert.Empty(t, times)
|
||||||
|
|
||||||
// by Repo
|
// by Repo
|
||||||
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 2})
|
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 2})
|
||||||
@ -69,7 +69,7 @@ func TestGetTrackedTimes(t *testing.T) {
|
|||||||
assert.Equal(t, int64(1), times[0].Time)
|
assert.Equal(t, int64(1), times[0].Time)
|
||||||
issue, err := issues_model.GetIssueByID(db.DefaultContext, times[0].IssueID)
|
issue, err := issues_model.GetIssueByID(db.DefaultContext, times[0].IssueID)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, issue.RepoID, int64(2))
|
assert.Equal(t, int64(2), issue.RepoID)
|
||||||
|
|
||||||
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 1})
|
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 1})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -77,7 +77,7 @@ func TestGetTrackedTimes(t *testing.T) {
|
|||||||
|
|
||||||
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 10})
|
times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 10})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, times, 0)
|
assert.Empty(t, times)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTotalTimesForEachUser(t *testing.T) {
|
func TestTotalTimesForEachUser(t *testing.T) {
|
||||||
|
@ -56,8 +56,8 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
|
|||||||
err := x.Table("attachment").Where("issue_id > 0").Find(&issueAttachments)
|
err := x.Table("attachment").Where("issue_id > 0").Find(&issueAttachments)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
for _, attach := range issueAttachments {
|
for _, attach := range issueAttachments {
|
||||||
assert.Greater(t, attach.RepoID, int64(0))
|
assert.Positive(t, attach.RepoID)
|
||||||
assert.Greater(t, attach.IssueID, int64(0))
|
assert.Positive(t, attach.IssueID)
|
||||||
var issue Issue
|
var issue Issue
|
||||||
has, err := x.ID(attach.IssueID).Get(&issue)
|
has, err := x.ID(attach.IssueID).Get(&issue)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -69,8 +69,8 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
|
|||||||
err = x.Table("attachment").Where("release_id > 0").Find(&releaseAttachments)
|
err = x.Table("attachment").Where("release_id > 0").Find(&releaseAttachments)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
for _, attach := range releaseAttachments {
|
for _, attach := range releaseAttachments {
|
||||||
assert.Greater(t, attach.RepoID, int64(0))
|
assert.Positive(t, attach.RepoID)
|
||||||
assert.Greater(t, attach.ReleaseID, int64(0))
|
assert.Positive(t, attach.ReleaseID)
|
||||||
var release Release
|
var release Release
|
||||||
has, err := x.ID(attach.ReleaseID).Get(&release)
|
has, err := x.ID(attach.ReleaseID).Get(&release)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -107,12 +107,12 @@ func Test_RepositoryFormat(t *testing.T) {
|
|||||||
repo = new(Repository)
|
repo = new(Repository)
|
||||||
ok, err := x.ID(2).Get(repo)
|
ok, err := x.ID(2).Get(repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, true, ok)
|
assert.True(t, ok)
|
||||||
assert.EqualValues(t, "sha1", repo.ObjectFormatName)
|
assert.EqualValues(t, "sha1", repo.ObjectFormatName)
|
||||||
|
|
||||||
repo = new(Repository)
|
repo = new(Repository)
|
||||||
ok, err = x.ID(id).Get(repo)
|
ok, err = x.ID(id).Get(repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, true, ok)
|
assert.True(t, ok)
|
||||||
assert.EqualValues(t, "sha256", repo.ObjectFormatName)
|
assert.EqualValues(t, "sha256", repo.ObjectFormatName)
|
||||||
}
|
}
|
||||||
|
@ -39,7 +39,7 @@ func Test_AddUniqueIndexForProjectIssue(t *testing.T) {
|
|||||||
|
|
||||||
tables, err := x.DBMetas()
|
tables, err := x.DBMetas()
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, 1, len(tables))
|
assert.Len(t, tables, 1)
|
||||||
found := false
|
found := false
|
||||||
for _, index := range tables[0].Indexes {
|
for _, index := range tables[0].Indexes {
|
||||||
if index.Type == schemas.UniqueType {
|
if index.Type == schemas.UniqueType {
|
||||||
|
@ -40,7 +40,7 @@ func TestFindOrgs(t *testing.T) {
|
|||||||
IncludePrivate: false,
|
IncludePrivate: false,
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, orgs, 0)
|
assert.Empty(t, orgs)
|
||||||
|
|
||||||
total, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{
|
total, err := db.Count[organization.Organization](db.DefaultContext, organization.FindOrgOptions{
|
||||||
UserID: 4,
|
UserID: 4,
|
||||||
|
@ -283,7 +283,7 @@ func TestGetOrgUsersByOrgID(t *testing.T) {
|
|||||||
OrgID: unittest.NonexistentID,
|
OrgID: unittest.NonexistentID,
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, orgUsers, 0)
|
assert.Empty(t, orgUsers)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChangeOrgUserStatus(t *testing.T) {
|
func TestChangeOrgUserStatus(t *testing.T) {
|
||||||
|
@ -15,7 +15,7 @@ func TestAccessMode(t *testing.T) {
|
|||||||
m := ParseAccessMode(name)
|
m := ParseAccessMode(name)
|
||||||
assert.Equal(t, AccessMode(i), m)
|
assert.Equal(t, AccessMode(i), m)
|
||||||
}
|
}
|
||||||
assert.Equal(t, AccessMode(4), AccessModeOwner)
|
assert.Equal(t, AccessModeOwner, AccessMode(4))
|
||||||
assert.Equal(t, "owner", AccessModeOwner.ToString())
|
assert.Equal(t, "owner", AccessModeOwner.ToString())
|
||||||
assert.Equal(t, AccessModeNone, ParseAccessMode("owner"))
|
assert.Equal(t, AccessModeNone, ParseAccessMode("owner"))
|
||||||
assert.Equal(t, AccessModeNone, ParseAccessMode("invalid"))
|
assert.Equal(t, AccessModeNone, ParseAccessMode("invalid"))
|
||||||
|
@ -5,7 +5,6 @@ package project
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
@ -66,7 +65,7 @@ func Test_moveIssuesToAnotherColumn(t *testing.T) {
|
|||||||
|
|
||||||
issues, err = column1.GetIssues(db.DefaultContext)
|
issues, err = column1.GetIssues(db.DefaultContext)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, issues, 0)
|
assert.Empty(t, issues)
|
||||||
|
|
||||||
issues, err = column2.GetIssues(db.DefaultContext)
|
issues, err = column2.GetIssues(db.DefaultContext)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -123,5 +122,5 @@ func Test_NewColumn(t *testing.T) {
|
|||||||
ProjectID: project1.ID,
|
ProjectID: project1.ID,
|
||||||
})
|
})
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.True(t, strings.Contains(err.Error(), "maximum number of columns reached"))
|
assert.Contains(t, err.Error(), "maximum number of columns reached")
|
||||||
}
|
}
|
||||||
|
@ -144,8 +144,8 @@ func TestGetRepositoryByURL(t *testing.T) {
|
|||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, repo.ID, int64(2))
|
assert.Equal(t, int64(2), repo.ID)
|
||||||
assert.Equal(t, repo.OwnerID, int64(2))
|
assert.Equal(t, int64(2), repo.OwnerID)
|
||||||
}
|
}
|
||||||
|
|
||||||
test(t, "https://try.gitea.io/user2/repo2")
|
test(t, "https://try.gitea.io/user2/repo2")
|
||||||
@ -159,8 +159,8 @@ func TestGetRepositoryByURL(t *testing.T) {
|
|||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, repo.ID, int64(2))
|
assert.Equal(t, int64(2), repo.ID)
|
||||||
assert.Equal(t, repo.OwnerID, int64(2))
|
assert.Equal(t, int64(2), repo.OwnerID)
|
||||||
}
|
}
|
||||||
|
|
||||||
test(t, "git+ssh://sshuser@try.gitea.io/user2/repo2")
|
test(t, "git+ssh://sshuser@try.gitea.io/user2/repo2")
|
||||||
@ -177,8 +177,8 @@ func TestGetRepositoryByURL(t *testing.T) {
|
|||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, repo.ID, int64(2))
|
assert.Equal(t, int64(2), repo.ID)
|
||||||
assert.Equal(t, repo.OwnerID, int64(2))
|
assert.Equal(t, int64(2), repo.OwnerID)
|
||||||
}
|
}
|
||||||
|
|
||||||
test(t, "sshuser@try.gitea.io:user2/repo2")
|
test(t, "sshuser@try.gitea.io:user2/repo2")
|
||||||
|
@ -52,7 +52,7 @@ func TestRepository_GetStargazers2(t *testing.T) {
|
|||||||
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
|
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
|
||||||
gazers, err := repo_model.GetStargazers(db.DefaultContext, repo, db.ListOptions{Page: 0})
|
gazers, err := repo_model.GetStargazers(db.DefaultContext, repo, db.ListOptions{Page: 0})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, gazers, 0)
|
assert.Empty(t, gazers)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestClearRepoStars(t *testing.T) {
|
func TestClearRepoStars(t *testing.T) {
|
||||||
@ -71,5 +71,5 @@ func TestClearRepoStars(t *testing.T) {
|
|||||||
|
|
||||||
gazers, err := repo_model.GetStargazers(db.DefaultContext, repo, db.ListOptions{Page: 0})
|
gazers, err := repo_model.GetStargazers(db.DefaultContext, repo, db.ListOptions{Page: 0})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, gazers, 0)
|
assert.Empty(t, gazers)
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@ func TestRepoAssignees(t *testing.T) {
|
|||||||
users, err := repo_model.GetRepoAssignees(db.DefaultContext, repo2)
|
users, err := repo_model.GetRepoAssignees(db.DefaultContext, repo2)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, users, 1)
|
assert.Len(t, users, 1)
|
||||||
assert.Equal(t, users[0].ID, int64(2))
|
assert.Equal(t, int64(2), users[0].ID)
|
||||||
|
|
||||||
repo21 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21})
|
repo21 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21})
|
||||||
users, err = repo_model.GetRepoAssignees(db.DefaultContext, repo21)
|
users, err = repo_model.GetRepoAssignees(db.DefaultContext, repo21)
|
||||||
|
@ -41,7 +41,7 @@ func TestGetWatchers(t *testing.T) {
|
|||||||
|
|
||||||
watches, err = repo_model.GetWatchers(db.DefaultContext, unittest.NonexistentID)
|
watches, err = repo_model.GetWatchers(db.DefaultContext, unittest.NonexistentID)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, watches, 0)
|
assert.Empty(t, watches)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRepository_GetWatchers(t *testing.T) {
|
func TestRepository_GetWatchers(t *testing.T) {
|
||||||
@ -58,7 +58,7 @@ func TestRepository_GetWatchers(t *testing.T) {
|
|||||||
repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 9})
|
repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 9})
|
||||||
watchers, err = repo_model.GetRepoWatchers(db.DefaultContext, repo.ID, db.ListOptions{Page: 1})
|
watchers, err = repo_model.GetRepoWatchers(db.DefaultContext, repo.ID, db.ListOptions{Page: 1})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, watchers, 0)
|
assert.Empty(t, watchers)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWatchIfAuto(t *testing.T) {
|
func TestWatchIfAuto(t *testing.T) {
|
||||||
|
@ -79,7 +79,7 @@ func AssertExistsAndLoadMap(t assert.TestingT, table string, conditions ...any)
|
|||||||
e := db.GetEngine(db.DefaultContext).Table(table)
|
e := db.GetEngine(db.DefaultContext).Table(table)
|
||||||
res, err := whereOrderConditions(e, conditions).Query()
|
res, err := whereOrderConditions(e, conditions).Query()
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.True(t, len(res) == 1,
|
assert.Len(t, res, 1,
|
||||||
"Expected to find one row in %s (with conditions %+v), but found %d",
|
"Expected to find one row in %s (with conditions %+v), but found %d",
|
||||||
table, conditions, len(res),
|
table, conditions, len(res),
|
||||||
)
|
)
|
||||||
|
@ -97,8 +97,7 @@ func TestListEmails(t *testing.T) {
|
|||||||
}
|
}
|
||||||
emails, count, err := user_model.SearchEmails(db.DefaultContext, opts)
|
emails, count, err := user_model.SearchEmails(db.DefaultContext, opts)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotEqual(t, int64(0), count)
|
assert.Greater(t, count, int64(5))
|
||||||
assert.True(t, count > 5)
|
|
||||||
|
|
||||||
contains := func(match func(s *user_model.SearchEmailResult) bool) bool {
|
contains := func(match func(s *user_model.SearchEmailResult) bool) bool {
|
||||||
for _, v := range emails {
|
for _, v := range emails {
|
||||||
|
@ -56,5 +56,5 @@ func TestSettings(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
settings, err = user_model.GetUserAllSettings(db.DefaultContext, 99)
|
settings, err = user_model.GetUserAllSettings(db.DefaultContext, 99)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, settings, 0)
|
assert.Empty(t, settings)
|
||||||
}
|
}
|
||||||
|
@ -201,7 +201,7 @@ func TestNewGitSig(t *testing.T) {
|
|||||||
assert.NotContains(t, sig.Name, "<")
|
assert.NotContains(t, sig.Name, "<")
|
||||||
assert.NotContains(t, sig.Name, ">")
|
assert.NotContains(t, sig.Name, ">")
|
||||||
assert.NotContains(t, sig.Name, "\n")
|
assert.NotContains(t, sig.Name, "\n")
|
||||||
assert.NotEqual(t, len(strings.TrimSpace(sig.Name)), 0)
|
assert.NotEmpty(t, strings.TrimSpace(sig.Name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -216,7 +216,7 @@ func TestDisplayName(t *testing.T) {
|
|||||||
if len(strings.TrimSpace(user.FullName)) == 0 {
|
if len(strings.TrimSpace(user.FullName)) == 0 {
|
||||||
assert.Equal(t, user.Name, displayName)
|
assert.Equal(t, user.Name, displayName)
|
||||||
}
|
}
|
||||||
assert.NotEqual(t, len(strings.TrimSpace(displayName)), 0)
|
assert.NotEmpty(t, strings.TrimSpace(displayName))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -322,15 +322,15 @@ func TestGetMaileableUsersByIDs(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, results, 1)
|
assert.Len(t, results, 1)
|
||||||
if len(results) > 1 {
|
if len(results) > 1 {
|
||||||
assert.Equal(t, results[0].ID, 1)
|
assert.Equal(t, 1, results[0].ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
results, err = user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, true)
|
results, err = user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, true)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, results, 2)
|
assert.Len(t, results, 2)
|
||||||
if len(results) > 2 {
|
if len(results) > 2 {
|
||||||
assert.Equal(t, results[0].ID, 1)
|
assert.Equal(t, 1, results[0].ID)
|
||||||
assert.Equal(t, results[1].ID, 4)
|
assert.Equal(t, 4, results[1].ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -499,7 +499,7 @@ func Test_ValidateUser(t *testing.T) {
|
|||||||
{ID: 2, Visibility: structs.VisibleTypePrivate}: true,
|
{ID: 2, Visibility: structs.VisibleTypePrivate}: true,
|
||||||
}
|
}
|
||||||
for kase, expected := range kases {
|
for kase, expected := range kases {
|
||||||
assert.EqualValues(t, expected, nil == user_model.ValidateUser(kase), fmt.Sprintf("case: %+v", kase))
|
assert.EqualValues(t, expected, nil == user_model.ValidateUser(kase), "case: %+v", kase)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -570,11 +570,11 @@ func TestDisabledUserFeatures(t *testing.T) {
|
|||||||
|
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||||
|
|
||||||
assert.Len(t, setting.Admin.UserDisabledFeatures.Values(), 0)
|
assert.Empty(t, setting.Admin.UserDisabledFeatures.Values())
|
||||||
|
|
||||||
// no features should be disabled with a plain login type
|
// no features should be disabled with a plain login type
|
||||||
assert.LessOrEqual(t, user.LoginType, auth.Plain)
|
assert.LessOrEqual(t, user.LoginType, auth.Plain)
|
||||||
assert.Len(t, user_model.DisabledFeaturesWithLoginType(user).Values(), 0)
|
assert.Empty(t, user_model.DisabledFeaturesWithLoginType(user).Values())
|
||||||
for _, f := range testValues.Values() {
|
for _, f := range testValues.Values() {
|
||||||
assert.False(t, user_model.IsFeatureDisabledWithLoginType(user, f))
|
assert.False(t, user_model.IsFeatureDisabledWithLoginType(user, f))
|
||||||
}
|
}
|
||||||
@ -600,5 +600,5 @@ func TestGetInactiveUsers(t *testing.T) {
|
|||||||
interval := time.Now().Unix() - 1730468968 + 3600*24
|
interval := time.Now().Unix() - 1730468968 + 3600*24
|
||||||
users, err = user_model.GetInactiveUsers(db.DefaultContext, time.Duration(interval*int64(time.Second)))
|
users, err = user_model.GetInactiveUsers(db.DefaultContext, time.Duration(interval*int64(time.Second)))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, users, 0)
|
assert.Empty(t, users)
|
||||||
}
|
}
|
||||||
|
@ -43,7 +43,7 @@ func TestWebhook_History(t *testing.T) {
|
|||||||
webhook = unittest.AssertExistsAndLoadBean(t, &Webhook{ID: 2})
|
webhook = unittest.AssertExistsAndLoadBean(t, &Webhook{ID: 2})
|
||||||
tasks, err = webhook.History(db.DefaultContext, 0)
|
tasks, err = webhook.History(db.DefaultContext, 0)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, tasks, 0)
|
assert.Empty(t, tasks)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWebhook_UpdateEvent(t *testing.T) {
|
func TestWebhook_UpdateEvent(t *testing.T) {
|
||||||
@ -206,7 +206,7 @@ func TestHookTasks(t *testing.T) {
|
|||||||
|
|
||||||
hookTasks, err = HookTasks(db.DefaultContext, unittest.NonexistentID, 1)
|
hookTasks, err = HookTasks(db.DefaultContext, unittest.NonexistentID, 1)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, hookTasks, 0)
|
assert.Empty(t, hookTasks)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCreateHookTask(t *testing.T) {
|
func TestCreateHookTask(t *testing.T) {
|
||||||
|
@ -8,7 +8,6 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"regexp"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
@ -28,9 +27,9 @@ func TestActivityPubSignedPost(t *testing.T) {
|
|||||||
|
|
||||||
expected := "BODY"
|
expected := "BODY"
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
assert.Regexp(t, regexp.MustCompile("^"+setting.Federation.DigestAlgorithm), r.Header.Get("Digest"))
|
assert.Regexp(t, "^"+setting.Federation.DigestAlgorithm, r.Header.Get("Digest"))
|
||||||
assert.Contains(t, r.Header.Get("Signature"), pubID)
|
assert.Contains(t, r.Header.Get("Signature"), pubID)
|
||||||
assert.Equal(t, r.Header.Get("Content-Type"), ActivityStreamsContentType)
|
assert.Equal(t, ActivityStreamsContentType, r.Header.Get("Content-Type"))
|
||||||
body, err := io.ReadAll(r.Body)
|
body, err := io.ReadAll(r.Body)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, expected, string(body))
|
assert.Equal(t, expected, string(body))
|
||||||
|
@ -58,7 +58,7 @@ func TestLayered(t *testing.T) {
|
|||||||
assertRead := func(expected string, expectedErr error, elems ...string) {
|
assertRead := func(expected string, expectedErr error, elems ...string) {
|
||||||
bs, err := assets.ReadFile(elems...)
|
bs, err := assets.ReadFile(elems...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
assert.ErrorAs(t, err, &expectedErr)
|
assert.ErrorIs(t, err, expectedErr)
|
||||||
} else {
|
} else {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, expected, string(bs))
|
assert.Equal(t, expected, string(bs))
|
||||||
|
@ -15,5 +15,5 @@ func TestPamAuth(t *testing.T) {
|
|||||||
result, err := Auth("gitea", "user1", "false-pwd")
|
result, err := Auth("gitea", "user1", "false-pwd")
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.EqualError(t, err, "Authentication failure")
|
assert.EqualError(t, err, "Authentication failure")
|
||||||
assert.Len(t, result, 0)
|
assert.Len(t, result)
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ func TestDummyHasher(t *testing.T) {
|
|||||||
password, salt := "password", "ZogKvWdyEx"
|
password, salt := "password", "ZogKvWdyEx"
|
||||||
|
|
||||||
hash, err := dummy.Hash(password, salt)
|
hash, err := dummy.Hash(password, salt)
|
||||||
assert.Nil(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, hash, salt+":"+password)
|
assert.Equal(t, hash, salt+":"+password)
|
||||||
|
|
||||||
assert.True(t, dummy.VerifyPassword(password, hash, salt))
|
assert.True(t, dummy.VerifyPassword(password, hash, salt))
|
||||||
|
@ -99,10 +99,10 @@ func IsComplexEnough(pwd string) bool {
|
|||||||
func Generate(n int) (string, error) {
|
func Generate(n int) (string, error) {
|
||||||
NewComplexity()
|
NewComplexity()
|
||||||
buffer := make([]byte, n)
|
buffer := make([]byte, n)
|
||||||
max := big.NewInt(int64(len(validChars)))
|
maxInt := big.NewInt(int64(len(validChars)))
|
||||||
for {
|
for {
|
||||||
for j := 0; j < n; j++ {
|
for j := 0; j < n; j++ {
|
||||||
rnd, err := rand.Int(rand.Reader, max)
|
rnd, err := rand.Int(rand.Reader, maxInt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,6 @@ package base
|
|||||||
import (
|
import (
|
||||||
"crypto/sha1"
|
"crypto/sha1"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -157,7 +156,7 @@ func TestStringsToInt64s(t *testing.T) {
|
|||||||
testSuccess([]string{"1", "4", "16", "64", "256"}, []int64{1, 4, 16, 64, 256})
|
testSuccess([]string{"1", "4", "16", "64", "256"}, []int64{1, 4, 16, 64, 256})
|
||||||
|
|
||||||
ints, err := StringsToInt64s([]string{"-1", "a"})
|
ints, err := StringsToInt64s([]string{"-1", "a"})
|
||||||
assert.Len(t, ints, 0)
|
assert.Empty(t, ints)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -172,9 +171,9 @@ func TestInt64sToStrings(t *testing.T) {
|
|||||||
// TODO: Test EntryIcon
|
// TODO: Test EntryIcon
|
||||||
|
|
||||||
func TestSetupGiteaRoot(t *testing.T) {
|
func TestSetupGiteaRoot(t *testing.T) {
|
||||||
_ = os.Setenv("GITEA_ROOT", "test")
|
t.Setenv("GITEA_ROOT", "test")
|
||||||
assert.Equal(t, "test", SetupGiteaRoot())
|
assert.Equal(t, "test", SetupGiteaRoot())
|
||||||
_ = os.Setenv("GITEA_ROOT", "")
|
t.Setenv("GITEA_ROOT", "")
|
||||||
assert.NotEqual(t, "test", SetupGiteaRoot())
|
assert.NotEqual(t, "test", SetupGiteaRoot())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ func TestPrepareFileNameAndType(t *testing.T) {
|
|||||||
assert.Equal(t,
|
assert.Equal(t,
|
||||||
fmt.Sprintf("outFile=%s, outType=%s", expFile, expType),
|
fmt.Sprintf("outFile=%s, outType=%s", expFile, expType),
|
||||||
fmt.Sprintf("outFile=%s, outType=%s", outFile, outType),
|
fmt.Sprintf("outFile=%s, outType=%s", outFile, outType),
|
||||||
fmt.Sprintf("argFile=%s, argType=%s", argFile, argType),
|
"argFile=%s, argType=%s", argFile, argType,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ func TestHasPreviousCommitSha256(t *testing.T) {
|
|||||||
parentSHA := MustIDFromString("b0ec7af4547047f12d5093e37ef8f1b3b5415ed8ee17894d43a34d7d34212e9c")
|
parentSHA := MustIDFromString("b0ec7af4547047f12d5093e37ef8f1b3b5415ed8ee17894d43a34d7d34212e9c")
|
||||||
notParentSHA := MustIDFromString("42e334efd04cd36eea6da0599913333c26116e1a537ca76e5b6e4af4dda00236")
|
notParentSHA := MustIDFromString("42e334efd04cd36eea6da0599913333c26116e1a537ca76e5b6e4af4dda00236")
|
||||||
assert.Equal(t, objectFormat, parentSHA.Type())
|
assert.Equal(t, objectFormat, parentSHA.Type())
|
||||||
assert.Equal(t, objectFormat.Name(), "sha256")
|
assert.Equal(t, "sha256", objectFormat.Name())
|
||||||
|
|
||||||
haz, err := commit.HasPreviousCommit(parentSHA)
|
haz, err := commit.HasPreviousCommit(parentSHA)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -343,9 +343,9 @@ func TestGetCommitFileStatusMerges(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, commitFileStatus.Added, expected.Added)
|
assert.Equal(t, expected.Added, commitFileStatus.Added)
|
||||||
assert.Equal(t, commitFileStatus.Removed, expected.Removed)
|
assert.Equal(t, expected.Removed, commitFileStatus.Removed)
|
||||||
assert.Equal(t, commitFileStatus.Modified, expected.Modified)
|
assert.Equal(t, expected.Modified, commitFileStatus.Modified)
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_GetCommitBranchStart(t *testing.T) {
|
func Test_GetCommitBranchStart(t *testing.T) {
|
||||||
|
@ -73,9 +73,9 @@ func TestGrepSearch(t *testing.T) {
|
|||||||
|
|
||||||
res, err = GrepSearch(context.Background(), repo, "no-such-content", GrepOptions{})
|
res, err = GrepSearch(context.Background(), repo, "no-such-content", GrepOptions{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, res, 0)
|
assert.Empty(t, res)
|
||||||
|
|
||||||
res, err = GrepSearch(context.Background(), &Repository{Path: "no-such-git-repo"}, "no-such-content", GrepOptions{})
|
res, err = GrepSearch(context.Background(), &Repository{Path: "no-such-git-repo"}, "no-such-content", GrepOptions{})
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Len(t, res, 0)
|
assert.Empty(t, res)
|
||||||
}
|
}
|
||||||
|
@ -100,5 +100,5 @@ func TestParseTreeEntriesInvalid(t *testing.T) {
|
|||||||
// there was a panic: "runtime error: slice bounds out of range" when the input was invalid: #20315
|
// there was a panic: "runtime error: slice bounds out of range" when the input was invalid: #20315
|
||||||
entries, err := ParseTreeEntries([]byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af"))
|
entries, err := ParseTreeEntries([]byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af"))
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Len(t, entries, 0)
|
assert.Empty(t, entries)
|
||||||
}
|
}
|
||||||
|
@ -34,7 +34,7 @@ func TestRepository_GetBranches(t *testing.T) {
|
|||||||
branches, countAll, err = bareRepo1.GetBranchNames(5, 1)
|
branches, countAll, err = bareRepo1.GetBranchNames(5, 1)
|
||||||
|
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, branches, 0)
|
assert.Empty(t, branches)
|
||||||
assert.EqualValues(t, 3, countAll)
|
assert.EqualValues(t, 3, countAll)
|
||||||
assert.ElementsMatch(t, []string{}, branches)
|
assert.ElementsMatch(t, []string{}, branches)
|
||||||
}
|
}
|
||||||
@ -66,7 +66,7 @@ func TestGetRefsBySha(t *testing.T) {
|
|||||||
// do not exist
|
// do not exist
|
||||||
branches, err := bareRepo5.GetRefsBySha("8006ff9adbf0cb94da7dad9e537e53817f9fa5c0", "")
|
branches, err := bareRepo5.GetRefsBySha("8006ff9adbf0cb94da7dad9e537e53817f9fa5c0", "")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, branches, 0)
|
assert.Empty(t, branches)
|
||||||
|
|
||||||
// refs/pull/1/head
|
// refs/pull/1/head
|
||||||
branches, err = bareRepo5.GetRefsBySha("c83380d7056593c51a699d12b9c00627bd5743e9", PullPrefix)
|
branches, err = bareRepo5.GetRefsBySha("c83380d7056593c51a699d12b9c00627bd5743e9", PullPrefix)
|
||||||
|
@ -465,15 +465,15 @@ func (repo *Repository) getBranches(env []string, commitID string, limit int) ([
|
|||||||
|
|
||||||
refs := strings.Split(stdout, "\n")
|
refs := strings.Split(stdout, "\n")
|
||||||
|
|
||||||
var max int
|
var maxNum int
|
||||||
if len(refs) > limit {
|
if len(refs) > limit {
|
||||||
max = limit
|
maxNum = limit
|
||||||
} else {
|
} else {
|
||||||
max = len(refs) - 1
|
maxNum = len(refs) - 1
|
||||||
}
|
}
|
||||||
|
|
||||||
branches := make([]string, max)
|
branches := make([]string, maxNum)
|
||||||
for i, ref := range refs[:max] {
|
for i, ref := range refs[:maxNum] {
|
||||||
parts := strings.Fields(ref)
|
parts := strings.Fields(ref)
|
||||||
|
|
||||||
branches[i] = parts[len(parts)-1]
|
branches[i] = parts[len(parts)-1]
|
||||||
|
@ -72,7 +72,7 @@ func TestReadPatch(t *testing.T) {
|
|||||||
assert.Empty(t, noFile)
|
assert.Empty(t, noFile)
|
||||||
assert.Empty(t, noCommit)
|
assert.Empty(t, noCommit)
|
||||||
assert.Len(t, oldCommit, 40)
|
assert.Len(t, oldCommit, 40)
|
||||||
assert.True(t, oldCommit == "6e8e2a6f9efd71dbe6917816343ed8415ad696c3")
|
assert.Equal(t, "6e8e2a6f9efd71dbe6917816343ed8415ad696c3", oldCommit)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestReadWritePullHead(t *testing.T) {
|
func TestReadWritePullHead(t *testing.T) {
|
||||||
@ -113,7 +113,7 @@ func TestReadWritePullHead(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
assert.Len(t, headContents, 40)
|
assert.Len(t, headContents, 40)
|
||||||
assert.True(t, headContents == newCommit)
|
assert.Equal(t, headContents, newCommit)
|
||||||
|
|
||||||
// Remove file after the test
|
// Remove file after the test
|
||||||
err = repo.RemoveReference(PullPrefix + "1/head")
|
err = repo.RemoveReference(PullPrefix + "1/head")
|
||||||
|
@ -218,13 +218,13 @@ func (g *Manager) ServerDone() {
|
|||||||
g.runningServerWaitGroup.Done()
|
g.runningServerWaitGroup.Done()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *Manager) setStateTransition(old, new state) bool {
|
func (g *Manager) setStateTransition(oldState, newState state) bool {
|
||||||
g.lock.Lock()
|
g.lock.Lock()
|
||||||
if g.state != old {
|
if g.state != oldState {
|
||||||
g.lock.Unlock()
|
g.lock.Unlock()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
g.state = new
|
g.state = newState
|
||||||
g.lock.Unlock()
|
g.lock.Unlock()
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -35,18 +35,18 @@ func BoolFieldQuery(value bool, field string) *query.BoolFieldQuery {
|
|||||||
return q
|
return q
|
||||||
}
|
}
|
||||||
|
|
||||||
func NumericRangeInclusiveQuery(min, max optional.Option[int64], field string) *query.NumericRangeQuery {
|
func NumericRangeInclusiveQuery(minOption, maxOption optional.Option[int64], field string) *query.NumericRangeQuery {
|
||||||
var minF, maxF *float64
|
var minF, maxF *float64
|
||||||
var minI, maxI *bool
|
var minI, maxI *bool
|
||||||
if min.Has() {
|
if minOption.Has() {
|
||||||
minF = new(float64)
|
minF = new(float64)
|
||||||
*minF = float64(min.Value())
|
*minF = float64(minOption.Value())
|
||||||
minI = new(bool)
|
minI = new(bool)
|
||||||
*minI = true
|
*minI = true
|
||||||
}
|
}
|
||||||
if max.Has() {
|
if maxOption.Has() {
|
||||||
maxF = new(float64)
|
maxF = new(float64)
|
||||||
*maxF = float64(max.Value())
|
*maxF = float64(maxOption.Value())
|
||||||
maxI = new(bool)
|
maxI = new(bool)
|
||||||
*maxI = true
|
*maxI = true
|
||||||
}
|
}
|
||||||
|
@ -10,12 +10,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// ParsePaginator parses a db.Paginator into a skip and limit
|
// ParsePaginator parses a db.Paginator into a skip and limit
|
||||||
func ParsePaginator(paginator *db.ListOptions, max ...int) (int, int) {
|
func ParsePaginator(paginator *db.ListOptions, maxNums ...int) (int, int) {
|
||||||
// Use a very large number to indicate no limit
|
// Use a very large number to indicate no limit
|
||||||
unlimited := math.MaxInt32
|
unlimited := math.MaxInt32
|
||||||
if len(max) > 0 {
|
if len(maxNums) > 0 {
|
||||||
// Some indexer engines have a limit on the page size, respect that
|
// Some indexer engines have a limit on the page size, respect that
|
||||||
unlimited = max[0]
|
unlimited = maxNums[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
if paginator == nil || paginator.IsListAll() {
|
if paginator == nil || paginator.IsListAll() {
|
||||||
|
@ -113,7 +113,7 @@ var cases = []*testIndexerCase{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
assert.Equal(t, len(data), int(result.Total))
|
assert.Equal(t, len(data), int(result.Total))
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -176,7 +176,7 @@ var cases = []*testIndexerCase{
|
|||||||
IsPull: optional.Some(false),
|
IsPull: optional.Some(false),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.False(t, data[v.ID].IsPull)
|
assert.False(t, data[v.ID].IsPull)
|
||||||
}
|
}
|
||||||
@ -192,7 +192,7 @@ var cases = []*testIndexerCase{
|
|||||||
IsPull: optional.Some(true),
|
IsPull: optional.Some(true),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.True(t, data[v.ID].IsPull)
|
assert.True(t, data[v.ID].IsPull)
|
||||||
}
|
}
|
||||||
@ -208,7 +208,7 @@ var cases = []*testIndexerCase{
|
|||||||
IsClosed: optional.Some(false),
|
IsClosed: optional.Some(false),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.False(t, data[v.ID].IsClosed)
|
assert.False(t, data[v.ID].IsClosed)
|
||||||
}
|
}
|
||||||
@ -224,7 +224,7 @@ var cases = []*testIndexerCase{
|
|||||||
IsClosed: optional.Some(true),
|
IsClosed: optional.Some(true),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.True(t, data[v.ID].IsClosed)
|
assert.True(t, data[v.ID].IsClosed)
|
||||||
}
|
}
|
||||||
@ -274,7 +274,7 @@ var cases = []*testIndexerCase{
|
|||||||
MilestoneIDs: []int64{1, 2, 6},
|
MilestoneIDs: []int64{1, 2, 6},
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Contains(t, []int64{1, 2, 6}, data[v.ID].MilestoneID)
|
assert.Contains(t, []int64{1, 2, 6}, data[v.ID].MilestoneID)
|
||||||
}
|
}
|
||||||
@ -292,7 +292,7 @@ var cases = []*testIndexerCase{
|
|||||||
MilestoneIDs: []int64{0},
|
MilestoneIDs: []int64{0},
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Equal(t, int64(0), data[v.ID].MilestoneID)
|
assert.Equal(t, int64(0), data[v.ID].MilestoneID)
|
||||||
}
|
}
|
||||||
@ -310,7 +310,7 @@ var cases = []*testIndexerCase{
|
|||||||
ProjectID: optional.Some(int64(1)),
|
ProjectID: optional.Some(int64(1)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Equal(t, int64(1), data[v.ID].ProjectID)
|
assert.Equal(t, int64(1), data[v.ID].ProjectID)
|
||||||
}
|
}
|
||||||
@ -328,7 +328,7 @@ var cases = []*testIndexerCase{
|
|||||||
ProjectID: optional.Some(int64(0)),
|
ProjectID: optional.Some(int64(0)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Equal(t, int64(0), data[v.ID].ProjectID)
|
assert.Equal(t, int64(0), data[v.ID].ProjectID)
|
||||||
}
|
}
|
||||||
@ -346,7 +346,7 @@ var cases = []*testIndexerCase{
|
|||||||
ProjectColumnID: optional.Some(int64(1)),
|
ProjectColumnID: optional.Some(int64(1)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Equal(t, int64(1), data[v.ID].ProjectColumnID)
|
assert.Equal(t, int64(1), data[v.ID].ProjectColumnID)
|
||||||
}
|
}
|
||||||
@ -364,7 +364,7 @@ var cases = []*testIndexerCase{
|
|||||||
ProjectColumnID: optional.Some(int64(0)),
|
ProjectColumnID: optional.Some(int64(0)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Equal(t, int64(0), data[v.ID].ProjectColumnID)
|
assert.Equal(t, int64(0), data[v.ID].ProjectColumnID)
|
||||||
}
|
}
|
||||||
@ -382,7 +382,7 @@ var cases = []*testIndexerCase{
|
|||||||
PosterID: optional.Some(int64(1)),
|
PosterID: optional.Some(int64(1)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Equal(t, int64(1), data[v.ID].PosterID)
|
assert.Equal(t, int64(1), data[v.ID].PosterID)
|
||||||
}
|
}
|
||||||
@ -400,7 +400,7 @@ var cases = []*testIndexerCase{
|
|||||||
AssigneeID: optional.Some(int64(1)),
|
AssigneeID: optional.Some(int64(1)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Equal(t, int64(1), data[v.ID].AssigneeID)
|
assert.Equal(t, int64(1), data[v.ID].AssigneeID)
|
||||||
}
|
}
|
||||||
@ -418,7 +418,7 @@ var cases = []*testIndexerCase{
|
|||||||
AssigneeID: optional.Some(int64(0)),
|
AssigneeID: optional.Some(int64(0)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Equal(t, int64(0), data[v.ID].AssigneeID)
|
assert.Equal(t, int64(0), data[v.ID].AssigneeID)
|
||||||
}
|
}
|
||||||
@ -436,7 +436,7 @@ var cases = []*testIndexerCase{
|
|||||||
MentionID: optional.Some(int64(1)),
|
MentionID: optional.Some(int64(1)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Contains(t, data[v.ID].MentionIDs, int64(1))
|
assert.Contains(t, data[v.ID].MentionIDs, int64(1))
|
||||||
}
|
}
|
||||||
@ -454,7 +454,7 @@ var cases = []*testIndexerCase{
|
|||||||
ReviewedID: optional.Some(int64(1)),
|
ReviewedID: optional.Some(int64(1)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Contains(t, data[v.ID].ReviewedIDs, int64(1))
|
assert.Contains(t, data[v.ID].ReviewedIDs, int64(1))
|
||||||
}
|
}
|
||||||
@ -472,7 +472,7 @@ var cases = []*testIndexerCase{
|
|||||||
ReviewRequestedID: optional.Some(int64(1)),
|
ReviewRequestedID: optional.Some(int64(1)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Contains(t, data[v.ID].ReviewRequestedIDs, int64(1))
|
assert.Contains(t, data[v.ID].ReviewRequestedIDs, int64(1))
|
||||||
}
|
}
|
||||||
@ -490,7 +490,7 @@ var cases = []*testIndexerCase{
|
|||||||
SubscriberID: optional.Some(int64(1)),
|
SubscriberID: optional.Some(int64(1)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.Contains(t, data[v.ID].SubscriberIDs, int64(1))
|
assert.Contains(t, data[v.ID].SubscriberIDs, int64(1))
|
||||||
}
|
}
|
||||||
@ -509,7 +509,7 @@ var cases = []*testIndexerCase{
|
|||||||
UpdatedBeforeUnix: optional.Some(int64(30)),
|
UpdatedBeforeUnix: optional.Some(int64(30)),
|
||||||
},
|
},
|
||||||
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
assert.Equal(t, 5, len(result.Hits))
|
assert.Len(t, result.Hits, 5)
|
||||||
for _, v := range result.Hits {
|
for _, v := range result.Hits {
|
||||||
assert.GreaterOrEqual(t, data[v.ID].UpdatedUnix, int64(20))
|
assert.GreaterOrEqual(t, data[v.ID].UpdatedUnix, int64(20))
|
||||||
assert.LessOrEqual(t, data[v.ID].UpdatedUnix, int64(30))
|
assert.LessOrEqual(t, data[v.ID].UpdatedUnix, int64(30))
|
||||||
|
@ -72,7 +72,10 @@ func (c *HTTPClient) batch(ctx context.Context, operation string, objects []Poin
|
|||||||
|
|
||||||
url := fmt.Sprintf("%s/objects/batch", c.endpoint)
|
url := fmt.Sprintf("%s/objects/batch", c.endpoint)
|
||||||
|
|
||||||
request := &BatchRequest{operation, c.transferNames(), nil, objects}
|
// `ref` is an "optional object describing the server ref that the objects belong to"
|
||||||
|
// but some (incorrect) lfs servers require it, so maybe adding an empty ref here doesn't break the correct ones.
|
||||||
|
// https://github.com/git-lfs/git-lfs/blob/a32a02b44bf8a511aa14f047627c49e1a7fd5021/docs/api/batch.md?plain=1#L37
|
||||||
|
request := &BatchRequest{operation, c.transferNames(), &Reference{}, objects}
|
||||||
payload := new(bytes.Buffer)
|
payload := new(bytes.Buffer)
|
||||||
err := json.NewEncoder(payload).Encode(request)
|
err := json.NewEncoder(payload).Encode(request)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -14,9 +14,12 @@ import (
|
|||||||
const (
|
const (
|
||||||
// MediaType contains the media type for LFS server requests
|
// MediaType contains the media type for LFS server requests
|
||||||
MediaType = "application/vnd.git-lfs+json"
|
MediaType = "application/vnd.git-lfs+json"
|
||||||
// Some LFS servers offer content with other types, so fallback to '*/*' if application/vnd.git-lfs+json cannot be served
|
// AcceptHeader Some LFS servers offer content with other types, so fallback to '*/*' if application/vnd.git-lfs+json cannot be served
|
||||||
AcceptHeader = "application/vnd.git-lfs+json;q=0.9, */*;q=0.8"
|
AcceptHeader = "application/vnd.git-lfs+json;q=0.9, */*;q=0.8"
|
||||||
UserAgentHeader = "git-lfs"
|
// UserAgentHeader Add User-Agent for gitea's self-implemented lfs client,
|
||||||
|
// and the version is consistent with the latest version of git lfs can be avoided incompatibilities.
|
||||||
|
// Some lfs servers will check this
|
||||||
|
UserAgentHeader = "git-lfs/3.6.0 (Gitea)"
|
||||||
)
|
)
|
||||||
|
|
||||||
// BatchRequest contains multiple requests processed in one batch operation.
|
// BatchRequest contains multiple requests processed in one batch operation.
|
||||||
|
@ -96,7 +96,7 @@ func TestBasicTransferAdapter(t *testing.T) {
|
|||||||
for n, c := range cases {
|
for n, c := range cases {
|
||||||
_, err := a.Download(context.Background(), c.link)
|
_, err := a.Download(context.Background(), c.link)
|
||||||
if len(c.expectederror) > 0 {
|
if len(c.expectederror) > 0 {
|
||||||
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
||||||
} else {
|
} else {
|
||||||
assert.NoError(t, err, "case %d", n)
|
assert.NoError(t, err, "case %d", n)
|
||||||
}
|
}
|
||||||
@ -129,7 +129,7 @@ func TestBasicTransferAdapter(t *testing.T) {
|
|||||||
for n, c := range cases {
|
for n, c := range cases {
|
||||||
err := a.Upload(context.Background(), c.link, p, bytes.NewBufferString("dummy"))
|
err := a.Upload(context.Background(), c.link, p, bytes.NewBufferString("dummy"))
|
||||||
if len(c.expectederror) > 0 {
|
if len(c.expectederror) > 0 {
|
||||||
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
||||||
} else {
|
} else {
|
||||||
assert.NoError(t, err, "case %d", n)
|
assert.NoError(t, err, "case %d", n)
|
||||||
}
|
}
|
||||||
@ -162,7 +162,7 @@ func TestBasicTransferAdapter(t *testing.T) {
|
|||||||
for n, c := range cases {
|
for n, c := range cases {
|
||||||
err := a.Verify(context.Background(), c.link, p)
|
err := a.Verify(context.Background(), c.link, p)
|
||||||
if len(c.expectederror) > 0 {
|
if len(c.expectederror) > 0 {
|
||||||
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
||||||
} else {
|
} else {
|
||||||
assert.NoError(t, err, "case %d", n)
|
assert.NoError(t, err, "case %d", n)
|
||||||
}
|
}
|
||||||
|
@ -110,10 +110,10 @@ func EventFormatTextMessage(mode *WriterMode, event *Event, msgFormat string, ms
|
|||||||
buf = append(buf, ' ')
|
buf = append(buf, ' ')
|
||||||
}
|
}
|
||||||
if flags&(Ltime|Lmicroseconds) != 0 {
|
if flags&(Ltime|Lmicroseconds) != 0 {
|
||||||
hour, min, sec := t.Clock()
|
hour, minNum, sec := t.Clock()
|
||||||
buf = itoa(buf, hour, 2)
|
buf = itoa(buf, hour, 2)
|
||||||
buf = append(buf, ':')
|
buf = append(buf, ':')
|
||||||
buf = itoa(buf, min, 2)
|
buf = itoa(buf, minNum, 2)
|
||||||
buf = append(buf, ':')
|
buf = append(buf, ':')
|
||||||
buf = itoa(buf, sec, 2)
|
buf = itoa(buf, sec, 2)
|
||||||
if flags&Lmicroseconds != 0 {
|
if flags&Lmicroseconds != 0 {
|
||||||
|
@ -56,7 +56,7 @@ func TestLogger(t *testing.T) {
|
|||||||
logger := NewLoggerWithWriters(context.Background(), "test")
|
logger := NewLoggerWithWriters(context.Background(), "test")
|
||||||
|
|
||||||
dump := logger.DumpWriters()
|
dump := logger.DumpWriters()
|
||||||
assert.EqualValues(t, 0, len(dump))
|
assert.Empty(t, dump)
|
||||||
assert.EqualValues(t, NONE, logger.GetLevel())
|
assert.EqualValues(t, NONE, logger.GetLevel())
|
||||||
assert.False(t, logger.IsEnabled())
|
assert.False(t, logger.IsEnabled())
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ func TestLogger(t *testing.T) {
|
|||||||
assert.EqualValues(t, DEBUG, logger.GetLevel())
|
assert.EqualValues(t, DEBUG, logger.GetLevel())
|
||||||
|
|
||||||
dump = logger.DumpWriters()
|
dump = logger.DumpWriters()
|
||||||
assert.EqualValues(t, 2, len(dump))
|
assert.Len(t, dump, 2)
|
||||||
|
|
||||||
logger.Trace("trace-level") // this level is not logged
|
logger.Trace("trace-level") // this level is not logged
|
||||||
logger.Debug("debug-level")
|
logger.Debug("debug-level")
|
||||||
|
@ -278,12 +278,12 @@ func TestRender_AutoLink(t *testing.T) {
|
|||||||
test := func(input, expected string) {
|
test := func(input, expected string) {
|
||||||
var buffer strings.Builder
|
var buffer strings.Builder
|
||||||
err := PostProcessDefault(NewTestRenderContext(localMetas), strings.NewReader(input), &buffer)
|
err := PostProcessDefault(NewTestRenderContext(localMetas), strings.NewReader(input), &buffer)
|
||||||
assert.Equal(t, err, nil)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer.String()))
|
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer.String()))
|
||||||
|
|
||||||
buffer.Reset()
|
buffer.Reset()
|
||||||
err = PostProcessDefault(NewTestRenderContext(localMetas), strings.NewReader(input), &buffer)
|
err = PostProcessDefault(NewTestRenderContext(localMetas), strings.NewReader(input), &buffer)
|
||||||
assert.Equal(t, err, nil)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer.String()))
|
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer.String()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,26 +78,23 @@ func (r *GlodmarkRender) Renderer() renderer.Renderer {
|
|||||||
|
|
||||||
func (r *GlodmarkRender) highlightingRenderer(w util.BufWriter, c highlighting.CodeBlockContext, entering bool) {
|
func (r *GlodmarkRender) highlightingRenderer(w util.BufWriter, c highlighting.CodeBlockContext, entering bool) {
|
||||||
if entering {
|
if entering {
|
||||||
language, _ := c.Language()
|
languageBytes, _ := c.Language()
|
||||||
if language == nil {
|
languageStr := giteautil.IfZero(string(languageBytes), "text")
|
||||||
language = []byte("text")
|
|
||||||
}
|
|
||||||
|
|
||||||
languageStr := string(language)
|
preClasses := "code-block"
|
||||||
|
|
||||||
preClasses := []string{"code-block"}
|
|
||||||
if languageStr == "mermaid" || languageStr == "math" {
|
if languageStr == "mermaid" || languageStr == "math" {
|
||||||
preClasses = append(preClasses, "is-loading")
|
preClasses += " is-loading"
|
||||||
}
|
}
|
||||||
|
|
||||||
err := r.ctx.RenderInternal.FormatWithSafeAttrs(w, `<pre class="%s">`, strings.Join(preClasses, " "))
|
err := r.ctx.RenderInternal.FormatWithSafeAttrs(w, `<pre class="%s">`, preClasses)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// include language-x class as part of commonmark spec
|
// include language-x class as part of commonmark spec, "chroma" class is used to highlight the code
|
||||||
// the "display" class is used by "js/markup/math.js" to render the code element as a block
|
// the "display" class is used by "js/markup/math.ts" to render the code element as a block
|
||||||
err = r.ctx.RenderInternal.FormatWithSafeAttrs(w, `<code class="chroma language-%s display">`, string(language))
|
// the "math.ts" strictly depends on the structure: <pre class="code-block is-loading"><code class="language-math display">...</code></pre>
|
||||||
|
err = r.ctx.RenderInternal.FormatWithSafeAttrs(w, `<code class="chroma language-%s display">`, languageStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -128,7 +125,12 @@ func SpecializedMarkdown(ctx *markup.RenderContext) *GlodmarkRender {
|
|||||||
),
|
),
|
||||||
highlighting.WithWrapperRenderer(r.highlightingRenderer),
|
highlighting.WithWrapperRenderer(r.highlightingRenderer),
|
||||||
),
|
),
|
||||||
math.NewExtension(&ctx.RenderInternal, math.Enabled(setting.Markdown.EnableMath)),
|
math.NewExtension(&ctx.RenderInternal, math.Options{
|
||||||
|
Enabled: setting.Markdown.EnableMath,
|
||||||
|
ParseDollarInline: true,
|
||||||
|
ParseDollarBlock: true,
|
||||||
|
ParseSquareBlock: true, // TODO: this is a bad syntax, it should be deprecated in the future (by some config options)
|
||||||
|
}),
|
||||||
meta.Meta,
|
meta.Meta,
|
||||||
),
|
),
|
||||||
goldmark.WithParserOptions(
|
goldmark.WithParserOptions(
|
||||||
|
@ -12,8 +12,9 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const nl = "\n"
|
||||||
|
|
||||||
func TestMathRender(t *testing.T) {
|
func TestMathRender(t *testing.T) {
|
||||||
const nl = "\n"
|
|
||||||
testcases := []struct {
|
testcases := []struct {
|
||||||
testcase string
|
testcase string
|
||||||
expected string
|
expected string
|
||||||
@ -86,6 +87,18 @@ func TestMathRender(t *testing.T) {
|
|||||||
`$\text{$b$}$`,
|
`$\text{$b$}$`,
|
||||||
`<p><code class="language-math">\text{$b$}</code></p>` + nl,
|
`<p><code class="language-math">\text{$b$}</code></p>` + nl,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"a$`b`$c",
|
||||||
|
`<p>a<code class="language-math">b</code>c</p>` + nl,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"a $`b`$ c",
|
||||||
|
`<p>a <code class="language-math">b</code> c</p>` + nl,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"a$``b``$c x$```y```$z",
|
||||||
|
`<p>a<code class="language-math">b</code>c x<code class="language-math">y</code>z</p>` + nl,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range testcases {
|
for _, test := range testcases {
|
||||||
@ -215,6 +228,11 @@ x
|
|||||||
</ol>
|
</ol>
|
||||||
`,
|
`,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"inline-non-math",
|
||||||
|
`\[x]`,
|
||||||
|
`<p>[x]</p>` + nl,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range testcases {
|
for _, test := range testcases {
|
||||||
|
@ -16,16 +16,18 @@ import (
|
|||||||
|
|
||||||
type blockParser struct {
|
type blockParser struct {
|
||||||
parseDollars bool
|
parseDollars bool
|
||||||
|
parseSquare bool
|
||||||
endBytesDollars []byte
|
endBytesDollars []byte
|
||||||
endBytesBracket []byte
|
endBytesSquare []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewBlockParser creates a new math BlockParser
|
// NewBlockParser creates a new math BlockParser
|
||||||
func NewBlockParser(parseDollarBlocks bool) parser.BlockParser {
|
func NewBlockParser(parseDollars, parseSquare bool) parser.BlockParser {
|
||||||
return &blockParser{
|
return &blockParser{
|
||||||
parseDollars: parseDollarBlocks,
|
parseDollars: parseDollars,
|
||||||
|
parseSquare: parseSquare,
|
||||||
endBytesDollars: []byte{'$', '$'},
|
endBytesDollars: []byte{'$', '$'},
|
||||||
endBytesBracket: []byte{'\\', ']'},
|
endBytesSquare: []byte{'\\', ']'},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,7 +42,7 @@ func (b *blockParser) Open(parent ast.Node, reader text.Reader, pc parser.Contex
|
|||||||
var dollars bool
|
var dollars bool
|
||||||
if b.parseDollars && line[pos] == '$' && line[pos+1] == '$' {
|
if b.parseDollars && line[pos] == '$' && line[pos+1] == '$' {
|
||||||
dollars = true
|
dollars = true
|
||||||
} else if line[pos] == '\\' && line[pos+1] == '[' {
|
} else if b.parseSquare && line[pos] == '\\' && line[pos+1] == '[' {
|
||||||
if len(line[pos:]) >= 3 && line[pos+2] == '!' && bytes.Contains(line[pos:], []byte(`\]`)) {
|
if len(line[pos:]) >= 3 && line[pos+2] == '!' && bytes.Contains(line[pos:], []byte(`\]`)) {
|
||||||
// do not process escaped attention block: "> \[!NOTE\]"
|
// do not process escaped attention block: "> \[!NOTE\]"
|
||||||
return nil, parser.NoChildren
|
return nil, parser.NoChildren
|
||||||
@ -53,10 +55,10 @@ func (b *blockParser) Open(parent ast.Node, reader text.Reader, pc parser.Contex
|
|||||||
node := NewBlock(dollars, pos)
|
node := NewBlock(dollars, pos)
|
||||||
|
|
||||||
// Now we need to check if the ending block is on the segment...
|
// Now we need to check if the ending block is on the segment...
|
||||||
endBytes := giteaUtil.Iif(dollars, b.endBytesDollars, b.endBytesBracket)
|
endBytes := giteaUtil.Iif(dollars, b.endBytesDollars, b.endBytesSquare)
|
||||||
idx := bytes.Index(line[pos+2:], endBytes)
|
idx := bytes.Index(line[pos+2:], endBytes)
|
||||||
if idx >= 0 {
|
if idx >= 0 {
|
||||||
// for case $$ ... $$ any other text
|
// for case: "$$ ... $$ any other text" (this case will be handled by the inline parser)
|
||||||
for i := pos + 2 + idx + 2; i < len(line); i++ {
|
for i := pos + 2 + idx + 2; i < len(line); i++ {
|
||||||
if line[i] != ' ' && line[i] != '\n' {
|
if line[i] != ' ' && line[i] != '\n' {
|
||||||
return nil, parser.NoChildren
|
return nil, parser.NoChildren
|
||||||
@ -70,6 +72,13 @@ func (b *blockParser) Open(parent ast.Node, reader text.Reader, pc parser.Contex
|
|||||||
return node, parser.Close | parser.NoChildren
|
return node, parser.Close | parser.NoChildren
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// for case "\[ ... ]" (no close marker on the same line)
|
||||||
|
for i := pos + 2 + idx + 2; i < len(line); i++ {
|
||||||
|
if line[i] != ' ' && line[i] != '\n' {
|
||||||
|
return nil, parser.NoChildren
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
segment.Start += pos + 2
|
segment.Start += pos + 2
|
||||||
node.Lines().Append(segment)
|
node.Lines().Append(segment)
|
||||||
return node, parser.NoChildren
|
return node, parser.NoChildren
|
||||||
@ -85,7 +94,7 @@ func (b *blockParser) Continue(node ast.Node, reader text.Reader, pc parser.Cont
|
|||||||
line, segment := reader.PeekLine()
|
line, segment := reader.PeekLine()
|
||||||
w, pos := util.IndentWidth(line, reader.LineOffset())
|
w, pos := util.IndentWidth(line, reader.LineOffset())
|
||||||
if w < 4 {
|
if w < 4 {
|
||||||
endBytes := giteaUtil.Iif(block.Dollars, b.endBytesDollars, b.endBytesBracket)
|
endBytes := giteaUtil.Iif(block.Dollars, b.endBytesDollars, b.endBytesSquare)
|
||||||
if bytes.HasPrefix(line[pos:], endBytes) && util.IsBlank(line[pos+len(endBytes):]) {
|
if bytes.HasPrefix(line[pos:], endBytes) && util.IsBlank(line[pos+len(endBytes):]) {
|
||||||
if util.IsBlank(line[pos+len(endBytes):]) {
|
if util.IsBlank(line[pos+len(endBytes):]) {
|
||||||
newline := giteaUtil.Iif(line[len(line)-1] != '\n', 0, 1)
|
newline := giteaUtil.Iif(line[len(line)-1] != '\n', 0, 1)
|
||||||
|
@ -1,31 +0,0 @@
|
|||||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
|
||||||
// SPDX-License-Identifier: MIT
|
|
||||||
|
|
||||||
package math
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/yuin/goldmark/ast"
|
|
||||||
)
|
|
||||||
|
|
||||||
// InlineBlock represents inline math e.g. $$...$$
|
|
||||||
type InlineBlock struct {
|
|
||||||
Inline
|
|
||||||
}
|
|
||||||
|
|
||||||
// InlineBlock implements InlineBlock.
|
|
||||||
func (n *InlineBlock) InlineBlock() {}
|
|
||||||
|
|
||||||
// KindInlineBlock is the kind for math inline block
|
|
||||||
var KindInlineBlock = ast.NewNodeKind("MathInlineBlock")
|
|
||||||
|
|
||||||
// Kind returns KindInlineBlock
|
|
||||||
func (n *InlineBlock) Kind() ast.NodeKind {
|
|
||||||
return KindInlineBlock
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewInlineBlock creates a new ast math inline block node
|
|
||||||
func NewInlineBlock() *InlineBlock {
|
|
||||||
return &InlineBlock{
|
|
||||||
Inline{},
|
|
||||||
}
|
|
||||||
}
|
|
@ -8,7 +8,7 @@ import (
|
|||||||
"github.com/yuin/goldmark/util"
|
"github.com/yuin/goldmark/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Inline represents inline math e.g. $...$ or \(...\)
|
// Inline struct represents inline math e.g. $...$ or \(...\)
|
||||||
type Inline struct {
|
type Inline struct {
|
||||||
ast.BaseInline
|
ast.BaseInline
|
||||||
}
|
}
|
||||||
|
@ -12,31 +12,25 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type inlineParser struct {
|
type inlineParser struct {
|
||||||
start []byte
|
trigger []byte
|
||||||
end []byte
|
endBytesSingleDollar []byte
|
||||||
|
endBytesDoubleDollar []byte
|
||||||
|
endBytesBracket []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultInlineDollarParser = &inlineParser{
|
var defaultInlineDollarParser = &inlineParser{
|
||||||
start: []byte{'$'},
|
trigger: []byte{'$'},
|
||||||
end: []byte{'$'},
|
endBytesSingleDollar: []byte{'$'},
|
||||||
}
|
endBytesDoubleDollar: []byte{'$', '$'},
|
||||||
|
|
||||||
var defaultDualDollarParser = &inlineParser{
|
|
||||||
start: []byte{'$', '$'},
|
|
||||||
end: []byte{'$', '$'},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewInlineDollarParser() parser.InlineParser {
|
func NewInlineDollarParser() parser.InlineParser {
|
||||||
return defaultInlineDollarParser
|
return defaultInlineDollarParser
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewInlineDualDollarParser() parser.InlineParser {
|
|
||||||
return defaultDualDollarParser
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultInlineBracketParser = &inlineParser{
|
var defaultInlineBracketParser = &inlineParser{
|
||||||
start: []byte{'\\', '('},
|
trigger: []byte{'\\', '('},
|
||||||
end: []byte{'\\', ')'},
|
endBytesBracket: []byte{'\\', ')'},
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewInlineBracketParser() parser.InlineParser {
|
func NewInlineBracketParser() parser.InlineParser {
|
||||||
@ -45,7 +39,7 @@ func NewInlineBracketParser() parser.InlineParser {
|
|||||||
|
|
||||||
// Trigger triggers this parser on $ or \
|
// Trigger triggers this parser on $ or \
|
||||||
func (parser *inlineParser) Trigger() []byte {
|
func (parser *inlineParser) Trigger() []byte {
|
||||||
return parser.start
|
return parser.trigger
|
||||||
}
|
}
|
||||||
|
|
||||||
func isPunctuation(b byte) bool {
|
func isPunctuation(b byte) bool {
|
||||||
@ -64,33 +58,60 @@ func isAlphanumeric(b byte) bool {
|
|||||||
func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node {
|
func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node {
|
||||||
line, _ := block.PeekLine()
|
line, _ := block.PeekLine()
|
||||||
|
|
||||||
if !bytes.HasPrefix(line, parser.start) {
|
if !bytes.HasPrefix(line, parser.trigger) {
|
||||||
// We'll catch this one on the next time round
|
// We'll catch this one on the next time round
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
precedingCharacter := block.PrecendingCharacter()
|
var startMarkLen int
|
||||||
if precedingCharacter < 256 && (isAlphanumeric(byte(precedingCharacter)) || isPunctuation(byte(precedingCharacter))) {
|
var stopMark []byte
|
||||||
// need to exclude things like `a$` from being considered a start
|
checkSurrounding := true
|
||||||
return nil
|
if line[0] == '$' {
|
||||||
|
startMarkLen = 1
|
||||||
|
stopMark = parser.endBytesSingleDollar
|
||||||
|
if len(line) > 1 {
|
||||||
|
if line[1] == '$' {
|
||||||
|
startMarkLen = 2
|
||||||
|
stopMark = parser.endBytesDoubleDollar
|
||||||
|
} else if line[1] == '`' {
|
||||||
|
pos := 1
|
||||||
|
for ; pos < len(line) && line[pos] == '`'; pos++ {
|
||||||
|
}
|
||||||
|
startMarkLen = pos
|
||||||
|
stopMark = bytes.Repeat([]byte{'`'}, pos)
|
||||||
|
stopMark[len(stopMark)-1] = '$'
|
||||||
|
checkSurrounding = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
startMarkLen = 2
|
||||||
|
stopMark = parser.endBytesBracket
|
||||||
|
}
|
||||||
|
|
||||||
|
if checkSurrounding {
|
||||||
|
precedingCharacter := block.PrecendingCharacter()
|
||||||
|
if precedingCharacter < 256 && (isAlphanumeric(byte(precedingCharacter)) || isPunctuation(byte(precedingCharacter))) {
|
||||||
|
// need to exclude things like `a$` from being considered a start
|
||||||
|
return nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// move the opener marker point at the start of the text
|
// move the opener marker point at the start of the text
|
||||||
opener := len(parser.start)
|
opener := startMarkLen
|
||||||
|
|
||||||
// Now look for an ending line
|
// Now look for an ending line
|
||||||
depth := 0
|
depth := 0
|
||||||
ender := -1
|
ender := -1
|
||||||
for i := opener; i < len(line); i++ {
|
for i := opener; i < len(line); i++ {
|
||||||
if depth == 0 && bytes.HasPrefix(line[i:], parser.end) {
|
if depth == 0 && bytes.HasPrefix(line[i:], stopMark) {
|
||||||
succeedingCharacter := byte(0)
|
succeedingCharacter := byte(0)
|
||||||
if i+len(parser.end) < len(line) {
|
if i+len(stopMark) < len(line) {
|
||||||
succeedingCharacter = line[i+len(parser.end)]
|
succeedingCharacter = line[i+len(stopMark)]
|
||||||
}
|
}
|
||||||
// check valid ending character
|
// check valid ending character
|
||||||
isValidEndingChar := isPunctuation(succeedingCharacter) || isBracket(succeedingCharacter) ||
|
isValidEndingChar := isPunctuation(succeedingCharacter) || isBracket(succeedingCharacter) ||
|
||||||
succeedingCharacter == ' ' || succeedingCharacter == '\n' || succeedingCharacter == 0
|
succeedingCharacter == ' ' || succeedingCharacter == '\n' || succeedingCharacter == 0
|
||||||
if !isValidEndingChar {
|
if checkSurrounding && !isValidEndingChar {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
ender = i
|
ender = i
|
||||||
@ -112,21 +133,12 @@ func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.
|
|||||||
|
|
||||||
block.Advance(opener)
|
block.Advance(opener)
|
||||||
_, pos := block.Position()
|
_, pos := block.Position()
|
||||||
var node ast.Node
|
node := NewInline()
|
||||||
if parser == defaultDualDollarParser {
|
|
||||||
node = NewInlineBlock()
|
|
||||||
} else {
|
|
||||||
node = NewInline()
|
|
||||||
}
|
|
||||||
segment := pos.WithStop(pos.Start + ender - opener)
|
segment := pos.WithStop(pos.Start + ender - opener)
|
||||||
node.AppendChild(node, ast.NewRawTextSegment(segment))
|
node.AppendChild(node, ast.NewRawTextSegment(segment))
|
||||||
block.Advance(ender - opener + len(parser.end))
|
block.Advance(ender - opener + len(stopMark))
|
||||||
|
trimBlock(node, block)
|
||||||
if parser == defaultDualDollarParser {
|
|
||||||
trimBlock(&(node.(*InlineBlock)).Inline, block)
|
|
||||||
} else {
|
|
||||||
trimBlock(node.(*Inline), block)
|
|
||||||
}
|
|
||||||
return node
|
return node
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,5 +50,4 @@ func (r *InlineRenderer) renderInline(w util.BufWriter, source []byte, n ast.Nod
|
|||||||
// RegisterFuncs registers the renderer for inline math nodes
|
// RegisterFuncs registers the renderer for inline math nodes
|
||||||
func (r *InlineRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
func (r *InlineRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
|
||||||
reg.Register(KindInline, r.renderInline)
|
reg.Register(KindInline, r.renderInline)
|
||||||
reg.Register(KindInlineBlock, r.renderInline)
|
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ package math
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"code.gitea.io/gitea/modules/markup/internal"
|
"code.gitea.io/gitea/modules/markup/internal"
|
||||||
|
giteaUtil "code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/yuin/goldmark"
|
"github.com/yuin/goldmark"
|
||||||
"github.com/yuin/goldmark/parser"
|
"github.com/yuin/goldmark/parser"
|
||||||
@ -12,70 +13,45 @@ import (
|
|||||||
"github.com/yuin/goldmark/util"
|
"github.com/yuin/goldmark/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type Options struct {
|
||||||
|
Enabled bool
|
||||||
|
ParseDollarInline bool
|
||||||
|
ParseDollarBlock bool
|
||||||
|
ParseSquareBlock bool
|
||||||
|
}
|
||||||
|
|
||||||
// Extension is a math extension
|
// Extension is a math extension
|
||||||
type Extension struct {
|
type Extension struct {
|
||||||
renderInternal *internal.RenderInternal
|
renderInternal *internal.RenderInternal
|
||||||
enabled bool
|
options Options
|
||||||
parseDollarInline bool
|
|
||||||
parseDollarBlock bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Option is the interface Options should implement
|
|
||||||
type Option interface {
|
|
||||||
SetOption(e *Extension)
|
|
||||||
}
|
|
||||||
|
|
||||||
type extensionFunc func(e *Extension)
|
|
||||||
|
|
||||||
func (fn extensionFunc) SetOption(e *Extension) {
|
|
||||||
fn(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enabled enables or disables this extension
|
|
||||||
func Enabled(enable ...bool) Option {
|
|
||||||
value := true
|
|
||||||
if len(enable) > 0 {
|
|
||||||
value = enable[0]
|
|
||||||
}
|
|
||||||
return extensionFunc(func(e *Extension) {
|
|
||||||
e.enabled = value
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewExtension creates a new math extension with the provided options
|
// NewExtension creates a new math extension with the provided options
|
||||||
func NewExtension(renderInternal *internal.RenderInternal, opts ...Option) *Extension {
|
func NewExtension(renderInternal *internal.RenderInternal, opts ...Options) *Extension {
|
||||||
|
opt := giteaUtil.OptionalArg(opts)
|
||||||
r := &Extension{
|
r := &Extension{
|
||||||
renderInternal: renderInternal,
|
renderInternal: renderInternal,
|
||||||
enabled: true,
|
options: opt,
|
||||||
parseDollarBlock: true,
|
|
||||||
parseDollarInline: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, o := range opts {
|
|
||||||
o.SetOption(r)
|
|
||||||
}
|
}
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extend extends goldmark with our parsers and renderers
|
// Extend extends goldmark with our parsers and renderers
|
||||||
func (e *Extension) Extend(m goldmark.Markdown) {
|
func (e *Extension) Extend(m goldmark.Markdown) {
|
||||||
if !e.enabled {
|
if !e.options.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
m.Parser().AddOptions(parser.WithBlockParsers(
|
inlines := []util.PrioritizedValue{util.Prioritized(NewInlineBracketParser(), 501)}
|
||||||
util.Prioritized(NewBlockParser(e.parseDollarBlock), 701),
|
if e.options.ParseDollarInline {
|
||||||
))
|
inlines = append(inlines, util.Prioritized(NewInlineDollarParser(), 502))
|
||||||
|
|
||||||
inlines := []util.PrioritizedValue{
|
|
||||||
util.Prioritized(NewInlineBracketParser(), 501),
|
|
||||||
}
|
|
||||||
if e.parseDollarInline {
|
|
||||||
inlines = append(inlines, util.Prioritized(NewInlineDollarParser(), 503),
|
|
||||||
util.Prioritized(NewInlineDualDollarParser(), 502))
|
|
||||||
}
|
}
|
||||||
m.Parser().AddOptions(parser.WithInlineParsers(inlines...))
|
m.Parser().AddOptions(parser.WithInlineParsers(inlines...))
|
||||||
|
|
||||||
|
m.Parser().AddOptions(parser.WithBlockParsers(
|
||||||
|
util.Prioritized(NewBlockParser(e.options.ParseDollarBlock, e.options.ParseSquareBlock), 701),
|
||||||
|
))
|
||||||
|
|
||||||
m.Renderer().AddOptions(renderer.WithNodeRenderers(
|
m.Renderer().AddOptions(renderer.WithNodeRenderers(
|
||||||
util.Prioritized(NewBlockRenderer(e.renderInternal), 501),
|
util.Prioritized(NewBlockRenderer(e.renderInternal), 501),
|
||||||
util.Prioritized(NewInlineRenderer(e.renderInternal), 502),
|
util.Prioritized(NewInlineRenderer(e.renderInternal), 502),
|
||||||
|
@ -40,7 +40,7 @@ class ConanPackageConan(ConanFile):
|
|||||||
|
|
||||||
func TestParseConanfile(t *testing.T) {
|
func TestParseConanfile(t *testing.T) {
|
||||||
metadata, err := ParseConanfile(strings.NewReader(contentConanfile))
|
metadata, err := ParseConanfile(strings.NewReader(contentConanfile))
|
||||||
assert.Nil(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, license, metadata.License)
|
assert.Equal(t, license, metadata.License)
|
||||||
assert.Equal(t, author, metadata.Author)
|
assert.Equal(t, author, metadata.Author)
|
||||||
assert.Equal(t, homepage, metadata.ProjectURL)
|
assert.Equal(t, homepage, metadata.ProjectURL)
|
||||||
|
@ -50,7 +50,7 @@ const (
|
|||||||
func TestParseConaninfo(t *testing.T) {
|
func TestParseConaninfo(t *testing.T) {
|
||||||
info, err := ParseConaninfo(strings.NewReader(contentConaninfo))
|
info, err := ParseConaninfo(strings.NewReader(contentConaninfo))
|
||||||
assert.NotNil(t, info)
|
assert.NotNil(t, info)
|
||||||
assert.Nil(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(
|
assert.Equal(
|
||||||
t,
|
t,
|
||||||
map[string]string{
|
map[string]string{
|
||||||
|
@ -46,10 +46,10 @@ func testQueueBasic(t *testing.T, newFn func(cfg *BaseConfig) (baseQueue, error)
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
if !isUnique {
|
if !isUnique {
|
||||||
assert.EqualValues(t, 2, cnt)
|
assert.EqualValues(t, 2, cnt)
|
||||||
assert.EqualValues(t, false, has) // non-unique queues don't check for duplicates
|
assert.False(t, has) // non-unique queues don't check for duplicates
|
||||||
} else {
|
} else {
|
||||||
assert.EqualValues(t, 1, cnt)
|
assert.EqualValues(t, 1, cnt)
|
||||||
assert.EqualValues(t, true, has)
|
assert.True(t, has)
|
||||||
}
|
}
|
||||||
|
|
||||||
// push another item
|
// push another item
|
||||||
@ -101,7 +101,7 @@ func testQueueBasic(t *testing.T, newFn func(cfg *BaseConfig) (baseQueue, error)
|
|||||||
pushBlockTime = 30 * time.Millisecond
|
pushBlockTime = 30 * time.Millisecond
|
||||||
err = q.PushItem(ctx, []byte("item-full"))
|
err = q.PushItem(ctx, []byte("item-full"))
|
||||||
assert.ErrorIs(t, err, context.DeadlineExceeded)
|
assert.ErrorIs(t, err, context.DeadlineExceeded)
|
||||||
assert.True(t, time.Since(timeStart) >= pushBlockTime*2/3)
|
assert.GreaterOrEqual(t, time.Since(timeStart), pushBlockTime*2/3)
|
||||||
pushBlockTime = oldPushBlockTime
|
pushBlockTime = oldPushBlockTime
|
||||||
|
|
||||||
// remove all
|
// remove all
|
||||||
|
@ -172,8 +172,8 @@ func testWorkerPoolQueuePersistence(t *testing.T, queueSetting setting.QueueSett
|
|||||||
|
|
||||||
q2() // restart the queue to continue to execute the tasks in it
|
q2() // restart the queue to continue to execute the tasks in it
|
||||||
|
|
||||||
assert.NotZero(t, len(tasksQ1))
|
assert.NotEmpty(t, tasksQ1)
|
||||||
assert.NotZero(t, len(tasksQ2))
|
assert.NotEmpty(t, tasksQ2)
|
||||||
assert.EqualValues(t, testCount, len(tasksQ1)+len(tasksQ2))
|
assert.EqualValues(t, testCount, len(tasksQ1)+len(tasksQ2))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,9 +164,9 @@ func newKeywords() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func doNewKeywords(close, reopen []string) {
|
func doNewKeywords(closeKeywords, reopenKeywords []string) {
|
||||||
issueCloseKeywordsPat = makeKeywordsPat(close)
|
issueCloseKeywordsPat = makeKeywordsPat(closeKeywords)
|
||||||
issueReopenKeywordsPat = makeKeywordsPat(reopen)
|
issueReopenKeywordsPat = makeKeywordsPat(reopenKeywords)
|
||||||
}
|
}
|
||||||
|
|
||||||
// getGiteaHostName returns a normalized string with the local host name, with no scheme or port information
|
// getGiteaHostName returns a normalized string with the local host name, with no scheme or port information
|
||||||
|
@ -526,7 +526,7 @@ func TestCustomizeCloseKeywords(t *testing.T) {
|
|||||||
|
|
||||||
func TestParseCloseKeywords(t *testing.T) {
|
func TestParseCloseKeywords(t *testing.T) {
|
||||||
// Test parsing of CloseKeywords and ReopenKeywords
|
// Test parsing of CloseKeywords and ReopenKeywords
|
||||||
assert.Len(t, parseKeywords([]string{""}), 0)
|
assert.Empty(t, parseKeywords([]string{""}))
|
||||||
assert.Len(t, parseKeywords([]string{" aa ", " bb ", "99", "#", "", "this is", "cc"}), 3)
|
assert.Len(t, parseKeywords([]string{" aa ", " bb ", "99", "#", "", "this is", "cc"}), 3)
|
||||||
|
|
||||||
for _, test := range []struct {
|
for _, test := range []struct {
|
||||||
|
@ -9,14 +9,22 @@ import (
|
|||||||
"code.gitea.io/gitea/models/organization"
|
"code.gitea.io/gitea/models/organization"
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func CanUserForkBetweenOwners(id1, id2 int64) bool {
|
||||||
|
if id1 != id2 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return setting.Repository.AllowForkIntoSameOwner
|
||||||
|
}
|
||||||
|
|
||||||
// CanUserForkRepo returns true if specified user can fork repository.
|
// CanUserForkRepo returns true if specified user can fork repository.
|
||||||
func CanUserForkRepo(ctx context.Context, user *user_model.User, repo *repo_model.Repository) (bool, error) {
|
func CanUserForkRepo(ctx context.Context, user *user_model.User, repo *repo_model.Repository) (bool, error) {
|
||||||
if user == nil {
|
if user == nil {
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
if repo.OwnerID != user.ID && !repo_model.HasForkedRepo(ctx, user.ID, repo.ID) {
|
if CanUserForkBetweenOwners(repo.OwnerID, user.ID) && !repo_model.HasForkedRepo(ctx, user.ID, repo.ID) {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
ownedOrgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx, user.ID)
|
ownedOrgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx, user.ID)
|
||||||
|
25
modules/repository/fork_test.go
Normal file
25
modules/repository/fork_test.go
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package repository
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/test"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCanUserForkBetweenOwners(t *testing.T) {
|
||||||
|
defer test.MockVariableValue(&setting.Repository.AllowForkIntoSameOwner)
|
||||||
|
|
||||||
|
setting.Repository.AllowForkIntoSameOwner = true
|
||||||
|
assert.True(t, CanUserForkBetweenOwners(1, 1))
|
||||||
|
assert.True(t, CanUserForkBetweenOwners(1, 2))
|
||||||
|
|
||||||
|
setting.Repository.AllowForkIntoSameOwner = false
|
||||||
|
assert.False(t, CanUserForkBetweenOwners(1, 1))
|
||||||
|
assert.True(t, CanUserForkBetweenOwners(1, 2))
|
||||||
|
}
|
@ -62,15 +62,15 @@ func Test_calcSync(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
inserts, deletes, updates := calcSync(gitTags, dbReleases)
|
inserts, deletes, updates := calcSync(gitTags, dbReleases)
|
||||||
if assert.EqualValues(t, 1, len(inserts), "inserts") {
|
if assert.Len(t, inserts, 1, "inserts") {
|
||||||
assert.EqualValues(t, *gitTags[2], *inserts[0], "inserts equal")
|
assert.EqualValues(t, *gitTags[2], *inserts[0], "inserts equal")
|
||||||
}
|
}
|
||||||
|
|
||||||
if assert.EqualValues(t, 1, len(deletes), "deletes") {
|
if assert.Len(t, deletes, 1, "deletes") {
|
||||||
assert.EqualValues(t, 1, deletes[0], "deletes equal")
|
assert.EqualValues(t, 1, deletes[0], "deletes equal")
|
||||||
}
|
}
|
||||||
|
|
||||||
if assert.EqualValues(t, 1, len(updates), "updates") {
|
if assert.Len(t, updates, 1, "updates") {
|
||||||
assert.EqualValues(t, *gitTags[1], *updates[0], "updates equal")
|
assert.EqualValues(t, *gitTags[1], *updates[0], "updates equal")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -38,6 +38,6 @@ EXTEND = true
|
|||||||
_, err = getCronSettings(cfg, "test", extended)
|
_, err = getCronSettings(cfg, "test", extended)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.True(t, extended.Base)
|
assert.True(t, extended.Base)
|
||||||
assert.EqualValues(t, extended.Second, "white rabbit")
|
assert.EqualValues(t, "white rabbit", extended.Second)
|
||||||
assert.True(t, extended.Extend)
|
assert.True(t, extended.Extend)
|
||||||
}
|
}
|
||||||
|
@ -74,5 +74,5 @@ DEFAULT_APPLICATIONS = tea
|
|||||||
DEFAULT_APPLICATIONS =
|
DEFAULT_APPLICATIONS =
|
||||||
`)
|
`)
|
||||||
loadOAuth2From(cfg)
|
loadOAuth2From(cfg)
|
||||||
assert.Nil(t, nil, OAuth2.DefaultApplications)
|
assert.Nil(t, OAuth2.DefaultApplications)
|
||||||
}
|
}
|
||||||
|
@ -53,6 +53,7 @@ var (
|
|||||||
AllowDeleteOfUnadoptedRepositories bool
|
AllowDeleteOfUnadoptedRepositories bool
|
||||||
DisableDownloadSourceArchives bool
|
DisableDownloadSourceArchives bool
|
||||||
AllowForkWithoutMaximumLimit bool
|
AllowForkWithoutMaximumLimit bool
|
||||||
|
AllowForkIntoSameOwner bool
|
||||||
|
|
||||||
// Repository editor settings
|
// Repository editor settings
|
||||||
Editor struct {
|
Editor struct {
|
||||||
|
@ -447,7 +447,7 @@ MINIO_USE_SSL = true
|
|||||||
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
||||||
assert.EqualValues(t, "my_access_key", RepoArchive.Storage.MinioConfig.AccessKeyID)
|
assert.EqualValues(t, "my_access_key", RepoArchive.Storage.MinioConfig.AccessKeyID)
|
||||||
assert.EqualValues(t, "my_secret_key", RepoArchive.Storage.MinioConfig.SecretAccessKey)
|
assert.EqualValues(t, "my_secret_key", RepoArchive.Storage.MinioConfig.SecretAccessKey)
|
||||||
assert.EqualValues(t, true, RepoArchive.Storage.MinioConfig.UseSSL)
|
assert.True(t, RepoArchive.Storage.MinioConfig.UseSSL)
|
||||||
assert.EqualValues(t, "repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
|
assert.EqualValues(t, "repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -464,7 +464,7 @@ MINIO_BASE_PATH = /prefix
|
|||||||
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
||||||
assert.EqualValues(t, "my_access_key", RepoArchive.Storage.MinioConfig.AccessKeyID)
|
assert.EqualValues(t, "my_access_key", RepoArchive.Storage.MinioConfig.AccessKeyID)
|
||||||
assert.EqualValues(t, "my_secret_key", RepoArchive.Storage.MinioConfig.SecretAccessKey)
|
assert.EqualValues(t, "my_secret_key", RepoArchive.Storage.MinioConfig.SecretAccessKey)
|
||||||
assert.EqualValues(t, true, RepoArchive.Storage.MinioConfig.UseSSL)
|
assert.True(t, RepoArchive.Storage.MinioConfig.UseSSL)
|
||||||
assert.EqualValues(t, "/prefix/repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
|
assert.EqualValues(t, "/prefix/repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
|
||||||
|
|
||||||
cfg, err = NewConfigProviderFromData(`
|
cfg, err = NewConfigProviderFromData(`
|
||||||
@ -477,7 +477,7 @@ MINIO_BASE_PATH = /prefix
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
assert.NoError(t, loadRepoArchiveFrom(cfg))
|
||||||
assert.EqualValues(t, "127.0.0.1", RepoArchive.Storage.MinioConfig.IamEndpoint)
|
assert.EqualValues(t, "127.0.0.1", RepoArchive.Storage.MinioConfig.IamEndpoint)
|
||||||
assert.EqualValues(t, true, RepoArchive.Storage.MinioConfig.UseSSL)
|
assert.True(t, RepoArchive.Storage.MinioConfig.UseSSL)
|
||||||
assert.EqualValues(t, "/prefix/repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
|
assert.EqualValues(t, "/prefix/repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
|
||||||
|
|
||||||
cfg, err = NewConfigProviderFromData(`
|
cfg, err = NewConfigProviderFromData(`
|
||||||
@ -495,7 +495,7 @@ MINIO_BASE_PATH = /lfs
|
|||||||
assert.NoError(t, loadLFSFrom(cfg))
|
assert.NoError(t, loadLFSFrom(cfg))
|
||||||
assert.EqualValues(t, "my_access_key", LFS.Storage.MinioConfig.AccessKeyID)
|
assert.EqualValues(t, "my_access_key", LFS.Storage.MinioConfig.AccessKeyID)
|
||||||
assert.EqualValues(t, "my_secret_key", LFS.Storage.MinioConfig.SecretAccessKey)
|
assert.EqualValues(t, "my_secret_key", LFS.Storage.MinioConfig.SecretAccessKey)
|
||||||
assert.EqualValues(t, true, LFS.Storage.MinioConfig.UseSSL)
|
assert.True(t, LFS.Storage.MinioConfig.UseSSL)
|
||||||
assert.EqualValues(t, "/lfs", LFS.Storage.MinioConfig.BasePath)
|
assert.EqualValues(t, "/lfs", LFS.Storage.MinioConfig.BasePath)
|
||||||
|
|
||||||
cfg, err = NewConfigProviderFromData(`
|
cfg, err = NewConfigProviderFromData(`
|
||||||
@ -513,7 +513,7 @@ MINIO_BASE_PATH = /lfs
|
|||||||
assert.NoError(t, loadLFSFrom(cfg))
|
assert.NoError(t, loadLFSFrom(cfg))
|
||||||
assert.EqualValues(t, "my_access_key", LFS.Storage.MinioConfig.AccessKeyID)
|
assert.EqualValues(t, "my_access_key", LFS.Storage.MinioConfig.AccessKeyID)
|
||||||
assert.EqualValues(t, "my_secret_key", LFS.Storage.MinioConfig.SecretAccessKey)
|
assert.EqualValues(t, "my_secret_key", LFS.Storage.MinioConfig.SecretAccessKey)
|
||||||
assert.EqualValues(t, true, LFS.Storage.MinioConfig.UseSSL)
|
assert.True(t, LFS.Storage.MinioConfig.UseSSL)
|
||||||
assert.EqualValues(t, "/lfs", LFS.Storage.MinioConfig.BasePath)
|
assert.EqualValues(t, "/lfs", LFS.Storage.MinioConfig.BasePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -13,7 +13,6 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"maps"
|
|
||||||
"net"
|
"net"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
@ -49,6 +48,10 @@ import (
|
|||||||
// Then sessionHandler should only use the "verified keyID" from the original ssh conn, but not the ctx one.
|
// Then sessionHandler should only use the "verified keyID" from the original ssh conn, but not the ctx one.
|
||||||
// Otherwise, if a user provides 2 keys A (a correct one) and B (public key matches but no private key),
|
// Otherwise, if a user provides 2 keys A (a correct one) and B (public key matches but no private key),
|
||||||
// then only A succeeds to authenticate, sessionHandler will see B's keyID
|
// then only A succeeds to authenticate, sessionHandler will see B's keyID
|
||||||
|
//
|
||||||
|
// After x/crypto >= 0.31.0 (fix CVE-2024-45337), the PublicKeyCallback will be called again for the verified key,
|
||||||
|
// it mitigates the misuse for most cases, it's still good for us to make sure we don't rely on that mitigation
|
||||||
|
// and do not misuse the PublicKeyCallback: we should only use the verified keyID from the verified ssh conn.
|
||||||
|
|
||||||
const giteaPermissionExtensionKeyID = "gitea-perm-ext-key-id"
|
const giteaPermissionExtensionKeyID = "gitea-perm-ext-key-id"
|
||||||
|
|
||||||
@ -100,8 +103,8 @@ func ptr[T any](intf any) *T {
|
|||||||
func sessionHandler(session ssh.Session) {
|
func sessionHandler(session ssh.Session) {
|
||||||
// here can't use session.Permissions() because it only uses the value from ctx, which might not be the authenticated one.
|
// here can't use session.Permissions() because it only uses the value from ctx, which might not be the authenticated one.
|
||||||
// so we must use the original ssh conn, which always contains the correct (verified) keyID.
|
// so we must use the original ssh conn, which always contains the correct (verified) keyID.
|
||||||
sshConn := ptr[sessionPartial](session)
|
sshSession := ptr[sessionPartial](session)
|
||||||
keyID := sshConn.conn.Permissions.Extensions[giteaPermissionExtensionKeyID]
|
keyID := sshSession.conn.Permissions.Extensions[giteaPermissionExtensionKeyID]
|
||||||
|
|
||||||
command := session.RawCommand()
|
command := session.RawCommand()
|
||||||
|
|
||||||
@ -210,10 +213,7 @@ func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
|
|||||||
|
|
||||||
// first, reset the ctx permissions (just like https://github.com/gliderlabs/ssh/pull/243 does)
|
// first, reset the ctx permissions (just like https://github.com/gliderlabs/ssh/pull/243 does)
|
||||||
// it shouldn't be reused across different ssh conn (sessions), each pub key should have its own "Permissions"
|
// it shouldn't be reused across different ssh conn (sessions), each pub key should have its own "Permissions"
|
||||||
oldCtxPerm := ctx.Permissions().Permissions
|
|
||||||
ctx.Permissions().Permissions = &gossh.Permissions{}
|
ctx.Permissions().Permissions = &gossh.Permissions{}
|
||||||
ctx.Permissions().Permissions.CriticalOptions = maps.Clone(oldCtxPerm.CriticalOptions)
|
|
||||||
|
|
||||||
setPermExt := func(keyID int64) {
|
setPermExt := func(keyID int64) {
|
||||||
ctx.Permissions().Permissions.Extensions = map[string]string{
|
ctx.Permissions().Permissions.Extensions = map[string]string{
|
||||||
giteaPermissionExtensionKeyID: fmt.Sprint(keyID),
|
giteaPermissionExtensionKeyID: fmt.Sprint(keyID),
|
||||||
|
@ -53,8 +53,8 @@ func parseLegacy(datetime string) time.Time {
|
|||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
func anyToTime(any any) (t time.Time, isZero bool) {
|
func anyToTime(value any) (t time.Time, isZero bool) {
|
||||||
switch v := any.(type) {
|
switch v := value.(type) {
|
||||||
case nil:
|
case nil:
|
||||||
// it is zero
|
// it is zero
|
||||||
case *time.Time:
|
case *time.Time:
|
||||||
@ -72,7 +72,7 @@ func anyToTime(any any) (t time.Time, isZero bool) {
|
|||||||
case int64:
|
case int64:
|
||||||
t = timeutil.TimeStamp(v).AsTime()
|
t = timeutil.TimeStamp(v).AsTime()
|
||||||
default:
|
default:
|
||||||
panic(fmt.Sprintf("Unsupported time type %T", any))
|
panic(fmt.Sprintf("Unsupported time type %T", value))
|
||||||
}
|
}
|
||||||
return t, t.IsZero() || t.Unix() == 0
|
return t, t.IsZero() || t.Unix() == 0
|
||||||
}
|
}
|
||||||
|
@ -53,10 +53,14 @@ func (su *StringUtils) Cut(s, sep string) []any {
|
|||||||
return []any{before, after, found}
|
return []any{before, after, found}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (su *StringUtils) EllipsisString(s string, max int) string {
|
func (su *StringUtils) EllipsisString(s string, maxLength int) string {
|
||||||
return base.EllipsisString(s, max)
|
return base.EllipsisString(s, maxLength)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (su *StringUtils) ToUpper(s string) string {
|
func (su *StringUtils) ToUpper(s string) string {
|
||||||
return strings.ToUpper(s)
|
return strings.ToUpper(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (su *StringUtils) TrimPrefix(s, prefix string) string {
|
||||||
|
return strings.TrimPrefix(s, prefix)
|
||||||
|
}
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
package user
|
package user
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
@ -36,7 +35,7 @@ func TestCurrentUsername(t *testing.T) {
|
|||||||
if user != whoami {
|
if user != whoami {
|
||||||
t.Errorf("expected %s as user, got: %s", whoami, user)
|
t.Errorf("expected %s as user, got: %s", whoami, user)
|
||||||
}
|
}
|
||||||
os.Setenv("USER", "spoofed")
|
t.Setenv("USER", "spoofed")
|
||||||
user = CurrentUsername()
|
user = CurrentUsername()
|
||||||
if user != whoami {
|
if user != whoami {
|
||||||
t.Errorf("expected %s as user, got: %s", whoami, user)
|
t.Errorf("expected %s as user, got: %s", whoami, user)
|
||||||
|
@ -27,9 +27,9 @@ func Test_HexToRBGColor(t *testing.T) {
|
|||||||
}
|
}
|
||||||
for n, c := range cases {
|
for n, c := range cases {
|
||||||
r, g, b := HexToRBGColor(c.colorString)
|
r, g, b := HexToRBGColor(c.colorString)
|
||||||
assert.Equal(t, c.expectedR, r, "case %d: error R should match: expected %f, but get %f", n, c.expectedR, r)
|
assert.InDelta(t, c.expectedR, r, 0, "case %d: error R should match: expected %f, but get %f", n, c.expectedR, r)
|
||||||
assert.Equal(t, c.expectedG, g, "case %d: error G should match: expected %f, but get %f", n, c.expectedG, g)
|
assert.InDelta(t, c.expectedG, g, 0, "case %d: error G should match: expected %f, but get %f", n, c.expectedG, g)
|
||||||
assert.Equal(t, c.expectedB, b, "case %d: error B should match: expected %f, but get %f", n, c.expectedB, b)
|
assert.InDelta(t, c.expectedB, b, 0, "case %d: error B should match: expected %f, but get %f", n, c.expectedB, b)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,7 +10,6 @@ import (
|
|||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"crypto/x509"
|
"crypto/x509"
|
||||||
"encoding/pem"
|
"encoding/pem"
|
||||||
"regexp"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@ -23,8 +22,8 @@ func TestKeygen(t *testing.T) {
|
|||||||
assert.NotEmpty(t, priv)
|
assert.NotEmpty(t, priv)
|
||||||
assert.NotEmpty(t, pub)
|
assert.NotEmpty(t, pub)
|
||||||
|
|
||||||
assert.Regexp(t, regexp.MustCompile("^-----BEGIN RSA PRIVATE KEY-----.*"), priv)
|
assert.Regexp(t, "^-----BEGIN RSA PRIVATE KEY-----.*", priv)
|
||||||
assert.Regexp(t, regexp.MustCompile("^-----BEGIN PUBLIC KEY-----.*"), pub)
|
assert.Regexp(t, "^-----BEGIN PUBLIC KEY-----.*", pub)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSignUsingKeys(t *testing.T) {
|
func TestSignUsingKeys(t *testing.T) {
|
||||||
|
@ -27,9 +27,9 @@ func TestTimeStr(t *testing.T) {
|
|||||||
t.Run(test.input, func(t *testing.T) {
|
t.Run(test.input, func(t *testing.T) {
|
||||||
output, err := TimeEstimateParse(test.input)
|
output, err := TimeEstimateParse(test.input)
|
||||||
if test.err {
|
if test.err {
|
||||||
assert.NotNil(t, err)
|
assert.Error(t, err)
|
||||||
} else {
|
} else {
|
||||||
assert.Nil(t, err)
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
assert.Equal(t, test.output, output)
|
assert.Equal(t, test.output, output)
|
||||||
})
|
})
|
||||||
|
@ -122,8 +122,8 @@ func Test_NormalizeEOL(t *testing.T) {
|
|||||||
|
|
||||||
func Test_RandomInt(t *testing.T) {
|
func Test_RandomInt(t *testing.T) {
|
||||||
randInt, err := CryptoRandomInt(255)
|
randInt, err := CryptoRandomInt(255)
|
||||||
assert.True(t, randInt >= 0)
|
assert.GreaterOrEqual(t, randInt, int64(0))
|
||||||
assert.True(t, randInt <= 255)
|
assert.LessOrEqual(t, randInt, int64(255))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -223,22 +223,22 @@ func BenchmarkToUpper(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestToTitleCase(t *testing.T) {
|
func TestToTitleCase(t *testing.T) {
|
||||||
assert.Equal(t, ToTitleCase(`foo bar baz`), `Foo Bar Baz`)
|
assert.Equal(t, `Foo Bar Baz`, ToTitleCase(`foo bar baz`))
|
||||||
assert.Equal(t, ToTitleCase(`FOO BAR BAZ`), `Foo Bar Baz`)
|
assert.Equal(t, `Foo Bar Baz`, ToTitleCase(`FOO BAR BAZ`))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestToPointer(t *testing.T) {
|
func TestToPointer(t *testing.T) {
|
||||||
assert.Equal(t, "abc", *ToPointer("abc"))
|
assert.Equal(t, "abc", *ToPointer("abc"))
|
||||||
assert.Equal(t, 123, *ToPointer(123))
|
assert.Equal(t, 123, *ToPointer(123))
|
||||||
abc := "abc"
|
abc := "abc"
|
||||||
assert.False(t, &abc == ToPointer(abc))
|
assert.NotSame(t, &abc, ToPointer(abc))
|
||||||
val123 := 123
|
val123 := 123
|
||||||
assert.False(t, &val123 == ToPointer(val123))
|
assert.NotSame(t, &val123, ToPointer(val123))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestReserveLineBreakForTextarea(t *testing.T) {
|
func TestReserveLineBreakForTextarea(t *testing.T) {
|
||||||
assert.Equal(t, ReserveLineBreakForTextarea("test\r\ndata"), "test\ndata")
|
assert.Equal(t, "test\ndata", ReserveLineBreakForTextarea("test\r\ndata"))
|
||||||
assert.Equal(t, ReserveLineBreakForTextarea("test\r\ndata\r\n"), "test\ndata\n")
|
assert.Equal(t, "test\ndata\n", ReserveLineBreakForTextarea("test\r\ndata\r\n"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestOptionalArg(t *testing.T) {
|
func TestOptionalArg(t *testing.T) {
|
||||||
|
4
options/license/MIPS
Normal file
4
options/license/MIPS
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
Copyright (c) 1992, 1991, 1990 MIPS Computer Systems, Inc.
|
||||||
|
MIPS Computer Systems, Inc. grants reproduction and use
|
||||||
|
rights to all parties, PROVIDED that this comment is
|
||||||
|
maintained in the copy.
|
7
options/license/ThirdEye
Normal file
7
options/license/ThirdEye
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
(C) Copyright 1984 by Third Eye Software, Inc.
|
||||||
|
|
||||||
|
Third Eye Software, Inc. grants reproduction and use rights to
|
||||||
|
all parties, PROVIDED that this comment is maintained in the copy.
|
||||||
|
|
||||||
|
Third Eye makes no claims about the applicability of this
|
||||||
|
symbol table to a particular use.
|
@ -1680,7 +1680,6 @@ issues.timetracker_timer_stop = Stop timer
|
|||||||
issues.timetracker_timer_discard = Discard timer
|
issues.timetracker_timer_discard = Discard timer
|
||||||
issues.timetracker_timer_manually_add = Add Time
|
issues.timetracker_timer_manually_add = Add Time
|
||||||
|
|
||||||
issues.time_estimate_placeholder = 1h 2m
|
|
||||||
issues.time_estimate_set = Set estimated time
|
issues.time_estimate_set = Set estimated time
|
||||||
issues.time_estimate_display = Estimate: %s
|
issues.time_estimate_display = Estimate: %s
|
||||||
issues.change_time_estimate_at = changed time estimate to <b>%s</b> %s
|
issues.change_time_estimate_at = changed time estimate to <b>%s</b> %s
|
||||||
@ -2633,6 +2632,7 @@ release.new_release = New Release
|
|||||||
release.draft = Draft
|
release.draft = Draft
|
||||||
release.prerelease = Pre-Release
|
release.prerelease = Pre-Release
|
||||||
release.stable = Stable
|
release.stable = Stable
|
||||||
|
release.latest = Latest
|
||||||
release.compare = Compare
|
release.compare = Compare
|
||||||
release.edit = edit
|
release.edit = edit
|
||||||
release.ahead.commits = <strong>%d</strong> commits
|
release.ahead.commits = <strong>%d</strong> commits
|
||||||
|
70
package-lock.json
generated
70
package-lock.json
generated
@ -30,7 +30,7 @@
|
|||||||
"esbuild-loader": "4.2.2",
|
"esbuild-loader": "4.2.2",
|
||||||
"escape-goat": "4.0.0",
|
"escape-goat": "4.0.0",
|
||||||
"fast-glob": "3.3.2",
|
"fast-glob": "3.3.2",
|
||||||
"htmx.org": "2.0.3",
|
"htmx.org": "2.0.4",
|
||||||
"idiomorph": "0.3.0",
|
"idiomorph": "0.3.0",
|
||||||
"jquery": "3.7.1",
|
"jquery": "3.7.1",
|
||||||
"katex": "0.16.11",
|
"katex": "0.16.11",
|
||||||
@ -67,6 +67,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint-community/eslint-plugin-eslint-comments": "4.4.1",
|
"@eslint-community/eslint-plugin-eslint-comments": "4.4.1",
|
||||||
"@playwright/test": "1.49.0",
|
"@playwright/test": "1.49.0",
|
||||||
|
"@silverwind/vue-tsc": "2.1.13",
|
||||||
"@stoplight/spectral-cli": "6.14.2",
|
"@stoplight/spectral-cli": "6.14.2",
|
||||||
"@stylistic/eslint-plugin-js": "2.11.0",
|
"@stylistic/eslint-plugin-js": "2.11.0",
|
||||||
"@stylistic/stylelint-plugin": "3.1.1",
|
"@stylistic/stylelint-plugin": "3.1.1",
|
||||||
@ -111,8 +112,7 @@
|
|||||||
"type-fest": "4.30.0",
|
"type-fest": "4.30.0",
|
||||||
"updates": "16.4.0",
|
"updates": "16.4.0",
|
||||||
"vite-string-plugin": "1.3.4",
|
"vite-string-plugin": "1.3.4",
|
||||||
"vitest": "2.1.8",
|
"vitest": "2.1.8"
|
||||||
"vue-tsc": "2.1.10"
|
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 18.0.0"
|
"node": ">= 18.0.0"
|
||||||
@ -3833,6 +3833,24 @@
|
|||||||
"hasInstallScript": true,
|
"hasInstallScript": true,
|
||||||
"license": "Apache-2.0"
|
"license": "Apache-2.0"
|
||||||
},
|
},
|
||||||
|
"node_modules/@silverwind/vue-tsc": {
|
||||||
|
"version": "2.1.13",
|
||||||
|
"resolved": "https://registry.npmjs.org/@silverwind/vue-tsc/-/vue-tsc-2.1.13.tgz",
|
||||||
|
"integrity": "sha512-ejFxz1KZiUGAESbC+eURnjqt0N95qkU9eZU7W15wgF9zV+v2FEu3ZLduuXTC7D/Sg6lL1R/QjPfUbxbAbBQOsw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@volar/typescript": "~2.4.11",
|
||||||
|
"@vue/language-core": "2.1.10",
|
||||||
|
"semver": "^7.5.4"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"vue-tsc": "bin/vue-tsc.js"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"typescript": ">=5.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@silverwind/vue3-calendar-heatmap": {
|
"node_modules/@silverwind/vue3-calendar-heatmap": {
|
||||||
"version": "2.0.6",
|
"version": "2.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/@silverwind/vue3-calendar-heatmap/-/vue3-calendar-heatmap-2.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/@silverwind/vue3-calendar-heatmap/-/vue3-calendar-heatmap-2.0.6.tgz",
|
||||||
@ -5335,30 +5353,30 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@volar/language-core": {
|
"node_modules/@volar/language-core": {
|
||||||
"version": "2.4.10",
|
"version": "2.4.11",
|
||||||
"resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-2.4.10.tgz",
|
"resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-2.4.11.tgz",
|
||||||
"integrity": "sha512-hG3Z13+nJmGaT+fnQzAkS0hjJRa2FCeqZt6Bd+oGNhUkQ+mTFsDETg5rqUTxyzIh5pSOGY7FHCWUS8G82AzLCA==",
|
"integrity": "sha512-lN2C1+ByfW9/JRPpqScuZt/4OrUUse57GLI6TbLgTIqBVemdl1wNcZ1qYGEo2+Gw8coYLgCy7SuKqn6IrQcQgg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@volar/source-map": "2.4.10"
|
"@volar/source-map": "2.4.11"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@volar/source-map": {
|
"node_modules/@volar/source-map": {
|
||||||
"version": "2.4.10",
|
"version": "2.4.11",
|
||||||
"resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-2.4.10.tgz",
|
"resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-2.4.11.tgz",
|
||||||
"integrity": "sha512-OCV+b5ihV0RF3A7vEvNyHPi4G4kFa6ukPmyVocmqm5QzOd8r5yAtiNvaPEjl8dNvgC/lj4JPryeeHLdXd62rWA==",
|
"integrity": "sha512-ZQpmafIGvaZMn/8iuvCFGrW3smeqkq/IIh9F1SdSx9aUl0J4Iurzd6/FhmjNO5g2ejF3rT45dKskgXWiofqlZQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@volar/typescript": {
|
"node_modules/@volar/typescript": {
|
||||||
"version": "2.4.10",
|
"version": "2.4.11",
|
||||||
"resolved": "https://registry.npmjs.org/@volar/typescript/-/typescript-2.4.10.tgz",
|
"resolved": "https://registry.npmjs.org/@volar/typescript/-/typescript-2.4.11.tgz",
|
||||||
"integrity": "sha512-F8ZtBMhSXyYKuBfGpYwqA5rsONnOwAVvjyE7KPYJ7wgZqo2roASqNWUnianOomJX5u1cxeRooHV59N0PhvEOgw==",
|
"integrity": "sha512-2DT+Tdh88Spp5PyPbqhyoYavYCPDsqbHLFwcUI9K1NlY1YgUJvujGdrqUp0zWxnW7KWNTr3xSpMuv2WnaTKDAw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@volar/language-core": "2.4.10",
|
"@volar/language-core": "2.4.11",
|
||||||
"path-browserify": "^1.0.1",
|
"path-browserify": "^1.0.1",
|
||||||
"vscode-uri": "^3.0.8"
|
"vscode-uri": "^3.0.8"
|
||||||
}
|
}
|
||||||
@ -10557,9 +10575,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/htmx.org": {
|
"node_modules/htmx.org": {
|
||||||
"version": "2.0.3",
|
"version": "2.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-2.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-2.0.4.tgz",
|
||||||
"integrity": "sha512-AeoJUAjkCVVajbfKX+3sVQBTCt8Ct4lif1T+z/tptTXo8+8yyq3QIMQQe/IT+R8ssfrO1I0DeX4CAronzCL6oA==",
|
"integrity": "sha512-HLxMCdfXDOJirs3vBZl/ZLoY+c7PfM4Ahr2Ad4YXh6d22T5ltbTXFFkpx9Tgb2vvmWFMbIc3LqN2ToNkZJvyYQ==",
|
||||||
"license": "0BSD"
|
"license": "0BSD"
|
||||||
},
|
},
|
||||||
"node_modules/iconv-lite": {
|
"node_modules/iconv-lite": {
|
||||||
@ -15780,24 +15798,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/vue-tsc": {
|
|
||||||
"version": "2.1.10",
|
|
||||||
"resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-2.1.10.tgz",
|
|
||||||
"integrity": "sha512-RBNSfaaRHcN5uqVqJSZh++Gy/YUzryuv9u1aFWhsammDJXNtUiJMNoJ747lZcQ68wUQFx6E73y4FY3D8E7FGMA==",
|
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@volar/typescript": "~2.4.8",
|
|
||||||
"@vue/language-core": "2.1.10",
|
|
||||||
"semver": "^7.5.4"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"vue-tsc": "bin/vue-tsc.js"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"typescript": ">=5.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/watchpack": {
|
"node_modules/watchpack": {
|
||||||
"version": "2.4.2",
|
"version": "2.4.2",
|
||||||
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz",
|
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz",
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user