Resolve lint for unused parameter and unnecessary type arguments (#30750)

Resolve all cases for `unused parameter` and `unnecessary type
arguments`

Related: #30729

---------

Co-authored-by: Giteabot <teabot@gitea.io>
This commit is contained in:
Chongyi Zheng 2024-04-29 04:47:56 -04:00 committed by GitHub
parent ad4e902d5a
commit e80466f734
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
42 changed files with 112 additions and 130 deletions

View File

@ -34,7 +34,7 @@ func TestXRef_AddCrossReferences(t *testing.T) {
// Comment on PR to reopen issue #1 // Comment on PR to reopen issue #1
content = fmt.Sprintf("content2, reopens #%d", itarget.Index) content = fmt.Sprintf("content2, reopens #%d", itarget.Index)
c := testCreateComment(t, 1, 2, pr.ID, content) c := testCreateComment(t, 2, pr.ID, content)
ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: c.ID}) ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: c.ID})
assert.Equal(t, issues_model.CommentTypeCommentRef, ref.Type) assert.Equal(t, issues_model.CommentTypeCommentRef, ref.Type)
assert.Equal(t, pr.RepoID, ref.RefRepoID) assert.Equal(t, pr.RepoID, ref.RefRepoID)
@ -104,18 +104,18 @@ func TestXRef_ResolveCrossReferences(t *testing.T) {
pr := testCreatePR(t, 1, 2, "titlepr", fmt.Sprintf("closes #%d", i1.Index)) pr := testCreatePR(t, 1, 2, "titlepr", fmt.Sprintf("closes #%d", i1.Index))
rp := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i1.ID, RefIssueID: pr.Issue.ID, RefCommentID: 0}) rp := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i1.ID, RefIssueID: pr.Issue.ID, RefCommentID: 0})
c1 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i2.Index)) c1 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i2.Index))
r1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c1.ID}) r1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c1.ID})
// Must be ignored // Must be ignored
c2 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("mentions #%d", i2.Index)) c2 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("mentions #%d", i2.Index))
unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c2.ID}) unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c2.ID})
// Must be superseded by c4/r4 // Must be superseded by c4/r4
c3 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("reopens #%d", i3.Index)) c3 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("reopens #%d", i3.Index))
unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c3.ID}) unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c3.ID})
c4 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index)) c4 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index))
r4 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c4.ID}) r4 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c4.ID})
refs, err := pr.ResolveCrossReferences(db.DefaultContext) refs, err := pr.ResolveCrossReferences(db.DefaultContext)
@ -168,7 +168,7 @@ func testCreatePR(t *testing.T, repo, doer int64, title, content string) *issues
return pr return pr
} }
func testCreateComment(t *testing.T, repo, doer, issue int64, content string) *issues_model.Comment { func testCreateComment(t *testing.T, doer, issue int64, content string) *issues_model.Comment {
d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: doer}) d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: doer})
i := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue}) i := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue})
c := &issues_model.Comment{Type: issues_model.CommentTypeComment, PosterID: doer, Poster: d, IssueID: issue, Issue: i, Content: content} c := &issues_model.Comment{Type: issues_model.CommentTypeComment, PosterID: doer, Poster: d, IssueID: issue, Issue: i, Content: content}

View File

@ -291,15 +291,15 @@ func TestAccessibleReposEnv_CountRepos(t *testing.T) {
func TestAccessibleReposEnv_RepoIDs(t *testing.T) { func TestAccessibleReposEnv_RepoIDs(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase()) assert.NoError(t, unittest.PrepareTestDatabase())
org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}) org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
testSuccess := func(userID, _, pageSize int64, expectedRepoIDs []int64) { testSuccess := func(userID int64, expectedRepoIDs []int64) {
env, err := organization.AccessibleReposEnv(db.DefaultContext, org, userID) env, err := organization.AccessibleReposEnv(db.DefaultContext, org, userID)
assert.NoError(t, err) assert.NoError(t, err)
repoIDs, err := env.RepoIDs(1, 100) repoIDs, err := env.RepoIDs(1, 100)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, expectedRepoIDs, repoIDs) assert.Equal(t, expectedRepoIDs, repoIDs)
} }
testSuccess(2, 1, 100, []int64{3, 5, 32}) testSuccess(2, []int64{3, 5, 32})
testSuccess(4, 0, 100, []int64{3, 32}) testSuccess(4, []int64{3, 32})
} }
func TestAccessibleReposEnv_Repos(t *testing.T) { func TestAccessibleReposEnv_Repos(t *testing.T) {

View File

@ -208,14 +208,14 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web
webhook_module.HookEventIssueAssign, webhook_module.HookEventIssueAssign,
webhook_module.HookEventIssueLabel, webhook_module.HookEventIssueLabel,
webhook_module.HookEventIssueMilestone: webhook_module.HookEventIssueMilestone:
return matchIssuesEvent(commit, payload.(*api.IssuePayload), evt) return matchIssuesEvent(payload.(*api.IssuePayload), evt)
case // issue_comment case // issue_comment
webhook_module.HookEventIssueComment, webhook_module.HookEventIssueComment,
// `pull_request_comment` is same as `issue_comment` // `pull_request_comment` is same as `issue_comment`
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment
webhook_module.HookEventPullRequestComment: webhook_module.HookEventPullRequestComment:
return matchIssueCommentEvent(commit, payload.(*api.IssueCommentPayload), evt) return matchIssueCommentEvent(payload.(*api.IssueCommentPayload), evt)
case // pull_request case // pull_request
webhook_module.HookEventPullRequest, webhook_module.HookEventPullRequest,
@ -229,19 +229,19 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web
case // pull_request_review case // pull_request_review
webhook_module.HookEventPullRequestReviewApproved, webhook_module.HookEventPullRequestReviewApproved,
webhook_module.HookEventPullRequestReviewRejected: webhook_module.HookEventPullRequestReviewRejected:
return matchPullRequestReviewEvent(commit, payload.(*api.PullRequestPayload), evt) return matchPullRequestReviewEvent(payload.(*api.PullRequestPayload), evt)
case // pull_request_review_comment case // pull_request_review_comment
webhook_module.HookEventPullRequestReviewComment: webhook_module.HookEventPullRequestReviewComment:
return matchPullRequestReviewCommentEvent(commit, payload.(*api.PullRequestPayload), evt) return matchPullRequestReviewCommentEvent(payload.(*api.PullRequestPayload), evt)
case // release case // release
webhook_module.HookEventRelease: webhook_module.HookEventRelease:
return matchReleaseEvent(commit, payload.(*api.ReleasePayload), evt) return matchReleaseEvent(payload.(*api.ReleasePayload), evt)
case // registry_package case // registry_package
webhook_module.HookEventPackage: webhook_module.HookEventPackage:
return matchPackageEvent(commit, payload.(*api.PackagePayload), evt) return matchPackageEvent(payload.(*api.PackagePayload), evt)
default: default:
log.Warn("unsupported event %q", triggedEvent) log.Warn("unsupported event %q", triggedEvent)
@ -347,7 +347,7 @@ func matchPushEvent(commit *git.Commit, pushPayload *api.PushPayload, evt *jobpa
return matchTimes == len(evt.Acts()) return matchTimes == len(evt.Acts())
} }
func matchIssuesEvent(commit *git.Commit, issuePayload *api.IssuePayload, evt *jobparser.Event) bool { func matchIssuesEvent(issuePayload *api.IssuePayload, evt *jobparser.Event) bool {
// with no special filter parameters // with no special filter parameters
if len(evt.Acts()) == 0 { if len(evt.Acts()) == 0 {
return true return true
@ -495,7 +495,7 @@ func matchPullRequestEvent(gitRepo *git.Repository, commit *git.Commit, prPayloa
return activityTypeMatched && matchTimes == len(evt.Acts()) return activityTypeMatched && matchTimes == len(evt.Acts())
} }
func matchIssueCommentEvent(commit *git.Commit, issueCommentPayload *api.IssueCommentPayload, evt *jobparser.Event) bool { func matchIssueCommentEvent(issueCommentPayload *api.IssueCommentPayload, evt *jobparser.Event) bool {
// with no special filter parameters // with no special filter parameters
if len(evt.Acts()) == 0 { if len(evt.Acts()) == 0 {
return true return true
@ -527,7 +527,7 @@ func matchIssueCommentEvent(commit *git.Commit, issueCommentPayload *api.IssueCo
return matchTimes == len(evt.Acts()) return matchTimes == len(evt.Acts())
} }
func matchPullRequestReviewEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool { func matchPullRequestReviewEvent(prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
// with no special filter parameters // with no special filter parameters
if len(evt.Acts()) == 0 { if len(evt.Acts()) == 0 {
return true return true
@ -576,7 +576,7 @@ func matchPullRequestReviewEvent(commit *git.Commit, prPayload *api.PullRequestP
return matchTimes == len(evt.Acts()) return matchTimes == len(evt.Acts())
} }
func matchPullRequestReviewCommentEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool { func matchPullRequestReviewCommentEvent(prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
// with no special filter parameters // with no special filter parameters
if len(evt.Acts()) == 0 { if len(evt.Acts()) == 0 {
return true return true
@ -625,7 +625,7 @@ func matchPullRequestReviewCommentEvent(commit *git.Commit, prPayload *api.PullR
return matchTimes == len(evt.Acts()) return matchTimes == len(evt.Acts())
} }
func matchReleaseEvent(commit *git.Commit, payload *api.ReleasePayload, evt *jobparser.Event) bool { func matchReleaseEvent(payload *api.ReleasePayload, evt *jobparser.Event) bool {
// with no special filter parameters // with no special filter parameters
if len(evt.Acts()) == 0 { if len(evt.Acts()) == 0 {
return true return true
@ -662,7 +662,7 @@ func matchReleaseEvent(commit *git.Commit, payload *api.ReleasePayload, evt *job
return matchTimes == len(evt.Acts()) return matchTimes == len(evt.Acts())
} }
func matchPackageEvent(commit *git.Commit, payload *api.PackagePayload, evt *jobparser.Event) bool { func matchPackageEvent(payload *api.PackagePayload, evt *jobparser.Event) bool {
// with no special filter parameters // with no special filter parameters
if len(evt.Acts()) == 0 { if len(evt.Acts()) == 0 {
return true return true

View File

@ -29,7 +29,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath
var revs map[string]*Commit var revs map[string]*Commit
if commit.repo.LastCommitCache != nil { if commit.repo.LastCommitCache != nil {
var unHitPaths []string var unHitPaths []string
revs, unHitPaths, err = getLastCommitForPathsByCache(ctx, commit.ID.String(), treePath, entryPaths, commit.repo.LastCommitCache) revs, unHitPaths, err = getLastCommitForPathsByCache(commit.ID.String(), treePath, entryPaths, commit.repo.LastCommitCache)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -97,7 +97,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath
return commitsInfo, treeCommit, nil return commitsInfo, treeCommit, nil
} }
func getLastCommitForPathsByCache(ctx context.Context, commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) { func getLastCommitForPathsByCache(commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) {
var unHitEntryPaths []string var unHitEntryPaths []string
results := make(map[string]*Commit) results := make(map[string]*Commit)
for _, p := range paths { for _, p := range paths {

View File

@ -18,7 +18,7 @@ import (
) )
// ParseTreeEntries parses the output of a `git ls-tree -l` command. // ParseTreeEntries parses the output of a `git ls-tree -l` command.
func ParseTreeEntries(h ObjectFormat, data []byte) ([]*TreeEntry, error) { func ParseTreeEntries(data []byte) ([]*TreeEntry, error) {
return parseTreeEntries(data, nil) return parseTreeEntries(data, nil)
} }

View File

@ -67,7 +67,7 @@ func TestParseTreeEntries(t *testing.T) {
} }
for _, testCase := range testCases { for _, testCase := range testCases {
entries, err := ParseTreeEntries(Sha1ObjectFormat, []byte(testCase.Input)) entries, err := ParseTreeEntries([]byte(testCase.Input))
assert.NoError(t, err) assert.NoError(t, err)
if len(entries) > 1 { if len(entries) > 1 {
fmt.Println(testCase.Expected[0].ID) fmt.Println(testCase.Expected[0].ID)

View File

@ -17,13 +17,13 @@ import (
) )
// ParseTreeEntries parses the output of a `git ls-tree -l` command. // ParseTreeEntries parses the output of a `git ls-tree -l` command.
func ParseTreeEntries(objectFormat ObjectFormat, data []byte) ([]*TreeEntry, error) { func ParseTreeEntries(data []byte) ([]*TreeEntry, error) {
return parseTreeEntries(objectFormat, data, nil) return parseTreeEntries(data, nil)
} }
var sepSpace = []byte{' '} var sepSpace = []byte{' '}
func parseTreeEntries(objectFormat ObjectFormat, data []byte, ptree *Tree) ([]*TreeEntry, error) { func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) {
var err error var err error
entries := make([]*TreeEntry, 0, bytes.Count(data, []byte{'\n'})+1) entries := make([]*TreeEntry, 0, bytes.Count(data, []byte{'\n'})+1)
for pos := 0; pos < len(data); { for pos := 0; pos < len(data); {

View File

@ -12,8 +12,6 @@ import (
) )
func TestParseTreeEntriesLong(t *testing.T) { func TestParseTreeEntriesLong(t *testing.T) {
objectFormat := Sha1ObjectFormat
testCases := []struct { testCases := []struct {
Input string Input string
Expected []*TreeEntry Expected []*TreeEntry
@ -56,7 +54,7 @@ func TestParseTreeEntriesLong(t *testing.T) {
}, },
} }
for _, testCase := range testCases { for _, testCase := range testCases {
entries, err := ParseTreeEntries(objectFormat, []byte(testCase.Input)) entries, err := ParseTreeEntries([]byte(testCase.Input))
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, entries, len(testCase.Expected)) assert.Len(t, entries, len(testCase.Expected))
for i, entry := range entries { for i, entry := range entries {
@ -66,8 +64,6 @@ func TestParseTreeEntriesLong(t *testing.T) {
} }
func TestParseTreeEntriesShort(t *testing.T) { func TestParseTreeEntriesShort(t *testing.T) {
objectFormat := Sha1ObjectFormat
testCases := []struct { testCases := []struct {
Input string Input string
Expected []*TreeEntry Expected []*TreeEntry
@ -91,7 +87,7 @@ func TestParseTreeEntriesShort(t *testing.T) {
}, },
} }
for _, testCase := range testCases { for _, testCase := range testCases {
entries, err := ParseTreeEntries(objectFormat, []byte(testCase.Input)) entries, err := ParseTreeEntries([]byte(testCase.Input))
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, entries, len(testCase.Expected)) assert.Len(t, entries, len(testCase.Expected))
for i, entry := range entries { for i, entry := range entries {
@ -102,7 +98,7 @@ func TestParseTreeEntriesShort(t *testing.T) {
func TestParseTreeEntriesInvalid(t *testing.T) { func TestParseTreeEntriesInvalid(t *testing.T) {
// there was a panic: "runtime error: slice bounds out of range" when the input was invalid: #20315 // there was a panic: "runtime error: slice bounds out of range" when the input was invalid: #20315
entries, err := ParseTreeEntries(Sha1ObjectFormat, []byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af")) entries, err := ParseTreeEntries([]byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af"))
assert.Error(t, err) assert.Error(t, err)
assert.Len(t, entries, 0) assert.Len(t, entries, 0)
} }

View File

@ -77,11 +77,8 @@ func (t *Tree) ListEntries() (Entries, error) {
return nil, runErr return nil, runErr
} }
objectFormat, err := t.repo.GetObjectFormat() var err error
if err != nil { t.entries, err = parseTreeEntries(stdout, t)
return nil, err
}
t.entries, err = parseTreeEntries(objectFormat, stdout, t)
if err == nil { if err == nil {
t.entriesParsed = true t.entriesParsed = true
} }
@ -104,11 +101,8 @@ func (t *Tree) listEntriesRecursive(extraArgs TrustedCmdArgs) (Entries, error) {
return nil, runErr return nil, runErr
} }
objectFormat, err := t.repo.GetObjectFormat() var err error
if err != nil { t.entriesRecursive, err = parseTreeEntries(stdout, t)
return nil, err
}
t.entriesRecursive, err = parseTreeEntries(objectFormat, stdout, t)
if err == nil { if err == nil {
t.entriesRecursiveParsed = true t.entriesRecursiveParsed = true
} }

View File

@ -62,8 +62,8 @@ func isIndexable(entry *git.TreeEntry) bool {
} }
// parseGitLsTreeOutput parses the output of a `git ls-tree -r --full-name` command // parseGitLsTreeOutput parses the output of a `git ls-tree -r --full-name` command
func parseGitLsTreeOutput(objectFormat git.ObjectFormat, stdout []byte) ([]internal.FileUpdate, error) { func parseGitLsTreeOutput(stdout []byte) ([]internal.FileUpdate, error) {
entries, err := git.ParseTreeEntries(objectFormat, stdout) entries, err := git.ParseTreeEntries(stdout)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -91,10 +91,8 @@ func genesisChanges(ctx context.Context, repo *repo_model.Repository, revision s
return nil, runErr return nil, runErr
} }
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
var err error var err error
changes.Updates, err = parseGitLsTreeOutput(objectFormat, stdout) changes.Updates, err = parseGitLsTreeOutput(stdout)
return &changes, err return &changes, err
} }
@ -172,8 +170,6 @@ func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revisio
return nil, err return nil, err
} }
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName) changes.Updates, err = parseGitLsTreeOutput(lsTreeStdout)
changes.Updates, err = parseGitLsTreeOutput(objectFormat, lsTreeStdout)
return &changes, err return &changes, err
} }

View File

@ -65,11 +65,11 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa
case *ast.Paragraph: case *ast.Paragraph:
g.applyElementDir(v) g.applyElementDir(v)
case *ast.Image: case *ast.Image:
g.transformImage(ctx, v, reader) g.transformImage(ctx, v)
case *ast.Link: case *ast.Link:
g.transformLink(ctx, v, reader) g.transformLink(ctx, v)
case *ast.List: case *ast.List:
g.transformList(ctx, v, reader, rc) g.transformList(ctx, v, rc)
case *ast.Text: case *ast.Text:
if v.SoftLineBreak() && !v.HardLineBreak() { if v.SoftLineBreak() && !v.HardLineBreak() {
if ctx.Metas["mode"] != "document" { if ctx.Metas["mode"] != "document" {

View File

@ -68,7 +68,7 @@ func cssColorHandler(value string) bool {
return css.HSLA.MatchString(value) return css.HSLA.MatchString(value)
} }
func (g *ASTTransformer) transformCodeSpan(ctx *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) { func (g *ASTTransformer) transformCodeSpan(_ *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) {
colorContent := v.Text(reader.Source()) colorContent := v.Text(reader.Source())
if cssColorHandler(string(colorContent)) { if cssColorHandler(string(colorContent)) {
v.AppendChild(v, NewColorPreview(colorContent)) v.AppendChild(v, NewColorPreview(colorContent))

View File

@ -13,7 +13,7 @@ import (
"github.com/yuin/goldmark/util" "github.com/yuin/goldmark/util"
) )
func (g *ASTTransformer) transformHeading(ctx *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) { func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) {
for _, attr := range v.Attributes() { for _, attr := range v.Attributes() {
if _, ok := attr.Value.([]byte); !ok { if _, ok := attr.Value.([]byte); !ok {
v.SetAttribute(attr.Name, []byte(fmt.Sprintf("%v", attr.Value))) v.SetAttribute(attr.Name, []byte(fmt.Sprintf("%v", attr.Value)))

View File

@ -10,10 +10,9 @@ import (
giteautil "code.gitea.io/gitea/modules/util" giteautil "code.gitea.io/gitea/modules/util"
"github.com/yuin/goldmark/ast" "github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/text"
) )
func (g *ASTTransformer) transformImage(ctx *markup.RenderContext, v *ast.Image, reader text.Reader) { func (g *ASTTransformer) transformImage(ctx *markup.RenderContext, v *ast.Image) {
// Images need two things: // Images need two things:
// //
// 1. Their src needs to munged to be a real value // 1. Their src needs to munged to be a real value

View File

@ -10,10 +10,9 @@ import (
giteautil "code.gitea.io/gitea/modules/util" giteautil "code.gitea.io/gitea/modules/util"
"github.com/yuin/goldmark/ast" "github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/text"
) )
func (g *ASTTransformer) transformLink(ctx *markup.RenderContext, v *ast.Link, reader text.Reader) { func (g *ASTTransformer) transformLink(ctx *markup.RenderContext, v *ast.Link) {
// Links need their href to munged to be a real value // Links need their href to munged to be a real value
link := v.Destination link := v.Destination
isAnchorFragment := len(link) > 0 && link[0] == '#' isAnchorFragment := len(link) > 0 && link[0] == '#'

View File

@ -11,7 +11,6 @@ import (
"github.com/yuin/goldmark/ast" "github.com/yuin/goldmark/ast"
east "github.com/yuin/goldmark/extension/ast" east "github.com/yuin/goldmark/extension/ast"
"github.com/yuin/goldmark/renderer/html" "github.com/yuin/goldmark/renderer/html"
"github.com/yuin/goldmark/text"
"github.com/yuin/goldmark/util" "github.com/yuin/goldmark/util"
) )
@ -50,7 +49,7 @@ func (r *HTMLRenderer) renderTaskCheckBox(w util.BufWriter, source []byte, node
return ast.WalkContinue, nil return ast.WalkContinue, nil
} }
func (g *ASTTransformer) transformList(ctx *markup.RenderContext, v *ast.List, reader text.Reader, rc *RenderConfig) { func (g *ASTTransformer) transformList(_ *markup.RenderContext, v *ast.List, rc *RenderConfig) {
if v.HasChildren() { if v.HasChildren() {
children := make([]ast.Node, 0, v.ChildCount()) children := make([]ast.Node, 0, v.ChildCount())
child := v.FirstChild() child := v.FirstChild()

View File

@ -54,7 +54,7 @@ func (r *stripRenderer) Render(w io.Writer, source []byte, doc ast.Node) error {
} }
return ast.WalkContinue, nil return ast.WalkContinue, nil
case *ast.Link: case *ast.Link:
r.processLink(w, v.Destination) r.processLink(v.Destination)
return ast.WalkSkipChildren, nil return ast.WalkSkipChildren, nil
case *ast.AutoLink: case *ast.AutoLink:
// This could be a reference to an issue or pull - if so convert it // This could be a reference to an issue or pull - if so convert it
@ -124,7 +124,7 @@ func (r *stripRenderer) processAutoLink(w io.Writer, link []byte) {
_, _ = w.Write([]byte(parts[4])) _, _ = w.Write([]byte(parts[4]))
} }
func (r *stripRenderer) processLink(w io.Writer, link []byte) { func (r *stripRenderer) processLink(link []byte) {
// Links are processed out of band // Links are processed out of band
r.links = append(r.links, string(link)) r.links = append(r.links, string(link))
} }

View File

@ -22,7 +22,7 @@ func TestOption(t *testing.T) {
assert.Equal(t, int(0), none.Value()) assert.Equal(t, int(0), none.Value())
assert.Equal(t, int(1), none.ValueOrDefault(1)) assert.Equal(t, int(1), none.ValueOrDefault(1))
some := optional.Some[int](1) some := optional.Some(1)
assert.True(t, some.Has()) assert.True(t, some.Has())
assert.Equal(t, int(1), some.Value()) assert.Equal(t, int(1), some.Value())
assert.Equal(t, int(1), some.ValueOrDefault(2)) assert.Equal(t, int(1), some.ValueOrDefault(2))

View File

@ -38,12 +38,12 @@ func loadIncomingEmailFrom(rootCfg ConfigProvider) {
return return
} }
if err := checkReplyToAddress(IncomingEmail.ReplyToAddress); err != nil { if err := checkReplyToAddress(); err != nil {
log.Fatal("Invalid incoming_mail.REPLY_TO_ADDRESS (%s): %v", IncomingEmail.ReplyToAddress, err) log.Fatal("Invalid incoming_mail.REPLY_TO_ADDRESS (%s): %v", IncomingEmail.ReplyToAddress, err)
} }
} }
func checkReplyToAddress(address string) error { func checkReplyToAddress() error {
parsed, err := mail.ParseAddress(IncomingEmail.ReplyToAddress) parsed, err := mail.ParseAddress(IncomingEmail.ReplyToAddress)
if err != nil { if err != nil {
return err return err

View File

@ -97,7 +97,7 @@ func getStorage(rootCfg ConfigProvider, name, typ string, sec ConfigSection) (*S
return nil, err return nil, err
} }
overrideSec := getStorageOverrideSection(rootCfg, targetSec, sec, tp, name) overrideSec := getStorageOverrideSection(rootCfg, sec, tp, name)
targetType := targetSec.Key("STORAGE_TYPE").String() targetType := targetSec.Key("STORAGE_TYPE").String()
switch targetType { switch targetType {
@ -189,7 +189,7 @@ func getStorageTargetSection(rootCfg ConfigProvider, name, typ string, sec Confi
} }
// getStorageOverrideSection override section will be read SERVE_DIRECT, PATH, MINIO_BASE_PATH, MINIO_BUCKET to override the targetsec when possible // getStorageOverrideSection override section will be read SERVE_DIRECT, PATH, MINIO_BASE_PATH, MINIO_BUCKET to override the targetsec when possible
func getStorageOverrideSection(rootConfig ConfigProvider, targetSec, sec ConfigSection, targetSecType targetSecType, name string) ConfigSection { func getStorageOverrideSection(rootConfig ConfigProvider, sec ConfigSection, targetSecType targetSecType, name string) ConfigSection {
if targetSecType == targetSecIsSec { if targetSecType == targetSecIsSec {
return nil return nil
} }

View File

@ -67,7 +67,7 @@ func AddUserBadges(ctx *context.APIContext) {
// "$ref": "#/responses/forbidden" // "$ref": "#/responses/forbidden"
form := web.GetForm(ctx).(*api.UserBadgeOption) form := web.GetForm(ctx).(*api.UserBadgeOption)
badges := prepareBadgesForReplaceOrAdd(ctx, *form) badges := prepareBadgesForReplaceOrAdd(*form)
if err := user_model.AddUserBadges(ctx, ctx.ContextUser, badges); err != nil { if err := user_model.AddUserBadges(ctx, ctx.ContextUser, badges); err != nil {
ctx.Error(http.StatusInternalServerError, "ReplaceUserBadges", err) ctx.Error(http.StatusInternalServerError, "ReplaceUserBadges", err)
@ -103,7 +103,7 @@ func DeleteUserBadges(ctx *context.APIContext) {
// "$ref": "#/responses/validationError" // "$ref": "#/responses/validationError"
form := web.GetForm(ctx).(*api.UserBadgeOption) form := web.GetForm(ctx).(*api.UserBadgeOption)
badges := prepareBadgesForReplaceOrAdd(ctx, *form) badges := prepareBadgesForReplaceOrAdd(*form)
if err := user_model.RemoveUserBadges(ctx, ctx.ContextUser, badges); err != nil { if err := user_model.RemoveUserBadges(ctx, ctx.ContextUser, badges); err != nil {
ctx.Error(http.StatusInternalServerError, "ReplaceUserBadges", err) ctx.Error(http.StatusInternalServerError, "ReplaceUserBadges", err)
@ -113,7 +113,7 @@ func DeleteUserBadges(ctx *context.APIContext) {
ctx.Status(http.StatusNoContent) ctx.Status(http.StatusNoContent)
} }
func prepareBadgesForReplaceOrAdd(ctx *context.APIContext, form api.UserBadgeOption) []*user_model.Badge { func prepareBadgesForReplaceOrAdd(form api.UserBadgeOption) []*user_model.Badge {
badges := make([]*user_model.Badge, len(form.BadgeSlugs)) badges := make([]*user_model.Badge, len(form.BadgeSlugs))
for i, badge := range form.BadgeSlugs { for i, badge := range form.BadgeSlugs {
badges[i] = &user_model.Badge{ badges[i] = &user_model.Badge{

View File

@ -180,7 +180,7 @@ func Migrate(ctx *context.APIContext) {
Status: repo_model.RepositoryBeingMigrated, Status: repo_model.RepositoryBeingMigrated,
}) })
if err != nil { if err != nil {
handleMigrateError(ctx, repoOwner, remoteAddr, err) handleMigrateError(ctx, repoOwner, err)
return return
} }
@ -207,7 +207,7 @@ func Migrate(ctx *context.APIContext) {
}() }()
if repo, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.Doer, repoOwner.Name, opts, nil); err != nil { if repo, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.Doer, repoOwner.Name, opts, nil); err != nil {
handleMigrateError(ctx, repoOwner, remoteAddr, err) handleMigrateError(ctx, repoOwner, err)
return return
} }
@ -215,7 +215,7 @@ func Migrate(ctx *context.APIContext) {
ctx.JSON(http.StatusCreated, convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeAdmin})) ctx.JSON(http.StatusCreated, convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeAdmin}))
} }
func handleMigrateError(ctx *context.APIContext, repoOwner *user_model.User, remoteAddr string, err error) { func handleMigrateError(ctx *context.APIContext, repoOwner *user_model.User, err error) {
switch { switch {
case repo_model.IsErrRepoAlreadyExist(err): case repo_model.IsErrRepoAlreadyExist(err):
ctx.Error(http.StatusConflict, "", "The repository with the same name already exists.") ctx.Error(http.StatusConflict, "", "The repository with the same name already exists.")

View File

@ -121,9 +121,9 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) {
case refFullName.IsBranch(): case refFullName.IsBranch():
preReceiveBranch(ourCtx, oldCommitID, newCommitID, refFullName) preReceiveBranch(ourCtx, oldCommitID, newCommitID, refFullName)
case refFullName.IsTag(): case refFullName.IsTag():
preReceiveTag(ourCtx, oldCommitID, newCommitID, refFullName) preReceiveTag(ourCtx, refFullName)
case git.DefaultFeatures.SupportProcReceive && refFullName.IsFor(): case git.DefaultFeatures.SupportProcReceive && refFullName.IsFor():
preReceiveFor(ourCtx, oldCommitID, newCommitID, refFullName) preReceiveFor(ourCtx, refFullName)
default: default:
ourCtx.AssertCanWriteCode() ourCtx.AssertCanWriteCode()
} }
@ -368,7 +368,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID string, r
} }
} }
func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) { func preReceiveTag(ctx *preReceiveContext, refFullName git.RefName) {
if !ctx.AssertCanWriteCode() { if !ctx.AssertCanWriteCode() {
return return
} }
@ -404,7 +404,7 @@ func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refF
} }
} }
func preReceiveFor(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) { func preReceiveFor(ctx *preReceiveContext, refFullName git.RefName) {
if !ctx.AssertCreatePullRequest() { if !ctx.AssertCreatePullRequest() {
return return
} }

View File

@ -159,7 +159,7 @@ func DashboardPost(ctx *context.Context) {
switch form.Op { switch form.Op {
case "sync_repo_branches": case "sync_repo_branches":
go func() { go func() {
if err := repo_service.AddAllRepoBranchesToSyncQueue(graceful.GetManager().ShutdownContext(), ctx.Doer.ID); err != nil { if err := repo_service.AddAllRepoBranchesToSyncQueue(graceful.GetManager().ShutdownContext()); err != nil {
log.Error("AddAllRepoBranchesToSyncQueue: %v: %v", ctx.Doer.ID, err) log.Error("AddAllRepoBranchesToSyncQueue: %v: %v", ctx.Doer.ID, err)
} }
}() }()

View File

@ -279,7 +279,7 @@ func GetFeedType(name string, req *http.Request) (bool, string, string) {
} }
// feedActionsToFeedItems convert gitea's Repo's Releases to feeds Item // feedActionsToFeedItems convert gitea's Repo's Releases to feeds Item
func releasesToFeedItems(ctx *context.Context, releases []*repo_model.Release, isReleasesOnly bool) (items []*feeds.Item, err error) { func releasesToFeedItems(ctx *context.Context, releases []*repo_model.Release) (items []*feeds.Item, err error) {
for _, rel := range releases { for _, rel := range releases {
err := rel.LoadAttributes(ctx) err := rel.LoadAttributes(ctx)
if err != nil { if err != nil {

View File

@ -42,7 +42,7 @@ func ShowReleaseFeed(ctx *context.Context, repo *repo_model.Repository, isReleas
Created: time.Now(), Created: time.Now(),
} }
feed.Items, err = releasesToFeedItems(ctx, releases, isReleasesOnly) feed.Items, err = releasesToFeedItems(ctx, releases)
if err != nil { if err != nil {
ctx.ServerError("releasesToFeedItems", err) ctx.ServerError("releasesToFeedItems", err)
return return

View File

@ -787,7 +787,7 @@ func (rt RepoRefType) RefTypeIncludesTags() bool {
return false return false
} }
func getRefNameFromPath(ctx *Base, repo *Repository, path string, isExist func(string) bool) string { func getRefNameFromPath(repo *Repository, path string, isExist func(string) bool) string {
refName := "" refName := ""
parts := strings.Split(path, "/") parts := strings.Split(path, "/")
for i, part := range parts { for i, part := range parts {
@ -823,7 +823,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string {
repo.TreePath = path repo.TreePath = path
return repo.Repository.DefaultBranch return repo.Repository.DefaultBranch
case RepoRefBranch: case RepoRefBranch:
ref := getRefNameFromPath(ctx, repo, path, repo.GitRepo.IsBranchExist) ref := getRefNameFromPath(repo, path, repo.GitRepo.IsBranchExist)
if len(ref) == 0 { if len(ref) == 0 {
// check if ref is HEAD // check if ref is HEAD
parts := strings.Split(path, "/") parts := strings.Split(path, "/")
@ -833,7 +833,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string {
} }
// maybe it's a renamed branch // maybe it's a renamed branch
return getRefNameFromPath(ctx, repo, path, func(s string) bool { return getRefNameFromPath(repo, path, func(s string) bool {
b, exist, err := git_model.FindRenamedBranch(ctx, repo.Repository.ID, s) b, exist, err := git_model.FindRenamedBranch(ctx, repo.Repository.ID, s)
if err != nil { if err != nil {
log.Error("FindRenamedBranch: %v", err) log.Error("FindRenamedBranch: %v", err)
@ -853,7 +853,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string {
return ref return ref
case RepoRefTag: case RepoRefTag:
return getRefNameFromPath(ctx, repo, path, repo.GitRepo.IsTagExist) return getRefNameFromPath(repo, path, repo.GitRepo.IsTagExist)
case RepoRefCommit: case RepoRefCommit:
parts := strings.Split(path, "/") parts := strings.Split(path, "/")

View File

@ -27,7 +27,7 @@ type commonStorageCheckOptions struct {
name string name string
} }
func commonCheckStorage(ctx context.Context, logger log.Logger, autofix bool, opts *commonStorageCheckOptions) error { func commonCheckStorage(logger log.Logger, autofix bool, opts *commonStorageCheckOptions) error {
totalCount, orphanedCount := 0, 0 totalCount, orphanedCount := 0, 0
totalSize, orphanedSize := int64(0), int64(0) totalSize, orphanedSize := int64(0), int64(0)
@ -98,7 +98,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
} }
if opts.Attachments || opts.All { if opts.Attachments || opts.All {
if err := commonCheckStorage(ctx, logger, autofix, if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{ &commonStorageCheckOptions{
storer: storage.Attachments, storer: storage.Attachments,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -116,7 +116,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
logger.Info("LFS isn't enabled (skipped)") logger.Info("LFS isn't enabled (skipped)")
return nil return nil
} }
if err := commonCheckStorage(ctx, logger, autofix, if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{ &commonStorageCheckOptions{
storer: storage.LFS, storer: storage.LFS,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -132,7 +132,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
} }
if opts.Avatars || opts.All { if opts.Avatars || opts.All {
if err := commonCheckStorage(ctx, logger, autofix, if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{ &commonStorageCheckOptions{
storer: storage.Avatars, storer: storage.Avatars,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -146,7 +146,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
} }
if opts.RepoAvatars || opts.All { if opts.RepoAvatars || opts.All {
if err := commonCheckStorage(ctx, logger, autofix, if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{ &commonStorageCheckOptions{
storer: storage.RepoAvatars, storer: storage.RepoAvatars,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -160,7 +160,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
} }
if opts.RepoArchives || opts.All { if opts.RepoArchives || opts.All {
if err := commonCheckStorage(ctx, logger, autofix, if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{ &commonStorageCheckOptions{
storer: storage.RepoArchives, storer: storage.RepoArchives,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -182,7 +182,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
logger.Info("Packages isn't enabled (skipped)") logger.Info("Packages isn't enabled (skipped)")
return nil return nil
} }
if err := commonCheckStorage(ctx, logger, autofix, if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{ &commonStorageCheckOptions{
storer: storage.Packages, storer: storage.Packages,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) { isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {

View File

@ -977,25 +977,24 @@ func (g *GiteaLocalUploader) Finish() error {
} }
func (g *GiteaLocalUploader) remapUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error { func (g *GiteaLocalUploader) remapUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error {
var userid int64 var userID int64
var err error var err error
if g.sameApp { if g.sameApp {
userid, err = g.remapLocalUser(source, target) userID, err = g.remapLocalUser(source)
} else { } else {
userid, err = g.remapExternalUser(source, target) userID, err = g.remapExternalUser(source)
} }
if err != nil { if err != nil {
return err return err
} }
if userid > 0 { if userID > 0 {
return target.RemapExternalUser("", 0, userid) return target.RemapExternalUser("", 0, userID)
} }
return target.RemapExternalUser(source.GetExternalName(), source.GetExternalID(), g.doer.ID) return target.RemapExternalUser(source.GetExternalName(), source.GetExternalID(), g.doer.ID)
} }
func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) (int64, error) { func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated) (int64, error) {
userid, ok := g.userMap[source.GetExternalID()] userid, ok := g.userMap[source.GetExternalID()]
if !ok { if !ok {
name, err := user_model.GetUserNameByID(g.ctx, source.GetExternalID()) name, err := user_model.GetUserNameByID(g.ctx, source.GetExternalID())
@ -1013,7 +1012,7 @@ func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrat
return userid, nil return userid, nil
} }
func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) (userid int64, err error) { func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated) (userid int64, err error) {
userid, ok := g.userMap[source.GetExternalID()] userid, ok := g.userMap[source.GetExternalID()]
if !ok { if !ok {
userid, err = user_model.GetUserIDByExternalUserID(g.ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID())) userid, err = user_model.GetUserIDByExternalUserID(g.ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID()))

View File

@ -90,7 +90,7 @@ func Update(ctx context.Context, pullLimit, pushLimit int) error {
pullMirrorsRequested := 0 pullMirrorsRequested := 0
if pullLimit != 0 { if pullLimit != 0 {
if err := repo_model.MirrorsIterate(ctx, pullLimit, func(idx int, bean any) error { if err := repo_model.MirrorsIterate(ctx, pullLimit, func(_ int, bean any) error {
if err := handler(bean); err != nil { if err := handler(bean); err != nil {
return err return err
} }

View File

@ -49,7 +49,7 @@ var ErrSubmitReviewOnClosedPR = errors.New("can't submit review for a closed or
// checkInvalidation checks if the line of code comment got changed by another commit. // checkInvalidation checks if the line of code comment got changed by another commit.
// If the line got changed the comment is going to be invalidated. // If the line got changed the comment is going to be invalidated.
func checkInvalidation(ctx context.Context, c *issues_model.Comment, doer *user_model.User, repo *git.Repository, branch string) error { func checkInvalidation(ctx context.Context, c *issues_model.Comment, repo *git.Repository, branch string) error {
// FIXME differentiate between previous and proposed line // FIXME differentiate between previous and proposed line
commit, err := repo.LineBlame(branch, repo.Path, c.TreePath, uint(c.UnsignedLine())) commit, err := repo.LineBlame(branch, repo.Path, c.TreePath, uint(c.UnsignedLine()))
if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) { if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
@ -83,7 +83,7 @@ func InvalidateCodeComments(ctx context.Context, prs issues_model.PullRequestLis
return fmt.Errorf("find code comments: %v", err) return fmt.Errorf("find code comments: %v", err)
} }
for _, comment := range codeComments { for _, comment := range codeComments {
if err := checkInvalidation(ctx, comment, doer, repo, branch); err != nil { if err := checkInvalidation(ctx, comment, repo, branch); err != nil {
return err return err
} }
} }

View File

@ -39,7 +39,7 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.
go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "") go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "")
}() }()
return updateHeadByRebaseOnToBase(ctx, pr, doer, message) return updateHeadByRebaseOnToBase(ctx, pr, doer)
} }
if err := pr.LoadBaseRepo(ctx); err != nil { if err := pr.LoadBaseRepo(ctx); err != nil {

View File

@ -18,7 +18,7 @@ import (
) )
// updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch // updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch
func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string) error { func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) error {
// "Clone" base repo and add the cache headers for the head repo and branch // "Clone" base repo and add the cache headers for the head repo and branch
mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "") mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "")
if err != nil { if err != nil {

View File

@ -80,7 +80,7 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR
return fmt.Errorf("getRepositoryByID: %w", err) return fmt.Errorf("getRepositoryByID: %w", err)
} }
if err := adoptRepository(ctx, repoPath, doer, repo, opts.DefaultBranch); err != nil { if err := adoptRepository(ctx, repoPath, repo, opts.DefaultBranch); err != nil {
return fmt.Errorf("createDelegateHooks: %w", err) return fmt.Errorf("createDelegateHooks: %w", err)
} }
@ -111,7 +111,7 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR
return repo, nil return repo, nil
} }
func adoptRepository(ctx context.Context, repoPath string, u *user_model.User, repo *repo_model.Repository, defaultBranch string) (err error) { func adoptRepository(ctx context.Context, repoPath string, repo *repo_model.Repository, defaultBranch string) (err error) {
isExist, err := util.IsExist(repoPath) isExist, err := util.IsExist(repoPath)
if err != nil { if err != nil {
log.Error("Unable to check if %s exists. Error: %v", repoPath, err) log.Error("Unable to check if %s exists. Error: %v", repoPath, err)

View File

@ -527,7 +527,7 @@ func handlerBranchSync(items ...*BranchSyncOptions) []*BranchSyncOptions {
return nil return nil
} }
func addRepoToBranchSyncQueue(repoID, doerID int64) error { func addRepoToBranchSyncQueue(repoID int64) error {
return branchSyncQueue.Push(&BranchSyncOptions{ return branchSyncQueue.Push(&BranchSyncOptions{
RepoID: repoID, RepoID: repoID,
}) })
@ -543,9 +543,9 @@ func initBranchSyncQueue(ctx context.Context) error {
return nil return nil
} }
func AddAllRepoBranchesToSyncQueue(ctx context.Context, doerID int64) error { func AddAllRepoBranchesToSyncQueue(ctx context.Context) error {
if err := db.Iterate(ctx, builder.Eq{"is_empty": false}, func(ctx context.Context, repo *repo_model.Repository) error { if err := db.Iterate(ctx, builder.Eq{"is_empty": false}, func(ctx context.Context, repo *repo_model.Repository) error {
return addRepoToBranchSyncQueue(repo.ID, doerID) return addRepoToBranchSyncQueue(repo.ID)
}); err != nil { }); err != nil {
return fmt.Errorf("run sync all branches failed: %v", err) return fmt.Errorf("run sync all branches failed: %v", err)
} }

View File

@ -211,7 +211,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
} }
for _, file := range opts.Files { for _, file := range opts.Files {
if err := handleCheckErrors(file, commit, opts, repo); err != nil { if err := handleCheckErrors(file, commit, opts); err != nil {
return nil, err return nil, err
} }
} }
@ -277,7 +277,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
} }
// handles the check for various issues for ChangeRepoFiles // handles the check for various issues for ChangeRepoFiles
func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions, repo *repo_model.Repository) error { func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions) error {
if file.Operation == "update" || file.Operation == "delete" { if file.Operation == "update" || file.Operation == "delete" {
fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath) fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath)
if err != nil { if err != nil {

View File

@ -35,7 +35,7 @@ func TestUpdateUser(t *testing.T) {
Description: optional.Some("description"), Description: optional.Some("description"),
AllowGitHook: optional.Some(true), AllowGitHook: optional.Some(true),
AllowImportLocal: optional.Some(true), AllowImportLocal: optional.Some(true),
MaxRepoCreation: optional.Some[int](10), MaxRepoCreation: optional.Some(10),
IsRestricted: optional.Some(true), IsRestricted: optional.Some(true),
IsActive: optional.Some(false), IsActive: optional.Some(false),
IsAdmin: optional.Some(true), IsAdmin: optional.Some(true),

View File

@ -169,7 +169,7 @@ nwIDAQAB
assert.Nil(t, u) assert.Nil(t, u)
assert.Error(t, err) assert.Error(t, err)
signRequest := func(t *testing.T, rw *RequestWrapper, version string) { signRequest := func(rw *RequestWrapper, version string) {
req := rw.Request req := rw.Request
username := req.Header.Get("X-Ops-Userid") username := req.Header.Get("X-Ops-Userid")
if version != "1.0" && version != "1.3" { if version != "1.0" && version != "1.3" {
@ -255,7 +255,7 @@ nwIDAQAB
t.Run(v, func(t *testing.T) { t.Run(v, func(t *testing.T) {
defer tests.PrintCurrentTest(t)() defer tests.PrintCurrentTest(t)()
signRequest(t, req, v) signRequest(req, v)
u, err = auth.Verify(req.Request, nil, nil, nil) u, err = auth.Verify(req.Request, nil, nil, nil)
assert.NotNil(t, u) assert.NotNil(t, u)
assert.NoError(t, err) assert.NoError(t, err)

View File

@ -77,7 +77,7 @@ func TestAPIListReleases(t *testing.T) {
testFilterByLen(true, url.Values{"draft": {"true"}, "pre-release": {"true"}}, 0, "there is no pre-release draft") testFilterByLen(true, url.Values{"draft": {"true"}, "pre-release": {"true"}}, 0, "there is no pre-release draft")
} }
func createNewReleaseUsingAPI(t *testing.T, session *TestSession, token string, owner *user_model.User, repo *repo_model.Repository, name, target, title, desc string) *api.Release { func createNewReleaseUsingAPI(t *testing.T, token string, owner *user_model.User, repo *repo_model.Repository, name, target, title, desc string) *api.Release {
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases", owner.Name, repo.Name) urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases", owner.Name, repo.Name)
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateReleaseOption{ req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateReleaseOption{
TagName: name, TagName: name,
@ -120,7 +120,7 @@ func TestAPICreateAndUpdateRelease(t *testing.T) {
target, err := gitRepo.GetTagCommitID("v0.0.1") target, err := gitRepo.GetTagCommitID("v0.0.1")
assert.NoError(t, err) assert.NoError(t, err)
newRelease := createNewReleaseUsingAPI(t, session, token, owner, repo, "v0.0.1", target, "v0.0.1", "test") newRelease := createNewReleaseUsingAPI(t, token, owner, repo, "v0.0.1", target, "v0.0.1", "test")
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases/%d", owner.Name, repo.Name, newRelease.ID) urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases/%d", owner.Name, repo.Name, newRelease.ID)
req := NewRequest(t, "GET", urlStr). req := NewRequest(t, "GET", urlStr).
@ -162,7 +162,7 @@ func TestAPICreateReleaseToDefaultBranch(t *testing.T) {
session := loginUser(t, owner.LowerName) session := loginUser(t, owner.LowerName)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository) token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
createNewReleaseUsingAPI(t, session, token, owner, repo, "v0.0.1", "", "v0.0.1", "test") createNewReleaseUsingAPI(t, token, owner, repo, "v0.0.1", "", "v0.0.1", "test")
} }
func TestAPICreateReleaseToDefaultBranchOnExistingTag(t *testing.T) { func TestAPICreateReleaseToDefaultBranchOnExistingTag(t *testing.T) {
@ -180,7 +180,7 @@ func TestAPICreateReleaseToDefaultBranchOnExistingTag(t *testing.T) {
err = gitRepo.CreateTag("v0.0.1", "master") err = gitRepo.CreateTag("v0.0.1", "master")
assert.NoError(t, err) assert.NoError(t, err)
createNewReleaseUsingAPI(t, session, token, owner, repo, "v0.0.1", "", "v0.0.1", "test") createNewReleaseUsingAPI(t, token, owner, repo, "v0.0.1", "", "v0.0.1", "test")
} }
func TestAPIGetLatestRelease(t *testing.T) { func TestAPIGetLatestRelease(t *testing.T) {
@ -232,7 +232,7 @@ func TestAPIDeleteReleaseByTagName(t *testing.T) {
session := loginUser(t, owner.LowerName) session := loginUser(t, owner.LowerName)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository) token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
createNewReleaseUsingAPI(t, session, token, owner, repo, "release-tag", "", "Release Tag", "test") createNewReleaseUsingAPI(t, token, owner, repo, "release-tag", "", "Release Tag", "test")
// delete release // delete release
req := NewRequestf(t, http.MethodDelete, fmt.Sprintf("/api/v1/repos/%s/%s/releases/tags/release-tag", owner.Name, repo.Name)). req := NewRequestf(t, http.MethodDelete, fmt.Sprintf("/api/v1/repos/%s/%s/releases/tags/release-tag", owner.Name, repo.Name)).
@ -258,7 +258,7 @@ func TestAPIUploadAssetRelease(t *testing.T) {
session := loginUser(t, owner.LowerName) session := loginUser(t, owner.LowerName)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository) token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
r := createNewReleaseUsingAPI(t, session, token, owner, repo, "release-tag", "", "Release Tag", "test") r := createNewReleaseUsingAPI(t, token, owner, repo, "release-tag", "", "Release Tag", "test")
filename := "image.png" filename := "image.png"
buff := generateImg() buff := generateImg()

View File

@ -80,7 +80,7 @@ func TestAPIDeleteTagByName(t *testing.T) {
_ = MakeRequest(t, req, http.StatusNoContent) _ = MakeRequest(t, req, http.StatusNoContent)
// Make sure that actual releases can't be deleted outright // Make sure that actual releases can't be deleted outright
createNewReleaseUsingAPI(t, session, token, owner, repo, "release-tag", "", "Release Tag", "test") createNewReleaseUsingAPI(t, token, owner, repo, "release-tag", "", "Release Tag", "test")
req = NewRequest(t, http.MethodDelete, fmt.Sprintf("/api/v1/repos/%s/%s/tags/release-tag", owner.Name, repo.Name)). req = NewRequest(t, http.MethodDelete, fmt.Sprintf("/api/v1/repos/%s/%s/tags/release-tag", owner.Name, repo.Name)).
AddTokenAuth(token) AddTokenAuth(token)

View File

@ -17,7 +17,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func resultFilenames(t testing.TB, doc *HTMLDoc) []string { func resultFilenames(doc *HTMLDoc) []string {
filenameSelections := doc.doc.Find(".repository.search").Find(".repo-search-result").Find(".header").Find("span.file") filenameSelections := doc.doc.Find(".repository.search").Find(".repo-search-result").Find(".header").Find("span.file")
result := make([]string, filenameSelections.Length()) result := make([]string, filenameSelections.Length())
filenameSelections.Each(func(i int, selection *goquery.Selection) { filenameSelections.Each(func(i int, selection *goquery.Selection) {
@ -56,6 +56,6 @@ func testSearch(t *testing.T, url string, expected []string) {
req := NewRequest(t, "GET", url) req := NewRequest(t, "GET", url)
resp := MakeRequest(t, req, http.StatusOK) resp := MakeRequest(t, req, http.StatusOK)
filenames := resultFilenames(t, NewHTMLParser(t, resp.Body)) filenames := resultFilenames(NewHTMLParser(t, resp.Body))
assert.EqualValues(t, expected, filenames) assert.EqualValues(t, expected, filenames)
} }

View File

@ -78,7 +78,7 @@ func getDeleteRepoFilesOptions(repo *repo_model.Repository) *files_service.Chang
} }
} }
func getExpectedFileResponseForRepofilesDelete(u *url.URL) *api.FileResponse { func getExpectedFileResponseForRepofilesDelete() *api.FileResponse {
// Just returns fields that don't change, i.e. fields with commit SHAs and dates can't be determined // Just returns fields that don't change, i.e. fields with commit SHAs and dates can't be determined
return &api.FileResponse{ return &api.FileResponse{
Content: nil, Content: nil,
@ -418,7 +418,7 @@ func testDeleteRepoFiles(t *testing.T, u *url.URL) {
t.Run("Delete README.md file", func(t *testing.T) { t.Run("Delete README.md file", func(t *testing.T) {
filesResponse, err := files_service.ChangeRepoFiles(git.DefaultContext, repo, doer, opts) filesResponse, err := files_service.ChangeRepoFiles(git.DefaultContext, repo, doer, opts)
assert.NoError(t, err) assert.NoError(t, err)
expectedFileResponse := getExpectedFileResponseForRepofilesDelete(u) expectedFileResponse := getExpectedFileResponseForRepofilesDelete()
assert.NotNil(t, filesResponse) assert.NotNil(t, filesResponse)
assert.Nil(t, filesResponse.Files[0]) assert.Nil(t, filesResponse.Files[0])
assert.EqualValues(t, expectedFileResponse.Commit.Message, filesResponse.Commit.Message) assert.EqualValues(t, expectedFileResponse.Commit.Message, filesResponse.Commit.Message)
@ -460,7 +460,7 @@ func testDeleteRepoFilesWithoutBranchNames(t *testing.T, u *url.URL) {
t.Run("Delete README.md without Branch Name", func(t *testing.T) { t.Run("Delete README.md without Branch Name", func(t *testing.T) {
filesResponse, err := files_service.ChangeRepoFiles(git.DefaultContext, repo, doer, opts) filesResponse, err := files_service.ChangeRepoFiles(git.DefaultContext, repo, doer, opts)
assert.NoError(t, err) assert.NoError(t, err)
expectedFileResponse := getExpectedFileResponseForRepofilesDelete(u) expectedFileResponse := getExpectedFileResponseForRepofilesDelete()
assert.NotNil(t, filesResponse) assert.NotNil(t, filesResponse)
assert.Nil(t, filesResponse.Files[0]) assert.Nil(t, filesResponse.Files[0])
assert.EqualValues(t, expectedFileResponse.Commit.Message, filesResponse.Commit.Message) assert.EqualValues(t, expectedFileResponse.Commit.Message, filesResponse.Commit.Message)