Merge branch 'main' into lunny/fix_edit_team

This commit is contained in:
Lunny Xiao 2024-10-30 10:59:20 -07:00
commit c2ff613582
71 changed files with 1929 additions and 1640 deletions

View File

@ -324,6 +324,10 @@ RUN_USER = ; git
;; Maximum number of locks returned per page ;; Maximum number of locks returned per page
;LFS_LOCKS_PAGING_NUM = 50 ;LFS_LOCKS_PAGING_NUM = 50
;; ;;
;; When clients make lfs batch requests, reject them if there are more pointers than this number
;; zero means 'unlimited'
;LFS_MAX_BATCH_SIZE = 0
;;
;; Allow graceful restarts using SIGHUP to fork ;; Allow graceful restarts using SIGHUP to fork
;ALLOW_GRACEFUL_RESTARTS = true ;ALLOW_GRACEFUL_RESTARTS = true
;; ;;
@ -2638,6 +2642,10 @@ LEVEL = Info
;; override the azure blob base path if storage type is azureblob ;; override the azure blob base path if storage type is azureblob
;AZURE_BLOB_BASE_PATH = lfs/ ;AZURE_BLOB_BASE_PATH = lfs/
;[lfs_client]
;; When mirroring an upstream lfs endpoint, limit the number of pointers in each batch request to this number
;BATCH_SIZE = 20
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; settings for packages, will override storage setting ;; settings for packages, will override storage setting

View File

@ -6,6 +6,12 @@ package db
import ( import (
"context" "context"
"database/sql" "database/sql"
"errors"
"runtime"
"slices"
"sync"
"code.gitea.io/gitea/modules/setting"
"xorm.io/builder" "xorm.io/builder"
"xorm.io/xorm" "xorm.io/xorm"
@ -15,45 +21,23 @@ import (
// will be overwritten by Init with HammerContext // will be overwritten by Init with HammerContext
var DefaultContext context.Context var DefaultContext context.Context
// contextKey is a value for use with context.WithValue. type engineContextKeyType struct{}
type contextKey struct {
name string
}
// enginedContextKey is a context key. It is used with context.Value() to get the current Engined for the context var engineContextKey = engineContextKeyType{}
var (
enginedContextKey = &contextKey{"engined"}
_ Engined = &Context{}
)
// Context represents a db context // Context represents a db context
type Context struct { type Context struct {
context.Context context.Context
e Engine engine Engine
transaction bool
} }
func newContext(ctx context.Context, e Engine, transaction bool) *Context { func newContext(ctx context.Context, e Engine) *Context {
return &Context{ return &Context{Context: ctx, engine: e}
Context: ctx,
e: e,
transaction: transaction,
}
}
// InTransaction if context is in a transaction
func (ctx *Context) InTransaction() bool {
return ctx.transaction
}
// Engine returns db engine
func (ctx *Context) Engine() Engine {
return ctx.e
} }
// Value shadows Value for context.Context but allows us to get ourselves and an Engined object // Value shadows Value for context.Context but allows us to get ourselves and an Engined object
func (ctx *Context) Value(key any) any { func (ctx *Context) Value(key any) any {
if key == enginedContextKey { if key == engineContextKey {
return ctx return ctx
} }
return ctx.Context.Value(key) return ctx.Context.Value(key)
@ -61,30 +45,66 @@ func (ctx *Context) Value(key any) any {
// WithContext returns this engine tied to this context // WithContext returns this engine tied to this context
func (ctx *Context) WithContext(other context.Context) *Context { func (ctx *Context) WithContext(other context.Context) *Context {
return newContext(ctx, ctx.e.Context(other), ctx.transaction) return newContext(ctx, ctx.engine.Context(other))
} }
// Engined structs provide an Engine var (
type Engined interface { contextSafetyOnce sync.Once
Engine() Engine contextSafetyDeniedFuncPCs []uintptr
)
func contextSafetyCheck(e Engine) {
if setting.IsProd && !setting.IsInTesting {
return
}
if e == nil {
return
}
// Only do this check for non-end-users. If the problem could be fixed in the future, this code could be removed.
contextSafetyOnce.Do(func() {
// try to figure out the bad functions to deny
type m struct{}
_ = e.SQL("SELECT 1").Iterate(&m{}, func(int, any) error {
callers := make([]uintptr, 32)
callerNum := runtime.Callers(1, callers)
for i := 0; i < callerNum; i++ {
if funcName := runtime.FuncForPC(callers[i]).Name(); funcName == "xorm.io/xorm.(*Session).Iterate" {
contextSafetyDeniedFuncPCs = append(contextSafetyDeniedFuncPCs, callers[i])
}
}
return nil
})
if len(contextSafetyDeniedFuncPCs) != 1 {
panic(errors.New("unable to determine the functions to deny"))
}
})
// it should be very fast: xxxx ns/op
callers := make([]uintptr, 32)
callerNum := runtime.Callers(3, callers) // skip 3: runtime.Callers, contextSafetyCheck, GetEngine
for i := 0; i < callerNum; i++ {
if slices.Contains(contextSafetyDeniedFuncPCs, callers[i]) {
panic(errors.New("using database context in an iterator would cause corrupted results"))
}
}
} }
// GetEngine will get a db Engine from this context or return an Engine restricted to this context // GetEngine gets an existing db Engine/Statement or creates a new Session
func GetEngine(ctx context.Context) Engine { func GetEngine(ctx context.Context) Engine {
if e := getEngine(ctx); e != nil { if e := getExistingEngine(ctx); e != nil {
return e return e
} }
return x.Context(ctx) return x.Context(ctx)
} }
// getEngine will get a db Engine from this context or return nil // getExistingEngine gets an existing db Engine/Statement from this context or returns nil
func getEngine(ctx context.Context) Engine { func getExistingEngine(ctx context.Context) (e Engine) {
if engined, ok := ctx.(Engined); ok { defer func() { contextSafetyCheck(e) }()
return engined.Engine() if engined, ok := ctx.(*Context); ok {
return engined.engine
} }
enginedInterface := ctx.Value(enginedContextKey) if engined, ok := ctx.Value(engineContextKey).(*Context); ok {
if enginedInterface != nil { return engined.engine
return enginedInterface.(Engined).Engine()
} }
return nil return nil
} }
@ -132,23 +152,23 @@ func (c *halfCommitter) Close() error {
// d. It doesn't mean rollback is forbidden, but always do it only when there is an error, and you do want to rollback. // d. It doesn't mean rollback is forbidden, but always do it only when there is an error, and you do want to rollback.
func TxContext(parentCtx context.Context) (*Context, Committer, error) { func TxContext(parentCtx context.Context) (*Context, Committer, error) {
if sess, ok := inTransaction(parentCtx); ok { if sess, ok := inTransaction(parentCtx); ok {
return newContext(parentCtx, sess, true), &halfCommitter{committer: sess}, nil return newContext(parentCtx, sess), &halfCommitter{committer: sess}, nil
} }
sess := x.NewSession() sess := x.NewSession()
if err := sess.Begin(); err != nil { if err := sess.Begin(); err != nil {
sess.Close() _ = sess.Close()
return nil, nil, err return nil, nil, err
} }
return newContext(DefaultContext, sess, true), sess, nil return newContext(DefaultContext, sess), sess, nil
} }
// WithTx represents executing database operations on a transaction, if the transaction exist, // WithTx represents executing database operations on a transaction, if the transaction exist,
// this function will reuse it otherwise will create a new one and close it when finished. // this function will reuse it otherwise will create a new one and close it when finished.
func WithTx(parentCtx context.Context, f func(ctx context.Context) error) error { func WithTx(parentCtx context.Context, f func(ctx context.Context) error) error {
if sess, ok := inTransaction(parentCtx); ok { if sess, ok := inTransaction(parentCtx); ok {
err := f(newContext(parentCtx, sess, true)) err := f(newContext(parentCtx, sess))
if err != nil { if err != nil {
// rollback immediately, in case the caller ignores returned error and tries to commit the transaction. // rollback immediately, in case the caller ignores returned error and tries to commit the transaction.
_ = sess.Close() _ = sess.Close()
@ -165,7 +185,7 @@ func txWithNoCheck(parentCtx context.Context, f func(ctx context.Context) error)
return err return err
} }
if err := f(newContext(parentCtx, sess, true)); err != nil { if err := f(newContext(parentCtx, sess)); err != nil {
return err return err
} }
@ -312,7 +332,7 @@ func InTransaction(ctx context.Context) bool {
} }
func inTransaction(ctx context.Context) (*xorm.Session, bool) { func inTransaction(ctx context.Context) (*xorm.Session, bool) {
e := getEngine(ctx) e := getExistingEngine(ctx)
if e == nil { if e == nil {
return nil, false return nil, false
} }

View File

@ -84,3 +84,47 @@ func TestTxContext(t *testing.T) {
})) }))
} }
} }
func TestContextSafety(t *testing.T) {
type TestModel1 struct {
ID int64
}
type TestModel2 struct {
ID int64
}
assert.NoError(t, unittest.GetXORMEngine().Sync(&TestModel1{}, &TestModel2{}))
assert.NoError(t, db.TruncateBeans(db.DefaultContext, &TestModel1{}, &TestModel2{}))
testCount := 10
for i := 1; i <= testCount; i++ {
assert.NoError(t, db.Insert(db.DefaultContext, &TestModel1{ID: int64(i)}))
assert.NoError(t, db.Insert(db.DefaultContext, &TestModel2{ID: int64(-i)}))
}
actualCount := 0
// here: db.GetEngine(db.DefaultContext) is a new *Session created from *Engine
_ = db.WithTx(db.DefaultContext, func(ctx context.Context) error {
_ = db.GetEngine(ctx).Iterate(&TestModel1{}, func(i int, bean any) error {
// here: db.GetEngine(ctx) is always the unclosed "Iterate" *Session with autoResetStatement=false,
// and the internal states (including "cond" and others) are always there and not be reset in this callback.
m1 := bean.(*TestModel1)
assert.EqualValues(t, i+1, m1.ID)
// here: XORM bug, it fails because the SQL becomes "WHERE id=-1", "WHERE id=-1 AND id=-2", "WHERE id=-1 AND id=-2 AND id=-3" ...
// and it conflicts with the "Iterate"'s internal states.
// has, err := db.GetEngine(ctx).Get(&TestModel2{ID: -m1.ID})
actualCount++
return nil
})
return nil
})
assert.EqualValues(t, testCount, actualCount)
// deny the bad usages
assert.PanicsWithError(t, "using database context in an iterator would cause corrupted results", func() {
_ = unittest.GetXORMEngine().Iterate(&TestModel1{}, func(i int, bean any) error {
_ = db.GetEngine(db.DefaultContext)
return nil
})
})
}

View File

@ -161,10 +161,7 @@ func InitEngine(ctx context.Context) error {
// SetDefaultEngine sets the default engine for db // SetDefaultEngine sets the default engine for db
func SetDefaultEngine(ctx context.Context, eng *xorm.Engine) { func SetDefaultEngine(ctx context.Context, eng *xorm.Engine) {
x = eng x = eng
DefaultContext = &Context{ DefaultContext = &Context{Context: ctx, engine: x}
Context: ctx,
e: x,
}
} }
// UnsetDefaultEngine closes and unsets the default engine // UnsetDefaultEngine closes and unsets the default engine

View File

@ -11,7 +11,7 @@ import (
) )
func getXORMEngine() *xorm.Engine { func getXORMEngine() *xorm.Engine {
return db.DefaultContext.(*db.Context).Engine().(*xorm.Engine) return db.GetEngine(db.DefaultContext).(*xorm.Engine)
} }
// CheckDatabaseConnection checks the database connection // CheckDatabaseConnection checks the database connection

View File

@ -11,7 +11,7 @@ import (
"xorm.io/builder" "xorm.io/builder"
) )
// Iterate iterate all the Bean object // Iterate iterates all the Bean object
func Iterate[Bean any](ctx context.Context, cond builder.Cond, f func(ctx context.Context, bean *Bean) error) error { func Iterate[Bean any](ctx context.Context, cond builder.Cond, f func(ctx context.Context, bean *Bean) error) error {
var start int var start int
batchSize := setting.Database.IterateBufferSize batchSize := setting.Database.IterateBufferSize

View File

@ -63,6 +63,7 @@ type ProtectedBranch struct {
RequireSignedCommits bool `xorm:"NOT NULL DEFAULT false"` RequireSignedCommits bool `xorm:"NOT NULL DEFAULT false"`
ProtectedFilePatterns string `xorm:"TEXT"` ProtectedFilePatterns string `xorm:"TEXT"`
UnprotectedFilePatterns string `xorm:"TEXT"` UnprotectedFilePatterns string `xorm:"TEXT"`
BlockAdminMergeOverride bool `xorm:"NOT NULL DEFAULT false"`
CreatedUnix timeutil.TimeStamp `xorm:"created"` CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"` UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
@ -83,14 +84,20 @@ func IsRuleNameSpecial(ruleName string) bool {
} }
func (protectBranch *ProtectedBranch) loadGlob() { func (protectBranch *ProtectedBranch) loadGlob() {
if protectBranch.globRule == nil { if protectBranch.isPlainName || protectBranch.globRule != nil {
var err error return
protectBranch.globRule, err = glob.Compile(protectBranch.RuleName, '/') }
if err != nil { // detect if it is not glob
log.Warn("Invalid glob rule for ProtectedBranch[%d]: %s %v", protectBranch.ID, protectBranch.RuleName, err) if !IsRuleNameSpecial(protectBranch.RuleName) {
protectBranch.globRule = glob.MustCompile(glob.QuoteMeta(protectBranch.RuleName), '/') protectBranch.isPlainName = true
} return
protectBranch.isPlainName = !IsRuleNameSpecial(protectBranch.RuleName) }
// now we load the glob
var err error
protectBranch.globRule, err = glob.Compile(protectBranch.RuleName, '/')
if err != nil {
log.Warn("Invalid glob rule for ProtectedBranch[%d]: %s %v", protectBranch.ID, protectBranch.RuleName, err)
protectBranch.globRule = glob.MustCompile(glob.QuoteMeta(protectBranch.RuleName), '/')
} }
} }

View File

@ -74,3 +74,32 @@ func TestBranchRuleMatchPriority(t *testing.T) {
} }
} }
} }
func TestBranchRuleSort(t *testing.T) {
in := []*ProtectedBranch{{
RuleName: "b",
CreatedUnix: 1,
}, {
RuleName: "b/*",
CreatedUnix: 3,
}, {
RuleName: "a/*",
CreatedUnix: 2,
}, {
RuleName: "c",
CreatedUnix: 0,
}, {
RuleName: "a",
CreatedUnix: 4,
}}
expect := []string{"c", "b", "a", "a/*", "b/*"}
pbr := ProtectedBranchRules(in)
pbr.sort()
var got []string
for i := range pbr {
got = append(got, pbr[i].RuleName)
}
assert.Equal(t, expect, got)
}

View File

@ -36,25 +36,15 @@ import (
const minDBVersion = 70 // Gitea 1.5.3 const minDBVersion = 70 // Gitea 1.5.3
// Migration describes on migration from lower version to high version
type Migration interface {
Description() string
Migrate(*xorm.Engine) error
}
type migration struct { type migration struct {
idNumber int64 // DB version is "the last migration's idNumber" + 1
description string description string
migrate func(*xorm.Engine) error migrate func(*xorm.Engine) error
} }
// NewMigration creates a new migration // newMigration creates a new migration
func NewMigration(desc string, fn func(*xorm.Engine) error) Migration { func newMigration(idNumber int64, desc string, fn func(*xorm.Engine) error) *migration {
return &migration{desc, fn} return &migration{idNumber, desc, fn}
}
// Description returns the migration's description
func (m *migration) Description() string {
return m.description
} }
// Migrate executes the migration // Migrate executes the migration
@ -65,544 +55,317 @@ func (m *migration) Migrate(x *xorm.Engine) error {
// Version describes the version table. Should have only one row with id==1 // Version describes the version table. Should have only one row with id==1
type Version struct { type Version struct {
ID int64 `xorm:"pk autoincr"` ID int64 `xorm:"pk autoincr"`
Version int64 Version int64 // DB version is "the last migration's idNumber" + 1
} }
// Use noopMigration when there is a migration that has been no-oped // Use noopMigration when there is a migration that has been no-oped
var noopMigration = func(_ *xorm.Engine) error { return nil } var noopMigration = func(_ *xorm.Engine) error { return nil }
var preparedMigrations []*migration
// This is a sequence of migrations. Add new migrations to the bottom of the list. // This is a sequence of migrations. Add new migrations to the bottom of the list.
// If you want to "retire" a migration, remove it from the top of the list and // If you want to "retire" a migration, remove it from the top of the list and
// update minDBVersion accordingly // update minDBVersion accordingly
var migrations = []Migration{ func prepareMigrationTasks() []*migration {
// Gitea 1.5.0 ends at v69 if preparedMigrations != nil {
return preparedMigrations
}
preparedMigrations = []*migration{
// Gitea 1.5.0 ends at database version 69
// v70 -> v71 newMigration(70, "add issue_dependencies", v1_6.AddIssueDependencies),
NewMigration("add issue_dependencies", v1_6.AddIssueDependencies), newMigration(71, "protect each scratch token", v1_6.AddScratchHash),
// v71 -> v72 newMigration(72, "add review", v1_6.AddReview),
NewMigration("protect each scratch token", v1_6.AddScratchHash),
// v72 -> v73
NewMigration("add review", v1_6.AddReview),
// Gitea 1.6.0 ends at v73 // Gitea 1.6.0 ends at database version 73
// v73 -> v74 newMigration(73, "add must_change_password column for users table", v1_7.AddMustChangePassword),
NewMigration("add must_change_password column for users table", v1_7.AddMustChangePassword), newMigration(74, "add approval whitelists to protected branches", v1_7.AddApprovalWhitelistsToProtectedBranches),
// v74 -> v75 newMigration(75, "clear nonused data which not deleted when user was deleted", v1_7.ClearNonusedData),
NewMigration("add approval whitelists to protected branches", v1_7.AddApprovalWhitelistsToProtectedBranches),
// v75 -> v76
NewMigration("clear nonused data which not deleted when user was deleted", v1_7.ClearNonusedData),
// Gitea 1.7.0 ends at v76 // Gitea 1.7.0 ends at database version 76
// v76 -> v77 newMigration(76, "add pull request rebase with merge commit", v1_8.AddPullRequestRebaseWithMerge),
NewMigration("add pull request rebase with merge commit", v1_8.AddPullRequestRebaseWithMerge), newMigration(77, "add theme to users", v1_8.AddUserDefaultTheme),
// v77 -> v78 newMigration(78, "rename repo is_bare to repo is_empty", v1_8.RenameRepoIsBareToIsEmpty),
NewMigration("add theme to users", v1_8.AddUserDefaultTheme), newMigration(79, "add can close issues via commit in any branch", v1_8.AddCanCloseIssuesViaCommitInAnyBranch),
// v78 -> v79 newMigration(80, "add is locked to issues", v1_8.AddIsLockedToIssues),
NewMigration("rename repo is_bare to repo is_empty", v1_8.RenameRepoIsBareToIsEmpty), newMigration(81, "update U2F counter type", v1_8.ChangeU2FCounterType),
// v79 -> v80
NewMigration("add can close issues via commit in any branch", v1_8.AddCanCloseIssuesViaCommitInAnyBranch),
// v80 -> v81
NewMigration("add is locked to issues", v1_8.AddIsLockedToIssues),
// v81 -> v82
NewMigration("update U2F counter type", v1_8.ChangeU2FCounterType),
// Gitea 1.8.0 ends at v82 // Gitea 1.8.0 ends at database version 82
// v82 -> v83 newMigration(82, "hot fix for wrong release sha1 on release table", v1_9.FixReleaseSha1OnReleaseTable),
NewMigration("hot fix for wrong release sha1 on release table", v1_9.FixReleaseSha1OnReleaseTable), newMigration(83, "add uploader id for table attachment", v1_9.AddUploaderIDForAttachment),
// v83 -> v84 newMigration(84, "add table to store original imported gpg keys", v1_9.AddGPGKeyImport),
NewMigration("add uploader id for table attachment", v1_9.AddUploaderIDForAttachment), newMigration(85, "hash application token", v1_9.HashAppToken),
// v84 -> v85 newMigration(86, "add http method to webhook", v1_9.AddHTTPMethodToWebhook),
NewMigration("add table to store original imported gpg keys", v1_9.AddGPGKeyImport), newMigration(87, "add avatar field to repository", v1_9.AddAvatarFieldToRepository),
// v85 -> v86
NewMigration("hash application token", v1_9.HashAppToken),
// v86 -> v87
NewMigration("add http method to webhook", v1_9.AddHTTPMethodToWebhook),
// v87 -> v88
NewMigration("add avatar field to repository", v1_9.AddAvatarFieldToRepository),
// Gitea 1.9.0 ends at v88 // Gitea 1.9.0 ends at database version 88
// v88 -> v89 newMigration(88, "add commit status context field to commit_status", v1_10.AddCommitStatusContext),
NewMigration("add commit status context field to commit_status", v1_10.AddCommitStatusContext), newMigration(89, "add original author/url migration info to issues, comments, and repo ", v1_10.AddOriginalMigrationInfo),
// v89 -> v90 newMigration(90, "change length of some repository columns", v1_10.ChangeSomeColumnsLengthOfRepo),
NewMigration("add original author/url migration info to issues, comments, and repo ", v1_10.AddOriginalMigrationInfo), newMigration(91, "add index on owner_id of repository and type, review_id of comment", v1_10.AddIndexOnRepositoryAndComment),
// v90 -> v91 newMigration(92, "remove orphaned repository index statuses", v1_10.RemoveLingeringIndexStatus),
NewMigration("change length of some repository columns", v1_10.ChangeSomeColumnsLengthOfRepo), newMigration(93, "add email notification enabled preference to user", v1_10.AddEmailNotificationEnabledToUser),
// v91 -> v92 newMigration(94, "add enable_status_check, status_check_contexts to protected_branch", v1_10.AddStatusCheckColumnsForProtectedBranches),
NewMigration("add index on owner_id of repository and type, review_id of comment", v1_10.AddIndexOnRepositoryAndComment), newMigration(95, "add table columns for cross referencing issues", v1_10.AddCrossReferenceColumns),
// v92 -> v93 newMigration(96, "delete orphaned attachments", v1_10.DeleteOrphanedAttachments),
NewMigration("remove orphaned repository index statuses", v1_10.RemoveLingeringIndexStatus), newMigration(97, "add repo_admin_change_team_access to user", v1_10.AddRepoAdminChangeTeamAccessColumnForUser),
// v93 -> v94 newMigration(98, "add original author name and id on migrated release", v1_10.AddOriginalAuthorOnMigratedReleases),
NewMigration("add email notification enabled preference to user", v1_10.AddEmailNotificationEnabledToUser), newMigration(99, "add task table and status column for repository table", v1_10.AddTaskTable),
// v94 -> v95 newMigration(100, "update migration repositories' service type", v1_10.UpdateMigrationServiceTypes),
NewMigration("add enable_status_check, status_check_contexts to protected_branch", v1_10.AddStatusCheckColumnsForProtectedBranches), newMigration(101, "change length of some external login users columns", v1_10.ChangeSomeColumnsLengthOfExternalLoginUser),
// v95 -> v96
NewMigration("add table columns for cross referencing issues", v1_10.AddCrossReferenceColumns),
// v96 -> v97
NewMigration("delete orphaned attachments", v1_10.DeleteOrphanedAttachments),
// v97 -> v98
NewMigration("add repo_admin_change_team_access to user", v1_10.AddRepoAdminChangeTeamAccessColumnForUser),
// v98 -> v99
NewMigration("add original author name and id on migrated release", v1_10.AddOriginalAuthorOnMigratedReleases),
// v99 -> v100
NewMigration("add task table and status column for repository table", v1_10.AddTaskTable),
// v100 -> v101
NewMigration("update migration repositories' service type", v1_10.UpdateMigrationServiceTypes),
// v101 -> v102
NewMigration("change length of some external login users columns", v1_10.ChangeSomeColumnsLengthOfExternalLoginUser),
// Gitea 1.10.0 ends at v102 // Gitea 1.10.0 ends at database version 102
// v102 -> v103 newMigration(102, "update migration repositories' service type", v1_11.DropColumnHeadUserNameOnPullRequest),
NewMigration("update migration repositories' service type", v1_11.DropColumnHeadUserNameOnPullRequest), newMigration(103, "Add WhitelistDeployKeys to protected branch", v1_11.AddWhitelistDeployKeysToBranches),
// v103 -> v104 newMigration(104, "remove unnecessary columns from label", v1_11.RemoveLabelUneededCols),
NewMigration("Add WhitelistDeployKeys to protected branch", v1_11.AddWhitelistDeployKeysToBranches), newMigration(105, "add includes_all_repositories to teams", v1_11.AddTeamIncludesAllRepositories),
// v104 -> v105 newMigration(106, "add column `mode` to table watch", v1_11.AddModeColumnToWatch),
NewMigration("remove unnecessary columns from label", v1_11.RemoveLabelUneededCols), newMigration(107, "Add template options to repository", v1_11.AddTemplateToRepo),
// v105 -> v106 newMigration(108, "Add comment_id on table notification", v1_11.AddCommentIDOnNotification),
NewMigration("add includes_all_repositories to teams", v1_11.AddTeamIncludesAllRepositories), newMigration(109, "add can_create_org_repo to team", v1_11.AddCanCreateOrgRepoColumnForTeam),
// v106 -> v107 newMigration(110, "change review content type to text", v1_11.ChangeReviewContentToText),
NewMigration("add column `mode` to table watch", v1_11.AddModeColumnToWatch), newMigration(111, "update branch protection for can push and whitelist enable", v1_11.AddBranchProtectionCanPushAndEnableWhitelist),
// v107 -> v108 newMigration(112, "remove release attachments which repository deleted", v1_11.RemoveAttachmentMissedRepo),
NewMigration("Add template options to repository", v1_11.AddTemplateToRepo), newMigration(113, "new feature: change target branch of pull requests", v1_11.FeatureChangeTargetBranch),
// v108 -> v109 newMigration(114, "Remove authentication credentials from stored URL", v1_11.SanitizeOriginalURL),
NewMigration("Add comment_id on table notification", v1_11.AddCommentIDOnNotification), newMigration(115, "add user_id prefix to existing user avatar name", v1_11.RenameExistingUserAvatarName),
// v109 -> v110 newMigration(116, "Extend TrackedTimes", v1_11.ExtendTrackedTimes),
NewMigration("add can_create_org_repo to team", v1_11.AddCanCreateOrgRepoColumnForTeam),
// v110 -> v111
NewMigration("change review content type to text", v1_11.ChangeReviewContentToText),
// v111 -> v112
NewMigration("update branch protection for can push and whitelist enable", v1_11.AddBranchProtectionCanPushAndEnableWhitelist),
// v112 -> v113
NewMigration("remove release attachments which repository deleted", v1_11.RemoveAttachmentMissedRepo),
// v113 -> v114
NewMigration("new feature: change target branch of pull requests", v1_11.FeatureChangeTargetBranch),
// v114 -> v115
NewMigration("Remove authentication credentials from stored URL", v1_11.SanitizeOriginalURL),
// v115 -> v116
NewMigration("add user_id prefix to existing user avatar name", v1_11.RenameExistingUserAvatarName),
// v116 -> v117
NewMigration("Extend TrackedTimes", v1_11.ExtendTrackedTimes),
// Gitea 1.11.0 ends at v117 // Gitea 1.11.0 ends at database version 117
// v117 -> v118 newMigration(117, "Add block on rejected reviews branch protection", v1_12.AddBlockOnRejectedReviews),
NewMigration("Add block on rejected reviews branch protection", v1_12.AddBlockOnRejectedReviews), newMigration(118, "Add commit id and stale to reviews", v1_12.AddReviewCommitAndStale),
// v118 -> v119 newMigration(119, "Fix migrated repositories' git service type", v1_12.FixMigratedRepositoryServiceType),
NewMigration("Add commit id and stale to reviews", v1_12.AddReviewCommitAndStale), newMigration(120, "Add owner_name on table repository", v1_12.AddOwnerNameOnRepository),
// v119 -> v120 newMigration(121, "add is_restricted column for users table", v1_12.AddIsRestricted),
NewMigration("Fix migrated repositories' git service type", v1_12.FixMigratedRepositoryServiceType), newMigration(122, "Add Require Signed Commits to ProtectedBranch", v1_12.AddRequireSignedCommits),
// v120 -> v121 newMigration(123, "Add original information for reactions", v1_12.AddReactionOriginals),
NewMigration("Add owner_name on table repository", v1_12.AddOwnerNameOnRepository), newMigration(124, "Add columns to user and repository", v1_12.AddUserRepoMissingColumns),
// v121 -> v122 newMigration(125, "Add some columns on review for migration", v1_12.AddReviewMigrateInfo),
NewMigration("add is_restricted column for users table", v1_12.AddIsRestricted), newMigration(126, "Fix topic repository count", v1_12.FixTopicRepositoryCount),
// v122 -> v123 newMigration(127, "add repository code language statistics", v1_12.AddLanguageStats),
NewMigration("Add Require Signed Commits to ProtectedBranch", v1_12.AddRequireSignedCommits), newMigration(128, "fix merge base for pull requests", v1_12.FixMergeBase),
// v123 -> v124 newMigration(129, "remove dependencies from deleted repositories", v1_12.PurgeUnusedDependencies),
NewMigration("Add original information for reactions", v1_12.AddReactionOriginals), newMigration(130, "Expand webhooks for more granularity", v1_12.ExpandWebhooks),
// v124 -> v125 newMigration(131, "Add IsSystemWebhook column to webhooks table", v1_12.AddSystemWebhookColumn),
NewMigration("Add columns to user and repository", v1_12.AddUserRepoMissingColumns), newMigration(132, "Add Branch Protection Protected Files Column", v1_12.AddBranchProtectionProtectedFilesColumn),
// v125 -> v126 newMigration(133, "Add EmailHash Table", v1_12.AddEmailHashTable),
NewMigration("Add some columns on review for migration", v1_12.AddReviewMigrateInfo), newMigration(134, "Refix merge base for merged pull requests", v1_12.RefixMergeBase),
// v126 -> v127 newMigration(135, "Add OrgID column to Labels table", v1_12.AddOrgIDLabelColumn),
NewMigration("Fix topic repository count", v1_12.FixTopicRepositoryCount), newMigration(136, "Add CommitsAhead and CommitsBehind Column to PullRequest Table", v1_12.AddCommitDivergenceToPulls),
// v127 -> v128 newMigration(137, "Add Branch Protection Block Outdated Branch", v1_12.AddBlockOnOutdatedBranch),
NewMigration("add repository code language statistics", v1_12.AddLanguageStats), newMigration(138, "Add ResolveDoerID to Comment table", v1_12.AddResolveDoerIDCommentColumn),
// v128 -> v129 newMigration(139, "prepend refs/heads/ to issue refs", v1_12.PrependRefsHeadsToIssueRefs),
NewMigration("fix merge base for pull requests", v1_12.FixMergeBase),
// v129 -> v130
NewMigration("remove dependencies from deleted repositories", v1_12.PurgeUnusedDependencies),
// v130 -> v131
NewMigration("Expand webhooks for more granularity", v1_12.ExpandWebhooks),
// v131 -> v132
NewMigration("Add IsSystemWebhook column to webhooks table", v1_12.AddSystemWebhookColumn),
// v132 -> v133
NewMigration("Add Branch Protection Protected Files Column", v1_12.AddBranchProtectionProtectedFilesColumn),
// v133 -> v134
NewMigration("Add EmailHash Table", v1_12.AddEmailHashTable),
// v134 -> v135
NewMigration("Refix merge base for merged pull requests", v1_12.RefixMergeBase),
// v135 -> v136
NewMigration("Add OrgID column to Labels table", v1_12.AddOrgIDLabelColumn),
// v136 -> v137
NewMigration("Add CommitsAhead and CommitsBehind Column to PullRequest Table", v1_12.AddCommitDivergenceToPulls),
// v137 -> v138
NewMigration("Add Branch Protection Block Outdated Branch", v1_12.AddBlockOnOutdatedBranch),
// v138 -> v139
NewMigration("Add ResolveDoerID to Comment table", v1_12.AddResolveDoerIDCommentColumn),
// v139 -> v140
NewMigration("prepend refs/heads/ to issue refs", v1_12.PrependRefsHeadsToIssueRefs),
// Gitea 1.12.0 ends at v140 // Gitea 1.12.0 ends at database version 140
// v140 -> v141 newMigration(140, "Save detected language file size to database instead of percent", v1_13.FixLanguageStatsToSaveSize),
NewMigration("Save detected language file size to database instead of percent", v1_13.FixLanguageStatsToSaveSize), newMigration(141, "Add KeepActivityPrivate to User table", v1_13.AddKeepActivityPrivateUserColumn),
// v141 -> v142 newMigration(142, "Ensure Repository.IsArchived is not null", v1_13.SetIsArchivedToFalse),
NewMigration("Add KeepActivityPrivate to User table", v1_13.AddKeepActivityPrivateUserColumn), newMigration(143, "recalculate Stars number for all user", v1_13.RecalculateStars),
// v142 -> v143 newMigration(144, "update Matrix Webhook http method to 'PUT'", v1_13.UpdateMatrixWebhookHTTPMethod),
NewMigration("Ensure Repository.IsArchived is not null", v1_13.SetIsArchivedToFalse), newMigration(145, "Increase Language field to 50 in LanguageStats", v1_13.IncreaseLanguageField),
// v143 -> v144 newMigration(146, "Add projects info to repository table", v1_13.AddProjectsInfo),
NewMigration("recalculate Stars number for all user", v1_13.RecalculateStars), newMigration(147, "create review for 0 review id code comments", v1_13.CreateReviewsForCodeComments),
// v144 -> v145 newMigration(148, "remove issue dependency comments who refer to non existing issues", v1_13.PurgeInvalidDependenciesComments),
NewMigration("update Matrix Webhook http method to 'PUT'", v1_13.UpdateMatrixWebhookHTTPMethod), newMigration(149, "Add Created and Updated to Milestone table", v1_13.AddCreatedAndUpdatedToMilestones),
// v145 -> v146 newMigration(150, "add primary key to repo_topic", v1_13.AddPrimaryKeyToRepoTopic),
NewMigration("Increase Language field to 50 in LanguageStats", v1_13.IncreaseLanguageField), newMigration(151, "set default password algorithm to Argon2", v1_13.SetDefaultPasswordToArgon2),
// v146 -> v147 newMigration(152, "add TrustModel field to Repository", v1_13.AddTrustModelToRepository),
NewMigration("Add projects info to repository table", v1_13.AddProjectsInfo), newMigration(153, "add Team review request support", v1_13.AddTeamReviewRequestSupport),
// v147 -> v148 newMigration(154, "add timestamps to Star, Label, Follow, Watch and Collaboration", v1_13.AddTimeStamps),
NewMigration("create review for 0 review id code comments", v1_13.CreateReviewsForCodeComments),
// v148 -> v149
NewMigration("remove issue dependency comments who refer to non existing issues", v1_13.PurgeInvalidDependenciesComments),
// v149 -> v150
NewMigration("Add Created and Updated to Milestone table", v1_13.AddCreatedAndUpdatedToMilestones),
// v150 -> v151
NewMigration("add primary key to repo_topic", v1_13.AddPrimaryKeyToRepoTopic),
// v151 -> v152
NewMigration("set default password algorithm to Argon2", v1_13.SetDefaultPasswordToArgon2),
// v152 -> v153
NewMigration("add TrustModel field to Repository", v1_13.AddTrustModelToRepository),
// v153 > v154
NewMigration("add Team review request support", v1_13.AddTeamReviewRequestSupport),
// v154 > v155
NewMigration("add timestamps to Star, Label, Follow, Watch and Collaboration", v1_13.AddTimeStamps),
// Gitea 1.13.0 ends at v155 // Gitea 1.13.0 ends at database version 155
// v155 -> v156 newMigration(155, "add changed_protected_files column for pull_request table", v1_14.AddChangedProtectedFilesPullRequestColumn),
NewMigration("add changed_protected_files column for pull_request table", v1_14.AddChangedProtectedFilesPullRequestColumn), newMigration(156, "fix publisher ID for tag releases", v1_14.FixPublisherIDforTagReleases),
// v156 -> v157 newMigration(157, "ensure repo topics are up-to-date", v1_14.FixRepoTopics),
NewMigration("fix publisher ID for tag releases", v1_14.FixPublisherIDforTagReleases), newMigration(158, "code comment replies should have the commitID of the review they are replying to", v1_14.UpdateCodeCommentReplies),
// v157 -> v158 newMigration(159, "update reactions constraint", v1_14.UpdateReactionConstraint),
NewMigration("ensure repo topics are up-to-date", v1_14.FixRepoTopics), newMigration(160, "Add block on official review requests branch protection", v1_14.AddBlockOnOfficialReviewRequests),
// v158 -> v159 newMigration(161, "Convert task type from int to string", v1_14.ConvertTaskTypeToString),
NewMigration("code comment replies should have the commitID of the review they are replying to", v1_14.UpdateCodeCommentReplies), newMigration(162, "Convert webhook task type from int to string", v1_14.ConvertWebhookTaskTypeToString),
// v159 -> v160 newMigration(163, "Convert topic name from 25 to 50", v1_14.ConvertTopicNameFrom25To50),
NewMigration("update reactions constraint", v1_14.UpdateReactionConstraint), newMigration(164, "Add scope and nonce columns to oauth2_grant table", v1_14.AddScopeAndNonceColumnsToOAuth2Grant),
// v160 -> v161 newMigration(165, "Convert hook task type from char(16) to varchar(16) and trim the column", v1_14.ConvertHookTaskTypeToVarcharAndTrim),
NewMigration("Add block on official review requests branch protection", v1_14.AddBlockOnOfficialReviewRequests), newMigration(166, "Where Password is Valid with Empty String delete it", v1_14.RecalculateUserEmptyPWD),
// v161 -> v162 newMigration(167, "Add user redirect", v1_14.AddUserRedirect),
NewMigration("Convert task type from int to string", v1_14.ConvertTaskTypeToString), newMigration(168, "Recreate user table to fix default values", v1_14.RecreateUserTableToFixDefaultValues),
// v162 -> v163 newMigration(169, "Update DeleteBranch comments to set the old_ref to the commit_sha", v1_14.CommentTypeDeleteBranchUseOldRef),
NewMigration("Convert webhook task type from int to string", v1_14.ConvertWebhookTaskTypeToString), newMigration(170, "Add Dismissed to Review table", v1_14.AddDismissedReviewColumn),
// v163 -> v164 newMigration(171, "Add Sorting to ProjectBoard table", v1_14.AddSortingColToProjectBoard),
NewMigration("Convert topic name from 25 to 50", v1_14.ConvertTopicNameFrom25To50), newMigration(172, "Add sessions table for go-chi/session", v1_14.AddSessionTable),
// v164 -> v165 newMigration(173, "Add time_id column to Comment", v1_14.AddTimeIDCommentColumn),
NewMigration("Add scope and nonce columns to oauth2_grant table", v1_14.AddScopeAndNonceColumnsToOAuth2Grant), newMigration(174, "Create repo transfer table", v1_14.AddRepoTransfer),
// v165 -> v166 newMigration(175, "Fix Postgres ID Sequences broken by recreate-table", v1_14.FixPostgresIDSequences),
NewMigration("Convert hook task type from char(16) to varchar(16) and trim the column", v1_14.ConvertHookTaskTypeToVarcharAndTrim), newMigration(176, "Remove invalid labels from comments", v1_14.RemoveInvalidLabels),
// v166 -> v167 newMigration(177, "Delete orphaned IssueLabels", v1_14.DeleteOrphanedIssueLabels),
NewMigration("Where Password is Valid with Empty String delete it", v1_14.RecalculateUserEmptyPWD),
// v167 -> v168
NewMigration("Add user redirect", v1_14.AddUserRedirect),
// v168 -> v169
NewMigration("Recreate user table to fix default values", v1_14.RecreateUserTableToFixDefaultValues),
// v169 -> v170
NewMigration("Update DeleteBranch comments to set the old_ref to the commit_sha", v1_14.CommentTypeDeleteBranchUseOldRef),
// v170 -> v171
NewMigration("Add Dismissed to Review table", v1_14.AddDismissedReviewColumn),
// v171 -> v172
NewMigration("Add Sorting to ProjectBoard table", v1_14.AddSortingColToProjectBoard),
// v172 -> v173
NewMigration("Add sessions table for go-chi/session", v1_14.AddSessionTable),
// v173 -> v174
NewMigration("Add time_id column to Comment", v1_14.AddTimeIDCommentColumn),
// v174 -> v175
NewMigration("Create repo transfer table", v1_14.AddRepoTransfer),
// v175 -> v176
NewMigration("Fix Postgres ID Sequences broken by recreate-table", v1_14.FixPostgresIDSequences),
// v176 -> v177
NewMigration("Remove invalid labels from comments", v1_14.RemoveInvalidLabels),
// v177 -> v178
NewMigration("Delete orphaned IssueLabels", v1_14.DeleteOrphanedIssueLabels),
// Gitea 1.14.0 ends at v178 // Gitea 1.14.0 ends at database version 178
// v178 -> v179 newMigration(178, "Add LFS columns to Mirror", v1_15.AddLFSMirrorColumns),
NewMigration("Add LFS columns to Mirror", v1_15.AddLFSMirrorColumns), newMigration(179, "Convert avatar url to text", v1_15.ConvertAvatarURLToText),
// v179 -> v180 newMigration(180, "Delete credentials from past migrations", v1_15.DeleteMigrationCredentials),
NewMigration("Convert avatar url to text", v1_15.ConvertAvatarURLToText), newMigration(181, "Always save primary email on email address table", v1_15.AddPrimaryEmail2EmailAddress),
// v180 -> v181 newMigration(182, "Add issue resource index table", v1_15.AddIssueResourceIndexTable),
NewMigration("Delete credentials from past migrations", v1_15.DeleteMigrationCredentials), newMigration(183, "Create PushMirror table", v1_15.CreatePushMirrorTable),
// v181 -> v182 newMigration(184, "Rename Task errors to message", v1_15.RenameTaskErrorsToMessage),
NewMigration("Always save primary email on email address table", v1_15.AddPrimaryEmail2EmailAddress), newMigration(185, "Add new table repo_archiver", v1_15.AddRepoArchiver),
// v182 -> v183 newMigration(186, "Create protected tag table", v1_15.CreateProtectedTagTable),
NewMigration("Add issue resource index table", v1_15.AddIssueResourceIndexTable), newMigration(187, "Drop unneeded webhook related columns", v1_15.DropWebhookColumns),
// v183 -> v184 newMigration(188, "Add key is verified to gpg key", v1_15.AddKeyIsVerified),
NewMigration("Create PushMirror table", v1_15.CreatePushMirrorTable),
// v184 -> v185
NewMigration("Rename Task errors to message", v1_15.RenameTaskErrorsToMessage),
// v185 -> v186
NewMigration("Add new table repo_archiver", v1_15.AddRepoArchiver),
// v186 -> v187
NewMigration("Create protected tag table", v1_15.CreateProtectedTagTable),
// v187 -> v188
NewMigration("Drop unneeded webhook related columns", v1_15.DropWebhookColumns),
// v188 -> v189
NewMigration("Add key is verified to gpg key", v1_15.AddKeyIsVerified),
// Gitea 1.15.0 ends at v189 // Gitea 1.15.0 ends at database version 189
// v189 -> v190 newMigration(189, "Unwrap ldap.Sources", v1_16.UnwrapLDAPSourceCfg),
NewMigration("Unwrap ldap.Sources", v1_16.UnwrapLDAPSourceCfg), newMigration(190, "Add agit flow pull request support", v1_16.AddAgitFlowPullRequest),
// v190 -> v191 newMigration(191, "Alter issue/comment table TEXT fields to LONGTEXT", v1_16.AlterIssueAndCommentTextFieldsToLongText),
NewMigration("Add agit flow pull request support", v1_16.AddAgitFlowPullRequest), newMigration(192, "RecreateIssueResourceIndexTable to have a primary key instead of an unique index", v1_16.RecreateIssueResourceIndexTable),
// v191 -> v192 newMigration(193, "Add repo id column for attachment table", v1_16.AddRepoIDForAttachment),
NewMigration("Alter issue/comment table TEXT fields to LONGTEXT", v1_16.AlterIssueAndCommentTextFieldsToLongText), newMigration(194, "Add Branch Protection Unprotected Files Column", v1_16.AddBranchProtectionUnprotectedFilesColumn),
// v192 -> v193 newMigration(195, "Add table commit_status_index", v1_16.AddTableCommitStatusIndex),
NewMigration("RecreateIssueResourceIndexTable to have a primary key instead of an unique index", v1_16.RecreateIssueResourceIndexTable), newMigration(196, "Add Color to ProjectBoard table", v1_16.AddColorColToProjectBoard),
// v193 -> v194 newMigration(197, "Add renamed_branch table", v1_16.AddRenamedBranchTable),
NewMigration("Add repo id column for attachment table", v1_16.AddRepoIDForAttachment), newMigration(198, "Add issue content history table", v1_16.AddTableIssueContentHistory),
// v194 -> v195 newMigration(199, "No-op (remote version is using AppState now)", noopMigration),
NewMigration("Add Branch Protection Unprotected Files Column", v1_16.AddBranchProtectionUnprotectedFilesColumn), newMigration(200, "Add table app_state", v1_16.AddTableAppState),
// v195 -> v196 newMigration(201, "Drop table remote_version (if exists)", v1_16.DropTableRemoteVersion),
NewMigration("Add table commit_status_index", v1_16.AddTableCommitStatusIndex), newMigration(202, "Create key/value table for user settings", v1_16.CreateUserSettingsTable),
// v196 -> v197 newMigration(203, "Add Sorting to ProjectIssue table", v1_16.AddProjectIssueSorting),
NewMigration("Add Color to ProjectBoard table", v1_16.AddColorColToProjectBoard), newMigration(204, "Add key is verified to ssh key", v1_16.AddSSHKeyIsVerified),
// v197 -> v198 newMigration(205, "Migrate to higher varchar on user struct", v1_16.MigrateUserPasswordSalt),
NewMigration("Add renamed_branch table", v1_16.AddRenamedBranchTable), newMigration(206, "Add authorize column to team_unit table", v1_16.AddAuthorizeColForTeamUnit),
// v198 -> v199 newMigration(207, "Add webauthn table and migrate u2f data to webauthn - NO-OPED", v1_16.AddWebAuthnCred),
NewMigration("Add issue content history table", v1_16.AddTableIssueContentHistory), newMigration(208, "Use base32.HexEncoding instead of base64 encoding for cred ID as it is case insensitive - NO-OPED", v1_16.UseBase32HexForCredIDInWebAuthnCredential),
// v199 -> v200 newMigration(209, "Increase WebAuthentication CredentialID size to 410 - NO-OPED", v1_16.IncreaseCredentialIDTo410),
NewMigration("No-op (remote version is using AppState now)", noopMigration), newMigration(210, "v208 was completely broken - remigrate", v1_16.RemigrateU2FCredentials),
// v200 -> v201
NewMigration("Add table app_state", v1_16.AddTableAppState),
// v201 -> v202
NewMigration("Drop table remote_version (if exists)", v1_16.DropTableRemoteVersion),
// v202 -> v203
NewMigration("Create key/value table for user settings", v1_16.CreateUserSettingsTable),
// v203 -> v204
NewMigration("Add Sorting to ProjectIssue table", v1_16.AddProjectIssueSorting),
// v204 -> v205
NewMigration("Add key is verified to ssh key", v1_16.AddSSHKeyIsVerified),
// v205 -> v206
NewMigration("Migrate to higher varchar on user struct", v1_16.MigrateUserPasswordSalt),
// v206 -> v207
NewMigration("Add authorize column to team_unit table", v1_16.AddAuthorizeColForTeamUnit),
// v207 -> v208
NewMigration("Add webauthn table and migrate u2f data to webauthn - NO-OPED", v1_16.AddWebAuthnCred),
// v208 -> v209
NewMigration("Use base32.HexEncoding instead of base64 encoding for cred ID as it is case insensitive - NO-OPED", v1_16.UseBase32HexForCredIDInWebAuthnCredential),
// v209 -> v210
NewMigration("Increase WebAuthentication CredentialID size to 410 - NO-OPED", v1_16.IncreaseCredentialIDTo410),
// v210 -> v211
NewMigration("v208 was completely broken - remigrate", v1_16.RemigrateU2FCredentials),
// Gitea 1.16.2 ends at v211 // Gitea 1.16.2 ends at database version 211
// v211 -> v212 newMigration(211, "Create ForeignReference table", v1_17.CreateForeignReferenceTable),
NewMigration("Create ForeignReference table", v1_17.CreateForeignReferenceTable), newMigration(212, "Add package tables", v1_17.AddPackageTables),
// v212 -> v213 newMigration(213, "Add allow edits from maintainers to PullRequest table", v1_17.AddAllowMaintainerEdit),
NewMigration("Add package tables", v1_17.AddPackageTables), newMigration(214, "Add auto merge table", v1_17.AddAutoMergeTable),
// v213 -> v214 newMigration(215, "allow to view files in PRs", v1_17.AddReviewViewedFiles),
NewMigration("Add allow edits from maintainers to PullRequest table", v1_17.AddAllowMaintainerEdit), newMigration(216, "No-op (Improve Action table indices v1)", noopMigration),
// v214 -> v215 newMigration(217, "Alter hook_task table TEXT fields to LONGTEXT", v1_17.AlterHookTaskTextFieldsToLongText),
NewMigration("Add auto merge table", v1_17.AddAutoMergeTable), newMigration(218, "Improve Action table indices v2", v1_17.ImproveActionTableIndices),
// v215 -> v216 newMigration(219, "Add sync_on_commit column to push_mirror table", v1_17.AddSyncOnCommitColForPushMirror),
NewMigration("allow to view files in PRs", v1_17.AddReviewViewedFiles), newMigration(220, "Add container repository property", v1_17.AddContainerRepositoryProperty),
// v216 -> v217 newMigration(221, "Store WebAuthentication CredentialID as bytes and increase size to at least 1024", v1_17.StoreWebauthnCredentialIDAsBytes),
NewMigration("No-op (Improve Action table indices v1)", noopMigration), newMigration(222, "Drop old CredentialID column", v1_17.DropOldCredentialIDColumn),
// v217 -> v218 newMigration(223, "Rename CredentialIDBytes column to CredentialID", v1_17.RenameCredentialIDBytes),
NewMigration("Alter hook_task table TEXT fields to LONGTEXT", v1_17.AlterHookTaskTextFieldsToLongText),
// v218 -> v219
NewMigration("Improve Action table indices v2", v1_17.ImproveActionTableIndices),
// v219 -> v220
NewMigration("Add sync_on_commit column to push_mirror table", v1_17.AddSyncOnCommitColForPushMirror),
// v220 -> v221
NewMigration("Add container repository property", v1_17.AddContainerRepositoryProperty),
// v221 -> v222
NewMigration("Store WebAuthentication CredentialID as bytes and increase size to at least 1024", v1_17.StoreWebauthnCredentialIDAsBytes),
// v222 -> v223
NewMigration("Drop old CredentialID column", v1_17.DropOldCredentialIDColumn),
// v223 -> v224
NewMigration("Rename CredentialIDBytes column to CredentialID", v1_17.RenameCredentialIDBytes),
// Gitea 1.17.0 ends at v224 // Gitea 1.17.0 ends at database version 224
// v224 -> v225 newMigration(224, "Add badges to users", v1_18.CreateUserBadgesTable),
NewMigration("Add badges to users", v1_18.CreateUserBadgesTable), newMigration(225, "Alter gpg_key/public_key content TEXT fields to MEDIUMTEXT", v1_18.AlterPublicGPGKeyContentFieldsToMediumText),
// v225 -> v226 newMigration(226, "Conan and generic packages do not need to be semantically versioned", v1_18.FixPackageSemverField),
NewMigration("Alter gpg_key/public_key content TEXT fields to MEDIUMTEXT", v1_18.AlterPublicGPGKeyContentFieldsToMediumText), newMigration(227, "Create key/value table for system settings", v1_18.CreateSystemSettingsTable),
// v226 -> v227 newMigration(228, "Add TeamInvite table", v1_18.AddTeamInviteTable),
NewMigration("Conan and generic packages do not need to be semantically versioned", v1_18.FixPackageSemverField), newMigration(229, "Update counts of all open milestones", v1_18.UpdateOpenMilestoneCounts),
// v227 -> v228 newMigration(230, "Add ConfidentialClient column (default true) to OAuth2Application table", v1_18.AddConfidentialClientColumnToOAuth2ApplicationTable),
NewMigration("Create key/value table for system settings", v1_18.CreateSystemSettingsTable),
// v228 -> v229
NewMigration("Add TeamInvite table", v1_18.AddTeamInviteTable),
// v229 -> v230
NewMigration("Update counts of all open milestones", v1_18.UpdateOpenMilestoneCounts),
// v230 -> v231
NewMigration("Add ConfidentialClient column (default true) to OAuth2Application table", v1_18.AddConfidentialClientColumnToOAuth2ApplicationTable),
// Gitea 1.18.0 ends at v231 // Gitea 1.18.0 ends at database version 231
// v231 -> v232 newMigration(231, "Add index for hook_task", v1_19.AddIndexForHookTask),
NewMigration("Add index for hook_task", v1_19.AddIndexForHookTask), newMigration(232, "Alter package_version.metadata_json to LONGTEXT", v1_19.AlterPackageVersionMetadataToLongText),
// v232 -> v233 newMigration(233, "Add header_authorization_encrypted column to webhook table", v1_19.AddHeaderAuthorizationEncryptedColWebhook),
NewMigration("Alter package_version.metadata_json to LONGTEXT", v1_19.AlterPackageVersionMetadataToLongText), newMigration(234, "Add package cleanup rule table", v1_19.CreatePackageCleanupRuleTable),
// v233 -> v234 newMigration(235, "Add index for access_token", v1_19.AddIndexForAccessToken),
NewMigration("Add header_authorization_encrypted column to webhook table", v1_19.AddHeaderAuthorizationEncryptedColWebhook), newMigration(236, "Create secrets table", v1_19.CreateSecretsTable),
// v234 -> v235 newMigration(237, "Drop ForeignReference table", v1_19.DropForeignReferenceTable),
NewMigration("Add package cleanup rule table", v1_19.CreatePackageCleanupRuleTable), newMigration(238, "Add updated unix to LFSMetaObject", v1_19.AddUpdatedUnixToLFSMetaObject),
// v235 -> v236 newMigration(239, "Add scope for access_token", v1_19.AddScopeForAccessTokens),
NewMigration("Add index for access_token", v1_19.AddIndexForAccessToken), newMigration(240, "Add actions tables", v1_19.AddActionsTables),
// v236 -> v237 newMigration(241, "Add card_type column to project table", v1_19.AddCardTypeToProjectTable),
NewMigration("Create secrets table", v1_19.CreateSecretsTable), newMigration(242, "Alter gpg_key_import content TEXT field to MEDIUMTEXT", v1_19.AlterPublicGPGKeyImportContentFieldToMediumText),
// v237 -> v238 newMigration(243, "Add exclusive label", v1_19.AddExclusiveLabel),
NewMigration("Drop ForeignReference table", v1_19.DropForeignReferenceTable),
// v238 -> v239
NewMigration("Add updated unix to LFSMetaObject", v1_19.AddUpdatedUnixToLFSMetaObject),
// v239 -> v240
NewMigration("Add scope for access_token", v1_19.AddScopeForAccessTokens),
// v240 -> v241
NewMigration("Add actions tables", v1_19.AddActionsTables),
// v241 -> v242
NewMigration("Add card_type column to project table", v1_19.AddCardTypeToProjectTable),
// v242 -> v243
NewMigration("Alter gpg_key_import content TEXT field to MEDIUMTEXT", v1_19.AlterPublicGPGKeyImportContentFieldToMediumText),
// v243 -> v244
NewMigration("Add exclusive label", v1_19.AddExclusiveLabel),
// Gitea 1.19.0 ends at v244 // Gitea 1.19.0 ends at database version 244
// v244 -> v245 newMigration(244, "Add NeedApproval to actions tables", v1_20.AddNeedApprovalToActionRun),
NewMigration("Add NeedApproval to actions tables", v1_20.AddNeedApprovalToActionRun), newMigration(245, "Rename Webhook org_id to owner_id", v1_20.RenameWebhookOrgToOwner),
// v245 -> v246 newMigration(246, "Add missed column owner_id for project table", v1_20.AddNewColumnForProject),
NewMigration("Rename Webhook org_id to owner_id", v1_20.RenameWebhookOrgToOwner), newMigration(247, "Fix incorrect project type", v1_20.FixIncorrectProjectType),
// v246 -> v247 newMigration(248, "Add version column to action_runner table", v1_20.AddVersionToActionRunner),
NewMigration("Add missed column owner_id for project table", v1_20.AddNewColumnForProject), newMigration(249, "Improve Action table indices v3", v1_20.ImproveActionTableIndices),
// v247 -> v248 newMigration(250, "Change Container Metadata", v1_20.ChangeContainerMetadataMultiArch),
NewMigration("Fix incorrect project type", v1_20.FixIncorrectProjectType), newMigration(251, "Fix incorrect owner team unit access mode", v1_20.FixIncorrectOwnerTeamUnitAccessMode),
// v248 -> v249 newMigration(252, "Fix incorrect admin team unit access mode", v1_20.FixIncorrectAdminTeamUnitAccessMode),
NewMigration("Add version column to action_runner table", v1_20.AddVersionToActionRunner), newMigration(253, "Fix ExternalTracker and ExternalWiki accessMode in owner and admin team", v1_20.FixExternalTrackerAndExternalWikiAccessModeInOwnerAndAdminTeam),
// v249 -> v250 newMigration(254, "Add ActionTaskOutput table", v1_20.AddActionTaskOutputTable),
NewMigration("Improve Action table indices v3", v1_20.ImproveActionTableIndices), newMigration(255, "Add ArchivedUnix Column", v1_20.AddArchivedUnixToRepository),
// v250 -> v251 newMigration(256, "Add is_internal column to package", v1_20.AddIsInternalColumnToPackage),
NewMigration("Change Container Metadata", v1_20.ChangeContainerMetadataMultiArch), newMigration(257, "Add Actions Artifact table", v1_20.CreateActionArtifactTable),
// v251 -> v252 newMigration(258, "Add PinOrder Column", v1_20.AddPinOrderToIssue),
NewMigration("Fix incorrect owner team unit access mode", v1_20.FixIncorrectOwnerTeamUnitAccessMode), newMigration(259, "Convert scoped access tokens", v1_20.ConvertScopedAccessTokens),
// v252 -> v253
NewMigration("Fix incorrect admin team unit access mode", v1_20.FixIncorrectAdminTeamUnitAccessMode),
// v253 -> v254
NewMigration("Fix ExternalTracker and ExternalWiki accessMode in owner and admin team", v1_20.FixExternalTrackerAndExternalWikiAccessModeInOwnerAndAdminTeam),
// v254 -> v255
NewMigration("Add ActionTaskOutput table", v1_20.AddActionTaskOutputTable),
// v255 -> v256
NewMigration("Add ArchivedUnix Column", v1_20.AddArchivedUnixToRepository),
// v256 -> v257
NewMigration("Add is_internal column to package", v1_20.AddIsInternalColumnToPackage),
// v257 -> v258
NewMigration("Add Actions Artifact table", v1_20.CreateActionArtifactTable),
// v258 -> v259
NewMigration("Add PinOrder Column", v1_20.AddPinOrderToIssue),
// v259 -> v260
NewMigration("Convert scoped access tokens", v1_20.ConvertScopedAccessTokens),
// Gitea 1.20.0 ends at v260 // Gitea 1.20.0 ends at database version 260
// v260 -> v261 newMigration(260, "Drop custom_labels column of action_runner table", v1_21.DropCustomLabelsColumnOfActionRunner),
NewMigration("Drop custom_labels column of action_runner table", v1_21.DropCustomLabelsColumnOfActionRunner), newMigration(261, "Add variable table", v1_21.CreateVariableTable),
// v261 -> v262 newMigration(262, "Add TriggerEvent to action_run table", v1_21.AddTriggerEventToActionRun),
NewMigration("Add variable table", v1_21.CreateVariableTable), newMigration(263, "Add git_size and lfs_size columns to repository table", v1_21.AddGitSizeAndLFSSizeToRepositoryTable),
// v262 -> v263 newMigration(264, "Add branch table", v1_21.AddBranchTable),
NewMigration("Add TriggerEvent to action_run table", v1_21.AddTriggerEventToActionRun), newMigration(265, "Alter Actions Artifact table", v1_21.AlterActionArtifactTable),
// v263 -> v264 newMigration(266, "Reduce commit status", v1_21.ReduceCommitStatus),
NewMigration("Add git_size and lfs_size columns to repository table", v1_21.AddGitSizeAndLFSSizeToRepositoryTable), newMigration(267, "Add action_tasks_version table", v1_21.CreateActionTasksVersionTable),
// v264 -> v265 newMigration(268, "Update Action Ref", v1_21.UpdateActionsRefIndex),
NewMigration("Add branch table", v1_21.AddBranchTable), newMigration(269, "Drop deleted branch table", v1_21.DropDeletedBranchTable),
// v265 -> v266 newMigration(270, "Fix PackageProperty typo", v1_21.FixPackagePropertyTypo),
NewMigration("Alter Actions Artifact table", v1_21.AlterActionArtifactTable), newMigration(271, "Allow archiving labels", v1_21.AddArchivedUnixColumInLabelTable),
// v266 -> v267 newMigration(272, "Add Version to ActionRun table", v1_21.AddVersionToActionRunTable),
NewMigration("Reduce commit status", v1_21.ReduceCommitStatus), newMigration(273, "Add Action Schedule Table", v1_21.AddActionScheduleTable),
// v267 -> v268 newMigration(274, "Add Actions artifacts expiration date", v1_21.AddExpiredUnixColumnInActionArtifactTable),
NewMigration("Add action_tasks_version table", v1_21.CreateActionTasksVersionTable), newMigration(275, "Add ScheduleID for ActionRun", v1_21.AddScheduleIDForActionRun),
// v268 -> v269 newMigration(276, "Add RemoteAddress to mirrors", v1_21.AddRemoteAddressToMirrors),
NewMigration("Update Action Ref", v1_21.UpdateActionsRefIndex), newMigration(277, "Add Index to issue_user.issue_id", v1_21.AddIndexToIssueUserIssueID),
// v269 -> v270 newMigration(278, "Add Index to comment.dependent_issue_id", v1_21.AddIndexToCommentDependentIssueID),
NewMigration("Drop deleted branch table", v1_21.DropDeletedBranchTable), newMigration(279, "Add Index to action.user_id", v1_21.AddIndexToActionUserID),
// v270 -> v271
NewMigration("Fix PackageProperty typo", v1_21.FixPackagePropertyTypo),
// v271 -> v272
NewMigration("Allow archiving labels", v1_21.AddArchivedUnixColumInLabelTable),
// v272 -> v273
NewMigration("Add Version to ActionRun table", v1_21.AddVersionToActionRunTable),
// v273 -> v274
NewMigration("Add Action Schedule Table", v1_21.AddActionScheduleTable),
// v274 -> v275
NewMigration("Add Actions artifacts expiration date", v1_21.AddExpiredUnixColumnInActionArtifactTable),
// v275 -> v276
NewMigration("Add ScheduleID for ActionRun", v1_21.AddScheduleIDForActionRun),
// v276 -> v277
NewMigration("Add RemoteAddress to mirrors", v1_21.AddRemoteAddressToMirrors),
// v277 -> v278
NewMigration("Add Index to issue_user.issue_id", v1_21.AddIndexToIssueUserIssueID),
// v278 -> v279
NewMigration("Add Index to comment.dependent_issue_id", v1_21.AddIndexToCommentDependentIssueID),
// v279 -> v280
NewMigration("Add Index to action.user_id", v1_21.AddIndexToActionUserID),
// Gitea 1.21.0 ends at 280 // Gitea 1.21.0 ends at database version 280
// v280 -> v281 newMigration(280, "Rename user themes", v1_22.RenameUserThemes),
NewMigration("Rename user themes", v1_22.RenameUserThemes), newMigration(281, "Add auth_token table", v1_22.CreateAuthTokenTable),
// v281 -> v282 newMigration(282, "Add Index to pull_auto_merge.doer_id", v1_22.AddIndexToPullAutoMergeDoerID),
NewMigration("Add auth_token table", v1_22.CreateAuthTokenTable), newMigration(283, "Add combined Index to issue_user.uid and issue_id", v1_22.AddCombinedIndexToIssueUser),
// v282 -> v283 newMigration(284, "Add ignore stale approval column on branch table", v1_22.AddIgnoreStaleApprovalsColumnToProtectedBranchTable),
NewMigration("Add Index to pull_auto_merge.doer_id", v1_22.AddIndexToPullAutoMergeDoerID), newMigration(285, "Add PreviousDuration to ActionRun", v1_22.AddPreviousDurationToActionRun),
// v283 -> v284 newMigration(286, "Add support for SHA256 git repositories", v1_22.AdjustDBForSha256),
NewMigration("Add combined Index to issue_user.uid and issue_id", v1_22.AddCombinedIndexToIssueUser), newMigration(287, "Use Slug instead of ID for Badges", v1_22.UseSlugInsteadOfIDForBadges),
// v284 -> v285 newMigration(288, "Add user_blocking table", v1_22.AddUserBlockingTable),
NewMigration("Add ignore stale approval column on branch table", v1_22.AddIgnoreStaleApprovalsColumnToProtectedBranchTable), newMigration(289, "Add default_wiki_branch to repository table", v1_22.AddDefaultWikiBranch),
// v285 -> v286 newMigration(290, "Add PayloadVersion to HookTask", v1_22.AddPayloadVersionToHookTaskTable),
NewMigration("Add PreviousDuration to ActionRun", v1_22.AddPreviousDurationToActionRun), newMigration(291, "Add Index to attachment.comment_id", v1_22.AddCommentIDIndexofAttachment),
// v286 -> v287 newMigration(292, "Ensure every project has exactly one default column - No Op", noopMigration),
NewMigration("Add support for SHA256 git repositories", v1_22.AdjustDBForSha256), newMigration(293, "Ensure every project has exactly one default column", v1_22.CheckProjectColumnsConsistency),
// v287 -> v288
NewMigration("Use Slug instead of ID for Badges", v1_22.UseSlugInsteadOfIDForBadges),
// v288 -> v289
NewMigration("Add user_blocking table", v1_22.AddUserBlockingTable),
// v289 -> v290
NewMigration("Add default_wiki_branch to repository table", v1_22.AddDefaultWikiBranch),
// v290 -> v291
NewMigration("Add PayloadVersion to HookTask", v1_22.AddPayloadVersionToHookTaskTable),
// v291 -> v292
NewMigration("Add Index to attachment.comment_id", v1_22.AddCommentIDIndexofAttachment),
// v292 -> v293
NewMigration("Ensure every project has exactly one default column - No Op", noopMigration),
// v293 -> v294
NewMigration("Ensure every project has exactly one default column", v1_22.CheckProjectColumnsConsistency),
// Gitea 1.22.0-rc0 ends at 294 // Gitea 1.22.0-rc0 ends at database version 294
// v294 -> v295 newMigration(294, "Add unique index for project issue table", v1_22.AddUniqueIndexForProjectIssue),
NewMigration("Add unique index for project issue table", v1_22.AddUniqueIndexForProjectIssue), newMigration(295, "Add commit status summary table", v1_22.AddCommitStatusSummary),
// v295 -> v296 newMigration(296, "Add missing field of commit status summary table", v1_22.AddCommitStatusSummary2),
NewMigration("Add commit status summary table", v1_22.AddCommitStatusSummary), newMigration(297, "Add everyone_access_mode for repo_unit", v1_22.AddRepoUnitEveryoneAccessMode),
// v296 -> v297 newMigration(298, "Drop wrongly created table o_auth2_application", v1_22.DropWronglyCreatedTable),
NewMigration("Add missing field of commit status summary table", v1_22.AddCommitStatusSummary2),
// v297 -> v298
NewMigration("Add everyone_access_mode for repo_unit", v1_22.AddRepoUnitEveryoneAccessMode),
// v298 -> v299
NewMigration("Drop wrongly created table o_auth2_application", v1_22.DropWronglyCreatedTable),
// Gitea 1.22.0-rc1 ends at 299 // Gitea 1.22.0-rc1 ends at migration ID number 298 (database version 299)
// v299 -> v300 newMigration(299, "Add content version to issue and comment table", v1_23.AddContentVersionToIssueAndComment),
NewMigration("Add content version to issue and comment table", v1_23.AddContentVersionToIssueAndComment), newMigration(300, "Add force-push branch protection support", v1_23.AddForcePushBranchProtection),
// v300 -> v301 newMigration(301, "Add skip_secondary_authorization option to oauth2 application table", v1_23.AddSkipSecondaryAuthColumnToOAuth2ApplicationTable),
NewMigration("Add force-push branch protection support", v1_23.AddForcePushBranchProtection), newMigration(302, "Add index to action_task stopped log_expired", v1_23.AddIndexToActionTaskStoppedLogExpired),
// v301 -> v302 newMigration(303, "Add metadata column for comment table", v1_23.AddCommentMetaDataColumn),
NewMigration("Add skip_secondary_authorization option to oauth2 application table", v1_23.AddSkipSecondaryAuthColumnToOAuth2ApplicationTable), newMigration(304, "Add index for release sha1", v1_23.AddIndexForReleaseSha1),
// v302 -> v303 newMigration(305, "Add Repository Licenses", v1_23.AddRepositoryLicenses),
NewMigration("Add index to action_task stopped log_expired", v1_23.AddIndexToActionTaskStoppedLogExpired), newMigration(306, "Add BlockAdminMergeOverride to ProtectedBranch", v1_23.AddBlockAdminMergeOverrideBranchProtection),
// v303 -> v304 }
NewMigration("Add metadata column for comment table", v1_23.AddCommentMetaDataColumn), return preparedMigrations
// v304 -> v305
NewMigration("Add index for release sha1", v1_23.AddIndexForReleaseSha1),
// v305 -> v306
NewMigration("Add Repository Licenses", v1_23.AddRepositoryLicenses),
} }
// GetCurrentDBVersion returns the current db version // GetCurrentDBVersion returns the current db version
@ -622,9 +385,20 @@ func GetCurrentDBVersion(x *xorm.Engine) (int64, error) {
return currentVersion.Version, nil return currentVersion.Version, nil
} }
// ExpectedVersion returns the expected db version func calcDBVersion(migrations []*migration) int64 {
func ExpectedVersion() int64 { dbVer := int64(minDBVersion + len(migrations))
return int64(minDBVersion + len(migrations)) if migrations[0].idNumber != minDBVersion {
panic("migrations should start at minDBVersion")
}
if dbVer != migrations[len(migrations)-1].idNumber+1 {
panic("migrations are not in order")
}
return dbVer
}
// ExpectedDBVersion returns the expected db version
func ExpectedDBVersion() int64 {
return calcDBVersion(prepareMigrationTasks())
} }
// EnsureUpToDate will check if the db is at the correct version // EnsureUpToDate will check if the db is at the correct version
@ -635,24 +409,35 @@ func EnsureUpToDate(x *xorm.Engine) error {
} }
if currentDB < 0 { if currentDB < 0 {
return fmt.Errorf("Database has not been initialized") return fmt.Errorf("database has not been initialized")
} }
if minDBVersion > currentDB { if minDBVersion > currentDB {
return fmt.Errorf("DB version %d (<= %d) is too old for auto-migration. Upgrade to Gitea 1.6.4 first then upgrade to this version", currentDB, minDBVersion) return fmt.Errorf("DB version %d (<= %d) is too old for auto-migration. Upgrade to Gitea 1.6.4 first then upgrade to this version", currentDB, minDBVersion)
} }
expected := ExpectedVersion() expectedDB := ExpectedDBVersion()
if currentDB != expected { if currentDB != expectedDB {
return fmt.Errorf(`Current database version %d is not equal to the expected version %d. Please run "gitea [--config /path/to/app.ini] migrate" to update the database version`, currentDB, expected) return fmt.Errorf(`current database version %d is not equal to the expected version %d. Please run "gitea [--config /path/to/app.ini] migrate" to update the database version`, currentDB, expectedDB)
} }
return nil return nil
} }
func getPendingMigrations(curDBVer int64, migrations []*migration) []*migration {
return migrations[curDBVer-minDBVersion:]
}
func migrationIDNumberToDBVersion(idNumber int64) int64 {
return idNumber + 1
}
// Migrate database to current version // Migrate database to current version
func Migrate(x *xorm.Engine) error { func Migrate(x *xorm.Engine) error {
migrations := prepareMigrationTasks()
maxDBVer := calcDBVersion(migrations)
// Set a new clean the default mapper to GonicMapper as that is the default for Gitea. // Set a new clean the default mapper to GonicMapper as that is the default for Gitea.
x.SetMapper(names.GonicMapper{}) x.SetMapper(names.GonicMapper{})
if err := x.Sync(new(Version)); err != nil { if err := x.Sync(new(Version)); err != nil {
@ -664,29 +449,29 @@ func Migrate(x *xorm.Engine) error {
if err != nil { if err != nil {
return fmt.Errorf("get: %w", err) return fmt.Errorf("get: %w", err)
} else if !has { } else if !has {
// If the version record does not exist we think // If the version record does not exist, it is a fresh installation, and we can skip all migrations.
// it is a fresh installation and we can skip all migrations. // XORM model framework will create all tables when initializing.
currentVersion.ID = 0 currentVersion.ID = 0
currentVersion.Version = int64(minDBVersion + len(migrations)) currentVersion.Version = maxDBVer
if _, err = x.InsertOne(currentVersion); err != nil { if _, err = x.InsertOne(currentVersion); err != nil {
return fmt.Errorf("insert: %w", err) return fmt.Errorf("insert: %w", err)
} }
} }
v := currentVersion.Version curDBVer := currentVersion.Version
if minDBVersion > v { // Outdated Gitea database version is not supported
if curDBVer < minDBVersion {
log.Fatal(`Gitea no longer supports auto-migration from your previously installed version. log.Fatal(`Gitea no longer supports auto-migration from your previously installed version.
Please try upgrading to a lower version first (suggested v1.6.4), then upgrade to this version.`) Please try upgrading to a lower version first (suggested v1.6.4), then upgrade to this version.`)
return nil return nil
} }
// Downgrading Gitea's database version not supported // Downgrading Gitea's database version not supported
if int(v-minDBVersion) > len(migrations) { if maxDBVer < curDBVer {
msg := fmt.Sprintf("Your database (migration version: %d) is for a newer Gitea, you can not use the newer database for this old Gitea release (%d).", v, minDBVersion+len(migrations)) msg := fmt.Sprintf("Your database (migration version: %d) is for a newer Gitea, you can not use the newer database for this old Gitea release (%d).", curDBVer, maxDBVer)
msg += "\nGitea will exit to keep your database safe and unchanged. Please use the correct Gitea release, do not change the migration version manually (incorrect manual operation may lose data)." msg += "\nGitea will exit to keep your database safe and unchanged. Please use the correct Gitea release, do not change the migration version manually (incorrect manual operation may lose data)."
if !setting.IsProd { if !setting.IsProd {
msg += fmt.Sprintf("\nIf you are in development and really know what you're doing, you can force changing the migration version by executing: UPDATE version SET version=%d WHERE id=1;", minDBVersion+len(migrations)) msg += fmt.Sprintf("\nIf you are in development and really know what you're doing, you can force changing the migration version by executing: UPDATE version SET version=%d WHERE id=1;", maxDBVer)
} }
log.Fatal("Migration Error: %s", msg) log.Fatal("Migration Error: %s", msg)
return nil return nil
@ -700,14 +485,14 @@ Please try upgrading to a lower version first (suggested v1.6.4), then upgrade t
} }
// Migrate // Migrate
for i, m := range migrations[v-minDBVersion:] { for _, m := range getPendingMigrations(curDBVer, migrations) {
log.Info("Migration[%d]: %s", v+int64(i), m.Description()) log.Info("Migration[%d]: %s", m.idNumber, m.description)
// Reset the mapper between each migration - migrations are not supposed to depend on each other // Reset the mapper between each migration - migrations are not supposed to depend on each other
x.SetMapper(names.GonicMapper{}) x.SetMapper(names.GonicMapper{})
if err = m.Migrate(x); err != nil { if err = m.Migrate(x); err != nil {
return fmt.Errorf("migration[%d]: %s failed: %w", v+int64(i), m.Description(), err) return fmt.Errorf("migration[%d]: %s failed: %w", m.idNumber, m.description, err)
} }
currentVersion.Version = v + int64(i) + 1 currentVersion.Version = migrationIDNumberToDBVersion(m.idNumber)
if _, err = x.ID(1).Update(currentVersion); err != nil { if _, err = x.ID(1).Update(currentVersion); err != nil {
return err return err
} }

View File

@ -0,0 +1,28 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package migrations
import (
"testing"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
)
func TestMigrations(t *testing.T) {
defer test.MockVariableValue(&preparedMigrations)()
preparedMigrations = []*migration{
{idNumber: 70},
{idNumber: 71},
}
assert.EqualValues(t, 72, calcDBVersion(preparedMigrations))
assert.EqualValues(t, 72, ExpectedDBVersion())
assert.EqualValues(t, 71, migrationIDNumberToDBVersion(70))
assert.EqualValues(t, []*migration{{idNumber: 70}, {idNumber: 71}}, getPendingMigrations(70, preparedMigrations))
assert.EqualValues(t, []*migration{{idNumber: 71}}, getPendingMigrations(71, preparedMigrations))
assert.EqualValues(t, []*migration{}, getPendingMigrations(72, preparedMigrations))
}

View File

@ -0,0 +1,13 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_23 //nolint
import "xorm.io/xorm"
func AddBlockAdminMergeOverrideBranchProtection(x *xorm.Engine) error {
type ProtectedBranch struct {
BlockAdminMergeOverride bool `xorm:"NOT NULL DEFAULT false"`
}
return x.Sync(new(ProtectedBranch))
}

View File

@ -75,26 +75,27 @@ func ExistPackages(ctx context.Context, opts *PackageSearchOptions) (bool, error
} }
// SearchPackages gets the packages matching the search options // SearchPackages gets the packages matching the search options
func SearchPackages(ctx context.Context, opts *PackageSearchOptions, iter func(*packages.PackageFileDescriptor)) error { func SearchPackages(ctx context.Context, opts *PackageSearchOptions) ([]*packages.PackageFileDescriptor, error) {
return db.GetEngine(ctx). var pkgFiles []*packages.PackageFile
err := db.GetEngine(ctx).
Table("package_file"). Table("package_file").
Select("package_file.*"). Select("package_file.*").
Join("INNER", "package_version", "package_version.id = package_file.version_id"). Join("INNER", "package_version", "package_version.id = package_file.version_id").
Join("INNER", "package", "package.id = package_version.package_id"). Join("INNER", "package", "package.id = package_version.package_id").
Where(opts.toCond()). Where(opts.toCond()).
Asc("package.lower_name", "package_version.created_unix"). Asc("package.lower_name", "package_version.created_unix").Find(&pkgFiles)
Iterate(new(packages.PackageFile), func(_ int, bean any) error { if err != nil {
pf := bean.(*packages.PackageFile) return nil, err
}
pfd, err := packages.GetPackageFileDescriptor(ctx, pf) pfds := make([]*packages.PackageFileDescriptor, 0, len(pkgFiles))
if err != nil { for _, pf := range pkgFiles {
return err pfd, err := packages.GetPackageFileDescriptor(ctx, pf)
} if err != nil {
return nil, err
iter(pfd) }
pfds = append(pfds, pfd)
return nil }
}) return pfds, nil
} }
// GetDistributions gets all available distributions // GetDistributions gets all available distributions

View File

@ -25,7 +25,7 @@ func GetXORMEngine(engine ...*xorm.Engine) (x *xorm.Engine) {
if len(engine) == 1 { if len(engine) == 1 {
return engine[0] return engine[0]
} }
return db.DefaultContext.(*db.Context).Engine().(*xorm.Engine) return db.GetEngine(db.DefaultContext).(*xorm.Engine)
} }
// InitFixtures initialize test fixtures for a test database // InitFixtures initialize test fixtures for a test database

View File

@ -50,25 +50,35 @@ func (repo *Repository) readTreeToIndex(id ObjectID, indexFilename ...string) er
} }
// ReadTreeToTemporaryIndex reads a treeish to a temporary index file // ReadTreeToTemporaryIndex reads a treeish to a temporary index file
func (repo *Repository) ReadTreeToTemporaryIndex(treeish string) (filename, tmpDir string, cancel context.CancelFunc, err error) { func (repo *Repository) ReadTreeToTemporaryIndex(treeish string) (tmpIndexFilename, tmpDir string, cancel context.CancelFunc, err error) {
tmpDir, err = os.MkdirTemp("", "index") defer func() {
if err != nil { // if error happens and there is a cancel function, do clean up
return filename, tmpDir, cancel, err if err != nil && cancel != nil {
} cancel()
cancel = nil
}
}()
filename = filepath.Join(tmpDir, ".tmp-index") removeDirFn := func(dir string) func() { // it can't use the return value "tmpDir" directly because it is empty when error occurs
cancel = func() { return func() {
err := util.RemoveAll(tmpDir) if err := util.RemoveAll(dir); err != nil {
if err != nil { log.Error("failed to remove tmp index dir: %v", err)
log.Error("failed to remove tmp index file: %v", err) }
} }
} }
err = repo.ReadTreeToIndex(treeish, filename)
tmpDir, err = os.MkdirTemp("", "index")
if err != nil { if err != nil {
defer cancel() return "", "", nil, err
return "", "", func() {}, err
} }
return filename, tmpDir, cancel, err
tmpIndexFilename = filepath.Join(tmpDir, ".tmp-index")
cancel = removeDirFn(tmpDir)
err = repo.ReadTreeToIndex(treeish, tmpIndexFilename)
if err != nil {
return "", "", cancel, err
}
return tmpIndexFilename, tmpDir, cancel, err
} }
// EmptyIndex empties the index // EmptyIndex empties the index

View File

@ -16,10 +16,9 @@ import (
"code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/proxy" "code.gitea.io/gitea/modules/proxy"
"code.gitea.io/gitea/modules/setting"
) )
const httpBatchSize = 20
// HTTPClient is used to communicate with the LFS server // HTTPClient is used to communicate with the LFS server
// https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md // https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
type HTTPClient struct { type HTTPClient struct {
@ -30,7 +29,7 @@ type HTTPClient struct {
// BatchSize returns the preferred size of batchs to process // BatchSize returns the preferred size of batchs to process
func (c *HTTPClient) BatchSize() int { func (c *HTTPClient) BatchSize() int {
return httpBatchSize return setting.LFSClient.BatchSize
} }
func newHTTPClient(endpoint *url.URL, httpTransport *http.Transport) *HTTPClient { func newHTTPClient(endpoint *url.URL, httpTransport *http.Transport) *HTTPClient {

View File

@ -10,7 +10,10 @@ import (
"code.gitea.io/gitea/modules/generate" "code.gitea.io/gitea/modules/generate"
) )
// LFS represents the configuration for Git LFS // LFS represents the server-side configuration for Git LFS.
// Ideally these options should be in a section like "[lfs_server]",
// but they are in "[server]" section due to historical reasons.
// Could be refactored in the future while keeping backwards compatibility.
var LFS = struct { var LFS = struct {
StartServer bool `ini:"LFS_START_SERVER"` StartServer bool `ini:"LFS_START_SERVER"`
AllowPureSSH bool `ini:"LFS_ALLOW_PURE_SSH"` AllowPureSSH bool `ini:"LFS_ALLOW_PURE_SSH"`
@ -18,15 +21,21 @@ var LFS = struct {
HTTPAuthExpiry time.Duration `ini:"LFS_HTTP_AUTH_EXPIRY"` HTTPAuthExpiry time.Duration `ini:"LFS_HTTP_AUTH_EXPIRY"`
MaxFileSize int64 `ini:"LFS_MAX_FILE_SIZE"` MaxFileSize int64 `ini:"LFS_MAX_FILE_SIZE"`
LocksPagingNum int `ini:"LFS_LOCKS_PAGING_NUM"` LocksPagingNum int `ini:"LFS_LOCKS_PAGING_NUM"`
MaxBatchSize int `ini:"LFS_MAX_BATCH_SIZE"`
Storage *Storage Storage *Storage
}{} }{}
// LFSClient represents configuration for Gitea's LFS clients, for example: mirroring upstream Git LFS
var LFSClient = struct {
BatchSize int `ini:"BATCH_SIZE"`
}{}
func loadLFSFrom(rootCfg ConfigProvider) error { func loadLFSFrom(rootCfg ConfigProvider) error {
mustMapSetting(rootCfg, "lfs_client", &LFSClient)
mustMapSetting(rootCfg, "server", &LFS)
sec := rootCfg.Section("server") sec := rootCfg.Section("server")
if err := sec.MapTo(&LFS); err != nil {
return fmt.Errorf("failed to map LFS settings: %v", err)
}
lfsSec, _ := rootCfg.GetSection("lfs") lfsSec, _ := rootCfg.GetSection("lfs")
@ -53,6 +62,10 @@ func loadLFSFrom(rootCfg ConfigProvider) error {
LFS.LocksPagingNum = 50 LFS.LocksPagingNum = 50
} }
if LFSClient.BatchSize < 1 {
LFSClient.BatchSize = 20
}
LFS.HTTPAuthExpiry = sec.Key("LFS_HTTP_AUTH_EXPIRY").MustDuration(24 * time.Hour) LFS.HTTPAuthExpiry = sec.Key("LFS_HTTP_AUTH_EXPIRY").MustDuration(24 * time.Hour)
if !LFS.StartServer || !InstallLock { if !LFS.StartServer || !InstallLock {

View File

@ -99,3 +99,19 @@ STORAGE_TYPE = minio
assert.EqualValues(t, "gitea", LFS.Storage.MinioConfig.Bucket) assert.EqualValues(t, "gitea", LFS.Storage.MinioConfig.Bucket)
assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath) assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
} }
func Test_LFSClientServerConfigs(t *testing.T) {
iniStr := `
[server]
LFS_MAX_BATCH_SIZE = 100
[lfs_client]
# will default to 20
BATCH_SIZE = 0
`
cfg, err := NewConfigProviderFromData(iniStr)
assert.NoError(t, err)
assert.NoError(t, loadLFSFrom(cfg))
assert.EqualValues(t, 100, LFS.MaxBatchSize)
assert.EqualValues(t, 20, LFSClient.BatchSize)
}

View File

@ -52,6 +52,7 @@ type BranchProtection struct {
RequireSignedCommits bool `json:"require_signed_commits"` RequireSignedCommits bool `json:"require_signed_commits"`
ProtectedFilePatterns string `json:"protected_file_patterns"` ProtectedFilePatterns string `json:"protected_file_patterns"`
UnprotectedFilePatterns string `json:"unprotected_file_patterns"` UnprotectedFilePatterns string `json:"unprotected_file_patterns"`
BlockAdminMergeOverride bool `json:"block_admin_merge_override"`
// swagger:strfmt date-time // swagger:strfmt date-time
Created time.Time `json:"created_at"` Created time.Time `json:"created_at"`
// swagger:strfmt date-time // swagger:strfmt date-time
@ -90,6 +91,7 @@ type CreateBranchProtectionOption struct {
RequireSignedCommits bool `json:"require_signed_commits"` RequireSignedCommits bool `json:"require_signed_commits"`
ProtectedFilePatterns string `json:"protected_file_patterns"` ProtectedFilePatterns string `json:"protected_file_patterns"`
UnprotectedFilePatterns string `json:"unprotected_file_patterns"` UnprotectedFilePatterns string `json:"unprotected_file_patterns"`
BlockAdminMergeOverride bool `json:"block_admin_merge_override"`
} }
// EditBranchProtectionOption options for editing a branch protection // EditBranchProtectionOption options for editing a branch protection
@ -121,4 +123,5 @@ type EditBranchProtectionOption struct {
RequireSignedCommits *bool `json:"require_signed_commits"` RequireSignedCommits *bool `json:"require_signed_commits"`
ProtectedFilePatterns *string `json:"protected_file_patterns"` ProtectedFilePatterns *string `json:"protected_file_patterns"`
UnprotectedFilePatterns *string `json:"unprotected_file_patterns"` UnprotectedFilePatterns *string `json:"unprotected_file_patterns"`
BlockAdminMergeOverride *bool `json:"block_admin_merge_override"`
} }

View File

@ -2461,6 +2461,8 @@ settings.block_on_official_review_requests = Block merge on official review requ
settings.block_on_official_review_requests_desc = Merging will not be possible when it has official review requests, even if there are enough approvals. settings.block_on_official_review_requests_desc = Merging will not be possible when it has official review requests, even if there are enough approvals.
settings.block_outdated_branch = Block merge if pull request is outdated settings.block_outdated_branch = Block merge if pull request is outdated
settings.block_outdated_branch_desc = Merging will not be possible when head branch is behind base branch. settings.block_outdated_branch_desc = Merging will not be possible when head branch is behind base branch.
settings.block_admin_merge_override = Administrators must follow branch protection rules
settings.block_admin_merge_override_desc = Administrators must follow branch protection rules and can not circumvent it.
settings.default_branch_desc = Select a default repository branch for pull requests and code commits: settings.default_branch_desc = Select a default repository branch for pull requests and code commits:
settings.merge_style_desc = Merge Styles settings.merge_style_desc = Merge Styles
settings.default_merge_style_desc = Default Merge Style settings.default_merge_style_desc = Default Merge Style

384
package-lock.json generated
View File

@ -11,7 +11,7 @@
"@citation-js/plugin-software-formats": "0.6.1", "@citation-js/plugin-software-formats": "0.6.1",
"@github/markdown-toolbar-element": "2.2.3", "@github/markdown-toolbar-element": "2.2.3",
"@github/relative-time-element": "4.4.3", "@github/relative-time-element": "4.4.3",
"@github/text-expander-element": "2.7.1", "@github/text-expander-element": "2.8.0",
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3", "@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
"@primer/octicons": "19.11.0", "@primer/octicons": "19.11.0",
"@silverwind/vue3-calendar-heatmap": "2.0.6", "@silverwind/vue3-calendar-heatmap": "2.0.6",
@ -40,13 +40,13 @@
"monaco-editor": "0.51.0", "monaco-editor": "0.51.0",
"monaco-editor-webpack-plugin": "7.1.0", "monaco-editor-webpack-plugin": "7.1.0",
"pdfobject": "2.3.0", "pdfobject": "2.3.0",
"perfect-debounce": "1.0.0",
"postcss": "8.4.41", "postcss": "8.4.41",
"postcss-loader": "8.1.1", "postcss-loader": "8.1.1",
"postcss-nesting": "13.0.0", "postcss-nesting": "13.0.0",
"sortablejs": "1.15.2", "sortablejs": "1.15.2",
"swagger-ui-dist": "5.17.14", "swagger-ui-dist": "5.17.14",
"tailwindcss": "3.4.10", "tailwindcss": "3.4.10",
"temporal-polyfill": "0.2.5",
"throttle-debounce": "5.0.2", "throttle-debounce": "5.0.2",
"tinycolor2": "1.6.0", "tinycolor2": "1.6.0",
"tippy.js": "6.3.7", "tippy.js": "6.3.7",
@ -55,7 +55,7 @@
"typescript": "5.5.4", "typescript": "5.5.4",
"uint8-to-base64": "0.2.0", "uint8-to-base64": "0.2.0",
"vanilla-colorful": "0.7.2", "vanilla-colorful": "0.7.2",
"vue": "3.4.38", "vue": "3.5.12",
"vue-bar-graph": "2.1.0", "vue-bar-graph": "2.1.0",
"vue-chartjs": "5.3.1", "vue-chartjs": "5.3.1",
"vue-loader": "17.4.2", "vue-loader": "17.4.2",
@ -3115,13 +3115,13 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/@github/text-expander-element": { "node_modules/@github/text-expander-element": {
"version": "2.7.1", "version": "2.8.0",
"resolved": "https://registry.npmjs.org/@github/text-expander-element/-/text-expander-element-2.7.1.tgz", "resolved": "https://registry.npmjs.org/@github/text-expander-element/-/text-expander-element-2.8.0.tgz",
"integrity": "sha512-CWxfYxJRkeWVCUhJveproLs6pHsPrWtK8TsjL8ByYVcSCs8CJmNzF8b7ZawrUgfai0F2jb4aIdw2FoBTykj9XA==", "integrity": "sha512-kkS2rZ/CG8HGKblpLDQ8vcK/K7l/Jsvzi/N4ovwPAsFSOImcIbJh2MgCv9tzqE3wAm/qXlscvh3Ms4Hh1vtZvw==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@github/combobox-nav": "^2.0.2", "@github/combobox-nav": "^2.0.2",
"dom-input-range": "^1.1.6" "dom-input-range": "^1.2.0"
} }
}, },
"node_modules/@humanwhocodes/config-array": { "node_modules/@humanwhocodes/config-array": {
@ -3618,224 +3618,208 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/@rollup/rollup-android-arm-eabi": { "node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.24.0.tgz",
"integrity": "sha512-2thheikVEuU7ZxFXubPDOtspKn1x0yqaYQwvALVtEcvFhMifPADBrgRPyHV0TF3b+9BgvgjgagVyvA/UqPZHmg==", "integrity": "sha512-Q6HJd7Y6xdB48x8ZNVDOqsbh2uByBhgK8PiQgPhwkIw/HC/YX5Ghq2mQY5sRMZWHb3VsFkWooUVOZHKr7DmDIA==",
"cpu": [ "cpu": [
"arm" "arm"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"android" "android"
] ]
}, },
"node_modules/@rollup/rollup-android-arm64": { "node_modules/@rollup/rollup-android-arm64": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.24.0.tgz",
"integrity": "sha512-t1lLYn4V9WgnIFHXy1d2Di/7gyzBWS8G5pQSXdZqfrdCGTwi1VasRMSS81DTYb+avDs/Zz4A6dzERki5oRYz1g==", "integrity": "sha512-ijLnS1qFId8xhKjT81uBHuuJp2lU4x2yxa4ctFPtG+MqEE6+C5f/+X/bStmxapgmwLwiL3ih122xv8kVARNAZA==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"android" "android"
] ]
}, },
"node_modules/@rollup/rollup-darwin-arm64": { "node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.24.0.tgz",
"integrity": "sha512-AH/wNWSEEHvs6t4iJ3RANxW5ZCK3fUnmf0gyMxWCesY1AlUj8jY7GC+rQE4wd3gwmZ9XDOpL0kcFnCjtN7FXlA==", "integrity": "sha512-bIv+X9xeSs1XCk6DVvkO+S/z8/2AMt/2lMqdQbMrmVpgFvXlmde9mLcbQpztXm1tajC3raFDqegsH18HQPMYtA==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"darwin" "darwin"
] ]
}, },
"node_modules/@rollup/rollup-darwin-x64": { "node_modules/@rollup/rollup-darwin-x64": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.24.0.tgz",
"integrity": "sha512-dO0BIz/+5ZdkLZrVgQrDdW7m2RkrLwYTh2YMFG9IpBtlC1x1NPNSXkfczhZieOlOLEqgXOFH3wYHB7PmBtf+Bg==", "integrity": "sha512-X6/nOwoFN7RT2svEQWUsW/5C/fYMBe4fnLK9DQk4SX4mgVBiTA9h64kjUYPvGQ0F/9xwJ5U5UfTbl6BEjaQdBQ==",
"cpu": [ "cpu": [
"x64" "x64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"darwin" "darwin"
] ]
}, },
"node_modules/@rollup/rollup-linux-arm-gnueabihf": { "node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.24.0.tgz",
"integrity": "sha512-sWWgdQ1fq+XKrlda8PsMCfut8caFwZBmhYeoehJ05FdI0YZXk6ZyUjWLrIgbR/VgiGycrFKMMgp7eJ69HOF2pQ==", "integrity": "sha512-0KXvIJQMOImLCVCz9uvvdPgfyWo93aHHp8ui3FrtOP57svqrF/roSSR5pjqL2hcMp0ljeGlU4q9o/rQaAQ3AYA==",
"cpu": [ "cpu": [
"arm" "arm"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-linux-arm-musleabihf": { "node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.24.0.tgz",
"integrity": "sha512-9OIiSuj5EsYQlmwhmFRA0LRO0dRRjdCVZA3hnmZe1rEwRk11Jy3ECGGq3a7RrVEZ0/pCsYWx8jG3IvcrJ6RCew==", "integrity": "sha512-it2BW6kKFVh8xk/BnHfakEeoLPv8STIISekpoF+nBgWM4d55CZKc7T4Dx1pEbTnYm/xEKMgy1MNtYuoA8RFIWw==",
"cpu": [ "cpu": [
"arm" "arm"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-linux-arm64-gnu": { "node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.24.0.tgz",
"integrity": "sha512-0kuAkRK4MeIUbzQYu63NrJmfoUVicajoRAL1bpwdYIYRcs57iyIV9NLcuyDyDXE2GiZCL4uhKSYAnyWpjZkWow==", "integrity": "sha512-i0xTLXjqap2eRfulFVlSnM5dEbTVque/3Pi4g2y7cxrs7+a9De42z4XxKLYJ7+OhE3IgxvfQM7vQc43bwTgPwA==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-linux-arm64-musl": { "node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.24.0.tgz",
"integrity": "sha512-/6dYC9fZtfEY0vozpc5bx1RP4VrtEOhNQGb0HwvYNwXD1BBbwQ5cKIbUVVU7G2d5WRE90NfB922elN8ASXAJEA==", "integrity": "sha512-9E6MKUJhDuDh604Qco5yP/3qn3y7SLXYuiC0Rpr89aMScS2UAmK1wHP2b7KAa1nSjWJc/f/Lc0Wl1L47qjiyQw==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": { "node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.24.0.tgz",
"integrity": "sha512-ltUWy+sHeAh3YZ91NUsV4Xg3uBXAlscQe8ZOXRCVAKLsivGuJsrkawYPUEyCV3DYa9urgJugMLn8Z3Z/6CeyRQ==", "integrity": "sha512-2XFFPJ2XMEiF5Zi2EBf4h73oR1V/lycirxZxHZNc93SqDN/IWhYYSYj8I9381ikUFXZrz2v7r2tOVk2NBwxrWw==",
"cpu": [ "cpu": [
"ppc64" "ppc64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-linux-riscv64-gnu": { "node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.24.0.tgz",
"integrity": "sha512-BggMndzI7Tlv4/abrgLwa/dxNEMn2gC61DCLrTzw8LkpSKel4o+O+gtjbnkevZ18SKkeN3ihRGPuBxjaetWzWg==", "integrity": "sha512-M3Dg4hlwuntUCdzU7KjYqbbd+BLq3JMAOhCKdBE3TcMGMZbKkDdJ5ivNdehOssMCIokNHFOsv7DO4rlEOfyKpg==",
"cpu": [ "cpu": [
"riscv64" "riscv64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-linux-s390x-gnu": { "node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.24.0.tgz",
"integrity": "sha512-z/9rtlGd/OMv+gb1mNSjElasMf9yXusAxnRDrBaYB+eS1shFm6/4/xDH1SAISO5729fFKUkJ88TkGPRUh8WSAA==", "integrity": "sha512-mjBaoo4ocxJppTorZVKWFpy1bfFj9FeCMJqzlMQGjpNPY9JwQi7OuS1axzNIk0nMX6jSgy6ZURDZ2w0QW6D56g==",
"cpu": [ "cpu": [
"s390x" "s390x"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-linux-x64-gnu": { "node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.24.0.tgz",
"integrity": "sha512-kXQVcWqDcDKw0S2E0TmhlTLlUgAmMVqPrJZR+KpH/1ZaZhLSl23GZpQVmawBQGVhyP5WXIsIQ/zqbDBBYmxm5w==", "integrity": "sha512-ZXFk7M72R0YYFN5q13niV0B7G8/5dcQ9JDp8keJSfr3GoZeXEoMHP/HlvqROA3OMbMdfr19IjCeNAnPUG93b6A==",
"cpu": [ "cpu": [
"x64" "x64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-linux-x64-musl": { "node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.24.0.tgz",
"integrity": "sha512-CbFv/WMQsSdl+bpX6rVbzR4kAjSSBuDgCqb1l4J68UYsQNalz5wOqLGYj4ZI0thGpyX5kc+LLZ9CL+kpqDovZA==", "integrity": "sha512-w1i+L7kAXZNdYl+vFvzSZy8Y1arS7vMgIy8wusXJzRrPyof5LAb02KGr1PD2EkRcl73kHulIID0M501lN+vobQ==",
"cpu": [ "cpu": [
"x64" "x64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"linux" "linux"
] ]
}, },
"node_modules/@rollup/rollup-win32-arm64-msvc": { "node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.24.0.tgz",
"integrity": "sha512-3Q3brDgA86gHXWHklrwdREKIrIbxC0ZgU8lwpj0eEKGBQH+31uPqr0P2v11pn0tSIxHvcdOWxa4j+YvLNx1i6g==", "integrity": "sha512-VXBrnPWgBpVDCVY6XF3LEW0pOU51KbaHhccHw6AS6vBWIC60eqsH19DAeeObl+g8nKAz04QFdl/Cefta0xQtUQ==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"win32" "win32"
] ]
}, },
"node_modules/@rollup/rollup-win32-ia32-msvc": { "node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.24.0.tgz",
"integrity": "sha512-tNg+jJcKR3Uwe4L0/wY3Ro0H+u3nrb04+tcq1GSYzBEmKLeOQF2emk1whxlzNqb6MMrQ2JOcQEpuuiPLyRcSIw==", "integrity": "sha512-xrNcGDU0OxVcPTH/8n/ShH4UevZxKIO6HJFK0e15XItZP2UcaiLFd5kiX7hJnqCbSztUF8Qot+JWBC/QXRPYWQ==",
"cpu": [ "cpu": [
"ia32" "ia32"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"win32" "win32"
] ]
}, },
"node_modules/@rollup/rollup-win32-x64-msvc": { "node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.21.1.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.24.0.tgz",
"integrity": "sha512-xGiIH95H1zU7naUyTKEyOA/I0aexNMUdO9qRv0bLKN3qu25bBdrxZHqA3PTJ24YNN/GdMzG4xkDcd/GvjuhfLg==", "integrity": "sha512-fbMkAF7fufku0N2dE5TBXcNlg0pt0cJue4xBRE2Qc5Vqikxr4VCgKj/ht6SMdFcOacVA9rqF70APJ8RN/4vMJw==",
"cpu": [ "cpu": [
"x64" "x64"
], ],
"dev": true, "dev": true,
"license": "MIT",
"optional": true, "optional": true,
"os": [ "os": [
"win32" "win32"
@ -5202,113 +5186,130 @@
} }
}, },
"node_modules/@vue/compiler-core": { "node_modules/@vue/compiler-core": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.4.38.tgz", "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.12.tgz",
"integrity": "sha512-8IQOTCWnLFqfHzOGm9+P8OPSEDukgg3Huc92qSG49if/xI2SAwLHQO2qaPQbjCWPBcQoO1WYfXfTACUrWV3c5A==", "integrity": "sha512-ISyBTRMmMYagUxhcpyEH0hpXRd/KqDU4ymofPgl2XAkY9ZhQ+h0ovEZJIiPop13UmR/54oA2cgMDjgroRelaEw==",
"license": "MIT",
"dependencies": { "dependencies": {
"@babel/parser": "^7.24.7", "@babel/parser": "^7.25.3",
"@vue/shared": "3.4.38", "@vue/shared": "3.5.12",
"entities": "^4.5.0", "entities": "^4.5.0",
"estree-walker": "^2.0.2", "estree-walker": "^2.0.2",
"source-map-js": "^1.2.0" "source-map-js": "^1.2.0"
} }
}, },
"node_modules/@vue/compiler-dom": { "node_modules/@vue/compiler-dom": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.4.38.tgz", "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.12.tgz",
"integrity": "sha512-Osc/c7ABsHXTsETLgykcOwIxFktHfGSUDkb05V61rocEfsFDcjDLH/IHJSNJP+/Sv9KeN2Lx1V6McZzlSb9EhQ==", "integrity": "sha512-9G6PbJ03uwxLHKQ3P42cMTi85lDRvGLB2rSGOiQqtXELat6uI4n8cNz9yjfVHRPIu+MsK6TE418Giruvgptckg==",
"license": "MIT",
"dependencies": { "dependencies": {
"@vue/compiler-core": "3.4.38", "@vue/compiler-core": "3.5.12",
"@vue/shared": "3.4.38" "@vue/shared": "3.5.12"
} }
}, },
"node_modules/@vue/compiler-sfc": { "node_modules/@vue/compiler-sfc": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.4.38.tgz", "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.12.tgz",
"integrity": "sha512-s5QfZ+9PzPh3T5H4hsQDJtI8x7zdJaew/dCGgqZ2630XdzaZ3AD8xGZfBqpT8oaD/p2eedd+pL8tD5vvt5ZYJQ==", "integrity": "sha512-2k973OGo2JuAa5+ZlekuQJtitI5CgLMOwgl94BzMCsKZCX/xiqzJYzapl4opFogKHqwJk34vfsaKpfEhd1k5nw==",
"license": "MIT",
"dependencies": { "dependencies": {
"@babel/parser": "^7.24.7", "@babel/parser": "^7.25.3",
"@vue/compiler-core": "3.4.38", "@vue/compiler-core": "3.5.12",
"@vue/compiler-dom": "3.4.38", "@vue/compiler-dom": "3.5.12",
"@vue/compiler-ssr": "3.4.38", "@vue/compiler-ssr": "3.5.12",
"@vue/shared": "3.4.38", "@vue/shared": "3.5.12",
"estree-walker": "^2.0.2", "estree-walker": "^2.0.2",
"magic-string": "^0.30.10", "magic-string": "^0.30.11",
"postcss": "^8.4.40", "postcss": "^8.4.47",
"source-map-js": "^1.2.0" "source-map-js": "^1.2.0"
} }
}, },
"node_modules/@vue/compiler-sfc/node_modules/magic-string": { "node_modules/@vue/compiler-sfc/node_modules/magic-string": {
"version": "0.30.11", "version": "0.30.12",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.11.tgz", "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.12.tgz",
"integrity": "sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==", "integrity": "sha512-Ea8I3sQMVXr8JhN4z+H/d8zwo+tYDgHE9+5G4Wnrwhs0gaK9fXTKx0Tw5Xwsd/bCPTTZNRAdpyzvoeORe9LYpw==",
"license": "MIT",
"dependencies": { "dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.0" "@jridgewell/sourcemap-codec": "^1.5.0"
} }
}, },
"node_modules/@vue/compiler-ssr": { "node_modules/@vue/compiler-sfc/node_modules/postcss": {
"version": "3.4.38", "version": "8.4.47",
"resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.4.38.tgz", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz",
"integrity": "sha512-YXznKFQ8dxYpAz9zLuVvfcXhc31FSPFDcqr0kyujbOwNhlmaNvL2QfIy+RZeJgSn5Fk54CWoEUeW+NVBAogGaw==", "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==",
"license": "MIT", "funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"dependencies": { "dependencies": {
"@vue/compiler-dom": "3.4.38", "nanoid": "^3.3.7",
"@vue/shared": "3.4.38" "picocolors": "^1.1.0",
"source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
}
},
"node_modules/@vue/compiler-ssr": {
"version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.12.tgz",
"integrity": "sha512-eLwc7v6bfGBSM7wZOGPmRavSWzNFF6+PdRhE+VFJhNCgHiF8AM7ccoqcv5kBXA2eWUfigD7byekvf/JsOfKvPA==",
"dependencies": {
"@vue/compiler-dom": "3.5.12",
"@vue/shared": "3.5.12"
} }
}, },
"node_modules/@vue/reactivity": { "node_modules/@vue/reactivity": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.38.tgz", "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.12.tgz",
"integrity": "sha512-4vl4wMMVniLsSYYeldAKzbk72+D3hUnkw9z8lDeJacTxAkXeDAP1uE9xr2+aKIN0ipOL8EG2GPouVTH6yF7Gnw==", "integrity": "sha512-UzaN3Da7xnJXdz4Okb/BGbAaomRHc3RdoWqTzlvd9+WBR5m3J39J1fGcHes7U3za0ruYn/iYy/a1euhMEHvTAg==",
"license": "MIT",
"dependencies": { "dependencies": {
"@vue/shared": "3.4.38" "@vue/shared": "3.5.12"
} }
}, },
"node_modules/@vue/runtime-core": { "node_modules/@vue/runtime-core": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.38.tgz", "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.12.tgz",
"integrity": "sha512-21z3wA99EABtuf+O3IhdxP0iHgkBs1vuoCAsCKLVJPEjpVqvblwBnTj42vzHRlWDCyxu9ptDm7sI2ZMcWrQqlA==", "integrity": "sha512-hrMUYV6tpocr3TL3Ad8DqxOdpDe4zuQY4HPY3X/VRh+L2myQO8MFXPAMarIOSGNu0bFAjh1yBkMPXZBqCk62Uw==",
"license": "MIT",
"dependencies": { "dependencies": {
"@vue/reactivity": "3.4.38", "@vue/reactivity": "3.5.12",
"@vue/shared": "3.4.38" "@vue/shared": "3.5.12"
} }
}, },
"node_modules/@vue/runtime-dom": { "node_modules/@vue/runtime-dom": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.38.tgz", "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.12.tgz",
"integrity": "sha512-afZzmUreU7vKwKsV17H1NDThEEmdYI+GCAK/KY1U957Ig2NATPVjCROv61R19fjZNzMmiU03n79OMnXyJVN0UA==", "integrity": "sha512-q8VFxR9A2MRfBr6/55Q3umyoN7ya836FzRXajPB6/Vvuv0zOPL+qltd9rIMzG/DbRLAIlREmnLsplEF/kotXKA==",
"license": "MIT",
"dependencies": { "dependencies": {
"@vue/reactivity": "3.4.38", "@vue/reactivity": "3.5.12",
"@vue/runtime-core": "3.4.38", "@vue/runtime-core": "3.5.12",
"@vue/shared": "3.4.38", "@vue/shared": "3.5.12",
"csstype": "^3.1.3" "csstype": "^3.1.3"
} }
}, },
"node_modules/@vue/server-renderer": { "node_modules/@vue/server-renderer": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.4.38.tgz", "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.12.tgz",
"integrity": "sha512-NggOTr82FbPEkkUvBm4fTGcwUY8UuTsnWC/L2YZBmvaQ4C4Jl/Ao4HHTB+l7WnFCt5M/dN3l0XLuyjzswGYVCA==", "integrity": "sha512-I3QoeDDeEPZm8yR28JtY+rk880Oqmj43hreIBVTicisFTx/Dl7JpG72g/X7YF8hnQD3IFhkky5i2bPonwrTVPg==",
"license": "MIT",
"dependencies": { "dependencies": {
"@vue/compiler-ssr": "3.4.38", "@vue/compiler-ssr": "3.5.12",
"@vue/shared": "3.4.38" "@vue/shared": "3.5.12"
}, },
"peerDependencies": { "peerDependencies": {
"vue": "3.4.38" "vue": "3.5.12"
} }
}, },
"node_modules/@vue/shared": { "node_modules/@vue/shared": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.38.tgz", "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.12.tgz",
"integrity": "sha512-q0xCiLkuWWQLzVrecPb0RMsNWyxICOjPrcrwxTUEHb1fsnvni4dcuyG7RT/Ie7VPTvnjzIaWzRMUBsrqNj/hhw==", "integrity": "sha512-L2RPSAwUFbgZH20etwrXyVyCBu9OxRSi8T/38QsvnkJyvq2LufW2lDCOzm7t/U9C1mkhJGWYfCuFBCmIuNivrg=="
"license": "MIT"
}, },
"node_modules/@webassemblyjs/ast": { "node_modules/@webassemblyjs/ast": {
"version": "1.12.1", "version": "1.12.1",
@ -7408,9 +7409,9 @@
} }
}, },
"node_modules/dom-input-range": { "node_modules/dom-input-range": {
"version": "1.1.6", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/dom-input-range/-/dom-input-range-1.1.6.tgz", "resolved": "https://registry.npmjs.org/dom-input-range/-/dom-input-range-1.2.0.tgz",
"integrity": "sha512-4o/SkTpscD0n81BeErrrtmE58lG8vTks++92vk//ld0NmkQTb4AVJ2rexh2yor6rtBf5IMte26u+fF3EgCppPQ==", "integrity": "sha512-8HVA5Oy5Vt872S7IXsjjp6/5Hqsm5YZLhurxwwQXp80T9qVsj8/mEUH3sQlFujLLUoWfxiaThHHuJ3/q1MHVuA==",
"license": "MIT", "license": "MIT",
"workspaces": [ "workspaces": [
"demos" "demos"
@ -12459,11 +12460,16 @@
"integrity": "sha512-w/9pXDXTDs3IDmOri/w8lM/w6LHR0/F4fcBLLzH+4csSoyshQ5su0TE7k0FLHZO7aOjVLDGecqd1M89+PVpVAA==", "integrity": "sha512-w/9pXDXTDs3IDmOri/w8lM/w6LHR0/F4fcBLLzH+4csSoyshQ5su0TE7k0FLHZO7aOjVLDGecqd1M89+PVpVAA==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/perfect-debounce": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz",
"integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==",
"license": "MIT"
},
"node_modules/picocolors": { "node_modules/picocolors": {
"version": "1.0.1", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="
"license": "ISC"
}, },
"node_modules/picomatch": { "node_modules/picomatch": {
"version": "2.3.1", "version": "2.3.1",
@ -13512,11 +13518,10 @@
"license": "Unlicense" "license": "Unlicense"
}, },
"node_modules/rollup": { "node_modules/rollup": {
"version": "2.79.1", "version": "2.79.2",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.1.tgz", "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.2.tgz",
"integrity": "sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==", "integrity": "sha512-fS6iqSPZDs3dr/y7Od6y5nha8dW1YnbgtsyotCVvoFGKbERG++CVRFv1meyGDE1SNItQA8BrnCw7ScdAhRJ3XQ==",
"dev": true, "dev": true,
"license": "MIT",
"bin": { "bin": {
"rollup": "dist/bin/rollup" "rollup": "dist/bin/rollup"
}, },
@ -13847,10 +13852,9 @@
} }
}, },
"node_modules/source-map-js": { "node_modules/source-map-js": {
"version": "1.2.0", "version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
"license": "BSD-3-Clause",
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
} }
@ -14743,21 +14747,6 @@
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/temporal-polyfill": {
"version": "0.2.5",
"resolved": "https://registry.npmjs.org/temporal-polyfill/-/temporal-polyfill-0.2.5.tgz",
"integrity": "sha512-ye47xp8Cb0nDguAhrrDS1JT1SzwEV9e26sSsrWzVu+yPZ7LzceEcH0i2gci9jWfOfSCCgM3Qv5nOYShVUUFUXA==",
"license": "MIT",
"dependencies": {
"temporal-spec": "^0.2.4"
}
},
"node_modules/temporal-spec": {
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/temporal-spec/-/temporal-spec-0.2.4.tgz",
"integrity": "sha512-lDMFv4nKQrSjlkHKAlHVqKrBG4DyFfa9F74cmBZ3Iy3ed8yvWnlWSIdi4IKfSqwmazAohBNwiN64qGx4y5Q3IQ==",
"license": "ISC"
},
"node_modules/terser": { "node_modules/terser": {
"version": "5.31.6", "version": "5.31.6",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.31.6.tgz", "resolved": "https://registry.npmjs.org/terser/-/terser-5.31.6.tgz",
@ -15378,11 +15367,10 @@
"license": "BSD-2-Clause" "license": "BSD-2-Clause"
}, },
"node_modules/vite/node_modules/@types/estree": { "node_modules/vite/node_modules/@types/estree": {
"version": "1.0.5", "version": "1.0.6",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
"integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"dev": true, "dev": true
"license": "MIT"
}, },
"node_modules/vite/node_modules/fsevents": { "node_modules/vite/node_modules/fsevents": {
"version": "2.3.3", "version": "2.3.3",
@ -15400,13 +15388,12 @@
} }
}, },
"node_modules/vite/node_modules/rollup": { "node_modules/vite/node_modules/rollup": {
"version": "4.21.1", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.21.1.tgz", "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.24.0.tgz",
"integrity": "sha512-ZnYyKvscThhgd3M5+Qt3pmhO4jIRR5RGzaSovB6Q7rGNrK5cUncrtLmcTTJVSdcKXyZjW8X8MB0JMSuH9bcAJg==", "integrity": "sha512-DOmrlGSXNk1DM0ljiQA+i+o0rSLhtii1je5wgk60j49d1jHT5YYttBv1iWOnYSTG+fZZESUOSNiAl89SIet+Cg==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"@types/estree": "1.0.5" "@types/estree": "1.0.6"
}, },
"bin": { "bin": {
"rollup": "dist/bin/rollup" "rollup": "dist/bin/rollup"
@ -15416,22 +15403,22 @@
"npm": ">=8.0.0" "npm": ">=8.0.0"
}, },
"optionalDependencies": { "optionalDependencies": {
"@rollup/rollup-android-arm-eabi": "4.21.1", "@rollup/rollup-android-arm-eabi": "4.24.0",
"@rollup/rollup-android-arm64": "4.21.1", "@rollup/rollup-android-arm64": "4.24.0",
"@rollup/rollup-darwin-arm64": "4.21.1", "@rollup/rollup-darwin-arm64": "4.24.0",
"@rollup/rollup-darwin-x64": "4.21.1", "@rollup/rollup-darwin-x64": "4.24.0",
"@rollup/rollup-linux-arm-gnueabihf": "4.21.1", "@rollup/rollup-linux-arm-gnueabihf": "4.24.0",
"@rollup/rollup-linux-arm-musleabihf": "4.21.1", "@rollup/rollup-linux-arm-musleabihf": "4.24.0",
"@rollup/rollup-linux-arm64-gnu": "4.21.1", "@rollup/rollup-linux-arm64-gnu": "4.24.0",
"@rollup/rollup-linux-arm64-musl": "4.21.1", "@rollup/rollup-linux-arm64-musl": "4.24.0",
"@rollup/rollup-linux-powerpc64le-gnu": "4.21.1", "@rollup/rollup-linux-powerpc64le-gnu": "4.24.0",
"@rollup/rollup-linux-riscv64-gnu": "4.21.1", "@rollup/rollup-linux-riscv64-gnu": "4.24.0",
"@rollup/rollup-linux-s390x-gnu": "4.21.1", "@rollup/rollup-linux-s390x-gnu": "4.24.0",
"@rollup/rollup-linux-x64-gnu": "4.21.1", "@rollup/rollup-linux-x64-gnu": "4.24.0",
"@rollup/rollup-linux-x64-musl": "4.21.1", "@rollup/rollup-linux-x64-musl": "4.24.0",
"@rollup/rollup-win32-arm64-msvc": "4.21.1", "@rollup/rollup-win32-arm64-msvc": "4.24.0",
"@rollup/rollup-win32-ia32-msvc": "4.21.1", "@rollup/rollup-win32-ia32-msvc": "4.24.0",
"@rollup/rollup-win32-x64-msvc": "4.21.1", "@rollup/rollup-win32-x64-msvc": "4.24.0",
"fsevents": "~2.3.2" "fsevents": "~2.3.2"
} }
}, },
@ -15560,16 +15547,15 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/vue": { "node_modules/vue": {
"version": "3.4.38", "version": "3.5.12",
"resolved": "https://registry.npmjs.org/vue/-/vue-3.4.38.tgz", "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.12.tgz",
"integrity": "sha512-f0ZgN+mZ5KFgVv9wz0f4OgVKukoXtS3nwET4c2vLBGQR50aI8G0cqbFtLlX9Yiyg3LFGBitruPHt2PxwTduJEw==", "integrity": "sha512-CLVZtXtn2ItBIi/zHZ0Sg1Xkb7+PU32bJJ8Bmy7ts3jxXTcbfsEfBivFYYWz1Hur+lalqGAh65Coin0r+HRUfg==",
"license": "MIT",
"dependencies": { "dependencies": {
"@vue/compiler-dom": "3.4.38", "@vue/compiler-dom": "3.5.12",
"@vue/compiler-sfc": "3.4.38", "@vue/compiler-sfc": "3.5.12",
"@vue/runtime-dom": "3.4.38", "@vue/runtime-dom": "3.5.12",
"@vue/server-renderer": "3.4.38", "@vue/server-renderer": "3.5.12",
"@vue/shared": "3.4.38" "@vue/shared": "3.5.12"
}, },
"peerDependencies": { "peerDependencies": {
"typescript": "*" "typescript": "*"

View File

@ -10,7 +10,7 @@
"@citation-js/plugin-software-formats": "0.6.1", "@citation-js/plugin-software-formats": "0.6.1",
"@github/markdown-toolbar-element": "2.2.3", "@github/markdown-toolbar-element": "2.2.3",
"@github/relative-time-element": "4.4.3", "@github/relative-time-element": "4.4.3",
"@github/text-expander-element": "2.7.1", "@github/text-expander-element": "2.8.0",
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3", "@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
"@primer/octicons": "19.11.0", "@primer/octicons": "19.11.0",
"@silverwind/vue3-calendar-heatmap": "2.0.6", "@silverwind/vue3-calendar-heatmap": "2.0.6",
@ -39,13 +39,13 @@
"monaco-editor": "0.51.0", "monaco-editor": "0.51.0",
"monaco-editor-webpack-plugin": "7.1.0", "monaco-editor-webpack-plugin": "7.1.0",
"pdfobject": "2.3.0", "pdfobject": "2.3.0",
"perfect-debounce": "1.0.0",
"postcss": "8.4.41", "postcss": "8.4.41",
"postcss-loader": "8.1.1", "postcss-loader": "8.1.1",
"postcss-nesting": "13.0.0", "postcss-nesting": "13.0.0",
"sortablejs": "1.15.2", "sortablejs": "1.15.2",
"swagger-ui-dist": "5.17.14", "swagger-ui-dist": "5.17.14",
"tailwindcss": "3.4.10", "tailwindcss": "3.4.10",
"temporal-polyfill": "0.2.5",
"throttle-debounce": "5.0.2", "throttle-debounce": "5.0.2",
"tinycolor2": "1.6.0", "tinycolor2": "1.6.0",
"tippy.js": "6.3.7", "tippy.js": "6.3.7",
@ -54,7 +54,7 @@
"typescript": "5.5.4", "typescript": "5.5.4",
"uint8-to-base64": "0.2.0", "uint8-to-base64": "0.2.0",
"vanilla-colorful": "0.7.2", "vanilla-colorful": "0.7.2",
"vue": "3.4.38", "vue": "3.5.12",
"vue-bar-graph": "2.1.0", "vue-bar-graph": "2.1.0",
"vue-chartjs": "5.3.1", "vue-chartjs": "5.3.1",
"vue-loader": "17.4.2", "vue-loader": "17.4.2",

View File

@ -642,6 +642,7 @@ func CreateBranchProtection(ctx *context.APIContext) {
ProtectedFilePatterns: form.ProtectedFilePatterns, ProtectedFilePatterns: form.ProtectedFilePatterns,
UnprotectedFilePatterns: form.UnprotectedFilePatterns, UnprotectedFilePatterns: form.UnprotectedFilePatterns,
BlockOnOutdatedBranch: form.BlockOnOutdatedBranch, BlockOnOutdatedBranch: form.BlockOnOutdatedBranch,
BlockAdminMergeOverride: form.BlockAdminMergeOverride,
} }
err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{ err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{
@ -852,6 +853,10 @@ func EditBranchProtection(ctx *context.APIContext) {
protectBranch.BlockOnOutdatedBranch = *form.BlockOnOutdatedBranch protectBranch.BlockOnOutdatedBranch = *form.BlockOnOutdatedBranch
} }
if form.BlockAdminMergeOverride != nil {
protectBranch.BlockAdminMergeOverride = *form.BlockAdminMergeOverride
}
var whitelistUsers, forcePushAllowlistUsers, mergeWhitelistUsers, approvalsWhitelistUsers []int64 var whitelistUsers, forcePushAllowlistUsers, mergeWhitelistUsers, approvalsWhitelistUsers []int64
if form.PushWhitelistUsernames != nil { if form.PushWhitelistUsernames != nil {
whitelistUsers, err = user_model.GetUserIDsByNames(ctx, form.PushWhitelistUsernames, false) whitelistUsers, err = user_model.GetUserIDsByNames(ctx, form.PushWhitelistUsernames, false)

View File

@ -51,7 +51,7 @@ func migrateWithSetting(x *xorm.Engine) error {
} else if current < 0 { } else if current < 0 {
// execute migrations when the database isn't initialized even if AutoMigration is false // execute migrations when the database isn't initialized even if AutoMigration is false
return migrations.Migrate(x) return migrations.Migrate(x)
} else if expected := migrations.ExpectedVersion(); current != expected { } else if expected := migrations.ExpectedDBVersion(); current != expected {
log.Fatal(`"database.AUTO_MIGRATION" is disabled, but current database version %d is not equal to the expected version %d.`+ log.Fatal(`"database.AUTO_MIGRATION" is disabled, but current database version %d is not equal to the expected version %d.`+
`You can set "database.AUTO_MIGRATION" to true or migrate manually by running "gitea [--config /path/to/app.ini] migrate"`, current, expected) `You can set "database.AUTO_MIGRATION" to true or migrate manually by running "gitea [--config /path/to/app.ini] migrate"`, current, expected)
} }

View File

@ -9,6 +9,7 @@ import (
"code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/context"
) )
@ -39,7 +40,7 @@ func Organizations(ctx *context.Context) {
) )
sortOrder := ctx.FormString("sort") sortOrder := ctx.FormString("sort")
if sortOrder == "" { if sortOrder == "" {
sortOrder = "newest" sortOrder = util.Iif(supportedSortOrders.Contains(setting.UI.ExploreDefaultSort), setting.UI.ExploreDefaultSort, "newest")
ctx.SetFormString("sort", sortOrder) ctx.SetFormString("sort", sortOrder)
} }

View File

@ -16,6 +16,7 @@ import (
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/sitemap" "code.gitea.io/gitea/modules/sitemap"
"code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/context"
) )
@ -149,7 +150,7 @@ func Users(ctx *context.Context) {
) )
sortOrder := ctx.FormString("sort") sortOrder := ctx.FormString("sort")
if sortOrder == "" { if sortOrder == "" {
sortOrder = "newest" sortOrder = util.Iif(supportedSortOrders.Contains(setting.UI.ExploreDefaultSort), setting.UI.ExploreDefaultSort, "newest")
ctx.SetFormString("sort", sortOrder) ctx.SetFormString("sort", sortOrder)
} }

View File

@ -0,0 +1,93 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package repo
import (
"net/http"
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/unit"
issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/services/context"
)
type issueSuggestion struct {
ID int64 `json:"id"`
Title string `json:"title"`
State string `json:"state"`
PullRequest *struct {
Merged bool `json:"merged"`
Draft bool `json:"draft"`
} `json:"pull_request,omitempty"`
}
// IssueSuggestions returns a list of issue suggestions
func IssueSuggestions(ctx *context.Context) {
keyword := ctx.Req.FormValue("q")
canReadIssues := ctx.Repo.CanRead(unit.TypeIssues)
canReadPulls := ctx.Repo.CanRead(unit.TypePullRequests)
var isPull optional.Option[bool]
if canReadPulls && !canReadIssues {
isPull = optional.Some(true)
} else if canReadIssues && !canReadPulls {
isPull = optional.Some(false)
}
searchOpt := &issue_indexer.SearchOptions{
Paginator: &db.ListOptions{
Page: 0,
PageSize: 5,
},
Keyword: keyword,
RepoIDs: []int64{ctx.Repo.Repository.ID},
IsPull: isPull,
IsClosed: nil,
SortBy: issue_indexer.SortByUpdatedDesc,
}
ids, _, err := issue_indexer.SearchIssues(ctx, searchOpt)
if err != nil {
ctx.ServerError("SearchIssues", err)
return
}
issues, err := issues_model.GetIssuesByIDs(ctx, ids, true)
if err != nil {
ctx.ServerError("FindIssuesByIDs", err)
return
}
suggestions := make([]*issueSuggestion, 0, len(issues))
for _, issue := range issues {
suggestion := &issueSuggestion{
ID: issue.ID,
Title: issue.Title,
State: string(issue.State()),
}
if issue.IsPull {
if err := issue.LoadPullRequest(ctx); err != nil {
ctx.ServerError("LoadPullRequest", err)
return
}
if issue.PullRequest != nil {
suggestion.PullRequest = &struct {
Merged bool `json:"merged"`
Draft bool `json:"draft"`
}{
Merged: issue.PullRequest.HasMerged,
Draft: issue.PullRequest.IsWorkInProgress(ctx),
}
}
}
suggestions = append(suggestions, suggestion)
}
ctx.JSON(http.StatusOK, suggestions)
}

View File

@ -256,6 +256,7 @@ func SettingsProtectedBranchPost(ctx *context.Context) {
protectBranch.ProtectedFilePatterns = f.ProtectedFilePatterns protectBranch.ProtectedFilePatterns = f.ProtectedFilePatterns
protectBranch.UnprotectedFilePatterns = f.UnprotectedFilePatterns protectBranch.UnprotectedFilePatterns = f.UnprotectedFilePatterns
protectBranch.BlockOnOutdatedBranch = f.BlockOnOutdatedBranch protectBranch.BlockOnOutdatedBranch = f.BlockOnOutdatedBranch
protectBranch.BlockAdminMergeOverride = f.BlockAdminMergeOverride
err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{ err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{
UserIDs: whitelistUsers, UserIDs: whitelistUsers,

View File

@ -39,8 +39,9 @@ func RegenerateScratchTwoFactor(ctx *context.Context) {
if auth.IsErrTwoFactorNotEnrolled(err) { if auth.IsErrTwoFactorNotEnrolled(err) {
ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled")) ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled"))
ctx.Redirect(setting.AppSubURL + "/user/settings/security") ctx.Redirect(setting.AppSubURL + "/user/settings/security")
} else {
ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
} }
ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
return return
} }
@ -74,8 +75,9 @@ func DisableTwoFactor(ctx *context.Context) {
if auth.IsErrTwoFactorNotEnrolled(err) { if auth.IsErrTwoFactorNotEnrolled(err) {
ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled")) ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled"))
ctx.Redirect(setting.AppSubURL + "/user/settings/security") ctx.Redirect(setting.AppSubURL + "/user/settings/security")
} else {
ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
} }
ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
return return
} }
@ -84,8 +86,9 @@ func DisableTwoFactor(ctx *context.Context) {
// There is a potential DB race here - we must have been disabled by another request in the intervening period // There is a potential DB race here - we must have been disabled by another request in the intervening period
ctx.Flash.Success(ctx.Tr("settings.twofa_disabled")) ctx.Flash.Success(ctx.Tr("settings.twofa_disabled"))
ctx.Redirect(setting.AppSubURL + "/user/settings/security") ctx.Redirect(setting.AppSubURL + "/user/settings/security")
} else {
ctx.ServerError("SettingsTwoFactor: Failed to DeleteTwoFactorByID", err)
} }
ctx.ServerError("SettingsTwoFactor: Failed to DeleteTwoFactorByID", err)
return return
} }

View File

@ -1178,6 +1178,7 @@ func registerRoutes(m *web.Router) {
}) })
}) })
}, context.RepoRef()) }, context.RepoRef())
m.Get("/issues/suggestions", repo.IssueSuggestions)
}, ignSignIn, context.RepoAssignment, reqRepoIssuesOrPullsReader) }, ignSignIn, context.RepoAssignment, reqRepoIssuesOrPullsReader)
// end "/{username}/{reponame}": view milestone, label, issue, pull, etc // end "/{username}/{reponame}": view milestone, label, issue, pull, etc

View File

@ -185,6 +185,7 @@ func ToBranchProtection(ctx context.Context, bp *git_model.ProtectedBranch, repo
RequireSignedCommits: bp.RequireSignedCommits, RequireSignedCommits: bp.RequireSignedCommits,
ProtectedFilePatterns: bp.ProtectedFilePatterns, ProtectedFilePatterns: bp.ProtectedFilePatterns,
UnprotectedFilePatterns: bp.UnprotectedFilePatterns, UnprotectedFilePatterns: bp.UnprotectedFilePatterns,
BlockAdminMergeOverride: bp.BlockAdminMergeOverride,
Created: bp.CreatedUnix.AsTime(), Created: bp.CreatedUnix.AsTime(),
Updated: bp.UpdatedUnix.AsTime(), Updated: bp.UpdatedUnix.AsTime(),
} }

View File

@ -12,7 +12,7 @@ import (
) )
func checkDBVersion(ctx context.Context, logger log.Logger, autofix bool) error { func checkDBVersion(ctx context.Context, logger log.Logger, autofix bool) error {
logger.Info("Expected database version: %d", migrations.ExpectedVersion()) logger.Info("Expected database version: %d", migrations.ExpectedDBVersion())
if err := db.InitEngineWithMigration(ctx, migrations.EnsureUpToDate); err != nil { if err := db.InitEngineWithMigration(ctx, migrations.EnsureUpToDate); err != nil {
if !autofix { if !autofix {
logger.Critical("Error: %v during ensure up to date", err) logger.Critical("Error: %v during ensure up to date", err)

View File

@ -219,6 +219,7 @@ type ProtectBranchForm struct {
RequireSignedCommits bool RequireSignedCommits bool
ProtectedFilePatterns string ProtectedFilePatterns string
UnprotectedFilePatterns string UnprotectedFilePatterns string
BlockAdminMergeOverride bool
} }
// Validate validates the fields // Validate validates the fields

View File

@ -179,6 +179,11 @@ func BatchHandler(ctx *context.Context) {
return return
} }
if setting.LFS.MaxBatchSize != 0 && len(br.Objects) > setting.LFS.MaxBatchSize {
writeStatus(ctx, http.StatusRequestEntityTooLarge)
return
}
contentStore := lfs_module.NewContentStore() contentStore := lfs_module.NewContentStore()
var responseObjects []*lfs_module.ObjectResponse var responseObjects []*lfs_module.ObjectResponse

View File

@ -22,7 +22,7 @@ import (
rpm_service "code.gitea.io/gitea/services/packages/rpm" rpm_service "code.gitea.io/gitea/services/packages/rpm"
) )
// Task method to execute cleanup rules and cleanup expired package data // CleanupTask executes cleanup rules and cleanup expired package data
func CleanupTask(ctx context.Context, olderThan time.Duration) error { func CleanupTask(ctx context.Context, olderThan time.Duration) error {
if err := ExecuteCleanupRules(ctx); err != nil { if err := ExecuteCleanupRules(ctx); err != nil {
return err return err

View File

@ -206,7 +206,11 @@ func buildPackagesIndices(ctx context.Context, ownerID int64, repoVersion *packa
w := io.MultiWriter(packagesContent, gzw, xzw) w := io.MultiWriter(packagesContent, gzw, xzw)
addSeparator := false addSeparator := false
if err := debian_model.SearchPackages(ctx, opts, func(pfd *packages_model.PackageFileDescriptor) { pfds, err := debian_model.SearchPackages(ctx, opts)
if err != nil {
return err
}
for _, pfd := range pfds {
if addSeparator { if addSeparator {
fmt.Fprintln(w) fmt.Fprintln(w)
} }
@ -220,10 +224,7 @@ func buildPackagesIndices(ctx context.Context, ownerID int64, repoVersion *packa
fmt.Fprintf(w, "SHA1: %s\n", pfd.Blob.HashSHA1) fmt.Fprintf(w, "SHA1: %s\n", pfd.Blob.HashSHA1)
fmt.Fprintf(w, "SHA256: %s\n", pfd.Blob.HashSHA256) fmt.Fprintf(w, "SHA256: %s\n", pfd.Blob.HashSHA256)
fmt.Fprintf(w, "SHA512: %s\n", pfd.Blob.HashSHA512) fmt.Fprintf(w, "SHA512: %s\n", pfd.Blob.HashSHA512)
}); err != nil {
return err
} }
gzw.Close() gzw.Close()
xzw.Close() xzw.Close()

View File

@ -68,7 +68,7 @@ const (
) )
// CheckPullMergeable check if the pull mergeable based on all conditions (branch protection, merge options, ...) // CheckPullMergeable check if the pull mergeable based on all conditions (branch protection, merge options, ...)
func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *access_model.Permission, pr *issues_model.PullRequest, mergeCheckType MergeCheckType, adminSkipProtectionCheck bool) error { func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *access_model.Permission, pr *issues_model.PullRequest, mergeCheckType MergeCheckType, adminForceMerge bool) error {
return db.WithTx(stdCtx, func(ctx context.Context) error { return db.WithTx(stdCtx, func(ctx context.Context) error {
if pr.HasMerged { if pr.HasMerged {
return ErrHasMerged return ErrHasMerged
@ -118,13 +118,22 @@ func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *acc
err = nil err = nil
} }
// * if the doer is admin, they could skip the branch protection check // * if admin tries to "Force Merge", they could sometimes skip the branch protection check
if adminSkipProtectionCheck { if adminForceMerge {
if isRepoAdmin, errCheckAdmin := access_model.IsUserRepoAdmin(ctx, pr.BaseRepo, doer); errCheckAdmin != nil { isRepoAdmin, errForceMerge := access_model.IsUserRepoAdmin(ctx, pr.BaseRepo, doer)
log.Error("Unable to check if %-v is a repo admin in %-v: %v", doer, pr.BaseRepo, errCheckAdmin) if errForceMerge != nil {
return errCheckAdmin return fmt.Errorf("IsUserRepoAdmin failed, repo: %v, doer: %v, err: %w", pr.BaseRepoID, doer.ID, errForceMerge)
} else if isRepoAdmin { }
err = nil // repo admin can skip the check, so clear the error
protectedBranchRule, errForceMerge := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if errForceMerge != nil {
return fmt.Errorf("GetFirstMatchProtectedBranchRule failed, repo: %v, base branch: %v, err: %w", pr.BaseRepoID, pr.BaseBranch, errForceMerge)
}
// if doer is admin and the "Force Merge" is not blocked, then clear the branch protection check error
blockAdminForceMerge := protectedBranchRule != nil && protectedBranchRule.BlockAdminMergeOverride
if isRepoAdmin && !blockAdminForceMerge {
err = nil
} }
} }

View File

@ -164,7 +164,7 @@
{{$notAllOverridableChecksOk := or .IsBlockedByApprovals .IsBlockedByRejection .IsBlockedByOfficialReviewRequests .IsBlockedByOutdatedBranch .IsBlockedByChangedProtectedFiles (and .EnableStatusCheck (not .RequiredStatusCheckState.IsSuccess))}} {{$notAllOverridableChecksOk := or .IsBlockedByApprovals .IsBlockedByRejection .IsBlockedByOfficialReviewRequests .IsBlockedByOutdatedBranch .IsBlockedByChangedProtectedFiles (and .EnableStatusCheck (not .RequiredStatusCheckState.IsSuccess))}}
{{/* admin can merge without checks, writer can merge when checks succeed */}} {{/* admin can merge without checks, writer can merge when checks succeed */}}
{{$canMergeNow := and (or $.IsRepoAdmin (not $notAllOverridableChecksOk)) (or (not .AllowMerge) (not .RequireSigned) .WillSign)}} {{$canMergeNow := and (or (and (not $.ProtectedBranch.BlockAdminMergeOverride) $.IsRepoAdmin) (not $notAllOverridableChecksOk)) (or (not .AllowMerge) (not .RequireSigned) .WillSign)}}
{{/* admin and writer both can make an auto merge schedule */}} {{/* admin and writer both can make an auto merge schedule */}}
{{if $canMergeNow}} {{if $canMergeNow}}

View File

@ -102,7 +102,7 @@
{{$sameBase := ne $.BaseName $.HeadUserName}} {{$sameBase := ne $.BaseName $.HeadUserName}}
{{$differentBranch := ne . $.HeadBranch}} {{$differentBranch := ne . $.HeadBranch}}
{{if or $sameBase $differentBranch}} {{if or $sameBase $differentBranch}}
<div class="item {{if eq $.BaseBranch .}}selected{{end}}" data-branch="{{.}}">{{$.BaseName}}{{if $.HeadRepo}}/{{$.HeadRepo}}{{end}}:{{.}}</div> <div class="item {{if eq $.BaseBranch .}}selected{{end}}" data-branch="{{.}}">{{$.BaseName}}:{{.}}</div>
{{end}} {{end}}
{{end}} {{end}}
</div> </div>

View File

@ -323,6 +323,13 @@
<p class="help">{{ctx.Locale.Tr "repo.settings.block_outdated_branch_desc"}}</p> <p class="help">{{ctx.Locale.Tr "repo.settings.block_outdated_branch_desc"}}</p>
</div> </div>
</div> </div>
<div class="field">
<div class="ui checkbox">
<input name="block_admin_merge_override" type="checkbox" {{if .Rule.BlockAdminMergeOverride}}checked{{end}}>
<label>{{ctx.Locale.Tr "repo.settings.block_admin_merge_override"}}</label>
<p class="help">{{ctx.Locale.Tr "repo.settings.block_admin_merge_override_desc"}}</p>
</div>
</div>
<div class="divider"></div> <div class="divider"></div>
<div class="field"> <div class="field">

View File

@ -44,7 +44,7 @@ Template Attributes:
<button class="markdown-toolbar-button markdown-switch-easymde" data-tooltip-content="{{ctx.Locale.Tr "editor.buttons.switch_to_legacy.tooltip"}}">{{svg "octicon-arrow-switch"}}</button> <button class="markdown-toolbar-button markdown-switch-easymde" data-tooltip-content="{{ctx.Locale.Tr "editor.buttons.switch_to_legacy.tooltip"}}">{{svg "octicon-arrow-switch"}}</button>
</div> </div>
</markdown-toolbar> </markdown-toolbar>
<text-expander keys=": @" suffix=""> <text-expander keys=": @ #" multiword="#" suffix="">
<textarea class="markdown-text-editor"{{if .TextareaName}} name="{{.TextareaName}}"{{end}}{{if .TextareaPlaceholder}} placeholder="{{.TextareaPlaceholder}}"{{end}}{{if .TextareaAriaLabel}} aria-label="{{.TextareaAriaLabel}}"{{end}}{{if .DisableAutosize}} data-disable-autosize="{{.DisableAutosize}}"{{end}}>{{.TextareaContent}}</textarea> <textarea class="markdown-text-editor"{{if .TextareaName}} name="{{.TextareaName}}"{{end}}{{if .TextareaPlaceholder}} placeholder="{{.TextareaPlaceholder}}"{{end}}{{if .TextareaAriaLabel}} aria-label="{{.TextareaAriaLabel}}"{{end}}{{if .DisableAutosize}} data-disable-autosize="{{.DisableAutosize}}"{{end}}>{{.TextareaContent}}</textarea>
</text-expander> </text-expander>
<script> <script>

View File

@ -18771,6 +18771,10 @@
}, },
"x-go-name": "ApprovalsWhitelistUsernames" "x-go-name": "ApprovalsWhitelistUsernames"
}, },
"block_admin_merge_override": {
"type": "boolean",
"x-go-name": "BlockAdminMergeOverride"
},
"block_on_official_review_requests": { "block_on_official_review_requests": {
"type": "boolean", "type": "boolean",
"x-go-name": "BlockOnOfficialReviewRequests" "x-go-name": "BlockOnOfficialReviewRequests"
@ -19466,6 +19470,10 @@
}, },
"x-go-name": "ApprovalsWhitelistUsernames" "x-go-name": "ApprovalsWhitelistUsernames"
}, },
"block_admin_merge_override": {
"type": "boolean",
"x-go-name": "BlockAdminMergeOverride"
},
"block_on_official_review_requests": { "block_on_official_review_requests": {
"type": "boolean", "type": "boolean",
"x-go-name": "BlockOnOfficialReviewRequests" "x-go-name": "BlockOnOfficialReviewRequests"
@ -20685,6 +20693,10 @@
}, },
"x-go-name": "ApprovalsWhitelistUsernames" "x-go-name": "ApprovalsWhitelistUsernames"
}, },
"block_admin_merge_override": {
"type": "boolean",
"x-go-name": "BlockAdminMergeOverride"
},
"block_on_official_review_requests": { "block_on_official_review_requests": {
"type": "boolean", "type": "boolean",
"x-go-name": "BlockOnOfficialReviewRequests" "x-go-name": "BlockOnOfficialReviewRequests"

View File

@ -10,6 +10,7 @@ import (
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"strconv"
"strings" "strings"
"testing" "testing"
@ -19,6 +20,7 @@ import (
user_model "code.gitea.io/gitea/models/user" user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/base"
debian_module "code.gitea.io/gitea/modules/packages/debian" debian_module "code.gitea.io/gitea/modules/packages/debian"
packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup"
"code.gitea.io/gitea/tests" "code.gitea.io/gitea/tests"
"github.com/blakesmith/ar" "github.com/blakesmith/ar"
@ -263,4 +265,37 @@ func TestPackageDebian(t *testing.T) {
assert.Contains(t, body, "Components: "+strings.Join(components, " ")+"\n") assert.Contains(t, body, "Components: "+strings.Join(components, " ")+"\n")
assert.Contains(t, body, "Architectures: "+architectures[1]+"\n") assert.Contains(t, body, "Architectures: "+architectures[1]+"\n")
}) })
t.Run("Cleanup", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
rule := &packages.PackageCleanupRule{
Enabled: true,
RemovePattern: `.*`,
MatchFullName: true,
OwnerID: user.ID,
Type: packages.TypeDebian,
}
_, err := packages.InsertCleanupRule(db.DefaultContext, rule)
assert.NoError(t, err)
// When there were a lot of packages (> 50 or 100) and the code used "Iterate" to get all packages, it ever caused bugs,
// because "Iterate" keeps a dangling SQL session but the callback function still uses the same session to execute statements.
// The "Iterate" problem has been checked by TestContextSafety now, so here we only need to check the cleanup logic with a small number
packagesCount := 2
for i := 0; i < packagesCount; i++ {
uploadURL := fmt.Sprintf("%s/pool/%s/%s/upload", rootURL, "test", "main")
req := NewRequestWithBody(t, "PUT", uploadURL, createArchive(packageName, "1.0."+strconv.Itoa(i), "all")).AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusCreated)
}
req := NewRequest(t, "GET", fmt.Sprintf("%s/dists/%s/Release", rootURL, "test"))
MakeRequest(t, req, http.StatusOK)
err = packages_cleanup_service.CleanupTask(db.DefaultContext, 0)
assert.NoError(t, err)
req = NewRequest(t, "GET", fmt.Sprintf("%s/dists/%s/Release", rootURL, "test"))
MakeRequest(t, req, http.StatusNotFound)
})
} }

View File

@ -976,3 +976,50 @@ func TestPullAutoMergeAfterCommitStatusSucceedAndApprovalForAgitFlow(t *testing.
unittest.AssertNotExistsBean(t, &pull_model.AutoMerge{PullID: pr.ID}) unittest.AssertNotExistsBean(t, &pull_model.AutoMerge{PullID: pr.ID})
}) })
} }
func TestPullNonMergeForAdminWithBranchProtection(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
// create a pull request
session := loginUser(t, "user1")
forkedName := "repo1-1"
testRepoFork(t, session, "user2", "repo1", "user1", forkedName, "")
defer testDeleteRepository(t, session, "user1", forkedName)
testEditFile(t, session, "user1", forkedName, "master", "README.md", "Hello, World (Edited)\n")
testPullCreate(t, session, "user1", forkedName, false, "master", "master", "Indexer notifier test pull")
baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user2", Name: "repo1"})
forkedRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user1", Name: forkedName})
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{
BaseRepoID: baseRepo.ID,
BaseBranch: "master",
HeadRepoID: forkedRepo.ID,
HeadBranch: "master",
})
// add protected branch for commit status
csrf := GetUserCSRFToken(t, session)
// Change master branch to protected
pbCreateReq := NewRequestWithValues(t, "POST", "/user2/repo1/settings/branches/edit", map[string]string{
"_csrf": csrf,
"rule_name": "master",
"enable_push": "true",
"enable_status_check": "true",
"status_check_contexts": "gitea/actions",
"block_admin_merge_override": "true",
})
session.MakeRequest(t, pbCreateReq, http.StatusSeeOther)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
mergeReq := NewRequestWithValues(t, "POST", "/api/v1/repos/user2/repo1/pulls/6/merge", map[string]string{
"_csrf": csrf,
"head_commit_id": "",
"merge_when_checks_succeed": "false",
"force_merge": "true",
"do": "rebase",
}).AddTokenAuth(token)
session.MakeRequest(t, mergeReq, http.StatusMethodNotAllowed)
})
}

View File

@ -2,31 +2,21 @@
Please also update the template file above if this vue is modified. Please also update the template file above if this vue is modified.
action status accepted: success, skipped, waiting, blocked, running, failure, cancelled, unknown action status accepted: success, skipped, waiting, blocked, running, failure, cancelled, unknown
--> -->
<script lang="ts"> <script lang="ts" setup>
import {SvgIcon} from '../svg.ts'; import {SvgIcon} from '../svg.ts';
export default { withDefaults(defineProps<{
components: {SvgIcon}, status: '',
props: { size?: number,
status: { className?: string,
type: String, localeStatus?: string,
required: true, }>(), {
}, size: 16,
size: { className: undefined,
type: Number, localeStatus: undefined,
default: 16, });
},
className: {
type: String,
default: '',
},
localeStatus: {
type: String,
default: '',
},
},
};
</script> </script>
<template> <template>
<span class="tw-flex tw-items-center" :data-tooltip-content="localeStatus" v-if="status"> <span class="tw-flex tw-items-center" :data-tooltip-content="localeStatus" v-if="status">
<SvgIcon name="octicon-check-circle-fill" class="text green" :size="size" :class-name="className" v-if="status === 'success'"/> <SvgIcon name="octicon-check-circle-fill" class="text green" :size="size" :class-name="className" v-if="status === 'success'"/>

View File

@ -1,58 +1,56 @@
<script lang="ts"> <script lang="ts" setup>
// TODO: Switch to upstream after https://github.com/razorness/vue3-calendar-heatmap/pull/34 is merged // TODO: Switch to upstream after https://github.com/razorness/vue3-calendar-heatmap/pull/34 is merged
import {CalendarHeatmap} from '@silverwind/vue3-calendar-heatmap'; import {CalendarHeatmap} from '@silverwind/vue3-calendar-heatmap';
import {onMounted, ref} from 'vue';
import type {Value as HeatmapValue, Locale as HeatmapLocale} from '@silverwind/vue3-calendar-heatmap';
export default { defineProps<{
components: {CalendarHeatmap}, values?: HeatmapValue[];
props: { locale: {
values: { textTotalContributions: string;
type: Array, heatMapLocale: Partial<HeatmapLocale>;
default: () => [], noDataText: string;
}, tooltipUnit: string;
locale: { };
type: Object, }>();
default: () => {},
},
},
data: () => ({
colorRange: [
'var(--color-secondary-alpha-60)',
'var(--color-secondary-alpha-60)',
'var(--color-primary-light-4)',
'var(--color-primary-light-2)',
'var(--color-primary)',
'var(--color-primary-dark-2)',
'var(--color-primary-dark-4)',
],
endDate: new Date(),
}),
mounted() {
// work around issue with first legend color being rendered twice and legend cut off
const legend = document.querySelector('.vch__external-legend-wrapper');
legend.setAttribute('viewBox', '12 0 80 10');
legend.style.marginRight = '-12px';
},
methods: {
handleDayClick(e) {
// Reset filter if same date is clicked
const params = new URLSearchParams(document.location.search);
const queryDate = params.get('date');
// Timezone has to be stripped because toISOString() converts to UTC
const clickedDate = new Date(e.date - (e.date.getTimezoneOffset() * 60000)).toISOString().substring(0, 10);
if (queryDate && queryDate === clickedDate) { const colorRange = [
params.delete('date'); 'var(--color-secondary-alpha-60)',
} else { 'var(--color-secondary-alpha-60)',
params.set('date', clickedDate); 'var(--color-primary-light-4)',
} 'var(--color-primary-light-2)',
'var(--color-primary)',
'var(--color-primary-dark-2)',
'var(--color-primary-dark-4)',
];
params.delete('page'); const endDate = ref(new Date());
const newSearch = params.toString(); onMounted(() => {
window.location.search = newSearch.length ? `?${newSearch}` : ''; // work around issue with first legend color being rendered twice and legend cut off
}, const legend = document.querySelector<HTMLElement>('.vch__external-legend-wrapper');
}, legend.setAttribute('viewBox', '12 0 80 10');
}; legend.style.marginRight = '-12px';
});
function handleDayClick(e: Event & {date: Date}) {
// Reset filter if same date is clicked
const params = new URLSearchParams(document.location.search);
const queryDate = params.get('date');
// Timezone has to be stripped because toISOString() converts to UTC
const clickedDate = new Date(e.date.getTime() - (e.date.getTimezoneOffset() * 60000)).toISOString().substring(0, 10);
if (queryDate && queryDate === clickedDate) {
params.delete('date');
} else {
params.set('date', clickedDate);
}
params.delete('page');
const newSearch = params.toString();
window.location.search = newSearch.length ? `?${newSearch}` : '';
}
</script> </script>
<template> <template>
<div class="total-contributions"> <div class="total-contributions">

View File

@ -1,100 +1,65 @@
<script lang="ts"> <script lang="ts" setup>
import {SvgIcon} from '../svg.ts'; import {SvgIcon} from '../svg.ts';
import {GET} from '../modules/fetch.ts'; import {GET} from '../modules/fetch.ts';
import {getIssueColor, getIssueIcon} from '../features/issue.ts';
import {computed, onMounted, ref} from 'vue';
const {appSubUrl, i18n} = window.config; const {appSubUrl, i18n} = window.config;
export default { const loading = ref(false);
components: {SvgIcon}, const issue = ref(null);
data: () => ({ const renderedLabels = ref('');
loading: false, const i18nErrorOccurred = i18n.error_occurred;
issue: null, const i18nErrorMessage = ref(null);
renderedLabels: '',
i18nErrorOccurred: i18n.error_occurred,
i18nErrorMessage: null,
}),
computed: {
createdAt() {
return new Date(this.issue.created_at).toLocaleDateString(undefined, {year: 'numeric', month: 'short', day: 'numeric'});
},
body() { const createdAt = computed(() => new Date(issue.value.created_at).toLocaleDateString(undefined, {year: 'numeric', month: 'short', day: 'numeric'}));
const body = this.issue.body.replace(/\n+/g, ' '); const body = computed(() => {
if (body.length > 85) { const body = issue.value.body.replace(/\n+/g, ' ');
return `${body.substring(0, 85)}`; if (body.length > 85) {
} return `${body.substring(0, 85)}`;
return body; }
}, return body;
});
icon() { const root = ref<HTMLElement | null>(null);
if (this.issue.pull_request !== null) {
if (this.issue.state === 'open') {
if (this.issue.pull_request.draft === true) {
return 'octicon-git-pull-request-draft'; // WIP PR
}
return 'octicon-git-pull-request'; // Open PR
} else if (this.issue.pull_request.merged === true) {
return 'octicon-git-merge'; // Merged PR
}
return 'octicon-git-pull-request'; // Closed PR
} else if (this.issue.state === 'open') {
return 'octicon-issue-opened'; // Open Issue
}
return 'octicon-issue-closed'; // Closed Issue
},
color() { onMounted(() => {
if (this.issue.pull_request !== null) { root.value.addEventListener('ce-load-context-popup', (e: CustomEvent) => {
if (this.issue.pull_request.draft === true) { const data = e.detail;
return 'grey'; // WIP PR if (!loading.value && issue.value === null) {
} else if (this.issue.pull_request.merged === true) { load(data);
return 'purple'; // Merged PR }
} });
} });
if (this.issue.state === 'open') {
return 'green'; // Open Issue
}
return 'red'; // Closed Issue
},
},
mounted() {
this.$refs.root.addEventListener('ce-load-context-popup', (e) => {
const data = e.detail;
if (!this.loading && this.issue === null) {
this.load(data);
}
});
},
methods: {
async load(data) {
this.loading = true;
this.i18nErrorMessage = null;
try { async function load(data) {
const response = await GET(`${appSubUrl}/${data.owner}/${data.repo}/issues/${data.index}/info`); // backend: GetIssueInfo loading.value = true;
const respJson = await response.json(); i18nErrorMessage.value = null;
if (!response.ok) {
this.i18nErrorMessage = respJson.message ?? i18n.network_error; try {
return; const response = await GET(`${appSubUrl}/${data.owner}/${data.repo}/issues/${data.index}/info`); // backend: GetIssueInfo
} const respJson = await response.json();
this.issue = respJson.convertedIssue; if (!response.ok) {
this.renderedLabels = respJson.renderedLabels; i18nErrorMessage.value = respJson.message ?? i18n.network_error;
} catch { return;
this.i18nErrorMessage = i18n.network_error; }
} finally { issue.value = respJson.convertedIssue;
this.loading = false; renderedLabels.value = respJson.renderedLabels;
} } catch {
}, i18nErrorMessage.value = i18n.network_error;
}, } finally {
}; loading.value = false;
}
}
</script> </script>
<template> <template>
<div ref="root"> <div ref="root">
<div v-if="loading" class="tw-h-12 tw-w-12 is-loading"/> <div v-if="loading" class="tw-h-12 tw-w-12 is-loading"/>
<div v-if="!loading && issue !== null" class="tw-flex tw-flex-col tw-gap-2"> <div v-if="!loading && issue !== null" class="tw-flex tw-flex-col tw-gap-2">
<div class="tw-text-12">{{ issue.repository.full_name }} on {{ createdAt }}</div> <div class="tw-text-12">{{ issue.repository.full_name }} on {{ createdAt }}</div>
<div class="flex-text-block"> <div class="flex-text-block">
<svg-icon :name="icon" :class="['text', color]"/> <svg-icon :name="getIssueIcon(issue)" :class="['text', getIssueColor(issue)]"/>
<span class="issue-title tw-font-semibold tw-break-anywhere"> <span class="issue-title tw-font-semibold tw-break-anywhere">
{{ issue.title }} {{ issue.title }}
<span class="index">#{{ issue.number }}</span> <span class="index">#{{ issue.number }}</span>

View File

@ -1,40 +1,42 @@
<script lang="ts"> <script lang="ts" setup>
import {onMounted, onUnmounted} from 'vue';
import {loadMoreFiles} from '../features/repo-diff.ts'; import {loadMoreFiles} from '../features/repo-diff.ts';
import {diffTreeStore} from '../modules/stores.ts'; import {diffTreeStore} from '../modules/stores.ts';
export default { const store = diffTreeStore();
data: () => {
return {store: diffTreeStore()}; onMounted(() => {
}, document.querySelector('#show-file-list-btn').addEventListener('click', toggleFileList);
mounted() { });
document.querySelector('#show-file-list-btn').addEventListener('click', this.toggleFileList);
}, onUnmounted(() => {
unmounted() { document.querySelector('#show-file-list-btn').removeEventListener('click', toggleFileList);
document.querySelector('#show-file-list-btn').removeEventListener('click', this.toggleFileList); });
},
methods: { function toggleFileList() {
toggleFileList() { store.fileListIsVisible = !store.fileListIsVisible;
this.store.fileListIsVisible = !this.store.fileListIsVisible; }
},
diffTypeToString(pType) { function diffTypeToString(pType) {
const diffTypes = { const diffTypes = {
1: 'add', 1: 'add',
2: 'modify', 2: 'modify',
3: 'del', 3: 'del',
4: 'rename', 4: 'rename',
5: 'copy', 5: 'copy',
}; };
return diffTypes[pType]; return diffTypes[pType];
}, }
diffStatsWidth(adds, dels) {
return `${adds / (adds + dels) * 100}%`; function diffStatsWidth(adds, dels) {
}, return `${adds / (adds + dels) * 100}%`;
loadMoreData() { }
loadMoreFiles(this.store.linkLoadMore);
}, function loadMoreData() {
}, loadMoreFiles(store.linkLoadMore);
}; }
</script> </script>
<template> <template>
<ol class="diff-stats tw-m-0" ref="root" v-if="store.fileListIsVisible"> <ol class="diff-stats tw-m-0" ref="root" v-if="store.fileListIsVisible">
<li v-for="file in store.files" :key="file.NameHash"> <li v-for="file in store.files" :key="file.NameHash">

View File

@ -1,130 +1,137 @@
<script lang="ts"> <script lang="ts" setup>
import DiffFileTreeItem from './DiffFileTreeItem.vue'; import DiffFileTreeItem from './DiffFileTreeItem.vue';
import {loadMoreFiles} from '../features/repo-diff.ts'; import {loadMoreFiles} from '../features/repo-diff.ts';
import {toggleElem} from '../utils/dom.ts'; import {toggleElem} from '../utils/dom.ts';
import {diffTreeStore} from '../modules/stores.ts'; import {diffTreeStore} from '../modules/stores.ts';
import {setFileFolding} from '../features/file-fold.ts'; import {setFileFolding} from '../features/file-fold.ts';
import {computed, onMounted, onUnmounted} from 'vue';
const LOCAL_STORAGE_KEY = 'diff_file_tree_visible'; const LOCAL_STORAGE_KEY = 'diff_file_tree_visible';
export default { const store = diffTreeStore();
components: {DiffFileTreeItem},
data: () => {
return {store: diffTreeStore()};
},
computed: {
fileTree() {
const result = [];
for (const file of this.store.files) {
// Split file into directories
const splits = file.Name.split('/');
let index = 0;
let parent = null;
let isFile = false;
for (const split of splits) {
index += 1;
// reached the end
if (index === splits.length) {
isFile = true;
}
let newParent = {
name: split,
children: [],
isFile,
};
if (isFile === true) { const fileTree = computed(() => {
newParent.file = file; const result = [];
} for (const file of store.files) {
// Split file into directories
if (parent) { const splits = file.Name.split('/');
// check if the folder already exists let index = 0;
const existingFolder = parent.children.find( let parent = null;
(x) => x.name === split, let isFile = false;
); for (const split of splits) {
if (existingFolder) { index += 1;
newParent = existingFolder; // reached the end
} else { if (index === splits.length) {
parent.children.push(newParent); isFile = true;
}
} else {
const existingFolder = result.find((x) => x.name === split);
if (existingFolder) {
newParent = existingFolder;
} else {
result.push(newParent);
}
}
parent = newParent;
}
} }
const mergeChildIfOnlyOneDir = (entries) => { let newParent = {
for (const entry of entries) { name: split,
if (entry.children) { children: [],
mergeChildIfOnlyOneDir(entry.children); isFile,
} } as {
if (entry.children.length === 1 && entry.children[0].isFile === false) { name: string,
// Merge it to the parent children: any[],
entry.name = `${entry.name}/${entry.children[0].name}`; isFile: boolean,
entry.children = entry.children[0].children; file?: any,
}
}
}; };
// Merge folders with just a folder as children in order to
// reduce the depth of our tree.
mergeChildIfOnlyOneDir(result);
return result;
},
},
mounted() {
// Default to true if unset
this.store.fileTreeIsVisible = localStorage.getItem(LOCAL_STORAGE_KEY) !== 'false';
document.querySelector('.diff-toggle-file-tree-button').addEventListener('click', this.toggleVisibility);
this.hashChangeListener = () => { if (isFile === true) {
this.store.selectedItem = window.location.hash; newParent.file = file;
this.expandSelectedFile();
};
this.hashChangeListener();
window.addEventListener('hashchange', this.hashChangeListener);
},
unmounted() {
document.querySelector('.diff-toggle-file-tree-button').removeEventListener('click', this.toggleVisibility);
window.removeEventListener('hashchange', this.hashChangeListener);
},
methods: {
expandSelectedFile() {
// expand file if the selected file is folded
if (this.store.selectedItem) {
const box = document.querySelector(this.store.selectedItem);
const folded = box?.getAttribute('data-folded') === 'true';
if (folded) setFileFolding(box, box.querySelector('.fold-file'), false);
} }
},
toggleVisibility() { if (parent) {
this.updateVisibility(!this.store.fileTreeIsVisible); // check if the folder already exists
}, const existingFolder = parent.children.find(
updateVisibility(visible) { (x) => x.name === split,
this.store.fileTreeIsVisible = visible; );
localStorage.setItem(LOCAL_STORAGE_KEY, this.store.fileTreeIsVisible); if (existingFolder) {
this.updateState(this.store.fileTreeIsVisible); newParent = existingFolder;
}, } else {
updateState(visible) { parent.children.push(newParent);
const btn = document.querySelector('.diff-toggle-file-tree-button'); }
const [toShow, toHide] = btn.querySelectorAll('.icon'); } else {
const tree = document.querySelector('#diff-file-tree'); const existingFolder = result.find((x) => x.name === split);
const newTooltip = btn.getAttribute(visible ? 'data-hide-text' : 'data-show-text'); if (existingFolder) {
btn.setAttribute('data-tooltip-content', newTooltip); newParent = existingFolder;
toggleElem(tree, visible); } else {
toggleElem(toShow, !visible); result.push(newParent);
toggleElem(toHide, visible); }
}, }
loadMoreData() { parent = newParent;
loadMoreFiles(this.store.linkLoadMore); }
}, }
}, const mergeChildIfOnlyOneDir = (entries) => {
}; for (const entry of entries) {
if (entry.children) {
mergeChildIfOnlyOneDir(entry.children);
}
if (entry.children.length === 1 && entry.children[0].isFile === false) {
// Merge it to the parent
entry.name = `${entry.name}/${entry.children[0].name}`;
entry.children = entry.children[0].children;
}
}
};
// Merge folders with just a folder as children in order to
// reduce the depth of our tree.
mergeChildIfOnlyOneDir(result);
return result;
});
onMounted(() => {
// Default to true if unset
store.fileTreeIsVisible = localStorage.getItem(LOCAL_STORAGE_KEY) !== 'false';
document.querySelector('.diff-toggle-file-tree-button').addEventListener('click', toggleVisibility);
hashChangeListener();
window.addEventListener('hashchange', hashChangeListener);
});
onUnmounted(() => {
document.querySelector('.diff-toggle-file-tree-button').removeEventListener('click', toggleVisibility);
window.removeEventListener('hashchange', hashChangeListener);
});
function hashChangeListener() {
store.selectedItem = window.location.hash;
expandSelectedFile();
}
function expandSelectedFile() {
// expand file if the selected file is folded
if (store.selectedItem) {
const box = document.querySelector(store.selectedItem);
const folded = box?.getAttribute('data-folded') === 'true';
if (folded) setFileFolding(box, box.querySelector('.fold-file'), false);
}
}
function toggleVisibility() {
updateVisibility(!store.fileTreeIsVisible);
}
function updateVisibility(visible) {
store.fileTreeIsVisible = visible;
localStorage.setItem(LOCAL_STORAGE_KEY, store.fileTreeIsVisible);
updateState(store.fileTreeIsVisible);
}
function updateState(visible) {
const btn = document.querySelector('.diff-toggle-file-tree-button');
const [toShow, toHide] = btn.querySelectorAll('.icon');
const tree = document.querySelector('#diff-file-tree');
const newTooltip = btn.getAttribute(visible ? 'data-hide-text' : 'data-show-text');
btn.setAttribute('data-tooltip-content', newTooltip);
toggleElem(tree, visible);
toggleElem(toShow, !visible);
toggleElem(toHide, visible);
}
function loadMoreData() {
loadMoreFiles(store.linkLoadMore);
}
</script> </script>
<template> <template>
<div v-if="store.fileTreeIsVisible" class="diff-file-tree-items"> <div v-if="store.fileTreeIsVisible" class="diff-file-tree-items">
<!-- only render the tree if we're visible. in many cases this is something that doesn't change very often --> <!-- only render the tree if we're visible. in many cases this is something that doesn't change very often -->
@ -134,6 +141,7 @@ export default {
</div> </div>
</div> </div>
</template> </template>
<style scoped> <style scoped>
.diff-file-tree-items { .diff-file-tree-items {
display: flex; display: flex;

View File

@ -1,33 +1,41 @@
<script lang="ts"> <script lang="ts" setup>
import {SvgIcon} from '../svg.ts'; import {SvgIcon} from '../svg.ts';
import {diffTreeStore} from '../modules/stores.ts'; import {diffTreeStore} from '../modules/stores.ts';
import {ref} from 'vue';
export default { type File = {
components: {SvgIcon}, Name: string;
props: { NameHash: string;
item: { Type: number;
type: Object, IsViewed: boolean;
required: true, }
},
}, type Item = {
data: () => ({ name: string;
store: diffTreeStore(), isFile: boolean;
collapsed: false, file?: File;
}), children?: Item[];
methods: {
getIconForDiffType(pType) {
const diffTypes = {
1: {name: 'octicon-diff-added', classes: ['text', 'green']},
2: {name: 'octicon-diff-modified', classes: ['text', 'yellow']},
3: {name: 'octicon-diff-removed', classes: ['text', 'red']},
4: {name: 'octicon-diff-renamed', classes: ['text', 'teal']},
5: {name: 'octicon-diff-renamed', classes: ['text', 'green']}, // there is no octicon for copied, so renamed should be ok
};
return diffTypes[pType];
},
},
}; };
defineProps<{
item: Item,
}>();
const store = diffTreeStore();
const collapsed = ref(false);
function getIconForDiffType(pType) {
const diffTypes = {
1: {name: 'octicon-diff-added', classes: ['text', 'green']},
2: {name: 'octicon-diff-modified', classes: ['text', 'yellow']},
3: {name: 'octicon-diff-removed', classes: ['text', 'red']},
4: {name: 'octicon-diff-renamed', classes: ['text', 'teal']},
5: {name: 'octicon-diff-renamed', classes: ['text', 'green']}, // there is no octicon for copied, so renamed should be ok
};
return diffTypes[pType];
}
</script> </script>
<template> <template>
<!--title instead of tooltip above as the tooltip needs too much work with the current methods, i.e. not being loaded or staying open for "too long"--> <!--title instead of tooltip above as the tooltip needs too much work with the current methods, i.e. not being loaded or staying open for "too long"-->
<a <a

View File

@ -1,84 +1,83 @@
<script lang="ts"> <script lang="ts" setup>
import {computed, onMounted, onUnmounted, ref, watch} from 'vue';
import {SvgIcon} from '../svg.ts'; import {SvgIcon} from '../svg.ts';
import {toggleElem} from '../utils/dom.ts'; import {toggleElem} from '../utils/dom.ts';
const {csrfToken, pageData} = window.config; const {csrfToken, pageData} = window.config;
export default { const mergeForm = ref(pageData.pullRequestMergeForm);
components: {SvgIcon},
data: () => ({
csrfToken,
mergeForm: pageData.pullRequestMergeForm,
mergeTitleFieldValue: '', const mergeTitleFieldValue = ref('');
mergeMessageFieldValue: '', const mergeMessageFieldValue = ref('');
deleteBranchAfterMerge: false, const deleteBranchAfterMerge = ref(false);
autoMergeWhenSucceed: false, const autoMergeWhenSucceed = ref(false);
mergeStyle: '', const mergeStyle = ref('');
mergeStyleDetail: { // dummy only, these values will come from one of the mergeForm.mergeStyles const mergeStyleDetail = ref({
hideMergeMessageTexts: false, hideMergeMessageTexts: false,
textDoMerge: '', textDoMerge: '',
mergeTitleFieldText: '', mergeTitleFieldText: '',
mergeMessageFieldText: '', mergeMessageFieldText: '',
hideAutoMerge: false, hideAutoMerge: false,
}, });
mergeStyleAllowedCount: 0,
showMergeStyleMenu: false, const mergeStyleAllowedCount = ref(0);
showActionForm: false,
}),
computed: {
mergeButtonStyleClass() {
if (this.mergeForm.allOverridableChecksOk) return 'primary';
return this.autoMergeWhenSucceed ? 'primary' : 'red';
},
forceMerge() {
return this.mergeForm.canMergeNow && !this.mergeForm.allOverridableChecksOk;
},
},
watch: {
mergeStyle(val) {
this.mergeStyleDetail = this.mergeForm.mergeStyles.find((e) => e.name === val);
for (const elem of document.querySelectorAll('[data-pull-merge-style]')) {
toggleElem(elem, elem.getAttribute('data-pull-merge-style') === val);
}
},
},
created() {
this.mergeStyleAllowedCount = this.mergeForm.mergeStyles.reduce((v, msd) => v + (msd.allowed ? 1 : 0), 0);
let mergeStyle = this.mergeForm.mergeStyles.find((e) => e.allowed && e.name === this.mergeForm.defaultMergeStyle)?.name; const showMergeStyleMenu = ref(false);
if (!mergeStyle) mergeStyle = this.mergeForm.mergeStyles.find((e) => e.allowed)?.name; const showActionForm = ref(false);
this.switchMergeStyle(mergeStyle, !this.mergeForm.canMergeNow);
}, const mergeButtonStyleClass = computed(() => {
mounted() { if (mergeForm.value.allOverridableChecksOk) return 'primary';
document.addEventListener('mouseup', this.hideMergeStyleMenu); return autoMergeWhenSucceed.value ? 'primary' : 'red';
}, });
unmounted() {
document.removeEventListener('mouseup', this.hideMergeStyleMenu); const forceMerge = computed(() => {
}, return mergeForm.value.canMergeNow && !mergeForm.value.allOverridableChecksOk;
methods: { });
hideMergeStyleMenu() {
this.showMergeStyleMenu = false; watch(mergeStyle, (val) => {
}, mergeStyleDetail.value = mergeForm.value.mergeStyles.find((e) => e.name === val);
toggleActionForm(show) { for (const elem of document.querySelectorAll('[data-pull-merge-style]')) {
this.showActionForm = show; toggleElem(elem, elem.getAttribute('data-pull-merge-style') === val);
if (!show) return; }
this.deleteBranchAfterMerge = this.mergeForm.defaultDeleteBranchAfterMerge; });
this.mergeTitleFieldValue = this.mergeStyleDetail.mergeTitleFieldText;
this.mergeMessageFieldValue = this.mergeStyleDetail.mergeMessageFieldText; onMounted(() => {
}, mergeStyleAllowedCount.value = mergeForm.value.mergeStyles.reduce((v, msd) => v + (msd.allowed ? 1 : 0), 0);
switchMergeStyle(name, autoMerge = false) {
this.mergeStyle = name; let mergeStyle = mergeForm.value.mergeStyles.find((e) => e.allowed && e.name === mergeForm.value.defaultMergeStyle)?.name;
this.autoMergeWhenSucceed = autoMerge; if (!mergeStyle) mergeStyle = mergeForm.value.mergeStyles.find((e) => e.allowed)?.name;
}, switchMergeStyle(mergeStyle, !mergeForm.value.canMergeNow);
clearMergeMessage() {
this.mergeMessageFieldValue = this.mergeForm.defaultMergeMessage; document.addEventListener('mouseup', hideMergeStyleMenu);
}, });
},
}; onUnmounted(() => {
document.removeEventListener('mouseup', hideMergeStyleMenu);
});
function hideMergeStyleMenu() {
showMergeStyleMenu.value = false;
}
function toggleActionForm(show: boolean) {
showActionForm.value = show;
if (!show) return;
deleteBranchAfterMerge.value = mergeForm.value.defaultDeleteBranchAfterMerge;
mergeTitleFieldValue.value = mergeStyleDetail.value.mergeTitleFieldText;
mergeMessageFieldValue.value = mergeStyleDetail.value.mergeMessageFieldText;
}
function switchMergeStyle(name, autoMerge = false) {
mergeStyle.value = name;
autoMergeWhenSucceed.value = autoMerge;
}
function clearMergeMessage() {
mergeMessageFieldValue.value = mergeForm.value.defaultMergeMessage;
}
</script> </script>
<template> <template>
<!-- <!--
if this component is shown, either the user is an admin (can do a merge without checks), or they are a writer who has the permission to do a merge if this component is shown, either the user is an admin (can do a merge without checks), or they are a writer who has the permission to do a merge
@ -186,6 +185,7 @@ export default {
</div> </div>
</div> </div>
</template> </template>
<style scoped> <style scoped>
/* to keep UI the same, at the moment we are still using some Fomantic UI styles, but we do not use their scripts, so we need to fine tune some styles */ /* to keep UI the same, at the moment we are still using some Fomantic UI styles, but we do not use their scripts, so we need to fine tune some styles */
.ui.dropdown .menu.show { .ui.dropdown .menu.show {

View File

@ -1,68 +1,62 @@
<script lang="ts"> <script lang="ts" setup>
import {VueBarGraph} from 'vue-bar-graph'; import {VueBarGraph} from 'vue-bar-graph';
import {createApp} from 'vue'; import {computed, onMounted, ref} from 'vue';
const sfc = { const colors = ref({
components: {VueBarGraph}, barColor: 'green',
data: () => ({ textColor: 'black',
colors: { textAltColor: 'white',
barColor: 'green', });
textColor: 'black',
textAltColor: 'white',
},
// possible keys: // possible keys:
// * avatar_link: (...) // * avatar_link: (...)
// * commits: (...) // * commits: (...)
// * home_link: (...) // * home_link: (...)
// * login: (...) // * login: (...)
// * name: (...) // * name: (...)
activityTopAuthors: window.config.pageData.repoActivityTopAuthors || [], const activityTopAuthors = window.config.pageData.repoActivityTopAuthors || [];
}),
computed: {
graphPoints() {
return this.activityTopAuthors.map((item) => {
return {
value: item.commits,
label: item.name,
};
});
},
graphAuthors() {
return this.activityTopAuthors.map((item, idx) => {
return {
position: idx + 1,
...item,
};
});
},
graphWidth() {
return this.activityTopAuthors.length * 40;
},
},
mounted() {
const refStyle = window.getComputedStyle(this.$refs.style);
const refAltStyle = window.getComputedStyle(this.$refs.altStyle);
this.colors.barColor = refStyle.backgroundColor; const graphPoints = computed(() => {
this.colors.textColor = refStyle.color; return activityTopAuthors.map((item) => {
this.colors.textAltColor = refAltStyle.color; return {
}, value: item.commits,
}; label: item.name,
};
});
});
export function initRepoActivityTopAuthorsChart() { const graphAuthors = computed(() => {
const el = document.querySelector('#repo-activity-top-authors-chart'); return activityTopAuthors.map((item, idx) => {
if (el) { return {
createApp(sfc).mount(el); position: idx + 1,
} ...item,
} };
});
});
export default sfc; // activate the IDE's Vue plugin const graphWidth = computed(() => {
return activityTopAuthors.length * 40;
});
const styleElement = ref<HTMLElement | null>(null);
const altStyleElement = ref<HTMLElement | null>(null);
onMounted(() => {
const refStyle = window.getComputedStyle(styleElement.value);
const refAltStyle = window.getComputedStyle(altStyleElement.value);
colors.value = {
barColor: refStyle.backgroundColor,
textColor: refStyle.color,
textAltColor: refAltStyle.color,
};
});
</script> </script>
<template> <template>
<div> <div>
<div class="activity-bar-graph" ref="style" style="width: 0; height: 0;"/> <div class="activity-bar-graph" ref="styleElement" style="width: 0; height: 0;"/>
<div class="activity-bar-graph-alt" ref="altStyle" style="width: 0; height: 0;"/> <div class="activity-bar-graph-alt" ref="altStyleElement" style="width: 0; height: 0;"/>
<vue-bar-graph <vue-bar-graph
:points="graphPoints" :points="graphPoints"
:show-x-axis="true" :show-x-axis="true"

View File

@ -1,4 +1,4 @@
<script lang="ts"> <script lang="ts" setup>
import {SvgIcon} from '../svg.ts'; import {SvgIcon} from '../svg.ts';
import { import {
Chart, Chart,
@ -15,10 +15,12 @@ import {
startDaysBetween, startDaysBetween,
firstStartDateAfterDate, firstStartDateAfterDate,
fillEmptyStartDaysWithZeroes, fillEmptyStartDaysWithZeroes,
type DayData,
} from '../utils/time.ts'; } from '../utils/time.ts';
import {chartJsColors} from '../utils/color.ts'; import {chartJsColors} from '../utils/color.ts';
import {sleep} from '../utils.ts'; import {sleep} from '../utils.ts';
import 'chartjs-adapter-dayjs-4/dist/chartjs-adapter-dayjs-4.esm'; import 'chartjs-adapter-dayjs-4/dist/chartjs-adapter-dayjs-4.esm';
import {onMounted, ref} from 'vue';
const {pageData} = window.config; const {pageData} = window.config;
@ -34,114 +36,110 @@ Chart.register(
Filler, Filler,
); );
export default { defineProps<{
components: {ChartLine, SvgIcon}, locale: {
props: { loadingTitle: string;
locale: { loadingTitleFailed: string;
type: Object, loadingInfo: string;
required: true, };
}, }>();
},
data: () => ({ const isLoading = ref(false);
isLoading: false, const errorText = ref('');
errorText: '', const repoLink = ref(pageData.repoLink || []);
repoLink: pageData.repoLink || [], const data = ref<DayData[]>([]);
data: [],
}), onMounted(() => {
mounted() { fetchGraphData();
this.fetchGraphData(); });
},
methods: { async function fetchGraphData() {
async fetchGraphData() { isLoading.value = true;
this.isLoading = true; try {
try { let response: Response;
let response; do {
do { response = await GET(`${repoLink.value}/activity/code-frequency/data`);
response = await GET(`${this.repoLink}/activity/code-frequency/data`); if (response.status === 202) {
if (response.status === 202) { await sleep(1000); // wait for 1 second before retrying
await sleep(1000); // wait for 1 second before retrying
}
} while (response.status === 202);
if (response.ok) {
this.data = await response.json();
const weekValues = Object.values(this.data);
const start = weekValues[0].week;
const end = firstStartDateAfterDate(new Date());
const startDays = startDaysBetween(start, end);
this.data = fillEmptyStartDaysWithZeroes(startDays, this.data);
this.errorText = '';
} else {
this.errorText = response.statusText;
}
} catch (err) {
this.errorText = err.message;
} finally {
this.isLoading = false;
} }
}, } while (response.status === 202);
if (response.ok) {
data.value = await response.json();
const weekValues = Object.values(data.value);
const start = weekValues[0].week;
const end = firstStartDateAfterDate(new Date());
const startDays = startDaysBetween(start, end);
data.value = fillEmptyStartDaysWithZeroes(startDays, data.value);
errorText.value = '';
} else {
errorText.value = response.statusText;
}
} catch (err) {
errorText.value = err.message;
} finally {
isLoading.value = false;
}
}
toGraphData(data) { function toGraphData(data) {
return { return {
datasets: [ datasets: [
{ {
data: data.map((i) => ({x: i.week, y: i.additions})), data: data.map((i) => ({x: i.week, y: i.additions})),
pointRadius: 0, pointRadius: 0,
pointHitRadius: 0, pointHitRadius: 0,
fill: true, fill: true,
label: 'Additions', label: 'Additions',
backgroundColor: chartJsColors['additions'], backgroundColor: chartJsColors['additions'],
borderWidth: 0, borderWidth: 0,
tension: 0.3, tension: 0.3,
}, },
{ {
data: data.map((i) => ({x: i.week, y: -i.deletions})), data: data.map((i) => ({x: i.week, y: -i.deletions})),
pointRadius: 0, pointRadius: 0,
pointHitRadius: 0, pointHitRadius: 0,
fill: true, fill: true,
label: 'Deletions', label: 'Deletions',
backgroundColor: chartJsColors['deletions'], backgroundColor: chartJsColors['deletions'],
borderWidth: 0, borderWidth: 0,
tension: 0.3, tension: 0.3,
}, },
], ],
}; };
}, }
getOptions() { const options = {
return { responsive: true,
responsive: true, maintainAspectRatio: false,
maintainAspectRatio: false, animation: true,
animation: true, plugins: {
plugins: { legend: {
legend: { display: true,
display: true, },
}, },
}, scales: {
scales: { x: {
x: { type: 'time',
type: 'time', grid: {
grid: { display: false,
display: false, },
}, time: {
time: { minUnit: 'month',
minUnit: 'month', },
}, ticks: {
ticks: { maxRotation: 0,
maxRotation: 0, maxTicksLimit: 12,
maxTicksLimit: 12, },
}, },
}, y: {
y: { ticks: {
ticks: { maxTicksLimit: 6,
maxTicksLimit: 6, },
},
},
},
};
}, },
}, },
}; };
</script> </script>
<template> <template>
<div> <div>
<div class="ui header tw-flex tw-items-center tw-justify-between"> <div class="ui header tw-flex tw-items-center tw-justify-between">
@ -160,11 +158,12 @@ export default {
</div> </div>
<ChartLine <ChartLine
v-memo="data" v-if="data.length !== 0" v-memo="data" v-if="data.length !== 0"
:data="toGraphData(data)" :options="getOptions()" :data="toGraphData(data)" :options="options"
/> />
</div> </div>
</div> </div>
</template> </template>
<style scoped> <style scoped>
.main-graph { .main-graph {
height: 440px; height: 440px;

View File

@ -1,4 +1,4 @@
<script lang="ts"> <script lang="ts" setup>
import {SvgIcon} from '../svg.ts'; import {SvgIcon} from '../svg.ts';
import { import {
Chart, Chart,
@ -6,6 +6,7 @@ import {
BarElement, BarElement,
LinearScale, LinearScale,
TimeScale, TimeScale,
type ChartOptions,
} from 'chart.js'; } from 'chart.js';
import {GET} from '../modules/fetch.ts'; import {GET} from '../modules/fetch.ts';
import {Bar} from 'vue-chartjs'; import {Bar} from 'vue-chartjs';
@ -13,10 +14,12 @@ import {
startDaysBetween, startDaysBetween,
firstStartDateAfterDate, firstStartDateAfterDate,
fillEmptyStartDaysWithZeroes, fillEmptyStartDaysWithZeroes,
type DayData,
} from '../utils/time.ts'; } from '../utils/time.ts';
import {chartJsColors} from '../utils/color.ts'; import {chartJsColors} from '../utils/color.ts';
import {sleep} from '../utils.ts'; import {sleep} from '../utils.ts';
import 'chartjs-adapter-dayjs-4/dist/chartjs-adapter-dayjs-4.esm'; import 'chartjs-adapter-dayjs-4/dist/chartjs-adapter-dayjs-4.esm';
import {onMounted, ref} from 'vue';
const {pageData} = window.config; const {pageData} = window.config;
@ -30,95 +33,91 @@ Chart.register(
Tooltip, Tooltip,
); );
export default { defineProps<{
components: {Bar, SvgIcon}, locale: {
props: { loadingTitle: string;
locale: { loadingTitleFailed: string;
type: Object, loadingInfo: string;
required: true, };
}, }>();
},
data: () => ({ const isLoading = ref(false);
isLoading: false, const errorText = ref('');
errorText: '', const repoLink = ref(pageData.repoLink || []);
repoLink: pageData.repoLink || [], const data = ref<DayData[]>([]);
data: [],
}), onMounted(() => {
mounted() { fetchGraphData();
this.fetchGraphData(); });
},
methods: { async function fetchGraphData() {
async fetchGraphData() { isLoading.value = true;
this.isLoading = true; try {
try { let response: Response;
let response; do {
do { response = await GET(`${repoLink.value}/activity/recent-commits/data`);
response = await GET(`${this.repoLink}/activity/recent-commits/data`); if (response.status === 202) {
if (response.status === 202) { await sleep(1000); // wait for 1 second before retrying
await sleep(1000); // wait for 1 second before retrying
}
} while (response.status === 202);
if (response.ok) {
const data = await response.json();
const start = Object.values(data)[0].week;
const end = firstStartDateAfterDate(new Date());
const startDays = startDaysBetween(start, end);
this.data = fillEmptyStartDaysWithZeroes(startDays, data).slice(-52);
this.errorText = '';
} else {
this.errorText = response.statusText;
}
} catch (err) {
this.errorText = err.message;
} finally {
this.isLoading = false;
} }
}, } while (response.status === 202);
if (response.ok) {
const data = await response.json();
const start = Object.values(data)[0].week;
const end = firstStartDateAfterDate(new Date());
const startDays = startDaysBetween(start, end);
data.value = fillEmptyStartDaysWithZeroes(startDays, data).slice(-52);
errorText.value = '';
} else {
errorText.value = response.statusText;
}
} catch (err) {
errorText.value = err.message;
} finally {
isLoading.value = false;
}
}
toGraphData(data) { function toGraphData(data) {
return { return {
datasets: [ datasets: [
{ {
data: data.map((i) => ({x: i.week, y: i.commits})), data: data.map((i) => ({x: i.week, y: i.commits})),
label: 'Commits', label: 'Commits',
backgroundColor: chartJsColors['commits'], backgroundColor: chartJsColors['commits'],
borderWidth: 0, borderWidth: 0,
tension: 0.3, tension: 0.3,
}, },
], ],
}; };
}, }
getOptions() { const options = {
return { responsive: true,
responsive: true, maintainAspectRatio: false,
maintainAspectRatio: false, animation: true,
animation: true, scales: {
scales: { x: {
x: { type: 'time',
type: 'time', grid: {
grid: { display: false,
display: false, },
}, time: {
time: { minUnit: 'week',
minUnit: 'week', },
}, ticks: {
ticks: { maxRotation: 0,
maxRotation: 0, maxTicksLimit: 52,
maxTicksLimit: 52, },
}, },
}, y: {
y: { ticks: {
ticks: { maxTicksLimit: 6,
maxTicksLimit: 6, },
},
},
},
};
}, },
}, },
}; } satisfies ChartOptions;
</script> </script>
<template> <template>
<div> <div>
<div class="ui header tw-flex tw-items-center tw-justify-between"> <div class="ui header tw-flex tw-items-center tw-justify-between">
@ -137,7 +136,7 @@ export default {
</div> </div>
<Bar <Bar
v-memo="data" v-if="data.length !== 0" v-memo="data" v-if="data.length !== 0"
:data="toGraphData(data)" :options="getOptions()" :data="toGraphData(data)" :options="options"
/> />
</div> </div>
</div> </div>

View File

@ -1,78 +1,60 @@
<script lang="ts"> <script lang="ts" setup>
import {computed, onMounted, onUnmounted} from 'vue';
import {hideElem, showElem} from '../utils/dom.ts'; import {hideElem, showElem} from '../utils/dom.ts';
const sfc = { const props = defineProps<{
props: { isAdmin: boolean;
isAdmin: { noAccessLabel: string;
type: Boolean, readLabel: string;
required: true, writeLabel: string;
}, }>();
noAccessLabel: {
type: String,
required: true,
},
readLabel: {
type: String,
required: true,
},
writeLabel: {
type: String,
required: true,
},
},
computed: { const categories = computed(() => {
categories() { const categories = [
const categories = [ 'activitypub',
'activitypub', ];
]; if (props.isAdmin) {
if (this.isAdmin) { categories.push('admin');
categories.push('admin'); }
} categories.push(
categories.push( 'issue',
'issue', 'misc',
'misc', 'notification',
'notification', 'organization',
'organization', 'package',
'package', 'repository',
'repository', 'user');
'user'); return categories;
return categories; });
},
},
mounted() { onMounted(() => {
document.querySelector('#scoped-access-submit').addEventListener('click', this.onClickSubmit); document.querySelector('#scoped-access-submit').addEventListener('click', onClickSubmit);
}, });
unmounted() { onUnmounted(() => {
document.querySelector('#scoped-access-submit').removeEventListener('click', this.onClickSubmit); document.querySelector('#scoped-access-submit').removeEventListener('click', onClickSubmit);
}, });
methods: { function onClickSubmit(e) {
onClickSubmit(e) { e.preventDefault();
e.preventDefault();
const warningEl = document.querySelector('#scoped-access-warning'); const warningEl = document.querySelector('#scoped-access-warning');
// check that at least one scope has been selected // check that at least one scope has been selected
for (const el of document.querySelectorAll('.access-token-select')) { for (const el of document.querySelectorAll<HTMLInputElement>('.access-token-select')) {
if (el.value) { if (el.value) {
// Hide the error if it was visible from previous attempt. // Hide the error if it was visible from previous attempt.
hideElem(warningEl); hideElem(warningEl);
// Submit the form. // Submit the form.
document.querySelector('#scoped-access-form').submit(); document.querySelector<HTMLFormElement>('#scoped-access-form').submit();
// Don't show the warning. // Don't show the warning.
return; return;
} }
} }
// no scopes selected, show validation error // no scopes selected, show validation error
showElem(warningEl); showElem(warningEl);
}, }
},
};
export default sfc;
</script> </script>
<template> <template>
<div v-for="category in categories" :key="category" class="field tw-pl-1 tw-pb-1 access-token-category"> <div v-for="category in categories" :key="category" class="field tw-pl-1 tw-pb-1 access-token-category">
<label class="category-label" :for="'access-token-scope-' + category"> <label class="category-label" :for="'access-token-scope-' + category">

View File

@ -3,14 +3,19 @@ import '@github/text-expander-element';
import $ from 'jquery'; import $ from 'jquery';
import {attachTribute} from '../tribute.ts'; import {attachTribute} from '../tribute.ts';
import {hideElem, showElem, autosize, isElemVisible} from '../../utils/dom.ts'; import {hideElem, showElem, autosize, isElemVisible} from '../../utils/dom.ts';
import {initEasyMDEPaste, initTextareaEvents} from './EditorUpload.ts'; import {
EventUploadStateChanged,
initEasyMDEPaste,
initTextareaEvents,
triggerUploadStateChanged,
} from './EditorUpload.ts';
import {handleGlobalEnterQuickSubmit} from './QuickSubmit.ts'; import {handleGlobalEnterQuickSubmit} from './QuickSubmit.ts';
import {renderPreviewPanelContent} from '../repo-editor.ts'; import {renderPreviewPanelContent} from '../repo-editor.ts';
import {easyMDEToolbarActions} from './EasyMDEToolbarActions.ts'; import {easyMDEToolbarActions} from './EasyMDEToolbarActions.ts';
import {initTextExpander} from './TextExpander.ts'; import {initTextExpander} from './TextExpander.ts';
import {showErrorToast} from '../../modules/toast.ts'; import {showErrorToast} from '../../modules/toast.ts';
import {POST} from '../../modules/fetch.ts'; import {POST} from '../../modules/fetch.ts';
import {initTextareaMarkdown} from './EditorMarkdown.ts'; import {EventEditorContentChanged, initTextareaMarkdown, triggerEditorContentChanged} from './EditorMarkdown.ts';
import {DropzoneCustomEventReloadFiles, initDropzone} from '../dropzone.ts'; import {DropzoneCustomEventReloadFiles, initDropzone} from '../dropzone.ts';
let elementIdCounter = 0; let elementIdCounter = 0;
@ -37,7 +42,34 @@ export function validateTextareaNonEmpty(textarea) {
return true; return true;
} }
class ComboMarkdownEditor { export class ComboMarkdownEditor {
static EventEditorContentChanged = EventEditorContentChanged;
static EventUploadStateChanged = EventUploadStateChanged;
public container : HTMLElement;
// TODO: use correct types to replace these "any" types
options: any;
tabEditor: HTMLElement;
tabPreviewer: HTMLElement;
easyMDE: any;
easyMDEToolbarActions: any;
easyMDEToolbarDefault: any;
textarea: HTMLTextAreaElement & {_giteaComboMarkdownEditor: any};
textareaMarkdownToolbar: HTMLElement;
textareaAutosize: any;
dropzone: HTMLElement;
attachedDropzoneInst: any;
previewUrl: string;
previewContext: string;
previewMode: string;
previewWiki: boolean;
constructor(container, options = {}) { constructor(container, options = {}) {
container._giteaComboMarkdownEditor = this; container._giteaComboMarkdownEditor = this;
this.options = options; this.options = options;
@ -63,14 +95,13 @@ class ComboMarkdownEditor {
setupContainer() { setupContainer() {
initTextExpander(this.container.querySelector('text-expander')); initTextExpander(this.container.querySelector('text-expander'));
this.container.addEventListener('ce-editor-content-changed', (e) => this.options?.onContentChanged?.(this, e));
} }
setupTextarea() { setupTextarea() {
this.textarea = this.container.querySelector('.markdown-text-editor'); this.textarea = this.container.querySelector('.markdown-text-editor');
this.textarea._giteaComboMarkdownEditor = this; this.textarea._giteaComboMarkdownEditor = this;
this.textarea.id = `_combo_markdown_editor_${String(elementIdCounter++)}`; this.textarea.id = `_combo_markdown_editor_${String(elementIdCounter++)}`;
this.textarea.addEventListener('input', (e) => this.options?.onContentChanged?.(this, e)); this.textarea.addEventListener('input', () => triggerEditorContentChanged(this.container));
this.applyEditorHeights(this.textarea, this.options.editorHeights); this.applyEditorHeights(this.textarea, this.options.editorHeights);
if (this.textarea.getAttribute('data-disable-autosize') !== 'true') { if (this.textarea.getAttribute('data-disable-autosize') !== 'true') {
@ -115,15 +146,21 @@ class ComboMarkdownEditor {
async setupDropzone() { async setupDropzone() {
const dropzoneParentContainer = this.container.getAttribute('data-dropzone-parent-container'); const dropzoneParentContainer = this.container.getAttribute('data-dropzone-parent-container');
if (dropzoneParentContainer) { if (!dropzoneParentContainer) return;
this.dropzone = this.container.closest(this.container.getAttribute('data-dropzone-parent-container'))?.querySelector('.dropzone'); this.dropzone = this.container.closest(this.container.getAttribute('data-dropzone-parent-container'))?.querySelector('.dropzone');
if (this.dropzone) this.attachedDropzoneInst = await initDropzone(this.dropzone); if (!this.dropzone) return;
}
this.attachedDropzoneInst = await initDropzone(this.dropzone);
// dropzone events
// * "processing" means a file is being uploaded
// * "queuecomplete" means all files have been uploaded
this.attachedDropzoneInst.on('processing', () => triggerUploadStateChanged(this.container));
this.attachedDropzoneInst.on('queuecomplete', () => triggerUploadStateChanged(this.container));
} }
dropzoneGetFiles() { dropzoneGetFiles() {
if (!this.dropzone) return null; if (!this.dropzone) return null;
return Array.from(this.dropzone.querySelectorAll('.files [name=files]'), (el) => el.value); return Array.from(this.dropzone.querySelectorAll<HTMLInputElement>('.files [name=files]'), (el) => el.value);
} }
dropzoneReloadFiles() { dropzoneReloadFiles() {
@ -137,8 +174,13 @@ class ComboMarkdownEditor {
this.attachedDropzoneInst.emit(DropzoneCustomEventReloadFiles); this.attachedDropzoneInst.emit(DropzoneCustomEventReloadFiles);
} }
isUploading() {
if (!this.dropzone) return false;
return this.attachedDropzoneInst.getQueuedFiles().length || this.attachedDropzoneInst.getUploadingFiles().length;
}
setupTab() { setupTab() {
const tabs = this.container.querySelectorAll('.tabular.menu > .item'); const tabs = this.container.querySelectorAll<HTMLElement>('.tabular.menu > .item');
// Fomantic Tab requires the "data-tab" to be globally unique. // Fomantic Tab requires the "data-tab" to be globally unique.
// So here it uses our defined "data-tab-for" and "data-tab-panel" to generate the "data-tab" attribute for Fomantic. // So here it uses our defined "data-tab-for" and "data-tab-panel" to generate the "data-tab" attribute for Fomantic.
@ -170,7 +212,7 @@ class ComboMarkdownEditor {
formData.append('mode', this.previewMode); formData.append('mode', this.previewMode);
formData.append('context', this.previewContext); formData.append('context', this.previewContext);
formData.append('text', this.value()); formData.append('text', this.value());
formData.append('wiki', this.previewWiki); formData.append('wiki', String(this.previewWiki));
const response = await POST(this.previewUrl, {data: formData}); const response = await POST(this.previewUrl, {data: formData});
const data = await response.text(); const data = await response.text();
renderPreviewPanelContent($(panelPreviewer), data); renderPreviewPanelContent($(panelPreviewer), data);
@ -237,24 +279,24 @@ class ComboMarkdownEditor {
easyMDEOpt.toolbar = this.parseEasyMDEToolbar(EasyMDE, easyMDEOpt.toolbar ?? this.easyMDEToolbarDefault); easyMDEOpt.toolbar = this.parseEasyMDEToolbar(EasyMDE, easyMDEOpt.toolbar ?? this.easyMDEToolbarDefault);
this.easyMDE = new EasyMDE(easyMDEOpt); this.easyMDE = new EasyMDE(easyMDEOpt);
this.easyMDE.codemirror.on('change', (...args) => {this.options?.onContentChanged?.(this, ...args)}); this.easyMDE.codemirror.on('change', () => triggerEditorContentChanged(this.container));
this.easyMDE.codemirror.setOption('extraKeys', { this.easyMDE.codemirror.setOption('extraKeys', {
'Cmd-Enter': (cm) => handleGlobalEnterQuickSubmit(cm.getTextArea()), 'Cmd-Enter': (cm) => handleGlobalEnterQuickSubmit(cm.getTextArea()),
'Ctrl-Enter': (cm) => handleGlobalEnterQuickSubmit(cm.getTextArea()), 'Ctrl-Enter': (cm) => handleGlobalEnterQuickSubmit(cm.getTextArea()),
Enter: (cm) => { Enter: (cm) => {
const tributeContainer = document.querySelector('.tribute-container'); const tributeContainer = document.querySelector<HTMLElement>('.tribute-container');
if (!tributeContainer || tributeContainer.style.display === 'none') { if (!tributeContainer || tributeContainer.style.display === 'none') {
cm.execCommand('newlineAndIndent'); cm.execCommand('newlineAndIndent');
} }
}, },
Up: (cm) => { Up: (cm) => {
const tributeContainer = document.querySelector('.tribute-container'); const tributeContainer = document.querySelector<HTMLElement>('.tribute-container');
if (!tributeContainer || tributeContainer.style.display === 'none') { if (!tributeContainer || tributeContainer.style.display === 'none') {
return cm.execCommand('goLineUp'); return cm.execCommand('goLineUp');
} }
}, },
Down: (cm) => { Down: (cm) => {
const tributeContainer = document.querySelector('.tribute-container'); const tributeContainer = document.querySelector<HTMLElement>('.tribute-container');
if (!tributeContainer || tributeContainer.style.display === 'none') { if (!tributeContainer || tributeContainer.style.display === 'none') {
return cm.execCommand('goLineDown'); return cm.execCommand('goLineDown');
} }
@ -314,13 +356,7 @@ export function getComboMarkdownEditor(el) {
return el?._giteaComboMarkdownEditor; return el?._giteaComboMarkdownEditor;
} }
export async function initComboMarkdownEditor(container, options = {}) { export async function initComboMarkdownEditor(container: HTMLElement, options = {}) {
if (container instanceof $) {
if (container.length !== 1) {
throw new Error('initComboMarkdownEditor: container must be a single element');
}
container = container[0];
}
if (!container) { if (!container) {
throw new Error('initComboMarkdownEditor: container is null'); throw new Error('initComboMarkdownEditor: container is null');
} }

View File

@ -1,5 +1,7 @@
export const EventEditorContentChanged = 'ce-editor-content-changed';
export function triggerEditorContentChanged(target) { export function triggerEditorContentChanged(target) {
target.dispatchEvent(new CustomEvent('ce-editor-content-changed', {bubbles: true})); target.dispatchEvent(new CustomEvent(EventEditorContentChanged, {bubbles: true}));
} }
function handleIndentSelection(textarea, e) { function handleIndentSelection(textarea, e) {

View File

@ -7,9 +7,16 @@ import {
DropzoneCustomEventUploadDone, DropzoneCustomEventUploadDone,
generateMarkdownLinkForAttachment, generateMarkdownLinkForAttachment,
} from '../dropzone.ts'; } from '../dropzone.ts';
import type CodeMirror from 'codemirror';
let uploadIdCounter = 0; let uploadIdCounter = 0;
export const EventUploadStateChanged = 'ce-upload-state-changed';
export function triggerUploadStateChanged(target) {
target.dispatchEvent(new CustomEvent(EventUploadStateChanged, {bubbles: true}));
}
function uploadFile(dropzoneEl, file) { function uploadFile(dropzoneEl, file) {
return new Promise((resolve) => { return new Promise((resolve) => {
const curUploadId = uploadIdCounter++; const curUploadId = uploadIdCounter++;
@ -18,7 +25,7 @@ function uploadFile(dropzoneEl, file) {
const onUploadDone = ({file}) => { const onUploadDone = ({file}) => {
if (file._giteaUploadId === curUploadId) { if (file._giteaUploadId === curUploadId) {
dropzoneInst.off(DropzoneCustomEventUploadDone, onUploadDone); dropzoneInst.off(DropzoneCustomEventUploadDone, onUploadDone);
resolve(); resolve(file);
} }
}; };
dropzoneInst.on(DropzoneCustomEventUploadDone, onUploadDone); dropzoneInst.on(DropzoneCustomEventUploadDone, onUploadDone);
@ -27,6 +34,8 @@ function uploadFile(dropzoneEl, file) {
} }
class TextareaEditor { class TextareaEditor {
editor : HTMLTextAreaElement;
constructor(editor) { constructor(editor) {
this.editor = editor; this.editor = editor;
} }
@ -61,6 +70,8 @@ class TextareaEditor {
} }
class CodeMirrorEditor { class CodeMirrorEditor {
editor: CodeMirror.EditorFromTextArea;
constructor(editor) { constructor(editor) {
this.editor = editor; this.editor = editor;
} }

View File

@ -1,5 +1,41 @@
import {matchEmoji, matchMention} from '../../utils/match.ts'; import {matchEmoji, matchMention, matchIssue} from '../../utils/match.ts';
import {emojiString} from '../emoji.ts'; import {emojiString} from '../emoji.ts';
import {svg} from '../../svg.ts';
import {parseIssueHref} from '../../utils.ts';
import {createElementFromAttrs, createElementFromHTML} from '../../utils/dom.ts';
import {getIssueColor, getIssueIcon} from '../issue.ts';
import {debounce} from 'perfect-debounce';
const debouncedSuggestIssues = debounce((key: string, text: string) => new Promise<{matched:boolean; fragment?: HTMLElement}>(async (resolve) => {
const {owner, repo, index} = parseIssueHref(window.location.href);
const matches = await matchIssue(owner, repo, index, text);
if (!matches.length) return resolve({matched: false});
const ul = document.createElement('ul');
ul.classList.add('suggestions');
for (const issue of matches) {
const li = createElementFromAttrs('li', {
role: 'option',
'data-value': `${key}${issue.id}`,
class: 'tw-flex tw-gap-2',
});
const icon = svg(getIssueIcon(issue), 16, ['text', getIssueColor(issue)].join(' '));
li.append(createElementFromHTML(icon));
const id = document.createElement('span');
id.textContent = issue.id.toString();
li.append(id);
const nameSpan = document.createElement('span');
nameSpan.textContent = issue.title;
li.append(nameSpan);
ul.append(li);
}
resolve({matched: true, fragment: ul});
}), 100);
export function initTextExpander(expander) { export function initTextExpander(expander) {
expander?.addEventListener('text-expander-change', ({detail: {key, provide, text}}) => { expander?.addEventListener('text-expander-change', ({detail: {key, provide, text}}) => {
@ -49,12 +85,14 @@ export function initTextExpander(expander) {
} }
provide({matched: true, fragment: ul}); provide({matched: true, fragment: ul});
} else if (key === '#') {
provide(debouncedSuggestIssues(key, text));
} }
}); });
expander?.addEventListener('text-expander-value', ({detail}) => { expander?.addEventListener('text-expander-value', ({detail}) => {
if (detail?.item) { if (detail?.item) {
// add a space after @mentions as it's likely the user wants one // add a space after @mentions and #issue as it's likely the user wants one
const suffix = detail.key === '@' ? ' ' : ''; const suffix = ['@', '#'].includes(detail.key) ? ' ' : '';
detail.value = `${detail.item.getAttribute('data-value')}${suffix}`; detail.value = `${detail.item.getAttribute('data-value')}${suffix}`;
} }
}); });

View File

@ -128,10 +128,12 @@ export async function initDropzone(dropzoneEl) {
fileUuidDict = {}; fileUuidDict = {};
for (const attachment of respData) { for (const attachment of respData) {
const file = {name: attachment.name, uuid: attachment.uuid, size: attachment.size}; const file = {name: attachment.name, uuid: attachment.uuid, size: attachment.size};
const imgSrc = `${attachmentBaseLinkUrl}/${file.uuid}`;
dzInst.emit('addedfile', file); dzInst.emit('addedfile', file);
dzInst.emit('thumbnail', file, imgSrc);
dzInst.emit('complete', file); dzInst.emit('complete', file);
if (isImageFile(file.name)) {
const imgSrc = `${attachmentBaseLinkUrl}/${file.uuid}`;
dzInst.emit('thumbnail', file, imgSrc);
}
addCopyLink(file); // it is from server response, so no "type" addCopyLink(file); // it is from server response, so no "type"
fileUuidDict[file.uuid] = {submitted: true}; fileUuidDict[file.uuid] = {submitted: true};
const input = createElementFromAttrs('input', {name: 'files', type: 'hidden', id: `dropzone-file-${file.uuid}`, value: file.uuid}); const input = createElementFromAttrs('input', {name: 'files', type: 'hidden', id: `dropzone-file-${file.uuid}`, value: file.uuid});

View File

@ -0,0 +1,32 @@
import type {Issue} from '../types.ts';
export function getIssueIcon(issue: Issue) {
if (issue.pull_request) {
if (issue.state === 'open') {
if (issue.pull_request.draft === true) {
return 'octicon-git-pull-request-draft'; // WIP PR
}
return 'octicon-git-pull-request'; // Open PR
} else if (issue.pull_request.merged === true) {
return 'octicon-git-merge'; // Merged PR
}
return 'octicon-git-pull-request'; // Closed PR
} else if (issue.state === 'open') {
return 'octicon-issue-opened'; // Open Issue
}
return 'octicon-issue-closed'; // Closed Issue
}
export function getIssueColor(issue: Issue) {
if (issue.pull_request) {
if (issue.pull_request.draft === true) {
return 'grey'; // WIP PR
} else if (issue.pull_request.merged === true) {
return 'purple'; // Merged PR
}
}
if (issue.state === 'open') {
return 'green'; // Open Issue
}
return 'red'; // Closed Issue
}

View File

@ -3,6 +3,8 @@ import {hideElem, queryElems, showElem} from '../utils/dom.ts';
import {POST} from '../modules/fetch.ts'; import {POST} from '../modules/fetch.ts';
import {showErrorToast} from '../modules/toast.ts'; import {showErrorToast} from '../modules/toast.ts';
import {sleep} from '../utils.ts'; import {sleep} from '../utils.ts';
import RepoActivityTopAuthors from '../components/RepoActivityTopAuthors.vue';
import {createApp} from 'vue';
async function onDownloadArchive(e) { async function onDownloadArchive(e) {
e.preventDefault(); e.preventDefault();
@ -32,6 +34,13 @@ export function initRepoArchiveLinks() {
queryElems('a.archive-link[href]', (el) => el.addEventListener('click', onDownloadArchive)); queryElems('a.archive-link[href]', (el) => el.addEventListener('click', onDownloadArchive));
} }
export function initRepoActivityTopAuthorsChart() {
const el = document.querySelector('#repo-activity-top-authors-chart');
if (el) {
createApp(RepoActivityTopAuthors).mount(el);
}
}
export function initRepoCloneLink() { export function initRepoCloneLink() {
const $repoCloneSsh = $('#repo-clone-ssh'); const $repoCloneSsh = $('#repo-clone-ssh');
const $repoCloneHttps = $('#repo-clone-https'); const $repoCloneHttps = $('#repo-clone-https');

View File

@ -1,11 +1,12 @@
import $ from 'jquery'; import $ from 'jquery';
import {handleReply} from './repo-issue.ts'; import {handleReply} from './repo-issue.ts';
import {getComboMarkdownEditor, initComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts'; import {getComboMarkdownEditor, initComboMarkdownEditor, ComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
import {POST} from '../modules/fetch.ts'; import {POST} from '../modules/fetch.ts';
import {showErrorToast} from '../modules/toast.ts'; import {showErrorToast} from '../modules/toast.ts';
import {hideElem, showElem} from '../utils/dom.ts'; import {hideElem, showElem} from '../utils/dom.ts';
import {attachRefIssueContextPopup} from './contextpopup.ts'; import {attachRefIssueContextPopup} from './contextpopup.ts';
import {initCommentContent, initMarkupContent} from '../markup/content.ts'; import {initCommentContent, initMarkupContent} from '../markup/content.ts';
import {triggerUploadStateChanged} from './comp/EditorUpload.ts';
async function onEditContent(event) { async function onEditContent(event) {
event.preventDefault(); event.preventDefault();
@ -15,7 +16,7 @@ async function onEditContent(event) {
const renderContent = segment.querySelector('.render-content'); const renderContent = segment.querySelector('.render-content');
const rawContent = segment.querySelector('.raw-content'); const rawContent = segment.querySelector('.raw-content');
let comboMarkdownEditor; let comboMarkdownEditor : ComboMarkdownEditor;
const cancelAndReset = (e) => { const cancelAndReset = (e) => {
e.preventDefault(); e.preventDefault();
@ -79,9 +80,12 @@ async function onEditContent(event) {
comboMarkdownEditor = getComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor')); comboMarkdownEditor = getComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
if (!comboMarkdownEditor) { if (!comboMarkdownEditor) {
editContentZone.innerHTML = document.querySelector('#issue-comment-editor-template').innerHTML; editContentZone.innerHTML = document.querySelector('#issue-comment-editor-template').innerHTML;
const saveButton = editContentZone.querySelector('.ui.primary.button');
comboMarkdownEditor = await initComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor')); comboMarkdownEditor = await initComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
const syncUiState = () => saveButton.disabled = comboMarkdownEditor.isUploading();
comboMarkdownEditor.container.addEventListener(ComboMarkdownEditor.EventUploadStateChanged, syncUiState);
editContentZone.querySelector('.ui.cancel.button').addEventListener('click', cancelAndReset); editContentZone.querySelector('.ui.cancel.button').addEventListener('click', cancelAndReset);
editContentZone.querySelector('.ui.primary.button').addEventListener('click', saveAndRefresh); saveButton.addEventListener('click', saveAndRefresh);
} }
// Show write/preview tab and copy raw content as needed // Show write/preview tab and copy raw content as needed
@ -93,6 +97,7 @@ async function onEditContent(event) {
} }
comboMarkdownEditor.switchTabToEditor(); comboMarkdownEditor.switchTabToEditor();
comboMarkdownEditor.focus(); comboMarkdownEditor.focus();
triggerUploadStateChanged(comboMarkdownEditor.container);
} }
export function initRepoIssueCommentEdit() { export function initRepoIssueCommentEdit() {

View File

@ -3,7 +3,7 @@ import {htmlEscape} from 'escape-goat';
import {createTippy, showTemporaryTooltip} from '../modules/tippy.ts'; import {createTippy, showTemporaryTooltip} from '../modules/tippy.ts';
import {hideElem, showElem, toggleElem} from '../utils/dom.ts'; import {hideElem, showElem, toggleElem} from '../utils/dom.ts';
import {setFileFolding} from './file-fold.ts'; import {setFileFolding} from './file-fold.ts';
import {getComboMarkdownEditor, initComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts'; import {ComboMarkdownEditor, getComboMarkdownEditor, initComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
import {toAbsoluteUrl} from '../utils.ts'; import {toAbsoluteUrl} from '../utils.ts';
import {GET, POST} from '../modules/fetch.ts'; import {GET, POST} from '../modules/fetch.ts';
import {showErrorToast} from '../modules/toast.ts'; import {showErrorToast} from '../modules/toast.ts';
@ -483,9 +483,9 @@ export function initRepoPullRequestReview() {
await handleReply(this); await handleReply(this);
}); });
const $reviewBox = $('.review-box-panel'); const elReviewBox = document.querySelector('.review-box-panel');
if ($reviewBox.length === 1) { if (elReviewBox) {
const _promise = initComboMarkdownEditor($reviewBox.find('.combo-markdown-editor')); initComboMarkdownEditor(elReviewBox.querySelector('.combo-markdown-editor'));
} }
// The following part is only for diff views // The following part is only for diff views
@ -548,7 +548,7 @@ export function initRepoPullRequestReview() {
$td.find("input[name='line']").val(idx); $td.find("input[name='line']").val(idx);
$td.find("input[name='side']").val(side === 'left' ? 'previous' : 'proposed'); $td.find("input[name='side']").val(side === 'left' ? 'previous' : 'proposed');
$td.find("input[name='path']").val(path); $td.find("input[name='path']").val(path);
const editor = await initComboMarkdownEditor($td.find('.combo-markdown-editor')); const editor = await initComboMarkdownEditor($td[0].querySelector('.combo-markdown-editor'));
editor.focus(); editor.focus();
} catch (error) { } catch (error) {
console.error(error); console.error(error);
@ -669,20 +669,22 @@ export async function initSingleCommentEditor($commentForm) {
// pages: // pages:
// * normal new issue/pr page: no status-button, no comment-button (there is only a normal submit button which can submit empty content) // * normal new issue/pr page: no status-button, no comment-button (there is only a normal submit button which can submit empty content)
// * issue/pr view page: with comment form, has status-button and comment-button // * issue/pr view page: with comment form, has status-button and comment-button
const opts = {}; const editor = await initComboMarkdownEditor($commentForm[0].querySelector('.combo-markdown-editor'));
const statusButton = document.querySelector('#status-button'); const statusButton = document.querySelector<HTMLButtonElement>('#status-button');
const commentButton = document.querySelector('#comment-button'); const commentButton = document.querySelector<HTMLButtonElement>('#comment-button');
opts.onContentChanged = (editor) => { const syncUiState = () => {
const editorText = editor.value().trim(); const editorText = editor.value().trim(), isUploading = editor.isUploading();
if (statusButton) { if (statusButton) {
statusButton.textContent = statusButton.getAttribute(editorText ? 'data-status-and-comment' : 'data-status'); statusButton.textContent = statusButton.getAttribute(editorText ? 'data-status-and-comment' : 'data-status');
statusButton.disabled = isUploading;
} }
if (commentButton) { if (commentButton) {
commentButton.disabled = !editorText; commentButton.disabled = !editorText || isUploading;
} }
}; };
const editor = await initComboMarkdownEditor($commentForm.find('.combo-markdown-editor'), opts); editor.container.addEventListener(ComboMarkdownEditor.EventUploadStateChanged, syncUiState);
opts.onContentChanged(editor); // sync state of buttons with the initial content editor.container.addEventListener(ComboMarkdownEditor.EventEditorContentChanged, syncUiState);
syncUiState();
} }
export function initIssueTemplateCommentEditors($commentForm) { export function initIssueTemplateCommentEditors($commentForm) {
@ -690,16 +692,13 @@ export function initIssueTemplateCommentEditors($commentForm) {
// * new issue with issue template // * new issue with issue template
const $comboFields = $commentForm.find('.combo-editor-dropzone'); const $comboFields = $commentForm.find('.combo-editor-dropzone');
const initCombo = async ($combo) => { const initCombo = async (elCombo) => {
const $dropzoneContainer = $combo.find('.form-field-dropzone'); const $formField = $(elCombo.querySelector('.form-field-real'));
const $formField = $combo.find('.form-field-real'); const dropzoneContainer = elCombo.querySelector('.form-field-dropzone');
const $markdownEditor = $combo.find('.combo-markdown-editor'); const markdownEditor = elCombo.querySelector('.combo-markdown-editor');
const editor = await initComboMarkdownEditor($markdownEditor, { const editor = await initComboMarkdownEditor(markdownEditor);
onContentChanged: (editor) => { editor.container.addEventListener(ComboMarkdownEditor.EventEditorContentChanged, () => $formField.val(editor.value()));
$formField.val(editor.value());
},
});
$formField.on('focus', async () => { $formField.on('focus', async () => {
// deactivate all markdown editors // deactivate all markdown editors
@ -709,8 +708,8 @@ export function initIssueTemplateCommentEditors($commentForm) {
// activate this markdown editor // activate this markdown editor
hideElem($formField); hideElem($formField);
showElem($markdownEditor); showElem(markdownEditor);
showElem($dropzoneContainer); showElem(dropzoneContainer);
await editor.switchToUserPreference(); await editor.switchToUserPreference();
editor.focus(); editor.focus();
@ -718,7 +717,7 @@ export function initIssueTemplateCommentEditors($commentForm) {
}; };
for (const el of $comboFields) { for (const el of $comboFields) {
initCombo($(el)); initCombo(el);
} }
} }

View File

@ -50,7 +50,7 @@ function initTagNameEditor() {
} }
function initRepoReleaseEditor() { function initRepoReleaseEditor() {
const editor = document.querySelector('.repository.new.release .combo-markdown-editor'); const editor = document.querySelector<HTMLElement>('.repository.new.release .combo-markdown-editor');
if (!editor) { if (!editor) {
return; return;
} }

View File

@ -4,11 +4,11 @@ import {fomanticMobileScreen} from '../modules/fomantic.ts';
import {POST} from '../modules/fetch.ts'; import {POST} from '../modules/fetch.ts';
async function initRepoWikiFormEditor() { async function initRepoWikiFormEditor() {
const editArea = document.querySelector('.repository.wiki .combo-markdown-editor textarea'); const editArea = document.querySelector<HTMLTextAreaElement>('.repository.wiki .combo-markdown-editor textarea');
if (!editArea) return; if (!editArea) return;
const form = document.querySelector('.repository.wiki.new .ui.form'); const form = document.querySelector('.repository.wiki.new .ui.form');
const editorContainer = form.querySelector('.combo-markdown-editor'); const editorContainer = form.querySelector<HTMLElement>('.combo-markdown-editor');
let editor; let editor;
let renderRequesting = false; let renderRequesting = false;

View File

@ -2,7 +2,6 @@
import './bootstrap.ts'; import './bootstrap.ts';
import './htmx.ts'; import './htmx.ts';
import {initRepoActivityTopAuthorsChart} from './components/RepoActivityTopAuthors.vue';
import {initDashboardRepoList} from './components/DashboardRepoList.vue'; import {initDashboardRepoList} from './components/DashboardRepoList.vue';
import {initGlobalCopyToClipboardListener} from './features/clipboard.ts'; import {initGlobalCopyToClipboardListener} from './features/clipboard.ts';
@ -42,7 +41,7 @@ import {initRepoTemplateSearch} from './features/repo-template.ts';
import {initRepoCodeView} from './features/repo-code.ts'; import {initRepoCodeView} from './features/repo-code.ts';
import {initSshKeyFormParser} from './features/sshkey-helper.ts'; import {initSshKeyFormParser} from './features/sshkey-helper.ts';
import {initUserSettings} from './features/user-settings.ts'; import {initUserSettings} from './features/user-settings.ts';
import {initRepoArchiveLinks} from './features/repo-common.ts'; import {initRepoActivityTopAuthorsChart, initRepoArchiveLinks} from './features/repo-common.ts';
import {initRepoMigrationStatusChecker} from './features/repo-migrate.ts'; import {initRepoMigrationStatusChecker} from './features/repo-migrate.ts';
import { import {
initRepoSettingGitHook, initRepoSettingGitHook,

View File

@ -36,3 +36,13 @@ export type IssueData = {
type: string, type: string,
index: string, index: string,
} }
export type Issue = {
id: number;
title: string;
state: 'open' | 'closed';
pull_request?: {
draft: boolean;
merged: boolean;
};
};

View File

@ -1,8 +1,10 @@
import emojis from '../../../assets/emoji.json'; import emojis from '../../../assets/emoji.json';
import type {Issue} from '../features/issue.ts';
import {GET} from '../modules/fetch.ts';
const maxMatches = 6; const maxMatches = 6;
function sortAndReduce(map: Map<string, number>) { function sortAndReduce<T>(map: Map<T, number>): T[] {
const sortedMap = new Map(Array.from(map.entries()).sort((a, b) => a[1] - b[1])); const sortedMap = new Map(Array.from(map.entries()).sort((a, b) => a[1] - b[1]));
return Array.from(sortedMap.keys()).slice(0, maxMatches); return Array.from(sortedMap.keys()).slice(0, maxMatches);
} }
@ -27,11 +29,12 @@ export function matchEmoji(queryText: string): string[] {
return sortAndReduce(results); return sortAndReduce(results);
} }
export function matchMention(queryText: string): string[] { type MentionSuggestion = {value: string; name: string; fullname: string; avatar: string};
export function matchMention(queryText: string): MentionSuggestion[] {
const query = queryText.toLowerCase(); const query = queryText.toLowerCase();
// results is a map of weights, lower is better // results is a map of weights, lower is better
const results = new Map(); const results = new Map<MentionSuggestion, number>();
for (const obj of window.config.mentionValues ?? []) { for (const obj of window.config.mentionValues ?? []) {
const index = obj.key.toLowerCase().indexOf(query); const index = obj.key.toLowerCase().indexOf(query);
if (index === -1) continue; if (index === -1) continue;
@ -41,3 +44,13 @@ export function matchMention(queryText: string): string[] {
return sortAndReduce(results); return sortAndReduce(results);
} }
export async function matchIssue(owner: string, repo: string, issueIndexStr: string, query: string): Promise<Issue[]> {
const res = await GET(`${window.config.appSubUrl}/${owner}/${repo}/issues/suggestions?q=${encodeURIComponent(query)}`);
const issues: Issue[] = await res.json();
const issueIndex = parseInt(issueIndexStr);
// filter out issue with same id
return issues.filter((i) => i.id !== issueIndex);
}

View File

@ -42,14 +42,14 @@ export function firstStartDateAfterDate(inputDate: Date): number {
return resultDate.valueOf(); return resultDate.valueOf();
} }
type DayData = { export type DayData = {
week: number, week: number,
additions: number, additions: number,
deletions: number, deletions: number,
commits: number, commits: number,
} }
export function fillEmptyStartDaysWithZeroes(startDays: number[], data: DayData): DayData[] { export function fillEmptyStartDaysWithZeroes(startDays: number[], data: DayData[]): DayData[] {
const result = {}; const result = {};
for (const startDay of startDays) { for (const startDay of startDays) {

View File

@ -7,9 +7,15 @@ test('toAbsoluteLocaleDate', () => {
day: 'numeric', day: 'numeric',
})).toEqual('March 15, 2024'); })).toEqual('March 15, 2024');
expect(toAbsoluteLocaleDate('2024-03-15', 'de-DE', { expect(toAbsoluteLocaleDate('2024-03-15T01:02:03', 'de-DE', {
year: 'numeric', year: 'numeric',
month: 'long', month: 'long',
day: 'numeric', day: 'numeric',
})).toEqual('15. März 2024'); })).toEqual('15. März 2024');
expect(toAbsoluteLocaleDate('12345-03-15 01:02:03', '', {
year: 'numeric',
month: 'short',
day: 'numeric',
})).toEqual('Mar 15, 12345');
}); });

View File

@ -1,30 +1,28 @@
import {Temporal} from 'temporal-polyfill'; export function toAbsoluteLocaleDate(date: string, lang: string, opts: Intl.DateTimeFormatOptions) {
return new Date(date).toLocaleString(lang || [], opts);
export function toAbsoluteLocaleDate(dateStr, lang, opts) {
return Temporal.PlainDate.from(dateStr).toLocaleString(lang ?? [], opts);
} }
window.customElements.define('absolute-date', class extends HTMLElement { window.customElements.define('absolute-date', class extends HTMLElement {
static observedAttributes = ['date', 'year', 'month', 'weekday', 'day']; static observedAttributes = ['date', 'year', 'month', 'weekday', 'day'];
initialized = false;
update = () => { update = () => {
const year = this.getAttribute('year') ?? ''; const opt: Intl.DateTimeFormatOptions = {};
const month = this.getAttribute('month') ?? ''; for (const attr of ['year', 'month', 'weekday', 'day']) {
const weekday = this.getAttribute('weekday') ?? ''; if (this.getAttribute(attr)) opt[attr] = this.getAttribute(attr);
const day = this.getAttribute('day') ?? ''; }
const lang = this.closest('[lang]')?.getAttribute('lang') || const lang = this.closest('[lang]')?.getAttribute('lang') ||
this.ownerDocument.documentElement.getAttribute('lang') || ''; this.ownerDocument.documentElement.getAttribute('lang') || '';
// only use the first 10 characters, e.g. the `yyyy-mm-dd` part // only use the date part, it is guaranteed to be in ISO format (YYYY-MM-DDTHH:mm:ss.sssZ)
const dateStr = this.getAttribute('date').substring(0, 10); let date = this.getAttribute('date');
let dateSep = date.indexOf('T');
dateSep = dateSep === -1 ? date.indexOf(' ') : dateSep;
date = dateSep === -1 ? date : date.substring(0, dateSep);
if (!this.shadowRoot) this.attachShadow({mode: 'open'}); if (!this.shadowRoot) this.attachShadow({mode: 'open'});
this.shadowRoot.textContent = toAbsoluteLocaleDate(dateStr, lang, { this.shadowRoot.textContent = toAbsoluteLocaleDate(date, lang, opt);
...(year && {year}),
...(month && {month}),
...(weekday && {weekday}),
...(day && {day}),
});
}; };
attributeChangedCallback(_name, oldValue, newValue) { attributeChangedCallback(_name, oldValue, newValue) {