mirror of
https://github.com/go-gitea/gitea
synced 2025-02-03 21:57:50 +01:00
Merge branch 'main' into lunny/issue_dev
This commit is contained in:
commit
6feb5a1e17
3
go.mod
3
go.mod
@ -49,6 +49,7 @@ require (
|
||||
github.com/go-git/go-billy/v5 v5.5.0
|
||||
github.com/go-git/go-git/v5 v5.12.0
|
||||
github.com/go-ldap/ldap/v3 v3.4.6
|
||||
github.com/go-redsync/redsync/v4 v4.13.0
|
||||
github.com/go-sql-driver/mysql v1.8.1
|
||||
github.com/go-swagger/go-swagger v0.31.0
|
||||
github.com/go-testfixtures/testfixtures/v3 v3.11.0
|
||||
@ -218,7 +219,9 @@ require (
|
||||
github.com/gorilla/mux v1.8.1 // indirect
|
||||
github.com/gorilla/securecookie v1.1.2 // indirect
|
||||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/imdario/mergo v0.3.16 // indirect
|
||||
|
19
go.sum
19
go.sum
@ -342,6 +342,14 @@ github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ
|
||||
github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58=
|
||||
github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
|
||||
github.com/go-redis/redis v6.15.2+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
|
||||
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-redis/redis/v7 v7.4.1 h1:PASvf36gyUpr2zdOUS/9Zqc80GbM+9BDyiJSJDDOrTI=
|
||||
github.com/go-redis/redis/v7 v7.4.1/go.mod h1:JDNMw23GTyLNC4GZu9njt15ctBQVn7xjRfnwdHj/Dcg=
|
||||
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
|
||||
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
|
||||
github.com/go-redsync/redsync/v4 v4.13.0 h1:49X6GJfnbLGaIpBBREM/zA4uIMDXKAh1NDkvQ1EkZKA=
|
||||
github.com/go-redsync/redsync/v4 v4.13.0/go.mod h1:HMW4Q224GZQz6x1Xc7040Yfgacukdzu7ifTDAKiyErQ=
|
||||
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||
@ -397,6 +405,8 @@ github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW
|
||||
github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/gomodule/redigo v1.8.9 h1:Sl3u+2BI/kk+VEatbj0scLdrFhjPmbxOc1myhDP41ws=
|
||||
github.com/gomodule/redigo v1.8.9/go.mod h1:7ArFNvsTjH8GMMzB4uy1snslv2BwmginuMs06a1uzZE=
|
||||
github.com/google/btree v1.1.2 h1:xf4v41cLI2Z6FxbKm+8Bu+m8ifhj15JuZ9sa0jZCMUU=
|
||||
github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
@ -449,10 +459,15 @@ github.com/h2non/gock v1.2.0 h1:K6ol8rfrRkUOefooBC8elXoaNGYkpp7y2qcxGG6BzUE=
|
||||
github.com/h2non/gock v1.2.0/go.mod h1:tNhoxHYW2W42cYkYb1WqzdbYIieALC99kpYr7rH/BQk=
|
||||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw=
|
||||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI=
|
||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
||||
github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=
|
||||
github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
|
||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
@ -674,6 +689,8 @@ github.com/quasoft/websspi v1.1.2/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKc
|
||||
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/redis/go-redis/v9 v9.6.0 h1:NLck+Rab3AOTHw21CGRpvQpgTrAU4sgdCswqGtlhGRA=
|
||||
github.com/redis/go-redis/v9 v9.6.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M=
|
||||
github.com/redis/rueidis v1.0.19 h1:s65oWtotzlIFN8eMPhyYwxlwLR1lUdhza2KtWprKYSo=
|
||||
github.com/redis/rueidis v1.0.19/go.mod h1:8B+r5wdnjwK3lTFml5VtxjzGOQAC+5UmujoD12pDrEo=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/rhysd/actionlint v1.7.1 h1:WJaDzyT1StBWVKGSsZPYnbV0HF9Y9/vD6KFdZQL42qE=
|
||||
@ -765,6 +782,8 @@ github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203 h1:QVqDTf3h2WHt08YuiTGPZLls0Wq99X9bWd0Q5ZSBesM=
|
||||
github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203/go.mod h1:oqN97ltKNihBbwlX8dLpwxCl3+HnXKV/R0e+sRLd9C8=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
||||
|
@ -28,7 +28,6 @@ func Test_SSHParsePublicKey(t *testing.T) {
|
||||
length int
|
||||
content string
|
||||
}{
|
||||
{"dsa-1024", false, "dsa", 1024, "ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment"},
|
||||
{"rsa-1024", false, "rsa", 1024, "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDAu7tvIvX6ZHrRXuZNfkR3XLHSsuCK9Zn3X58lxBcQzuo5xZgB6vRwwm/QtJuF+zZPtY5hsQILBLmF+BZ5WpKZp1jBeSjH2G7lxet9kbcH+kIVj0tPFEoyKI9wvWqIwC4prx/WVk2wLTJjzBAhyNxfEq7C9CeiX9pQEbEqJfkKCQ== nocomment\n"},
|
||||
{"rsa-2048", false, "rsa", 2048, "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDMZXh+1OBUwSH9D45wTaxErQIN9IoC9xl7MKJkqvTvv6O5RR9YW/IK9FbfjXgXsppYGhsCZo1hFOOsXHMnfOORqu/xMDx4yPuyvKpw4LePEcg4TDipaDFuxbWOqc/BUZRZcXu41QAWfDLrInwsltWZHSeG7hjhpacl4FrVv9V1pS6Oc5Q1NxxEzTzuNLS/8diZrTm/YAQQ/+B+mzWI3zEtF4miZjjAljWd1LTBPvU23d29DcBmmFahcZ441XZsTeAwGxG/Q6j8NgNXj9WxMeWwxXV2jeAX/EBSpZrCVlCQ1yJswT6xCp8TuBnTiGWYMBNTbOZvPC4e0WI2/yZW/s5F nocomment"},
|
||||
{"ecdsa-256", false, "ecdsa", 256, "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBFQacN3PrOll7PXmN5B/ZNVahiUIqI05nbBlZk1KXsO3d06ktAWqbNflv2vEmA38bTFTfJ2sbn2B5ksT52cDDbA= nocomment"},
|
||||
@ -172,7 +171,6 @@ func Test_calcFingerprint(t *testing.T) {
|
||||
fp string
|
||||
content string
|
||||
}{
|
||||
{"dsa-1024", false, "SHA256:fSIHQlpKMDsGPVAXI8BPYfRp+e2sfvSt1sMrPsFiXrc", "ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment"},
|
||||
{"rsa-1024", false, "SHA256:vSnDkvRh/xM6kMxPidLgrUhq3mCN7CDaronCEm2joyQ", "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDAu7tvIvX6ZHrRXuZNfkR3XLHSsuCK9Zn3X58lxBcQzuo5xZgB6vRwwm/QtJuF+zZPtY5hsQILBLmF+BZ5WpKZp1jBeSjH2G7lxet9kbcH+kIVj0tPFEoyKI9wvWqIwC4prx/WVk2wLTJjzBAhyNxfEq7C9CeiX9pQEbEqJfkKCQ== nocomment\n"},
|
||||
{"rsa-2048", false, "SHA256:ZHD//a1b9VuTq9XSunAeYjKeU1xDa2tBFZYrFr2Okkg", "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDMZXh+1OBUwSH9D45wTaxErQIN9IoC9xl7MKJkqvTvv6O5RR9YW/IK9FbfjXgXsppYGhsCZo1hFOOsXHMnfOORqu/xMDx4yPuyvKpw4LePEcg4TDipaDFuxbWOqc/BUZRZcXu41QAWfDLrInwsltWZHSeG7hjhpacl4FrVv9V1pS6Oc5Q1NxxEzTzuNLS/8diZrTm/YAQQ/+B+mzWI3zEtF4miZjjAljWd1LTBPvU23d29DcBmmFahcZ441XZsTeAwGxG/Q6j8NgNXj9WxMeWwxXV2jeAX/EBSpZrCVlCQ1yJswT6xCp8TuBnTiGWYMBNTbOZvPC4e0WI2/yZW/s5F nocomment"},
|
||||
{"ecdsa-256", false, "SHA256:Bqx/xgWqRKLtkZ0Lr4iZpgb+5lYsFpSwXwVZbPwuTRw", "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBFQacN3PrOll7PXmN5B/ZNVahiUIqI05nbBlZk1KXsO3d06ktAWqbNflv2vEmA38bTFTfJ2sbn2B5ksT52cDDbA= nocomment"},
|
||||
|
66
modules/globallock/globallock.go
Normal file
66
modules/globallock/globallock.go
Normal file
@ -0,0 +1,66 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package globallock
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
)
|
||||
|
||||
var (
|
||||
defaultLocker Locker
|
||||
initOnce sync.Once
|
||||
initFunc = func() {
|
||||
// TODO: read the setting and initialize the default locker.
|
||||
// Before implementing this, don't use it.
|
||||
} // define initFunc as a variable to make it possible to change it in tests
|
||||
)
|
||||
|
||||
// DefaultLocker returns the default locker.
|
||||
func DefaultLocker() Locker {
|
||||
initOnce.Do(func() {
|
||||
initFunc()
|
||||
})
|
||||
return defaultLocker
|
||||
}
|
||||
|
||||
// Lock tries to acquire a lock for the given key, it uses the default locker.
|
||||
// Read the documentation of Locker.Lock for more information about the behavior.
|
||||
func Lock(ctx context.Context, key string) (ReleaseFunc, error) {
|
||||
return DefaultLocker().Lock(ctx, key)
|
||||
}
|
||||
|
||||
// TryLock tries to acquire a lock for the given key, it uses the default locker.
|
||||
// Read the documentation of Locker.TryLock for more information about the behavior.
|
||||
func TryLock(ctx context.Context, key string) (bool, ReleaseFunc, error) {
|
||||
return DefaultLocker().TryLock(ctx, key)
|
||||
}
|
||||
|
||||
// LockAndDo tries to acquire a lock for the given key and then calls the given function.
|
||||
// It uses the default locker, and it will return an error if failed to acquire the lock.
|
||||
func LockAndDo(ctx context.Context, key string, f func(context.Context) error) error {
|
||||
release, err := Lock(ctx, key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer release()
|
||||
|
||||
return f(ctx)
|
||||
}
|
||||
|
||||
// TryLockAndDo tries to acquire a lock for the given key and then calls the given function.
|
||||
// It uses the default locker, and it will return false if failed to acquire the lock.
|
||||
func TryLockAndDo(ctx context.Context, key string, f func(context.Context) error) (bool, error) {
|
||||
ok, release, err := TryLock(ctx, key)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer release()
|
||||
|
||||
if !ok {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, f(ctx)
|
||||
}
|
96
modules/globallock/globallock_test.go
Normal file
96
modules/globallock/globallock_test.go
Normal file
@ -0,0 +1,96 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package globallock
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestLockAndDo(t *testing.T) {
|
||||
t.Run("redis", func(t *testing.T) {
|
||||
url := "redis://127.0.0.1:6379/0"
|
||||
if os.Getenv("CI") == "" {
|
||||
// Make it possible to run tests against a local redis instance
|
||||
url = os.Getenv("TEST_REDIS_URL")
|
||||
if url == "" {
|
||||
t.Skip("TEST_REDIS_URL not set and not running in CI")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
oldDefaultLocker := defaultLocker
|
||||
oldInitFunc := initFunc
|
||||
defer func() {
|
||||
defaultLocker = oldDefaultLocker
|
||||
initFunc = oldInitFunc
|
||||
if defaultLocker == nil {
|
||||
initOnce = sync.Once{}
|
||||
}
|
||||
}()
|
||||
|
||||
initOnce = sync.Once{}
|
||||
initFunc = func() {
|
||||
defaultLocker = NewRedisLocker(url)
|
||||
}
|
||||
|
||||
testLockAndDo(t)
|
||||
require.NoError(t, defaultLocker.(*redisLocker).Close())
|
||||
})
|
||||
t.Run("memory", func(t *testing.T) {
|
||||
oldDefaultLocker := defaultLocker
|
||||
oldInitFunc := initFunc
|
||||
defer func() {
|
||||
defaultLocker = oldDefaultLocker
|
||||
initFunc = oldInitFunc
|
||||
if defaultLocker == nil {
|
||||
initOnce = sync.Once{}
|
||||
}
|
||||
}()
|
||||
|
||||
initOnce = sync.Once{}
|
||||
initFunc = func() {
|
||||
defaultLocker = NewMemoryLocker()
|
||||
}
|
||||
|
||||
testLockAndDo(t)
|
||||
})
|
||||
}
|
||||
|
||||
func testLockAndDo(t *testing.T) {
|
||||
const concurrency = 1000
|
||||
|
||||
ctx := context.Background()
|
||||
count := 0
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(concurrency)
|
||||
for i := 0; i < concurrency; i++ {
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
err := LockAndDo(ctx, "test", func(ctx context.Context) error {
|
||||
count++
|
||||
|
||||
// It's impossible to acquire the lock inner the function
|
||||
ok, err := TryLockAndDo(ctx, "test", func(ctx context.Context) error {
|
||||
assert.Fail(t, "should not acquire the lock")
|
||||
return nil
|
||||
})
|
||||
assert.False(t, ok)
|
||||
assert.NoError(t, err)
|
||||
|
||||
return nil
|
||||
})
|
||||
require.NoError(t, err)
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
assert.Equal(t, concurrency, count)
|
||||
}
|
38
modules/globallock/locker.go
Normal file
38
modules/globallock/locker.go
Normal file
@ -0,0 +1,38 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package globallock
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
type Locker interface {
|
||||
// Lock tries to acquire a lock for the given key, it blocks until the lock is acquired or the context is canceled.
|
||||
//
|
||||
// Lock returns a ReleaseFunc to release the lock, it cannot be nil.
|
||||
// It's always safe to call this function even if it fails to acquire the lock, and it will do nothing in that case.
|
||||
// And it's also safe to call it multiple times, but it will only release the lock once.
|
||||
// That's why it's called ReleaseFunc, not UnlockFunc.
|
||||
// But be aware that it's not safe to not call it at all; it could lead to a memory leak.
|
||||
// So a recommended pattern is to use defer to call it:
|
||||
// release, err := locker.Lock(ctx, "key")
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// defer release()
|
||||
//
|
||||
// Lock returns an error if failed to acquire the lock.
|
||||
// Be aware that even the context is not canceled, it's still possible to fail to acquire the lock.
|
||||
// For example, redis is down, or it reached the maximum number of tries.
|
||||
Lock(ctx context.Context, key string) (ReleaseFunc, error)
|
||||
|
||||
// TryLock tries to acquire a lock for the given key, it returns immediately.
|
||||
// It follows the same pattern as Lock, but it doesn't block.
|
||||
// And if it fails to acquire the lock because it's already locked, not other reasons like redis is down,
|
||||
// it will return false without any error.
|
||||
TryLock(ctx context.Context, key string) (bool, ReleaseFunc, error)
|
||||
}
|
||||
|
||||
// ReleaseFunc is a function that releases a lock.
|
||||
type ReleaseFunc func()
|
181
modules/globallock/locker_test.go
Normal file
181
modules/globallock/locker_test.go
Normal file
@ -0,0 +1,181 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package globallock
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/go-redsync/redsync/v4"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestLocker(t *testing.T) {
|
||||
t.Run("redis", func(t *testing.T) {
|
||||
url := "redis://127.0.0.1:6379/0"
|
||||
if os.Getenv("CI") == "" {
|
||||
// Make it possible to run tests against a local redis instance
|
||||
url = os.Getenv("TEST_REDIS_URL")
|
||||
if url == "" {
|
||||
t.Skip("TEST_REDIS_URL not set and not running in CI")
|
||||
return
|
||||
}
|
||||
}
|
||||
oldExpiry := redisLockExpiry
|
||||
redisLockExpiry = 5 * time.Second // make it shorter for testing
|
||||
defer func() {
|
||||
redisLockExpiry = oldExpiry
|
||||
}()
|
||||
|
||||
locker := NewRedisLocker(url)
|
||||
testLocker(t, locker)
|
||||
testRedisLocker(t, locker.(*redisLocker))
|
||||
require.NoError(t, locker.(*redisLocker).Close())
|
||||
})
|
||||
t.Run("memory", func(t *testing.T) {
|
||||
locker := NewMemoryLocker()
|
||||
testLocker(t, locker)
|
||||
testMemoryLocker(t, locker.(*memoryLocker))
|
||||
})
|
||||
}
|
||||
|
||||
func testLocker(t *testing.T, locker Locker) {
|
||||
t.Run("lock", func(t *testing.T) {
|
||||
parentCtx := context.Background()
|
||||
release, err := locker.Lock(parentCtx, "test")
|
||||
defer release()
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
||||
func() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
|
||||
defer cancel()
|
||||
release, err := locker.Lock(ctx, "test")
|
||||
defer release()
|
||||
|
||||
assert.Error(t, err)
|
||||
}()
|
||||
|
||||
release()
|
||||
|
||||
func() {
|
||||
release, err := locker.Lock(context.Background(), "test")
|
||||
defer release()
|
||||
|
||||
assert.NoError(t, err)
|
||||
}()
|
||||
})
|
||||
|
||||
t.Run("try lock", func(t *testing.T) {
|
||||
parentCtx := context.Background()
|
||||
ok, release, err := locker.TryLock(parentCtx, "test")
|
||||
defer release()
|
||||
|
||||
assert.True(t, ok)
|
||||
assert.NoError(t, err)
|
||||
|
||||
func() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
|
||||
defer cancel()
|
||||
ok, release, err := locker.TryLock(ctx, "test")
|
||||
defer release()
|
||||
|
||||
assert.False(t, ok)
|
||||
assert.NoError(t, err)
|
||||
}()
|
||||
|
||||
release()
|
||||
|
||||
func() {
|
||||
ok, release, _ := locker.TryLock(context.Background(), "test")
|
||||
defer release()
|
||||
|
||||
assert.True(t, ok)
|
||||
}()
|
||||
})
|
||||
|
||||
t.Run("wait and acquired", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
release, err := locker.Lock(ctx, "test")
|
||||
require.NoError(t, err)
|
||||
|
||||
wg := &sync.WaitGroup{}
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
started := time.Now()
|
||||
release, err := locker.Lock(context.Background(), "test") // should be blocked for seconds
|
||||
defer release()
|
||||
assert.Greater(t, time.Since(started), time.Second)
|
||||
assert.NoError(t, err)
|
||||
}()
|
||||
|
||||
time.Sleep(2 * time.Second)
|
||||
release()
|
||||
|
||||
wg.Wait()
|
||||
})
|
||||
|
||||
t.Run("multiple release", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
release1, err := locker.Lock(ctx, "test")
|
||||
require.NoError(t, err)
|
||||
|
||||
release1()
|
||||
|
||||
release2, err := locker.Lock(ctx, "test")
|
||||
defer release2()
|
||||
require.NoError(t, err)
|
||||
|
||||
// Call release1 again,
|
||||
// it should not panic or block,
|
||||
// and it shouldn't affect the other lock
|
||||
release1()
|
||||
|
||||
ok, release3, err := locker.TryLock(ctx, "test")
|
||||
defer release3()
|
||||
require.NoError(t, err)
|
||||
// It should be able to acquire the lock;
|
||||
// otherwise, it means the lock has been released by release1
|
||||
assert.False(t, ok)
|
||||
})
|
||||
}
|
||||
|
||||
// testMemoryLocker does specific tests for memoryLocker
|
||||
func testMemoryLocker(t *testing.T, locker *memoryLocker) {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
// testRedisLocker does specific tests for redisLocker
|
||||
func testRedisLocker(t *testing.T, locker *redisLocker) {
|
||||
defer func() {
|
||||
// This case should be tested at the end.
|
||||
// Otherwise, it will affect other tests.
|
||||
t.Run("close", func(t *testing.T) {
|
||||
assert.NoError(t, locker.Close())
|
||||
_, err := locker.Lock(context.Background(), "test")
|
||||
assert.Error(t, err)
|
||||
})
|
||||
}()
|
||||
|
||||
t.Run("failed extend", func(t *testing.T) {
|
||||
release, err := locker.Lock(context.Background(), "test")
|
||||
defer release()
|
||||
require.NoError(t, err)
|
||||
|
||||
// It simulates that there are some problems with extending like network issues or redis server down.
|
||||
v, ok := locker.mutexM.Load("test")
|
||||
require.True(t, ok)
|
||||
m := v.(*redsync.Mutex)
|
||||
_, _ = m.Unlock() // release it to make it impossible to extend
|
||||
|
||||
// In current design, callers can't know the lock can't be extended.
|
||||
// Just keep this case to improve the test coverage.
|
||||
})
|
||||
}
|
67
modules/globallock/memory_locker.go
Normal file
67
modules/globallock/memory_locker.go
Normal file
@ -0,0 +1,67 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package globallock
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type memoryLocker struct {
|
||||
locks sync.Map
|
||||
}
|
||||
|
||||
var _ Locker = &memoryLocker{}
|
||||
|
||||
func NewMemoryLocker() Locker {
|
||||
return &memoryLocker{}
|
||||
}
|
||||
|
||||
func (l *memoryLocker) Lock(ctx context.Context, key string) (ReleaseFunc, error) {
|
||||
if l.tryLock(key) {
|
||||
releaseOnce := sync.Once{}
|
||||
return func() {
|
||||
releaseOnce.Do(func() {
|
||||
l.locks.Delete(key)
|
||||
})
|
||||
}, nil
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(time.Millisecond * 100)
|
||||
defer ticker.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return func() {}, ctx.Err()
|
||||
case <-ticker.C:
|
||||
if l.tryLock(key) {
|
||||
releaseOnce := sync.Once{}
|
||||
return func() {
|
||||
releaseOnce.Do(func() {
|
||||
l.locks.Delete(key)
|
||||
})
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (l *memoryLocker) TryLock(_ context.Context, key string) (bool, ReleaseFunc, error) {
|
||||
if l.tryLock(key) {
|
||||
releaseOnce := sync.Once{}
|
||||
return true, func() {
|
||||
releaseOnce.Do(func() {
|
||||
l.locks.Delete(key)
|
||||
})
|
||||
}, nil
|
||||
}
|
||||
|
||||
return false, func() {}, nil
|
||||
}
|
||||
|
||||
func (l *memoryLocker) tryLock(key string) bool {
|
||||
_, loaded := l.locks.LoadOrStore(key, struct{}{})
|
||||
return !loaded
|
||||
}
|
137
modules/globallock/redis_locker.go
Normal file
137
modules/globallock/redis_locker.go
Normal file
@ -0,0 +1,137 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package globallock
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/nosql"
|
||||
|
||||
"github.com/go-redsync/redsync/v4"
|
||||
"github.com/go-redsync/redsync/v4/redis/goredis/v9"
|
||||
)
|
||||
|
||||
const redisLockKeyPrefix = "gitea:globallock:"
|
||||
|
||||
// redisLockExpiry is the default expiry time for a lock.
|
||||
// Define it as a variable to make it possible to change it in tests.
|
||||
var redisLockExpiry = 30 * time.Second
|
||||
|
||||
type redisLocker struct {
|
||||
rs *redsync.Redsync
|
||||
|
||||
mutexM sync.Map
|
||||
closed atomic.Bool
|
||||
extendWg sync.WaitGroup
|
||||
}
|
||||
|
||||
var _ Locker = &redisLocker{}
|
||||
|
||||
func NewRedisLocker(connection string) Locker {
|
||||
l := &redisLocker{
|
||||
rs: redsync.New(
|
||||
goredis.NewPool(
|
||||
nosql.GetManager().GetRedisClient(connection),
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
l.extendWg.Add(1)
|
||||
l.startExtend()
|
||||
|
||||
return l
|
||||
}
|
||||
|
||||
func (l *redisLocker) Lock(ctx context.Context, key string) (ReleaseFunc, error) {
|
||||
return l.lock(ctx, key, 0)
|
||||
}
|
||||
|
||||
func (l *redisLocker) TryLock(ctx context.Context, key string) (bool, ReleaseFunc, error) {
|
||||
f, err := l.lock(ctx, key, 1)
|
||||
|
||||
var (
|
||||
errTaken *redsync.ErrTaken
|
||||
errNodeTaken *redsync.ErrNodeTaken
|
||||
)
|
||||
if errors.As(err, &errTaken) || errors.As(err, &errNodeTaken) {
|
||||
return false, f, nil
|
||||
}
|
||||
return err == nil, f, err
|
||||
}
|
||||
|
||||
// Close closes the locker.
|
||||
// It will stop extending the locks and refuse to acquire new locks.
|
||||
// In actual use, it is not necessary to call this function.
|
||||
// But it's useful in tests to release resources.
|
||||
// It could take some time since it waits for the extending goroutine to finish.
|
||||
func (l *redisLocker) Close() error {
|
||||
l.closed.Store(true)
|
||||
l.extendWg.Wait()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *redisLocker) lock(ctx context.Context, key string, tries int) (ReleaseFunc, error) {
|
||||
if l.closed.Load() {
|
||||
return func() {}, fmt.Errorf("locker is closed")
|
||||
}
|
||||
|
||||
options := []redsync.Option{
|
||||
redsync.WithExpiry(redisLockExpiry),
|
||||
}
|
||||
if tries > 0 {
|
||||
options = append(options, redsync.WithTries(tries))
|
||||
}
|
||||
mutex := l.rs.NewMutex(redisLockKeyPrefix+key, options...)
|
||||
if err := mutex.LockContext(ctx); err != nil {
|
||||
return func() {}, err
|
||||
}
|
||||
|
||||
l.mutexM.Store(key, mutex)
|
||||
|
||||
releaseOnce := sync.Once{}
|
||||
return func() {
|
||||
releaseOnce.Do(func() {
|
||||
l.mutexM.Delete(key)
|
||||
|
||||
// It's safe to ignore the error here,
|
||||
// if it failed to unlock, it will be released automatically after the lock expires.
|
||||
// Do not call mutex.UnlockContext(ctx) here, or it will fail to release when ctx has timed out.
|
||||
_, _ = mutex.Unlock()
|
||||
})
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (l *redisLocker) startExtend() {
|
||||
if l.closed.Load() {
|
||||
l.extendWg.Done()
|
||||
return
|
||||
}
|
||||
|
||||
toExtend := make([]*redsync.Mutex, 0)
|
||||
l.mutexM.Range(func(_, value any) bool {
|
||||
m := value.(*redsync.Mutex)
|
||||
|
||||
// Extend the lock if it is not expired.
|
||||
// Although the mutex will be removed from the map before it is released,
|
||||
// it still can be expired because of a failed extension.
|
||||
// If it happens, it does not need to be extended anymore.
|
||||
if time.Now().After(m.Until()) {
|
||||
return true
|
||||
}
|
||||
|
||||
toExtend = append(toExtend, m)
|
||||
return true
|
||||
})
|
||||
for _, v := range toExtend {
|
||||
// If it failed to extend, it will be released automatically after the lock expires.
|
||||
_, _ = v.Extend()
|
||||
}
|
||||
|
||||
time.AfterFunc(redisLockExpiry/2, l.startExtend)
|
||||
}
|
@ -45,7 +45,7 @@ func (p *PullRequest) GetContext() DownloaderContext { return p.Context }
|
||||
|
||||
// IsForkPullRequest returns true if the pull request from a forked repository but not the same repository
|
||||
func (p *PullRequest) IsForkPullRequest() bool {
|
||||
return p.Head.RepoPath() != p.Base.RepoPath()
|
||||
return p.Head.RepoFullName() != p.Base.RepoFullName()
|
||||
}
|
||||
|
||||
// GetGitRefName returns pull request relative path to head
|
||||
@ -62,8 +62,8 @@ type PullRequestBranch struct {
|
||||
OwnerName string `yaml:"owner_name"`
|
||||
}
|
||||
|
||||
// RepoPath returns pull request repo path
|
||||
func (p PullRequestBranch) RepoPath() string {
|
||||
// RepoFullName returns pull request repo full name
|
||||
func (p PullRequestBranch) RepoFullName() string {
|
||||
return fmt.Sprintf("%s/%s", p.OwnerName, p.RepoName)
|
||||
}
|
||||
|
||||
|
22
options/license/DocBook-Schema
Normal file
22
options/license/DocBook-Schema
Normal file
@ -0,0 +1,22 @@
|
||||
Copyright 1992-2011 HaL Computer Systems, Inc.,
|
||||
O'Reilly & Associates, Inc., ArborText, Inc., Fujitsu Software
|
||||
Corporation, Norman Walsh, Sun Microsystems, Inc., and the
|
||||
Organization for the Advancement of Structured Information
|
||||
Standards (OASIS).
|
||||
|
||||
Permission to use, copy, modify and distribute the DocBook schema
|
||||
and its accompanying documentation for any purpose and without fee
|
||||
is hereby granted in perpetuity, provided that the above copyright
|
||||
notice and this paragraph appear in all copies. The copyright
|
||||
holders make no representation about the suitability of the schema
|
||||
for any purpose. It is provided "as is" without expressed or implied
|
||||
warranty.
|
||||
|
||||
If you modify the DocBook schema in any way, label your schema as a
|
||||
variant of DocBook. See the reference documentation
|
||||
(http://docbook.org/tdg5/en/html/ch05.html#s-notdocbook)
|
||||
for more information.
|
||||
|
||||
Please direct all questions, bug reports, or suggestions for changes
|
||||
to the docbook@lists.oasis-open.org mailing list. For more
|
||||
information, see http://www.oasis-open.org/docbook/.
|
48
options/license/DocBook-XML
Normal file
48
options/license/DocBook-XML
Normal file
@ -0,0 +1,48 @@
|
||||
Copyright
|
||||
---------
|
||||
Copyright (C) 1999-2007 Norman Walsh
|
||||
Copyright (C) 2003 Jiří Kosek
|
||||
Copyright (C) 2004-2007 Steve Ball
|
||||
Copyright (C) 2005-2014 The DocBook Project
|
||||
Copyright (C) 2011-2012 O'Reilly Media
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the ``Software''), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
Except as contained in this notice, the names of individuals
|
||||
credited with contribution to this software shall not be used in
|
||||
advertising or otherwise to promote the sale, use or other
|
||||
dealings in this Software without prior written authorization
|
||||
from the individuals in question.
|
||||
|
||||
Any stylesheet derived from this Software that is publically
|
||||
distributed will be identified with a different name and the
|
||||
version strings in any derived Software will be changed so that
|
||||
no possibility of confusion between the derived package and this
|
||||
Software will exist.
|
||||
|
||||
Warranty
|
||||
--------
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL NORMAN WALSH OR ANY OTHER
|
||||
CONTRIBUTOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
Contacting the Author
|
||||
---------------------
|
||||
The DocBook XSL stylesheets are maintained by Norman Walsh,
|
||||
<ndw@nwalsh.com>, and members of the DocBook Project,
|
||||
<docbook-developers@sf.net>
|
96
options/license/Ubuntu-font-1.0
Normal file
96
options/license/Ubuntu-font-1.0
Normal file
@ -0,0 +1,96 @@
|
||||
-------------------------------
|
||||
UBUNTU FONT LICENCE Version 1.0
|
||||
-------------------------------
|
||||
|
||||
PREAMBLE
|
||||
This licence allows the licensed fonts to be used, studied, modified and
|
||||
redistributed freely. The fonts, including any derivative works, can be
|
||||
bundled, embedded, and redistributed provided the terms of this licence
|
||||
are met. The fonts and derivatives, however, cannot be released under
|
||||
any other licence. The requirement for fonts to remain under this
|
||||
licence does not require any document created using the fonts or their
|
||||
derivatives to be published under this licence, as long as the primary
|
||||
purpose of the document is not to be a vehicle for the distribution of
|
||||
the fonts.
|
||||
|
||||
DEFINITIONS
|
||||
"Font Software" refers to the set of files released by the Copyright
|
||||
Holder(s) under this licence and clearly marked as such. This may
|
||||
include source files, build scripts and documentation.
|
||||
|
||||
"Original Version" refers to the collection of Font Software components
|
||||
as received under this licence.
|
||||
|
||||
"Modified Version" refers to any derivative made by adding to, deleting,
|
||||
or substituting -- in part or in whole -- any of the components of the
|
||||
Original Version, by changing formats or by porting the Font Software to
|
||||
a new environment.
|
||||
|
||||
"Copyright Holder(s)" refers to all individuals and companies who have a
|
||||
copyright ownership of the Font Software.
|
||||
|
||||
"Substantially Changed" refers to Modified Versions which can be easily
|
||||
identified as dissimilar to the Font Software by users of the Font
|
||||
Software comparing the Original Version with the Modified Version.
|
||||
|
||||
To "Propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification and with or without charging
|
||||
a redistribution fee), making available to the public, and in some
|
||||
countries other activities as well.
|
||||
|
||||
PERMISSION & CONDITIONS
|
||||
This licence does not grant any rights under trademark law and all such
|
||||
rights are reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of the Font Software, to propagate the Font Software, subject to
|
||||
the below conditions:
|
||||
|
||||
1) Each copy of the Font Software must contain the above copyright
|
||||
notice and this licence. These can be included either as stand-alone
|
||||
text files, human-readable headers or in the appropriate machine-
|
||||
readable metadata fields within text or binary files as long as those
|
||||
fields can be easily viewed by the user.
|
||||
|
||||
2) The font name complies with the following:
|
||||
(a) The Original Version must retain its name, unmodified.
|
||||
(b) Modified Versions which are Substantially Changed must be renamed to
|
||||
avoid use of the name of the Original Version or similar names entirely.
|
||||
(c) Modified Versions which are not Substantially Changed must be
|
||||
renamed to both (i) retain the name of the Original Version and (ii) add
|
||||
additional naming elements to distinguish the Modified Version from the
|
||||
Original Version. The name of such Modified Versions must be the name of
|
||||
the Original Version, with "derivative X" where X represents the name of
|
||||
the new work, appended to that name.
|
||||
|
||||
3) The name(s) of the Copyright Holder(s) and any contributor to the
|
||||
Font Software shall not be used to promote, endorse or advertise any
|
||||
Modified Version, except (i) as required by this licence, (ii) to
|
||||
acknowledge the contribution(s) of the Copyright Holder(s) or (iii) with
|
||||
their explicit written permission.
|
||||
|
||||
4) The Font Software, modified or unmodified, in part or in whole, must
|
||||
be distributed entirely under this licence, and must not be distributed
|
||||
under any other licence. The requirement for fonts to remain under this
|
||||
licence does not affect any document created using the Font Software,
|
||||
except any version of the Font Software extracted from a document
|
||||
created using the Font Software may only be distributed under this
|
||||
licence.
|
||||
|
||||
TERMINATION
|
||||
This licence becomes null and void if any of the above conditions are
|
||||
not met.
|
||||
|
||||
DISCLAIMER
|
||||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF
|
||||
COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
|
||||
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER
|
||||
DEALINGS IN THE FONT SOFTWARE.
|
@ -2465,6 +2465,18 @@ settings.thread_id=スレッドID
|
||||
settings.matrix.homeserver_url=ホームサーバー URL
|
||||
settings.matrix.room_id=ルーム ID
|
||||
settings.matrix.message_type=メッセージ種別
|
||||
settings.visibility.private.button=プライベートにする
|
||||
settings.visibility.private.text=プライベートに変更した場合、リポジトリを許可されたメンバーのみが閲覧できるようにするだけでなく、フォーク、ウォッチャー、スターとの関係を解除する可能性もあります。
|
||||
settings.visibility.private.bullet_title=<strong>プライベートに変更すると:</strong>
|
||||
settings.visibility.private.bullet_one=リポジトリを許可されたメンバーのみが閲覧できるようにします。
|
||||
settings.visibility.private.bullet_two=<strong>フォーク</strong>、<strong>ウォッチャー</strong>、<strong>スター</strong>との関係を解除する可能性があります。
|
||||
settings.visibility.public.button=公開する
|
||||
settings.visibility.public.text=公開に変更すると、リポジトリを誰でも閲覧できるようにします。
|
||||
settings.visibility.public.bullet_title=<strong>公開に変更すると:</strong>
|
||||
settings.visibility.public.bullet_one=リポジトリを誰でも閲覧できるようにします。
|
||||
settings.visibility.success=リポジトリの公開設定を変更しました。
|
||||
settings.visibility.error=リポジトリの公開設定の変更中にエラーが発生しました。
|
||||
settings.visibility.fork_error=フォークされたリポジトリの公開設定は変更できません。
|
||||
settings.archive.button=アーカイブ
|
||||
settings.archive.header=このリポジトリをアーカイブ
|
||||
settings.archive.text=リポジトリをアーカイブするとリポジトリ全体が読み出し専用となります。 ダッシュボードにも表示されなくなります。 新たなコミット、あるいは、イシューやプルリクエストの作成は、誰もできなくなります (あなたでさえも!)。
|
||||
|
@ -628,6 +628,7 @@ org_still_own_repo=Esta organização ainda possui um ou mais repositórios, eli
|
||||
org_still_own_packages=Esta organização ainda possui um ou mais pacotes, elimine-os primeiro.
|
||||
|
||||
target_branch_not_exist=O ramo de destino não existe.
|
||||
target_ref_not_exist=A referência de destino não existe %s
|
||||
|
||||
admin_cannot_delete_self=Não se pode auto-remover quando tem privilégios de administração. Remova esses privilégios primeiro.
|
||||
|
||||
@ -1273,6 +1274,7 @@ commit_graph.color=Colorido
|
||||
commit.contained_in=Este cometimento está contido em:
|
||||
commit.contained_in_default_branch=Este cometimento é parte do ramo principal
|
||||
commit.load_referencing_branches_and_tags=Carregar ramos e etiquetas que referenciem este cometimento
|
||||
commit.load_tags_failed=O carregamento das etiquetas falhou por causa de um erro interno
|
||||
blame=Responsabilidade
|
||||
download_file=Descarregar ficheiro
|
||||
normal_view=Vista normal
|
||||
@ -3700,6 +3702,11 @@ workflow.disable_success=A sequência de trabalho '%s' foi desabilitada com suce
|
||||
workflow.enable=Habilitar sequência de trabalho
|
||||
workflow.enable_success=A sequência de trabalho '%s' foi habilitada com sucesso.
|
||||
workflow.disabled=A sequência de trabalho está desabilitada.
|
||||
workflow.run=Executar sequência de trabalho
|
||||
workflow.not_found=A sequência de trabalho '%s' não foi encontrada.
|
||||
workflow.run_success=A sequência de trabalho '%s' foi executada com sucesso.
|
||||
workflow.from_ref=Usar sequência de trabalho de
|
||||
workflow.has_workflow_dispatch=Esta sequência de trabalho tem um despoletador de eventos workflow_dispatch.
|
||||
|
||||
need_approval_desc=É necessária aprovação para executar sequências de trabalho para a derivação do pedido de integração.
|
||||
|
||||
|
@ -206,7 +206,7 @@ buttons.list.unordered.tooltip=添加待办清单
|
||||
buttons.list.ordered.tooltip=添加编号列表
|
||||
buttons.list.task.tooltip=添加任务列表
|
||||
buttons.mention.tooltip=提及用户或团队
|
||||
buttons.ref.tooltip=引用一个问题或拉取请求
|
||||
buttons.ref.tooltip=引用一个问题或合并请求
|
||||
buttons.switch_to_legacy.tooltip=使用旧版编辑器
|
||||
buttons.enable_monospace_font=启用等宽字体
|
||||
buttons.disable_monospace_font=禁用等宽字体
|
||||
@ -1752,8 +1752,9 @@ compare.compare_head=比较
|
||||
pulls.desc=启用合并请求和代码评审。
|
||||
pulls.new=创建合并请求
|
||||
pulls.new.blocked_user=无法创建合并请求,因为您已被仓库所有者屏蔽。
|
||||
pulls.new.must_collaborator=您必须是仓库的协作者才能创建合并请求。
|
||||
pulls.edit.already_changed=无法保存对合并请求的更改。其内容似乎已被其他用户更改。 请刷新页面并重新编辑以避免覆盖他们的更改
|
||||
pulls.view=查看拉取请求
|
||||
pulls.view=查看合并请求
|
||||
pulls.compare_changes=创建合并请求
|
||||
pulls.allow_edits_from_maintainers=允许维护者编辑
|
||||
pulls.allow_edits_from_maintainers_desc=对基础分支有写入权限的用户也可以推送到此分支
|
||||
@ -1830,8 +1831,8 @@ pulls.wrong_commit_id=提交 id 必须在目标分支 上
|
||||
pulls.no_merge_desc=由于未启用合并选项,此合并请求无法被合并。
|
||||
pulls.no_merge_helper=在仓库设置中启用合并选项或者手工合并请求。
|
||||
pulls.no_merge_wip=这个合并请求无法合并,因为被标记为尚未完成的工作。
|
||||
pulls.no_merge_not_ready=此拉取请求尚未准备好合并,请检查审核状态和状态检查。
|
||||
pulls.no_merge_access=您无权合并此拉取请求。
|
||||
pulls.no_merge_not_ready=此合并请求尚未准备好合并,请检查审核状态和状态检查。
|
||||
pulls.no_merge_access=您无权合并此合并请求。
|
||||
pulls.merge_pull_request=创建合并提交
|
||||
pulls.rebase_merge_pull_request=变基后快进
|
||||
pulls.rebase_merge_commit_pull_request=变基后创建合并提交
|
||||
@ -1876,6 +1877,7 @@ pulls.cmd_instruction_checkout_title=检出
|
||||
pulls.cmd_instruction_checkout_desc=从你的仓库中检出一个新的分支并测试变更。
|
||||
pulls.cmd_instruction_merge_title=合并
|
||||
pulls.cmd_instruction_merge_desc=合并变更并更新到 Gitea 上
|
||||
pulls.cmd_instruction_merge_warning=警告:此操作不能合并该合并请求,因为“自动检测手动合并”未启用
|
||||
pulls.clear_merge_message=清除合并信息
|
||||
pulls.clear_merge_message_hint=清除合并消息只会删除提交消息内容,并保留生成的 git 附加内容,如“Co-Authored-By …”。
|
||||
|
||||
@ -1888,11 +1890,11 @@ pulls.auto_merge_cancel_schedule=取消自动合并
|
||||
pulls.auto_merge_not_scheduled=此合并请求没有计划自动合并。
|
||||
pulls.auto_merge_canceled_schedule=此合并请求的自动合并已取消。
|
||||
|
||||
pulls.auto_merge_newly_scheduled_comment=`已于 %[1]s 设置此拉取请求在所有检查成功后自动合并`
|
||||
pulls.auto_merge_newly_scheduled_comment=`已于 %[1]s 设置此合并请求在所有检查成功后自动合并`
|
||||
pulls.auto_merge_canceled_schedule_comment=`已于 %[1]s 取消了自动合并设置 `
|
||||
|
||||
pulls.delete.title=删除此拉取请求?
|
||||
pulls.delete.text=你真的要删除这个拉取请求吗? (这将永久删除所有内容。如果你打算将内容存档,请考虑关闭它)
|
||||
pulls.delete.title=删除此合并请求?
|
||||
pulls.delete.text=你真的要删除这个合并请求吗? (这将永久删除所有内容。如果你打算将内容存档,请考虑关闭它)
|
||||
|
||||
pulls.recently_pushed_new_branches=您已经于%[2]s推送了分支 <strong>%[1]s</strong>
|
||||
|
||||
@ -2125,7 +2127,7 @@ settings.allow_only_contributors_to_track_time=仅允许成员跟踪时间
|
||||
settings.pulls_desc=启用合并请求
|
||||
settings.pulls.ignore_whitespace=忽略空白冲突
|
||||
settings.pulls.enable_autodetect_manual_merge=启用自动检测手动合并 (注意:在某些特殊情况下可能发生错误判断)
|
||||
settings.pulls.allow_rebase_update=允许通过变基更新拉取请求分支
|
||||
settings.pulls.allow_rebase_update=允许通过变基更新合并请求分支
|
||||
settings.pulls.default_delete_branch_after_merge=默认合并后删除合并请求分支
|
||||
settings.pulls.default_allow_edits_from_maintainers=默认开启允许维护者编辑
|
||||
settings.releases_desc=启用发布
|
||||
@ -2375,7 +2377,7 @@ settings.protect_status_check_matched=匹配
|
||||
settings.protect_invalid_status_check_pattern=无效的状态检查规则:“%s”。
|
||||
settings.protect_no_valid_status_check_patterns=没有有效的状态检查规则。
|
||||
settings.protect_required_approvals=所需的批准:
|
||||
settings.protect_required_approvals_desc=只允许合并有足够审核人数的拉取请求。
|
||||
settings.protect_required_approvals_desc=只允许合并有足够审核人数的合并请求。
|
||||
settings.dismiss_stale_approvals=取消过时的批准
|
||||
settings.dismiss_stale_approvals_desc=当新的提交更改合并请求内容被推送到分支时,旧的批准将被撤销。
|
||||
settings.ignore_stale_approvals=忽略过期批准
|
||||
@ -2400,7 +2402,7 @@ settings.block_rejected_reviews=拒绝审核阻止了此合并
|
||||
settings.block_rejected_reviews_desc=如果官方审查人员要求作出改动,即使有足够的批准,合并也不允许。
|
||||
settings.block_on_official_review_requests=有官方审核阻止了代码合并
|
||||
settings.block_on_official_review_requests_desc=处于评审状态时,即使有足够的批准,也不能合并。
|
||||
settings.block_outdated_branch=如果拉取请求已经过时,阻止合并
|
||||
settings.block_outdated_branch=如果合并请求已经过时,阻止合并
|
||||
settings.block_outdated_branch_desc=当头部分支落后基础分支时,不能合并。
|
||||
settings.default_branch_desc=请选择一个默认的分支用于合并请求和提交:
|
||||
settings.merge_style_desc=合并方式
|
||||
|
4995
package-lock.json
generated
4995
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
52
package.json
52
package.json
@ -9,7 +9,7 @@
|
||||
"@citation-js/plugin-csl": "0.7.14",
|
||||
"@citation-js/plugin-software-formats": "0.6.1",
|
||||
"@github/markdown-toolbar-element": "2.2.3",
|
||||
"@github/relative-time-element": "4.4.2",
|
||||
"@github/relative-time-element": "4.4.3",
|
||||
"@github/text-expander-element": "2.7.1",
|
||||
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
|
||||
"@primer/octicons": "19.11.0",
|
||||
@ -17,12 +17,12 @@
|
||||
"add-asset-webpack-plugin": "3.0.0",
|
||||
"ansi_up": "6.0.2",
|
||||
"asciinema-player": "3.8.0",
|
||||
"chart.js": "4.4.3",
|
||||
"chart.js": "4.4.4",
|
||||
"chartjs-adapter-dayjs-4": "1.0.4",
|
||||
"chartjs-plugin-zoom": "2.0.1",
|
||||
"clippie": "4.1.3",
|
||||
"css-loader": "7.1.2",
|
||||
"dayjs": "1.11.12",
|
||||
"dayjs": "1.11.13",
|
||||
"dropzone": "6.0.0-beta.2",
|
||||
"easymde": "2.18.0",
|
||||
"esbuild-loader": "4.2.2",
|
||||
@ -33,18 +33,18 @@
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.11",
|
||||
"license-checker-webpack-plugin": "0.2.1",
|
||||
"mermaid": "10.9.1",
|
||||
"mini-css-extract-plugin": "2.9.0",
|
||||
"mermaid": "11.0.2",
|
||||
"mini-css-extract-plugin": "2.9.1",
|
||||
"minimatch": "10.0.1",
|
||||
"monaco-editor": "0.50.0",
|
||||
"monaco-editor": "0.51.0",
|
||||
"monaco-editor-webpack-plugin": "7.1.0",
|
||||
"pdfobject": "2.3.0",
|
||||
"postcss": "8.4.40",
|
||||
"postcss": "8.4.41",
|
||||
"postcss-loader": "8.1.1",
|
||||
"postcss-nesting": "12.1.5",
|
||||
"postcss-nesting": "13.0.0",
|
||||
"sortablejs": "1.15.2",
|
||||
"swagger-ui-dist": "5.17.14",
|
||||
"tailwindcss": "3.4.7",
|
||||
"tailwindcss": "3.4.10",
|
||||
"temporal-polyfill": "0.2.5",
|
||||
"throttle-debounce": "5.0.2",
|
||||
"tinycolor2": "1.6.0",
|
||||
@ -54,20 +54,20 @@
|
||||
"typescript": "5.5.4",
|
||||
"uint8-to-base64": "0.2.0",
|
||||
"vanilla-colorful": "0.7.2",
|
||||
"vue": "3.4.35",
|
||||
"vue": "3.4.38",
|
||||
"vue-bar-graph": "2.1.0",
|
||||
"vue-chartjs": "5.3.1",
|
||||
"vue-loader": "17.4.2",
|
||||
"webpack": "5.93.0",
|
||||
"webpack": "5.94.0",
|
||||
"webpack-cli": "5.1.4",
|
||||
"wrap-ansi": "9.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint-community/eslint-plugin-eslint-comments": "4.3.0",
|
||||
"@playwright/test": "1.45.3",
|
||||
"@eslint-community/eslint-plugin-eslint-comments": "4.4.0",
|
||||
"@playwright/test": "1.46.1",
|
||||
"@stoplight/spectral-cli": "6.11.1",
|
||||
"@stylistic/eslint-plugin-js": "2.6.1",
|
||||
"@stylistic/stylelint-plugin": "3.0.0",
|
||||
"@stylistic/eslint-plugin-js": "2.6.5",
|
||||
"@stylistic/stylelint-plugin": "3.0.1",
|
||||
"@types/dropzone": "5.7.8",
|
||||
"@types/jquery": "3.5.30",
|
||||
"@types/katex": "0.16.7",
|
||||
@ -78,11 +78,11 @@
|
||||
"@types/throttle-debounce": "5.0.2",
|
||||
"@types/tinycolor2": "1.4.6",
|
||||
"@types/toastify-js": "1.12.3",
|
||||
"@typescript-eslint/eslint-plugin": "8.0.0",
|
||||
"@typescript-eslint/parser": "8.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "8.3.0",
|
||||
"@typescript-eslint/parser": "8.3.0",
|
||||
"@vitejs/plugin-vue": "5.1.2",
|
||||
"eslint": "8.57.0",
|
||||
"eslint-import-resolver-typescript": "3.6.1",
|
||||
"eslint-import-resolver-typescript": "3.6.3",
|
||||
"eslint-plugin-array-func": "4.0.0",
|
||||
"eslint-plugin-deprecation": "3.0.0",
|
||||
"eslint-plugin-github": "5.0.1",
|
||||
@ -91,14 +91,14 @@
|
||||
"eslint-plugin-no-use-extend-native": "0.5.0",
|
||||
"eslint-plugin-playwright": "1.6.2",
|
||||
"eslint-plugin-regexp": "2.6.0",
|
||||
"eslint-plugin-sonarjs": "1.0.4",
|
||||
"eslint-plugin-sonarjs": "2.0.1",
|
||||
"eslint-plugin-unicorn": "55.0.0",
|
||||
"eslint-plugin-vitest": "0.4.1",
|
||||
"eslint-plugin-vitest-globals": "1.5.0",
|
||||
"eslint-plugin-vue": "9.27.0",
|
||||
"eslint-plugin-vue-scoped-css": "2.8.1",
|
||||
"eslint-plugin-wc": "2.1.0",
|
||||
"happy-dom": "14.12.3",
|
||||
"eslint-plugin-wc": "2.1.1",
|
||||
"happy-dom": "15.3.1",
|
||||
"markdownlint-cli": "0.41.0",
|
||||
"nolyfill": "1.0.39",
|
||||
"postcss-html": "1.7.0",
|
||||
@ -107,8 +107,8 @@
|
||||
"stylelint-declaration-strict-value": "1.10.6",
|
||||
"stylelint-value-no-unknown-custom-properties": "6.0.1",
|
||||
"svgo": "3.3.2",
|
||||
"type-fest": "4.23.0",
|
||||
"updates": "16.3.7",
|
||||
"type-fest": "4.26.0",
|
||||
"updates": "16.4.0",
|
||||
"vite-string-plugin": "1.3.4",
|
||||
"vitest": "2.0.5"
|
||||
},
|
||||
@ -131,6 +131,10 @@
|
||||
"object.values": "npm:@nolyfill/object.values@^1",
|
||||
"safe-regex-test": "npm:@nolyfill/safe-regex-test@^1",
|
||||
"string.prototype.includes": "npm:@nolyfill/string.prototype.includes@^1",
|
||||
"is-core-module": "npm:@nolyfill/is-core-module@^1"
|
||||
"is-core-module": "npm:@nolyfill/is-core-module@^1",
|
||||
"array.prototype.findlast": "npm:@nolyfill/array.prototype.findlast@^1",
|
||||
"array.prototype.tosorted": "npm:@nolyfill/array.prototype.tosorted@^1",
|
||||
"string.prototype.matchall": "npm:@nolyfill/string.prototype.matchall@^1",
|
||||
"string.prototype.repeat": "npm:@nolyfill/string.prototype.repeat@^1"
|
||||
}
|
||||
}
|
||||
|
10
poetry.lock
generated
10
poetry.lock
generated
@ -42,13 +42,13 @@ six = ">=1.13.0"
|
||||
|
||||
[[package]]
|
||||
name = "djlint"
|
||||
version = "1.34.1"
|
||||
version = "1.34.2"
|
||||
description = "HTML Template Linter and Formatter"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0,<4.0.0"
|
||||
python-versions = "<4.0.0,>=3.8.0"
|
||||
files = [
|
||||
{file = "djlint-1.34.1-py3-none-any.whl", hash = "sha256:96ff1c464fb6f061130ebc88663a2ea524d7ec51f4b56221a2b3f0320a3cfce8"},
|
||||
{file = "djlint-1.34.1.tar.gz", hash = "sha256:db93fa008d19eaadb0454edf1704931d14469d48508daba2df9941111f408346"},
|
||||
{file = "djlint-1.34.2-py3-none-any.whl", hash = "sha256:4825389e395eb77371857c77f547fa5ebd1a644b1bc4fe9fed19d49a2786b9e5"},
|
||||
{file = "djlint-1.34.2.tar.gz", hash = "sha256:db9b2e59203a452b83532499bc243c749279090b905cc1f657973f78e7a31ddd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -357,4 +357,4 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "cd2ff218e9f27a464dfbc8ec2387824a90f4360e04c3f2e58cc375796b7df33a"
|
||||
content-hash = "af89bce0c442463621b6e536f9b94c31e188e1662c2caa84372c0858a2ee7d5c"
|
||||
|
@ -5,7 +5,7 @@ package-mode = false
|
||||
python = "^3.10"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
djlint = "1.34.1"
|
||||
djlint = "1.34.2"
|
||||
yamllint = "1.35.1"
|
||||
|
||||
[tool.djlint]
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
"code.gitea.io/gitea/models/organization"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/markup/markdown"
|
||||
@ -42,6 +41,14 @@ func Home(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
home(ctx, false)
|
||||
}
|
||||
|
||||
func Repositories(ctx *context.Context) {
|
||||
home(ctx, true)
|
||||
}
|
||||
|
||||
func home(ctx *context.Context, viewRepositories bool) {
|
||||
org := ctx.Org.Organization
|
||||
|
||||
ctx.Data["PageIsUserProfile"] = true
|
||||
@ -101,10 +108,34 @@ func Home(ctx *context.Context) {
|
||||
private := ctx.FormOptionalBool("private")
|
||||
ctx.Data["IsPrivate"] = private
|
||||
|
||||
err := shared_user.LoadHeaderCount(ctx)
|
||||
if err != nil {
|
||||
ctx.ServerError("LoadHeaderCount", err)
|
||||
return
|
||||
}
|
||||
|
||||
opts := &organization.FindOrgMembersOpts{
|
||||
OrgID: org.ID,
|
||||
PublicOnly: ctx.Org.PublicMemberOnly,
|
||||
ListOptions: db.ListOptions{Page: 1, PageSize: 25},
|
||||
}
|
||||
members, _, err := organization.FindOrgMembers(ctx, opts)
|
||||
if err != nil {
|
||||
ctx.ServerError("FindOrgMembers", err)
|
||||
return
|
||||
}
|
||||
ctx.Data["Members"] = members
|
||||
ctx.Data["Teams"] = ctx.Org.Teams
|
||||
ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
|
||||
ctx.Data["ShowMemberAndTeamTab"] = ctx.Org.IsMember || len(members) > 0
|
||||
|
||||
if !prepareOrgProfileReadme(ctx, viewRepositories) {
|
||||
ctx.Data["PageIsViewRepositories"] = true
|
||||
}
|
||||
|
||||
var (
|
||||
repos []*repo_model.Repository
|
||||
count int64
|
||||
err error
|
||||
)
|
||||
repos, count, err = repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
|
||||
ListOptions: db.ListOptions{
|
||||
@ -129,29 +160,8 @@ func Home(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
opts := &organization.FindOrgMembersOpts{
|
||||
OrgID: org.ID,
|
||||
PublicOnly: ctx.Org.PublicMemberOnly,
|
||||
ListOptions: db.ListOptions{Page: 1, PageSize: 25},
|
||||
}
|
||||
members, _, err := organization.FindOrgMembers(ctx, opts)
|
||||
if err != nil {
|
||||
ctx.ServerError("FindOrgMembers", err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Data["Repos"] = repos
|
||||
ctx.Data["Total"] = count
|
||||
ctx.Data["Members"] = members
|
||||
ctx.Data["Teams"] = ctx.Org.Teams
|
||||
ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
|
||||
ctx.Data["PageIsViewRepositories"] = true
|
||||
|
||||
err = shared_user.LoadHeaderCount(ctx)
|
||||
if err != nil {
|
||||
ctx.ServerError("LoadHeaderCount", err)
|
||||
return
|
||||
}
|
||||
|
||||
pager := context.NewPagination(int(count), setting.UI.User.RepoPagingNum, page, 5)
|
||||
pager.SetDefaultParams(ctx)
|
||||
@ -173,18 +183,16 @@ func Home(ctx *context.Context) {
|
||||
}
|
||||
ctx.Data["Page"] = pager
|
||||
|
||||
ctx.Data["ShowMemberAndTeamTab"] = ctx.Org.IsMember || len(members) > 0
|
||||
|
||||
profileDbRepo, profileGitRepo, profileReadmeBlob, profileClose := shared_user.FindUserProfileReadme(ctx, ctx.Doer)
|
||||
defer profileClose()
|
||||
prepareOrgProfileReadme(ctx, profileGitRepo, profileDbRepo, profileReadmeBlob)
|
||||
|
||||
ctx.HTML(http.StatusOK, tplOrgHome)
|
||||
}
|
||||
|
||||
func prepareOrgProfileReadme(ctx *context.Context, profileGitRepo *git.Repository, profileDbRepo *repo_model.Repository, profileReadme *git.Blob) {
|
||||
if profileGitRepo == nil || profileReadme == nil {
|
||||
return
|
||||
func prepareOrgProfileReadme(ctx *context.Context, viewRepositories bool) bool {
|
||||
profileDbRepo, profileGitRepo, profileReadme, profileClose := shared_user.FindUserProfileReadme(ctx, ctx.Doer)
|
||||
defer profileClose()
|
||||
ctx.Data["HasProfileReadme"] = profileReadme != nil
|
||||
|
||||
if profileGitRepo == nil || profileReadme == nil || viewRepositories {
|
||||
return false
|
||||
}
|
||||
|
||||
if bytes, err := profileReadme.GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil {
|
||||
@ -206,4 +214,7 @@ func prepareOrgProfileReadme(ctx *context.Context, profileGitRepo *git.Repositor
|
||||
ctx.Data["ProfileReadme"] = profileContent
|
||||
}
|
||||
}
|
||||
|
||||
ctx.Data["PageIsViewOverview"] = true
|
||||
return true
|
||||
}
|
||||
|
@ -54,9 +54,9 @@ func Members(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
err = shared_user.LoadHeaderCount(ctx)
|
||||
err = shared_user.RenderOrgHeader(ctx)
|
||||
if err != nil {
|
||||
ctx.ServerError("LoadHeaderCount", err)
|
||||
ctx.ServerError("RenderOrgHeader", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -59,9 +59,9 @@ func Teams(ctx *context.Context) {
|
||||
}
|
||||
ctx.Data["Teams"] = ctx.Org.Teams
|
||||
|
||||
err := shared_user.LoadHeaderCount(ctx)
|
||||
err := shared_user.RenderOrgHeader(ctx)
|
||||
if err != nil {
|
||||
ctx.ServerError("LoadHeaderCount", err)
|
||||
ctx.ServerError("RenderOrgHeader", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -162,3 +162,15 @@ func LoadHeaderCount(ctx *context.Context) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func RenderOrgHeader(ctx *context.Context) error {
|
||||
if err := LoadHeaderCount(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, _, profileReadmeBlob, profileClose := FindUserProfileReadme(ctx, ctx.Doer)
|
||||
defer profileClose()
|
||||
ctx.Data["HasProfileReadme"] = profileReadmeBlob != nil
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -883,10 +883,15 @@ func registerRoutes(m *web.Router) {
|
||||
m.Post("/teams/{team}/action/repo/{action}", org.TeamsRepoAction)
|
||||
}, context.OrgAssignment(true, false, true))
|
||||
|
||||
// require admin permission
|
||||
m.Group("/{org}", func() {
|
||||
m.Get("/teams/-/search", org.SearchTeam)
|
||||
}, context.OrgAssignment(true, false, false, true))
|
||||
|
||||
// require owner permission
|
||||
m.Group("/{org}", func() {
|
||||
m.Get("/teams/new", org.NewTeam)
|
||||
m.Post("/teams/new", web.Bind(forms.CreateTeamForm{}), org.NewTeamPost)
|
||||
m.Get("/teams/-/search", org.SearchTeam)
|
||||
m.Get("/teams/{team}/edit", org.EditTeam)
|
||||
m.Post("/teams/{team}/edit", web.Bind(forms.CreateTeamForm{}), org.EditTeamPost)
|
||||
m.Post("/teams/{team}/delete", org.DeleteTeam)
|
||||
@ -995,6 +1000,8 @@ func registerRoutes(m *web.Router) {
|
||||
}, context.PackageAssignment(), reqPackageAccess(perm.AccessModeRead))
|
||||
}
|
||||
|
||||
m.Get("/repositories", org.Repositories)
|
||||
|
||||
m.Group("/projects", func() {
|
||||
m.Group("", func() {
|
||||
m.Get("", org.Projects)
|
||||
|
@ -219,6 +219,10 @@ func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.U
|
||||
// Reset cached commit count
|
||||
cache.Remove(pr.Issue.Repo.GetCommitsCountCacheKey(pr.BaseBranch, true))
|
||||
|
||||
return handleCloseCrossReferences(ctx, pr, doer)
|
||||
}
|
||||
|
||||
func handleCloseCrossReferences(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) error {
|
||||
// Resolve cross references
|
||||
refs, err := pr.ResolveCrossReferences(ctx)
|
||||
if err != nil {
|
||||
@ -542,5 +546,6 @@ func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *use
|
||||
|
||||
notify_service.MergePullRequest(baseGitRepo.Ctx, doer, pr)
|
||||
log.Info("manuallyMerged[%d]: Marked as manually merged into %s/%s by commit id: %s", pr.ID, pr.BaseRepo.Name, pr.BaseBranch, commitID)
|
||||
return nil
|
||||
|
||||
return handleCloseCrossReferences(ctx, pr, doer)
|
||||
}
|
||||
|
@ -1,7 +1,12 @@
|
||||
<div class="ui container">
|
||||
<overflow-menu class="ui secondary pointing tabular borderless menu tw-mb-4">
|
||||
<div class="overflow-menu-items">
|
||||
<a class="{{if .PageIsViewRepositories}}active {{end}}item" href="{{$.Org.HomeLink}}">
|
||||
{{if .HasProfileReadme}}
|
||||
<a class="{{if .PageIsViewOverview}}active {{end}}item" href="{{$.Org.HomeLink}}">
|
||||
{{svg "octicon-info"}} {{ctx.Locale.Tr "user.overview"}}
|
||||
</a>
|
||||
{{end}}
|
||||
<a class="{{if .PageIsViewRepositories}}active {{end}}item" href="{{$.Org.HomeLink}}{{if .HasProfileReadme}}/-/repositories{{end}}">
|
||||
{{svg "octicon-repo"}} {{ctx.Locale.Tr "user.repositories"}}
|
||||
{{if .RepoCount}}
|
||||
<div class="ui small label">{{.RepoCount}}</div>
|
||||
|
52
types.d.ts
vendored
52
types.d.ts
vendored
@ -10,22 +10,52 @@ declare module '*.css' {
|
||||
|
||||
declare let __webpack_public_path__: string;
|
||||
|
||||
interface Window {
|
||||
config: import('./web_src/js/types.ts').Config;
|
||||
$: typeof import('@types/jquery'),
|
||||
jQuery: typeof import('@types/jquery'),
|
||||
htmx: typeof import('htmx.org'),
|
||||
_globalHandlerErrors: Array<ErrorEvent & PromiseRejectionEvent> & {
|
||||
_inited: boolean,
|
||||
push: (e: ErrorEvent & PromiseRejectionEvent) => void | number,
|
||||
},
|
||||
}
|
||||
|
||||
declare module 'htmx.org/dist/htmx.esm.js' {
|
||||
const value = await import('htmx.org');
|
||||
export default value;
|
||||
}
|
||||
|
||||
declare module 'uint8-to-base64' {
|
||||
export function encode(arrayBuffer: ArrayBuffer): string;
|
||||
export function decode(base64str: string): ArrayBuffer;
|
||||
}
|
||||
|
||||
declare module 'swagger-ui-dist/swagger-ui-es-bundle.js' {
|
||||
const value = await import('swagger-ui-dist');
|
||||
export default value.SwaggerUIBundle;
|
||||
}
|
||||
|
||||
interface JQuery {
|
||||
api: any, // fomantic
|
||||
areYouSure: any, // jquery.are-you-sure
|
||||
dimmer: any, // fomantic
|
||||
dropdown: any; // fomantic
|
||||
modal: any; // fomantic
|
||||
tab: any; // fomantic
|
||||
transition: any, // fomantic
|
||||
}
|
||||
|
||||
interface JQueryStatic {
|
||||
api: any, // fomantic
|
||||
}
|
||||
|
||||
interface Element {
|
||||
_tippy: import('tippy.js').Instance;
|
||||
}
|
||||
|
||||
type Writable<T> = { -readonly [K in keyof T]: T[K] };
|
||||
|
||||
interface Window {
|
||||
config: import('./web_src/js/types.ts').Config;
|
||||
$: typeof import('@types/jquery'),
|
||||
jQuery: typeof import('@types/jquery'),
|
||||
htmx: Omit<typeof import('htmx.org/dist/htmx.esm.js').default, 'config'> & {
|
||||
config?: Writable<typeof import('htmx.org').default.config>,
|
||||
},
|
||||
ui?: any,
|
||||
_globalHandlerErrors: Array<ErrorEvent & PromiseRejectionEvent> & {
|
||||
_inited: boolean,
|
||||
push: (e: ErrorEvent & PromiseRejectionEvent) => void | number,
|
||||
},
|
||||
__webpack_public_path__: string;
|
||||
}
|
||||
|
@ -1,20 +1,21 @@
|
||||
import {showErrorToast} from './modules/toast.ts';
|
||||
import 'idiomorph/dist/idiomorph-ext.js'; // https://github.com/bigskysoftware/idiomorph#htmx
|
||||
import type {HtmxResponseInfo} from 'htmx.org';
|
||||
|
||||
// https://github.com/bigskysoftware/idiomorph#htmx
|
||||
import 'idiomorph/dist/idiomorph-ext.js';
|
||||
type HtmxEvent = Event & {detail: HtmxResponseInfo};
|
||||
|
||||
// https://htmx.org/reference/#config
|
||||
window.htmx.config.requestClass = 'is-loading';
|
||||
window.htmx.config.scrollIntoViewOnBoost = false;
|
||||
|
||||
// https://htmx.org/events/#htmx:sendError
|
||||
document.body.addEventListener('htmx:sendError', (event) => {
|
||||
document.body.addEventListener('htmx:sendError', (event: HtmxEvent) => {
|
||||
// TODO: add translations
|
||||
showErrorToast(`Network error when calling ${event.detail.requestConfig.path}`);
|
||||
});
|
||||
|
||||
// https://htmx.org/events/#htmx:responseError
|
||||
document.body.addEventListener('htmx:responseError', (event) => {
|
||||
document.body.addEventListener('htmx:responseError', (event: HtmxEvent) => {
|
||||
// TODO: add translations
|
||||
showErrorToast(`Error ${event.detail.xhr.status} when calling ${event.detail.requestConfig.path}`);
|
||||
});
|
||||
|
@ -98,12 +98,12 @@ initGiteaFomantic();
|
||||
initDirAuto();
|
||||
initSubmitEventPolyfill();
|
||||
|
||||
function callInitFunctions(functions) {
|
||||
function callInitFunctions(functions: (() => any)[]) {
|
||||
// Start performance trace by accessing a URL by "https://localhost/?_ui_performance_trace=1" or "https://localhost/?key=value&_ui_performance_trace=1"
|
||||
// It is a quick check, no side effect so no need to do slow URL parsing.
|
||||
const initStart = performance.now();
|
||||
if (window.location.search.includes('_ui_performance_trace=1')) {
|
||||
let results = [];
|
||||
let results: {name: string, dur: number}[] = [];
|
||||
for (const func of functions) {
|
||||
const start = performance.now();
|
||||
func();
|
||||
|
@ -20,6 +20,7 @@ export async function renderMermaid() {
|
||||
startOnLoad: false,
|
||||
theme: isDarkTheme() ? 'dark' : 'neutral',
|
||||
securityLevel: 'strict',
|
||||
suppressErrorRendering: true,
|
||||
});
|
||||
|
||||
for (const el of els) {
|
||||
|
@ -1,12 +1,12 @@
|
||||
import {AnsiUp} from 'ansi_up';
|
||||
|
||||
const replacements = [
|
||||
const replacements: Array<[RegExp, string]> = [
|
||||
[/\x1b\[\d+[A-H]/g, ''], // Move cursor, treat them as no-op
|
||||
[/\x1b\[\d?[JK]/g, '\r'], // Erase display/line, treat them as a Carriage Return
|
||||
];
|
||||
|
||||
// render ANSI to HTML
|
||||
export function renderAnsi(line) {
|
||||
export function renderAnsi(line: string): string {
|
||||
// create a fresh ansi_up instance because otherwise previous renders can influence
|
||||
// the output of future renders, because ansi_up is stateful and remembers things like
|
||||
// unclosed opening tags for colors.
|
||||
|
@ -8,7 +8,7 @@ window.addEventListener('load', async () => {
|
||||
|
||||
// Make the page's protocol be at the top of the schemes list
|
||||
const proto = window.location.protocol.slice(0, -1);
|
||||
spec.schemes.sort((a, b) => {
|
||||
spec.schemes.sort((a: string, b: string) => {
|
||||
if (a === proto) return -1;
|
||||
if (b === proto) return 1;
|
||||
return 0;
|
||||
|
@ -17,7 +17,7 @@ test('svgParseOuterInner', () => {
|
||||
test('SvgIcon', () => {
|
||||
const root = document.createElement('div');
|
||||
createApp({render: () => h(SvgIcon, {name: 'octicon-link', size: 24, class: 'base', className: 'extra'})}).mount(root);
|
||||
const node = root.firstChild;
|
||||
const node = root.firstChild as Element;
|
||||
expect(node.nodeName).toEqual('svg');
|
||||
expect(node.getAttribute('width')).toEqual('24');
|
||||
expect(node.getAttribute('height')).toEqual('24');
|
||||
|
@ -29,3 +29,10 @@ export type RequestData = string | FormData | URLSearchParams;
|
||||
export type RequestOpts = {
|
||||
data?: RequestData,
|
||||
} & RequestInit;
|
||||
|
||||
export type IssueData = {
|
||||
owner: string,
|
||||
repo: string,
|
||||
type: string,
|
||||
index: string,
|
||||
}
|
||||
|
@ -95,23 +95,20 @@ test('toAbsoluteUrl', () => {
|
||||
});
|
||||
|
||||
test('encodeURLEncodedBase64, decodeURLEncodedBase64', () => {
|
||||
// TextEncoder is Node.js API while Uint8Array is jsdom API and their outputs are not
|
||||
// structurally comparable, so we convert to array to compare. The conversion can be
|
||||
// removed once https://github.com/jsdom/jsdom/issues/2524 is resolved.
|
||||
const encoder = new TextEncoder();
|
||||
const uint8array = encoder.encode.bind(encoder);
|
||||
|
||||
expect(encodeURLEncodedBase64(uint8array('AA?'))).toEqual('QUE_'); // standard base64: "QUE/"
|
||||
expect(encodeURLEncodedBase64(uint8array('AA~'))).toEqual('QUF-'); // standard base64: "QUF+"
|
||||
|
||||
expect(Array.from(decodeURLEncodedBase64('QUE/'))).toEqual(Array.from(uint8array('AA?')));
|
||||
expect(Array.from(decodeURLEncodedBase64('QUF+'))).toEqual(Array.from(uint8array('AA~')));
|
||||
expect(Array.from(decodeURLEncodedBase64('QUE_'))).toEqual(Array.from(uint8array('AA?')));
|
||||
expect(Array.from(decodeURLEncodedBase64('QUF-'))).toEqual(Array.from(uint8array('AA~')));
|
||||
expect(new Uint8Array(decodeURLEncodedBase64('QUE/'))).toEqual(uint8array('AA?'));
|
||||
expect(new Uint8Array(decodeURLEncodedBase64('QUF+'))).toEqual(uint8array('AA~'));
|
||||
expect(new Uint8Array(decodeURLEncodedBase64('QUE_'))).toEqual(uint8array('AA?'));
|
||||
expect(new Uint8Array(decodeURLEncodedBase64('QUF-'))).toEqual(uint8array('AA~'));
|
||||
|
||||
expect(encodeURLEncodedBase64(uint8array('a'))).toEqual('YQ'); // standard base64: "YQ=="
|
||||
expect(Array.from(decodeURLEncodedBase64('YQ'))).toEqual(Array.from(uint8array('a')));
|
||||
expect(Array.from(decodeURLEncodedBase64('YQ=='))).toEqual(Array.from(uint8array('a')));
|
||||
expect(new Uint8Array(decodeURLEncodedBase64('YQ'))).toEqual(uint8array('a'));
|
||||
expect(new Uint8Array(decodeURLEncodedBase64('YQ=='))).toEqual(uint8array('a'));
|
||||
});
|
||||
|
||||
test('file detection', () => {
|
||||
|
@ -1,13 +1,14 @@
|
||||
import {encode, decode} from 'uint8-to-base64';
|
||||
import type {IssueData} from './types.ts';
|
||||
|
||||
// transform /path/to/file.ext to file.ext
|
||||
export function basename(path) {
|
||||
export function basename(path: string): string {
|
||||
const lastSlashIndex = path.lastIndexOf('/');
|
||||
return lastSlashIndex < 0 ? path : path.substring(lastSlashIndex + 1);
|
||||
}
|
||||
|
||||
// transform /path/to/file.ext to .ext
|
||||
export function extname(path) {
|
||||
export function extname(path: string): string {
|
||||
const lastSlashIndex = path.lastIndexOf('/');
|
||||
const lastPointIndex = path.lastIndexOf('.');
|
||||
if (lastSlashIndex > lastPointIndex) return '';
|
||||
@ -15,54 +16,54 @@ export function extname(path) {
|
||||
}
|
||||
|
||||
// test whether a variable is an object
|
||||
export function isObject(obj) {
|
||||
export function isObject(obj: any): boolean {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
|
||||
// returns whether a dark theme is enabled
|
||||
export function isDarkTheme() {
|
||||
export function isDarkTheme(): boolean {
|
||||
const style = window.getComputedStyle(document.documentElement);
|
||||
return style.getPropertyValue('--is-dark-theme').trim().toLowerCase() === 'true';
|
||||
}
|
||||
|
||||
// strip <tags> from a string
|
||||
export function stripTags(text) {
|
||||
export function stripTags(text: string): string {
|
||||
return text.replace(/<[^>]*>?/g, '');
|
||||
}
|
||||
|
||||
export function parseIssueHref(href) {
|
||||
export function parseIssueHref(href: string): IssueData {
|
||||
const path = (href || '').replace(/[#?].*$/, '');
|
||||
const [_, owner, repo, type, index] = /([^/]+)\/([^/]+)\/(issues|pulls)\/([0-9]+)/.exec(path) || [];
|
||||
return {owner, repo, type, index};
|
||||
}
|
||||
|
||||
// parse a URL, either relative '/path' or absolute 'https://localhost/path'
|
||||
export function parseUrl(str) {
|
||||
export function parseUrl(str: string): URL {
|
||||
return new URL(str, str.startsWith('http') ? undefined : window.location.origin);
|
||||
}
|
||||
|
||||
// return current locale chosen by user
|
||||
export function getCurrentLocale() {
|
||||
export function getCurrentLocale(): string {
|
||||
return document.documentElement.lang;
|
||||
}
|
||||
|
||||
// given a month (0-11), returns it in the documents language
|
||||
export function translateMonth(month) {
|
||||
export function translateMonth(month: number) {
|
||||
return new Date(Date.UTC(2022, month, 12)).toLocaleString(getCurrentLocale(), {month: 'short', timeZone: 'UTC'});
|
||||
}
|
||||
|
||||
// given a weekday (0-6, Sunday to Saturday), returns it in the documents language
|
||||
export function translateDay(day) {
|
||||
export function translateDay(day: number) {
|
||||
return new Date(Date.UTC(2022, 7, day)).toLocaleString(getCurrentLocale(), {weekday: 'short', timeZone: 'UTC'});
|
||||
}
|
||||
|
||||
// convert a Blob to a DataURI
|
||||
export function blobToDataURI(blob) {
|
||||
export function blobToDataURI(blob: Blob): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
const reader = new FileReader();
|
||||
reader.addEventListener('load', (e) => {
|
||||
resolve(e.target.result);
|
||||
resolve(e.target.result as string);
|
||||
});
|
||||
reader.addEventListener('error', () => {
|
||||
reject(new Error('FileReader failed'));
|
||||
@ -75,7 +76,7 @@ export function blobToDataURI(blob) {
|
||||
}
|
||||
|
||||
// convert image Blob to another mime-type format.
|
||||
export function convertImage(blob, mime) {
|
||||
export function convertImage(blob: Blob, mime: string): Promise<Blob> {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const img = new Image();
|
||||
@ -104,7 +105,7 @@ export function convertImage(blob, mime) {
|
||||
});
|
||||
}
|
||||
|
||||
export function toAbsoluteUrl(url) {
|
||||
export function toAbsoluteUrl(url: string): string {
|
||||
if (url.startsWith('http://') || url.startsWith('https://')) {
|
||||
return url;
|
||||
}
|
||||
@ -118,15 +119,15 @@ export function toAbsoluteUrl(url) {
|
||||
}
|
||||
|
||||
// Encode an ArrayBuffer into a URLEncoded base64 string.
|
||||
export function encodeURLEncodedBase64(arrayBuffer) {
|
||||
export function encodeURLEncodedBase64(arrayBuffer: ArrayBuffer): string {
|
||||
return encode(arrayBuffer)
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '');
|
||||
}
|
||||
|
||||
// Decode a URLEncoded base64 to an ArrayBuffer string.
|
||||
export function decodeURLEncodedBase64(base64url) {
|
||||
// Decode a URLEncoded base64 to an ArrayBuffer.
|
||||
export function decodeURLEncodedBase64(base64url: string): ArrayBuffer {
|
||||
return decode(base64url
|
||||
.replace(/_/g, '/')
|
||||
.replace(/-/g, '+'));
|
||||
@ -135,20 +136,22 @@ export function decodeURLEncodedBase64(base64url) {
|
||||
const domParser = new DOMParser();
|
||||
const xmlSerializer = new XMLSerializer();
|
||||
|
||||
export function parseDom(text, contentType) {
|
||||
export function parseDom(text: string, contentType: DOMParserSupportedType): Document {
|
||||
return domParser.parseFromString(text, contentType);
|
||||
}
|
||||
|
||||
export function serializeXml(node) {
|
||||
export function serializeXml(node: Element | Node): string {
|
||||
return xmlSerializer.serializeToString(node);
|
||||
}
|
||||
|
||||
export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
export function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export function isImageFile({name, type}) {
|
||||
export function isImageFile({name, type}: {name: string, type?: string}): boolean {
|
||||
return /\.(jpe?g|png|gif|webp|svg|heic)$/i.test(name || '') || type?.startsWith('image/');
|
||||
}
|
||||
|
||||
export function isVideoFile({name, type}) {
|
||||
export function isVideoFile({name, type}: {name: string, type?: string}): boolean {
|
||||
return /\.(mpe?g|mp4|mkv|webm)$/i.test(name || '') || type?.startsWith('video/');
|
||||
}
|
||||
|
@ -3,23 +3,23 @@ import type {ColorInput} from 'tinycolor2';
|
||||
|
||||
// Returns relative luminance for a SRGB color - https://en.wikipedia.org/wiki/Relative_luminance
|
||||
// Keep this in sync with modules/util/color.go
|
||||
function getRelativeLuminance(color: ColorInput) {
|
||||
function getRelativeLuminance(color: ColorInput): number {
|
||||
const {r, g, b} = tinycolor(color).toRgb();
|
||||
return (0.2126729 * r + 0.7151522 * g + 0.072175 * b) / 255;
|
||||
}
|
||||
|
||||
function useLightText(backgroundColor: ColorInput) {
|
||||
function useLightText(backgroundColor: ColorInput): boolean {
|
||||
return getRelativeLuminance(backgroundColor) < 0.453;
|
||||
}
|
||||
|
||||
// Given a background color, returns a black or white foreground color that the highest
|
||||
// contrast ratio. In the future, the APCA contrast function, or CSS `contrast-color` will be better.
|
||||
// https://github.com/color-js/color.js/blob/eb7b53f7a13bb716ec8b28c7a56f052cd599acd9/src/contrast/APCA.js#L42
|
||||
export function contrastColor(backgroundColor: ColorInput) {
|
||||
export function contrastColor(backgroundColor: ColorInput): string {
|
||||
return useLightText(backgroundColor) ? '#fff' : '#000';
|
||||
}
|
||||
|
||||
function resolveColors(obj: Record<string, string>) {
|
||||
function resolveColors(obj: Record<string, string>): Record<string, string> {
|
||||
const styles = window.getComputedStyle(document.documentElement);
|
||||
const getColor = (name: string) => styles.getPropertyValue(name).trim();
|
||||
return Object.fromEntries(Object.entries(obj).map(([key, value]) => [key, getColor(value)]));
|
||||
|
@ -266,10 +266,8 @@ export function initSubmitEventPolyfill() {
|
||||
/**
|
||||
* Check if an element is visible, equivalent to jQuery's `:visible` pseudo.
|
||||
* Note: This function doesn't account for all possible visibility scenarios.
|
||||
* @param {HTMLElement} element The element to check.
|
||||
* @returns {boolean} True if the element is visible.
|
||||
*/
|
||||
export function isElemVisible(element: HTMLElement) {
|
||||
export function isElemVisible(element: HTMLElement): boolean {
|
||||
if (!element) return false;
|
||||
|
||||
return Boolean(element.offsetWidth || element.offsetHeight || element.getClientRects().length);
|
||||
|
@ -1,6 +1,11 @@
|
||||
export async function pngChunks(blob) {
|
||||
type PngChunk = {
|
||||
name: string,
|
||||
data: Uint8Array,
|
||||
}
|
||||
|
||||
export async function pngChunks(blob: Blob): Promise<PngChunk[]> {
|
||||
const uint8arr = new Uint8Array(await blob.arrayBuffer());
|
||||
const chunks = [];
|
||||
const chunks: PngChunk[] = [];
|
||||
if (uint8arr.length < 12) return chunks;
|
||||
const view = new DataView(uint8arr.buffer);
|
||||
if (view.getBigUint64(0) !== 9894494448401390090n) return chunks;
|
||||
@ -19,9 +24,14 @@ export async function pngChunks(blob) {
|
||||
return chunks;
|
||||
}
|
||||
|
||||
type ImageInfo = {
|
||||
width?: number,
|
||||
dppx?: number,
|
||||
}
|
||||
|
||||
// decode a image and try to obtain width and dppx. It will never throw but instead
|
||||
// return default values.
|
||||
export async function imageInfo(blob) {
|
||||
export async function imageInfo(blob: Blob): Promise<ImageInfo> {
|
||||
let width = 0, dppx = 1; // dppx: 1 dot per pixel for non-HiDPI screens
|
||||
|
||||
if (blob.type === 'image/png') { // only png is supported currently
|
||||
|
@ -2,17 +2,17 @@ import emojis from '../../../assets/emoji.json';
|
||||
|
||||
const maxMatches = 6;
|
||||
|
||||
function sortAndReduce(map) {
|
||||
function sortAndReduce(map: Map<string, number>) {
|
||||
const sortedMap = new Map(Array.from(map.entries()).sort((a, b) => a[1] - b[1]));
|
||||
return Array.from(sortedMap.keys()).slice(0, maxMatches);
|
||||
}
|
||||
|
||||
export function matchEmoji(queryText) {
|
||||
export function matchEmoji(queryText: string): string[] {
|
||||
const query = queryText.toLowerCase().replaceAll('_', ' ');
|
||||
if (!query) return emojis.slice(0, maxMatches).map((e) => e.aliases[0]);
|
||||
|
||||
// results is a map of weights, lower is better
|
||||
const results = new Map();
|
||||
const results = new Map<string, number>();
|
||||
for (const {aliases} of emojis) {
|
||||
const mainAlias = aliases[0];
|
||||
for (const [aliasIndex, alias] of aliases.entries()) {
|
||||
@ -27,7 +27,7 @@ export function matchEmoji(queryText) {
|
||||
return sortAndReduce(results);
|
||||
}
|
||||
|
||||
export function matchMention(queryText) {
|
||||
export function matchMention(queryText: string): string[] {
|
||||
const query = queryText.toLowerCase();
|
||||
|
||||
// results is a map of weights, lower is better
|
||||
|
@ -1,16 +1,17 @@
|
||||
import dayjs from 'dayjs';
|
||||
import utc from 'dayjs/plugin/utc.js';
|
||||
import {getCurrentLocale} from '../utils.ts';
|
||||
import type {ConfigType} from 'dayjs';
|
||||
|
||||
dayjs.extend(utc);
|
||||
|
||||
/**
|
||||
* Returns an array of millisecond-timestamps of start-of-week days (Sundays)
|
||||
*
|
||||
* @param startConfig The start date. Can take any type that `Date` accepts.
|
||||
* @param endConfig The end date. Can take any type that `Date` accepts.
|
||||
* @param startDate The start date. Can take any type that dayjs accepts.
|
||||
* @param endDate The end date. Can take any type that dayjs accepts.
|
||||
*/
|
||||
export function startDaysBetween(startDate, endDate) {
|
||||
export function startDaysBetween(startDate: ConfigType, endDate: ConfigType): number[] {
|
||||
const start = dayjs.utc(startDate);
|
||||
const end = dayjs.utc(endDate);
|
||||
|
||||
@ -21,7 +22,7 @@ export function startDaysBetween(startDate, endDate) {
|
||||
current = current.add(1, 'day');
|
||||
}
|
||||
|
||||
const startDays = [];
|
||||
const startDays: number[] = [];
|
||||
while (current.isBefore(end)) {
|
||||
startDays.push(current.valueOf());
|
||||
current = current.add(1, 'week');
|
||||
@ -30,7 +31,7 @@ export function startDaysBetween(startDate, endDate) {
|
||||
return startDays;
|
||||
}
|
||||
|
||||
export function firstStartDateAfterDate(inputDate) {
|
||||
export function firstStartDateAfterDate(inputDate: Date): number {
|
||||
if (!(inputDate instanceof Date)) {
|
||||
throw new Error('Invalid date');
|
||||
}
|
||||
@ -41,7 +42,14 @@ export function firstStartDateAfterDate(inputDate) {
|
||||
return resultDate.valueOf();
|
||||
}
|
||||
|
||||
export function fillEmptyStartDaysWithZeroes(startDays, data) {
|
||||
type DayData = {
|
||||
week: number,
|
||||
additions: number,
|
||||
deletions: number,
|
||||
commits: number,
|
||||
}
|
||||
|
||||
export function fillEmptyStartDaysWithZeroes(startDays: number[], data: DayData): DayData[] {
|
||||
const result = {};
|
||||
|
||||
for (const startDay of startDays) {
|
||||
@ -51,11 +59,11 @@ export function fillEmptyStartDaysWithZeroes(startDays, data) {
|
||||
return Object.values(result);
|
||||
}
|
||||
|
||||
let dateFormat;
|
||||
let dateFormat: Intl.DateTimeFormat;
|
||||
|
||||
// format a Date object to document's locale, but with 24h format from user's current locale because this
|
||||
// option is a personal preference of the user, not something that the document's locale should dictate.
|
||||
export function formatDatetime(date) {
|
||||
export function formatDatetime(date: Date | number): string {
|
||||
if (!dateFormat) {
|
||||
// TODO: replace `hour12` with `Intl.Locale.prototype.getHourCycles` once there is broad browser support
|
||||
dateFormat = new Intl.DateTimeFormat(getCurrentLocale(), {
|
||||
|
@ -1,12 +1,12 @@
|
||||
export function pathEscapeSegments(s) {
|
||||
export function pathEscapeSegments(s: string): string {
|
||||
return s.split('/').map(encodeURIComponent).join('/');
|
||||
}
|
||||
|
||||
function stripSlash(url) {
|
||||
function stripSlash(url: string): string {
|
||||
return url.endsWith('/') ? url.slice(0, -1) : url;
|
||||
}
|
||||
|
||||
export function isUrl(url) {
|
||||
export function isUrl(url: string): boolean {
|
||||
try {
|
||||
return stripSlash((new URL(url).href)).trim() === stripSlash(url).trim();
|
||||
} catch {
|
||||
|
@ -1,10 +1,16 @@
|
||||
window.__webpack_public_path__ = '';
|
||||
|
||||
window.config = {
|
||||
appUrl: 'http://localhost:3000/',
|
||||
appSubUrl: '',
|
||||
assetVersionEncoded: '',
|
||||
assetUrlPrefix: '',
|
||||
runModeIsProd: true,
|
||||
customEmojis: {},
|
||||
csrfToken: 'test-csrf-token-123456',
|
||||
pageData: {},
|
||||
i18n: {},
|
||||
appSubUrl: '',
|
||||
notificationSettings: {},
|
||||
enableTimeTracking: true,
|
||||
mentionValues: [
|
||||
{key: 'user1 User 1', value: 'user1', name: 'user1', fullname: 'User 1', avatar: 'https://avatar1.com'},
|
||||
{key: 'user2 User 2', value: 'user2', name: 'user2', fullname: 'User 2', avatar: 'https://avatar2.com'},
|
||||
@ -14,4 +20,6 @@ window.config = {
|
||||
{key: 'org6 User 6', value: 'org6', name: 'org6', fullname: 'User 6', avatar: 'https://avatar6.com'},
|
||||
{key: 'org7 User 7', value: 'org7', name: 'org7', fullname: 'User 7', avatar: 'https://avatar7.com'},
|
||||
],
|
||||
mermaidMaxSourceCharacters: 5000,
|
||||
i18n: {},
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user